summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorhans@chromium.org <hans@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-05-23 10:34:30 +0000
committerhans@chromium.org <hans@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-05-23 10:34:30 +0000
commit45ca26175a14f33d8667608983b0c2de7571cc1f (patch)
tree36e5ddcc847c797d80680fd3678756e13dbb0743
parenta369f129ab2b845c229550cc7375e3c9955319c1 (diff)
downloadchromium_src-45ca26175a14f33d8667608983b0c2de7571cc1f.zip
chromium_src-45ca26175a14f33d8667608983b0c2de7571cc1f.tar.gz
chromium_src-45ca26175a14f33d8667608983b0c2de7571cc1f.tar.bz2
base/time_posix.cc: Work around new integer overflow warning.
A new version of Clang warns about the previous version of this code: ../../base/time_posix.cc:212:58: warning: overflow in expression; result is 0 with type 'long' [-Winteger-overflow] milliseconds = std::numeric_limits<SysTime>::min() * ^ ../../base/time_posix.cc:215:59: warning: overflow in expression; result is -1000 with type 'long' [-Winteger-overflow] milliseconds = (std::numeric_limits<SysTime>::max() * ^ The warning is legitimate: that code does overflow on LP64 platforms. However, we will never hit that code on 64-bit platforms, because mktime() will not overflow when time_t is a 64-bit type. I have added a CHECK about this and worked around the warning by splitting up the expression. BUG=24737 R=brettw@chromium.org, thakis@chromium.org Review URL: https://codereview.chromium.org/15560002 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@201735 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--base/time_posix.cc12
1 files changed, 7 insertions, 5 deletions
diff --git a/base/time_posix.cc b/base/time_posix.cc
index 288c6463..3870201 100644
--- a/base/time_posix.cc
+++ b/base/time_posix.cc
@@ -209,12 +209,14 @@ Time Time::FromExploded(bool is_local, const Exploded& exploded) {
// 999ms to avoid the time being less than any other possible value that
// this function can return.
if (exploded.year < 1969) {
- milliseconds = std::numeric_limits<SysTime>::min() *
- kMillisecondsPerSecond;
+ CHECK(sizeof(SysTime) < sizeof(int64)) << "integer overflow";
+ milliseconds = std::numeric_limits<SysTime>::min();
+ milliseconds *= kMillisecondsPerSecond;
} else {
- milliseconds = (std::numeric_limits<SysTime>::max() *
- kMillisecondsPerSecond) +
- kMillisecondsPerSecond - 1;
+ CHECK(sizeof(SysTime) < sizeof(int64)) << "integer overflow";
+ milliseconds = std::numeric_limits<SysTime>::max();
+ milliseconds *= kMillisecondsPerSecond;
+ milliseconds += (kMillisecondsPerSecond - 1);
}
} else {
milliseconds = seconds * kMillisecondsPerSecond + exploded.millisecond;