summaryrefslogtreecommitdiffstats
path: root/base
diff options
context:
space:
mode:
authorbrettw@chromium.org <brettw@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-08-26 02:53:36 +0000
committerbrettw@chromium.org <brettw@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2009-08-26 02:53:36 +0000
commita4a3292e978cca3ad8c0baa5205054b5b3802e64 (patch)
tree9490d74f9760c4b841f1188e13b1a91db374c327 /base
parent67d0d62d638f7b15e031dd2c22756df0109e021d (diff)
downloadchromium_src-a4a3292e978cca3ad8c0baa5205054b5b3802e64.zip
chromium_src-a4a3292e978cca3ad8c0baa5205054b5b3802e64.tar.gz
chromium_src-a4a3292e978cca3ad8c0baa5205054b5b3802e64.tar.bz2
Convert internal time format to Windows 1601 epoch on Linux & Mac.
Although we represent time internally starting from 1601, there are still things like time explosion that will not work before the year 1900. This limitation is the same as it was previously. BUG=14734 Review URL: http://codereview.chromium.org/173296 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@24417 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'base')
-rw-r--r--base/time.h9
-rw-r--r--base/time_mac.cc34
-rw-r--r--base/time_posix.cc76
-rw-r--r--base/time_unittest.cc30
4 files changed, 93 insertions, 56 deletions
diff --git a/base/time.h b/base/time.h
index beeca27..a57c04f 100644
--- a/base/time.h
+++ b/base/time.h
@@ -178,6 +178,15 @@ class Time {
static const int64 kNanosecondsPerSecond = kNanosecondsPerMicrosecond *
kMicrosecondsPerSecond;
+#if !defined(OS_WIN)
+ // On Mac & Linux, this value is the delta from the Windows epoch of 1601 to
+ // the Posix delta of 1970. This is used for migrating between the old
+ // 1970-based epochs to the new 1601-based ones. It should be removed from
+ // this global header and put in the platform-specific ones when we remove the
+ // migration code.
+ static const int64 kWindowsEpochDeltaMicroseconds;
+#endif
+
// Represents an exploded time that can be formatted nicely. This is kind of
// like the Win32 SYSTEMTIME structure or the Unix "struct tm" with a few
// additions and changes to prevent errors.
diff --git a/base/time_mac.cc b/base/time_mac.cc
index 3e5e14a..6b46b95 100644
--- a/base/time_mac.cc
+++ b/base/time_mac.cc
@@ -24,20 +24,33 @@ namespace base {
// Time -----------------------------------------------------------------------
-// The internal representation of Time uses a 64-bit microsecond count
-// from 1970-01-01 00:00:00 UTC. Core Foundation uses a double second count
-// since 2001-01-01 00:00:00 UTC.
+// Core Foundation uses a double second count since 2001-01-01 00:00:00 UTC.
+// The UNIX epoch is 1970-01-01 00:00:00 UTC.
+// Windows uses a Gregorian epoch of 1601. We need to match this internally
+// so that our time representations match across all platforms. See bug 14734.
+// irb(main):010:0> Time.at(0).getutc()
+// => Thu Jan 01 00:00:00 UTC 1970
+// irb(main):011:0> Time.at(-11644473600).getutc()
+// => Mon Jan 01 00:00:00 UTC 1601
+static const int64 kWindowsEpochDeltaSeconds = GG_INT64_C(11644473600);
+static const int64 kWindowsEpochDeltaMilliseconds =
+ kWindowsEpochDeltaSeconds * Time::kMillisecondsPerSecond;
-// Some functions in time.cc use time_t directly, so we provide a zero offset
-// for them. The epoch is 1970-01-01 00:00:00 UTC.
// static
-const int64 Time::kTimeTToMicrosecondsOffset = GG_INT64_C(0);
+const int64 Time::kWindowsEpochDeltaMicroseconds =
+ kWindowsEpochDeltaSeconds * Time::kMicrosecondsPerSecond;
+
+// Some functions in time.cc use time_t directly, so we provide an offset
+// to convert from time_t (Unix epoch) and internal (Windows epoch).
+// static
+const int64 Time::kTimeTToMicrosecondsOffset = kWindowsEpochDeltaMicroseconds;
// static
Time Time::Now() {
CFAbsoluteTime now =
CFAbsoluteTimeGetCurrent() + kCFAbsoluteTimeIntervalSince1970;
- return Time(static_cast<int64>(now * kMicrosecondsPerSecond));
+ return Time(static_cast<int64>(now * kMicrosecondsPerSecond) +
+ kWindowsEpochDeltaMicroseconds);
}
// static
@@ -61,13 +74,14 @@ Time Time::FromExploded(bool is_local, const Exploded& exploded) {
time_zone(is_local ? CFTimeZoneCopySystem() : NULL);
CFAbsoluteTime seconds = CFGregorianDateGetAbsoluteTime(date, time_zone) +
kCFAbsoluteTimeIntervalSince1970;
- return Time(static_cast<int64>(seconds * kMicrosecondsPerSecond));
+ return Time(static_cast<int64>(seconds * kMicrosecondsPerSecond) +
+ kWindowsEpochDeltaMicroseconds);
}
void Time::Explode(bool is_local, Exploded* exploded) const {
CFAbsoluteTime seconds =
- (static_cast<double>(us_) / kMicrosecondsPerSecond) -
- kCFAbsoluteTimeIntervalSince1970;
+ (static_cast<double>((us_ - kWindowsEpochDeltaMicroseconds) /
+ kMicrosecondsPerSecond) - kCFAbsoluteTimeIntervalSince1970);
scoped_cftyperef<CFTimeZoneRef>
time_zone(is_local ? CFTimeZoneCopySystem() : NULL);
diff --git a/base/time_posix.cc b/base/time_posix.cc
index 8b04be9..66f41d3 100644
--- a/base/time_posix.cc
+++ b/base/time_posix.cc
@@ -4,9 +4,6 @@
#include "base/time.h"
-#ifdef OS_MACOSX
-#include <mach/mach_time.h>
-#endif
#include <sys/time.h>
#include <time.h>
@@ -23,10 +20,24 @@ namespace base {
// Time -----------------------------------------------------------------------
-// Some functions in time.cc use time_t directly, so we provide a zero offset
-// for them. The epoch is 1970-01-01 00:00:00 UTC.
+// Windows uses a Gregorian epoch of 1601. We need to match this internally
+// so that our time representations match across all platforms. See bug 14734.
+// irb(main):010:0> Time.at(0).getutc()
+// => Thu Jan 01 00:00:00 UTC 1970
+// irb(main):011:0> Time.at(-11644473600).getutc()
+// => Mon Jan 01 00:00:00 UTC 1601
+static const int64 kWindowsEpochDeltaSeconds = GG_INT64_C(11644473600);
+static const int64 kWindowsEpochDeltaMilliseconds =
+ kWindowsEpochDeltaSeconds * Time::kMillisecondsPerSecond;
+
+// static
+const int64 Time::kWindowsEpochDeltaMicroseconds =
+ kWindowsEpochDeltaSeconds * Time::kMicrosecondsPerSecond;
+
+// Some functions in time.cc use time_t directly, so we provide an offset
+// to convert from time_t (Unix epoch) and internal (Windows epoch).
// static
-const int64 Time::kTimeTToMicrosecondsOffset = GG_INT64_C(0);
+const int64 Time::kTimeTToMicrosecondsOffset = kWindowsEpochDeltaMicroseconds;
// static
Time Time::Now() {
@@ -36,8 +47,10 @@ Time Time::Now() {
DCHECK(0) << "Could not determine time of day";
}
// Combine seconds and microseconds in a 64-bit field containing microseconds
- // since the epoch. That's enough for nearly 600 centuries.
- return tv.tv_sec * kMicrosecondsPerSecond + tv.tv_usec;
+ // since the epoch. That's enough for nearly 600 centuries. Adjust from
+ // Unix (1970) to Windows (1601) epoch.
+ return Time((tv.tv_sec * kMicrosecondsPerSecond + tv.tv_usec) +
+ kWindowsEpochDeltaMicroseconds);
}
// static
@@ -100,13 +113,17 @@ Time Time::FromExploded(bool is_local, const Exploded& exploded) {
milliseconds = seconds * kMillisecondsPerSecond + exploded.millisecond;
}
- return Time(milliseconds * kMicrosecondsPerMillisecond);
+ // Adjust from Unix (1970) to Windows (1601) epoch.
+ return Time((milliseconds * kMicrosecondsPerMillisecond) +
+ kWindowsEpochDeltaMicroseconds);
}
void Time::Explode(bool is_local, Exploded* exploded) const {
// Time stores times with microsecond resolution, but Exploded only carries
- // millisecond resolution, so begin by being lossy.
- int64 milliseconds = us_ / kMicrosecondsPerMillisecond;
+ // millisecond resolution, so begin by being lossy. Adjust from Windows
+ // epoch (1601) to Unix epoch (1970);
+ int64 milliseconds = (us_ - kWindowsEpochDeltaMicroseconds) /
+ kMicrosecondsPerMillisecond;
time_t seconds = milliseconds / kMillisecondsPerSecond;
struct tm timestruct;
@@ -127,38 +144,13 @@ void Time::Explode(bool is_local, Exploded* exploded) const {
// TimeTicks ------------------------------------------------------------------
+#if defined(OS_POSIX) && \
+ defined(_POSIX_MONOTONIC_CLOCK) && _POSIX_MONOTONIC_CLOCK >= 0
+
// static
TimeTicks TimeTicks::Now() {
uint64_t absolute_micro;
-#if defined(OS_MACOSX)
- static mach_timebase_info_data_t timebase_info;
- if (timebase_info.denom == 0) {
- // Zero-initialization of statics guarantees that denom will be 0 before
- // calling mach_timebase_info. mach_timebase_info will never set denom to
- // 0 as that would be invalid, so the zero-check can be used to determine
- // whether mach_timebase_info has already been called. This is
- // recommended by Apple's QA1398.
- kern_return_t kr = mach_timebase_info(&timebase_info);
- DCHECK(kr == KERN_SUCCESS);
- }
-
- // mach_absolute_time is it when it comes to ticks on the Mac. Other calls
- // with less precision (such as TickCount) just call through to
- // mach_absolute_time.
-
- // timebase_info converts absolute time tick units into nanoseconds. Convert
- // to microseconds up front to stave off overflows.
- absolute_micro = mach_absolute_time() / Time::kNanosecondsPerMicrosecond *
- timebase_info.numer / timebase_info.denom;
-
- // Don't bother with the rollover handling that the Windows version does.
- // With numer and denom = 1 (the expected case), the 64-bit absolute time
- // reported in nanoseconds is enough to last nearly 585 years.
-
-#elif defined(OS_POSIX) && \
- defined(_POSIX_MONOTONIC_CLOCK) && _POSIX_MONOTONIC_CLOCK >= 0
-
struct timespec ts;
if (clock_gettime(CLOCK_MONOTONIC, &ts) != 0) {
NOTREACHED() << "clock_gettime(CLOCK_MONOTONIC) failed.";
@@ -169,13 +161,13 @@ TimeTicks TimeTicks::Now() {
(static_cast<int64>(ts.tv_sec) * Time::kMicrosecondsPerSecond) +
(static_cast<int64>(ts.tv_nsec) / Time::kNanosecondsPerMicrosecond);
+ return TimeTicks(absolute_micro);
+}
+
#else // _POSIX_MONOTONIC_CLOCK
#error No usable tick clock function on this platform.
#endif // _POSIX_MONOTONIC_CLOCK
- return TimeTicks(absolute_micro);
-}
-
// static
TimeTicks TimeTicks::HighResNow() {
return Now();
diff --git a/base/time_unittest.cc b/base/time_unittest.cc
index ebe69eb..f8a62cb 100644
--- a/base/time_unittest.cc
+++ b/base/time_unittest.cc
@@ -66,9 +66,10 @@ TEST(Time, LocalExplode) {
Time b = Time::FromLocalExploded(exploded);
- // The exploded structure doesn't have microseconds, so the result will be
- // rounded to the nearest millisecond.
- EXPECT_TRUE((a - b) < TimeDelta::FromMilliseconds(1));
+ // The exploded structure doesn't have microseconds, and on Mac & Linux, the
+ // internal OS conversion uses seconds, which will cause truncation. So we
+ // can only make sure that the delta is within one second.
+ EXPECT_TRUE((a - b) < TimeDelta::FromSeconds(1));
}
TEST(Time, UTCExplode) {
@@ -77,7 +78,7 @@ TEST(Time, UTCExplode) {
a.UTCExplode(&exploded);
Time b = Time::FromUTCExploded(exploded);
- EXPECT_TRUE((a - b) < TimeDelta::FromMilliseconds(1));
+ EXPECT_TRUE((a - b) < TimeDelta::FromSeconds(1));
}
TEST(Time, LocalMidnight) {
@@ -140,3 +141,24 @@ TEST(TimeDelta, FromAndIn) {
EXPECT_EQ(13.0, TimeDelta::FromMilliseconds(13).InMillisecondsF());
EXPECT_EQ(13, TimeDelta::FromMicroseconds(13).InMicroseconds());
}
+
+// Our internal time format is serialized in things like databases, so it's
+// important that it's consistent across all our platforms. We use the 1601
+// Windows epoch as the internal format across all platforms.
+TEST(TimeDelta, WindowsEpoch) {
+ Time::Exploded exploded;
+ exploded.year = 1970;
+ exploded.month = 1;
+ exploded.day_of_week = 0; // Should be unusued.
+ exploded.day_of_month = 1;
+ exploded.hour = 0;
+ exploded.minute = 0;
+ exploded.second = 0;
+ exploded.millisecond = 0;
+ Time t = Time::FromUTCExploded(exploded);
+ // Unix 1970 epoch.
+ EXPECT_EQ(GG_INT64_C(11644473600000000), t.ToInternalValue());
+
+ // We can't test 1601 epoch, since the system time functions on Linux
+ // only compute years starting from 1900.
+}