summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorphajdan.jr@chromium.org <phajdan.jr@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-04-30 08:15:31 +0000
committerphajdan.jr@chromium.org <phajdan.jr@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-04-30 08:15:31 +0000
commit82df52ead5f3e27ac2c745b89c3d4e7475d54918 (patch)
tree0203f2bcc6ec0d20c24437b763f3aad1bf9ea863
parent228cb40ea43f32a355844adf3ef0602439aeecc3 (diff)
downloadchromium_src-82df52ead5f3e27ac2c745b89c3d4e7475d54918.zip
chromium_src-82df52ead5f3e27ac2c745b89c3d4e7475d54918.tar.gz
chromium_src-82df52ead5f3e27ac2c745b89c3d4e7475d54918.tar.bz2
Revert "[Third time landing] Python implementation of sync server, for testing."
This broke the memory waterfall, failed to import protobuf. TBR=nick Review URL: http://codereview.chromium.org/1822001 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@46048 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--chrome/browser/sync/profile_sync_service.cc2
-rwxr-xr-xchrome/browser/sync/protocol/sync_proto.gyp73
-rw-r--r--chrome/chrome.gyp58
-rw-r--r--chrome/chrome_browser.gypi2
-rw-r--r--chrome/chrome_tests.gypi12
-rw-r--r--chrome/test/in_process_browser_test.cc12
-rw-r--r--chrome/test/in_process_browser_test.h10
-rw-r--r--chrome/test/live_sync/live_sync_test.h23
-rw-r--r--chrome/test/live_sync/two_client_live_bookmarks_sync_test.cc42
-rw-r--r--chrome/test/memory_test/memory_test.cc2
-rw-r--r--net/net.gyp2
-rw-r--r--net/socket/ssl_test_util.cc9
-rwxr-xr-xnet/tools/testserver/chromiumsync.py653
-rwxr-xr-xnet/tools/testserver/chromiumsync_test.py317
-rw-r--r--net/tools/testserver/testserver.py61
-rwxr-xr-xthird_party/protobuf2/__init__.py2
-rwxr-xr-xthird_party/protobuf2/descriptor_pb2.py1172
-rw-r--r--third_party/protobuf2/protobuf.gyp81
18 files changed, 98 insertions, 2435 deletions
diff --git a/chrome/browser/sync/profile_sync_service.cc b/chrome/browser/sync/profile_sync_service.cc
index 47b1438..9618418 100644
--- a/chrome/browser/sync/profile_sync_service.cc
+++ b/chrome/browser/sync/profile_sync_service.cc
@@ -519,7 +519,7 @@ void ProfileSyncService::ChangePreferredDataTypes(
void ProfileSyncService::GetPreferredDataTypes(
syncable::ModelTypeSet* preferred_types) const {
- preferred_types->clear();
+ preferred_types->clear();
// Filter out any datatypes which aren't registered, or for which
// the preference can't be read.
diff --git a/chrome/browser/sync/protocol/sync_proto.gyp b/chrome/browser/sync/protocol/sync_proto.gyp
deleted file mode 100755
index 4628ab6..0000000
--- a/chrome/browser/sync/protocol/sync_proto.gyp
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'variables': {
- 'chromium_code': 1,
- },
- 'targets': [
- {
- # Protobuf compiler / generate rule for sync.proto. This is used by
- # test code in net, which is why it's isolated into its own .gyp file.
- 'target_name': 'sync_proto',
- 'type': 'none',
- 'sources': [
- 'sync.proto',
- 'autofill_specifics.proto',
- 'bookmark_specifics.proto',
- 'preference_specifics.proto',
- 'theme_specifics.proto',
- 'typed_url_specifics.proto',
- ],
- 'rules': [
- {
- 'rule_name': 'genproto',
- 'extension': 'proto',
- 'inputs': [
- '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
- ],
- 'outputs': [
- '<(PRODUCT_DIR)/pyproto/sync_pb/<(RULE_INPUT_ROOT)_pb2.py',
- '<(SHARED_INTERMEDIATE_DIR)/protoc_out/chrome/browser/sync/protocol/<(RULE_INPUT_ROOT).pb.h',
- '<(SHARED_INTERMEDIATE_DIR)/protoc_out/chrome/browser/sync/protocol/<(RULE_INPUT_ROOT).pb.cc',
- ],
- 'action': [
- '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
- '--proto_path=.',
- './<(RULE_INPUT_ROOT)<(RULE_INPUT_EXT)',
- '--cpp_out=<(SHARED_INTERMEDIATE_DIR)/protoc_out/chrome/browser/sync/protocol',
- '--python_out=<(PRODUCT_DIR)/pyproto/sync_pb',
- ],
- 'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
- },
- ],
- 'dependencies': [
- '../../../../third_party/protobuf2/protobuf.gyp:protoc#host',
- ],
- },
- {
- 'target_name': 'sync_proto_cpp',
- 'type': 'none',
- 'export_dependent_settings': [
- '../../../../third_party/protobuf2/protobuf.gyp:protobuf_lite',
- 'sync_proto',
- ],
- 'dependencies': [
- '../../../../third_party/protobuf2/protobuf.gyp:protobuf_lite',
- 'sync_proto',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
- ],
- },
- },
- ],
-}
-
-# Local Variables:
-# tab-width:2
-# indent-tabs-mode:nil
-# End:
-# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/chrome/chrome.gyp b/chrome/chrome.gyp
index 86af93c..35d7319 100644
--- a/chrome/chrome.gyp
+++ b/chrome/chrome.gyp
@@ -689,10 +689,61 @@
'../third_party/icu/icu.gyp:icuuc',
'../third_party/libjingle/libjingle.gyp:libjingle',
'../third_party/sqlite/sqlite.gyp:sqlite',
- 'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
'common_constants',
'notifier',
'sync',
+ 'sync_proto',
+ ],
+ },
+ {
+ # Protobuf compiler / generate rule for sync.proto
+ 'target_name': 'sync_proto',
+ 'type': 'none',
+ 'sources': [
+ 'browser/sync/protocol/sync.proto',
+ 'browser/sync/protocol/autofill_specifics.proto',
+ 'browser/sync/protocol/bookmark_specifics.proto',
+ 'browser/sync/protocol/preference_specifics.proto',
+ 'browser/sync/protocol/theme_specifics.proto',
+ 'browser/sync/protocol/typed_url_specifics.proto',
+ ],
+ 'rules': [
+ {
+ 'rule_name': 'genproto',
+ 'extension': 'proto',
+ 'inputs': [
+ '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+ ],
+ 'variables': {
+ # The protoc compiler requires a proto_path argument with the
+ # directory containing the .proto file.
+ # There's no generator variable that corresponds to this, so fake it.
+ 'rule_input_relpath': 'browser/sync/protocol',
+ },
+ 'outputs': [
+ '<(protoc_out_dir)/chrome/<(rule_input_relpath)/<(RULE_INPUT_ROOT).pb.h',
+ '<(protoc_out_dir)/chrome/<(rule_input_relpath)/<(RULE_INPUT_ROOT).pb.cc',
+ ],
+ 'action': [
+ '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+ '--proto_path=./<(rule_input_relpath)',
+ './<(rule_input_relpath)/<(RULE_INPUT_ROOT)<(RULE_INPUT_EXT)',
+ '--cpp_out=<(protoc_out_dir)/chrome/<(rule_input_relpath)',
+ ],
+ 'message': 'Generating C++ code from <(RULE_INPUT_PATH)',
+ },
+ ],
+ 'dependencies': [
+ '../third_party/protobuf2/protobuf.gyp:protobuf_lite',
+ '../third_party/protobuf2/protobuf.gyp:protoc#host',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(protoc_out_dir)',
+ ]
+ },
+ 'export_dependent_settings': [
+ '../third_party/protobuf2/protobuf.gyp:protobuf_lite',
],
},
{
@@ -785,7 +836,7 @@
'../net/net.gyp:net',
'../third_party/expat/expat.gyp:expat',
'../third_party/libjingle/libjingle.gyp:libjingle',
- 'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
+ 'sync_proto',
],
'conditions': [
['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
@@ -939,8 +990,7 @@
'dependencies': [
'../skia/skia.gyp:skia',
'../third_party/libjingle/libjingle.gyp:libjingle',
- 'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
- '../third_party/protobuf2/protobuf.gyp:protobuf_lite#target',
+ 'sync_proto',
],
'conditions': [
['OS=="win"', {
diff --git a/chrome/chrome_browser.gypi b/chrome/chrome_browser.gypi
index dc046fe..1a7486a 100644
--- a/chrome/chrome_browser.gypi
+++ b/chrome/chrome_browser.gypi
@@ -17,7 +17,7 @@
'domui_shared_resources',
'net_internals_resources',
'platform_locale_settings',
- 'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
+ 'sync_proto',
'syncapi',
'theme_resources',
'../app/app.gyp:app_resources',
diff --git a/chrome/chrome_tests.gypi b/chrome/chrome_tests.gypi
index 68bbca1..0a9af8d 100644
--- a/chrome/chrome_tests.gypi
+++ b/chrome/chrome_tests.gypi
@@ -503,13 +503,13 @@
'msvs_guid': 'ECFC2BEC-9FC0-4AD9-9649-5F26793F65FC',
'dependencies': [
'browser',
- 'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
'chrome',
'chrome_resources',
'chrome_strings',
'common',
'debugger',
'renderer',
+ 'sync_proto',
'test_support_unit',
'utility',
'../app/app.gyp:app_base',
@@ -1715,7 +1715,6 @@
'_USE_32BIT_TIME_T',
],
'dependencies': [
- 'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
'common',
'debugger',
'../skia/skia.gyp:skia',
@@ -1723,6 +1722,7 @@
'../testing/gtest.gyp:gtest',
'../third_party/libjingle/libjingle.gyp:libjingle',
'syncapi',
+ 'sync_proto',
'test_support_unit',
],
'conditions': [
@@ -1810,14 +1810,15 @@
'type': 'executable',
'dependencies': [
'browser',
- 'browser/sync/protocol/sync_proto.gyp:sync_proto_cpp',
'chrome',
'chrome_resources',
'common',
'debugger',
'renderer',
+ 'chrome_resources',
'chrome_strings',
- 'test_support_common',
+ 'sync_proto',
+ 'test_support_unit',
'../net/net.gyp:net_test_support',
'../printing/printing.gyp:printing',
'../skia/skia.gyp:skia',
@@ -1839,9 +1840,6 @@
'app/chrome_dll_resource.h',
'test/in_process_browser_test.cc',
'test/in_process_browser_test.h',
- 'test/test_launcher/out_of_proc_test_runner.cc',
- 'test/test_launcher/test_runner.cc',
- 'test/test_launcher/test_runner.h',
'test/live_sync/bookmark_model_verifier.cc',
'test/live_sync/bookmark_model_verifier.h',
'test/live_sync/live_sync_test.cc',
diff --git a/chrome/test/in_process_browser_test.cc b/chrome/test/in_process_browser_test.cc
index 941150e..451dd2e 100644
--- a/chrome/test/in_process_browser_test.cc
+++ b/chrome/test/in_process_browser_test.cc
@@ -93,14 +93,12 @@ void InProcessBrowserTest::SetUp() {
"The user data directory name passed into this test was too "
"short to delete safely. Please check the user-data-dir "
"argument and try again.";
- ASSERT_TRUE(file_util::DieFileDie(user_data_dir, true));
+ if (ShouldDeleteProfile())
+ ASSERT_TRUE(file_util::DieFileDie(user_data_dir, true));
// The unit test suite creates a testingbrowser, but we want the real thing.
// Delete the current one. We'll install the testing one in TearDown.
delete g_browser_process;
- g_browser_process = NULL;
-
- SetUpUserDataDirectory();
// Don't delete the resources when BrowserMain returns. Many ui classes
// cache SkBitmaps in a static field so that if we delete the resource
@@ -142,7 +140,7 @@ void InProcessBrowserTest::SetUp() {
ASCIIToWide(kBrowserTestType));
// Single-process mode is not set in BrowserMain so it needs to be processed
- // explicitly.
+ // explicitlty.
original_single_process_ = RenderProcessHost::run_renderer_in_process();
if (command_line->HasSwitch(switches::kSingleProcess))
RenderProcessHost::set_run_renderer_in_process(true);
@@ -281,9 +279,7 @@ void InProcessBrowserTest::RunTestOnMainThreadLoopDeprecated() {
NewRunnableMethod(this, &InProcessBrowserTest::TimedOut),
initial_timeout_);
- // If an ASSERT_ failed during SetUp, skip the InProcessBrowserTest test body.
- if (!HasFatalFailure())
- RunTestOnMainThread();
+ RunTestOnMainThread();
CleanUpOnMainThread();
// Close all browser windows. This might not happen immediately, since some
diff --git a/chrome/test/in_process_browser_test.h b/chrome/test/in_process_browser_test.h
index 63cad3b..2b63b7f 100644
--- a/chrome/test/in_process_browser_test.h
+++ b/chrome/test/in_process_browser_test.h
@@ -56,6 +56,10 @@ class InProcessBrowserTest : public testing::Test {
// Restores state configured in SetUp.
virtual void TearDown();
+ // This method is used to decide if user data dir
+ // needs to be deleted or not.
+ virtual bool ShouldDeleteProfile() { return true; }
+
protected:
// Returns the browser created by CreateBrowser.
Browser* browser() const { return browser_; }
@@ -63,12 +67,6 @@ class InProcessBrowserTest : public testing::Test {
// Override this rather than TestBody.
virtual void RunTestOnMainThread() = 0;
- // Helper to initialize the user data directory. Called by SetUp() after
- // erasing the user data directory, but before any browser is launched.
- // If a test wishes to set up some initial non-empty state in the user
- // data directory before the browser starts up, it can do so here.
- virtual void SetUpUserDataDirectory() {};
-
// We need these special methods because InProcessBrowserTest::SetUp is the
// bottom of the stack that winds up calling your test method, so it is not
// always an option to do what you want by overriding it and calling the
diff --git a/chrome/test/live_sync/live_sync_test.h b/chrome/test/live_sync/live_sync_test.h
index cb11795..9fd3373 100644
--- a/chrome/test/live_sync/live_sync_test.h
+++ b/chrome/test/live_sync/live_sync_test.h
@@ -12,7 +12,6 @@
#include "chrome/common/chrome_switches.h"
#include "chrome/test/in_process_browser_test.h"
#include "googleurl/src/gurl.h"
-#include "net/socket/ssl_test_util.h"
class BookmarkModel;
class BookmarkNode;
@@ -46,30 +45,10 @@ class LiveSyncTest : public InProcessBrowserTest {
ASSERT_FALSE(password_.empty()) << "Can't run live server test "
<< "without specifying --" << switches::kSyncPasswordForTest;
- // Unless a sync server was explicitly provided, run a test one locally.
- // TODO(ncarter): It might be better to allow the user to specify a choice
- // of sync server "providers" -- a script that could locate (or allocate)
- // a sync server instance, possibly on some remote host. The provider
- // would be invoked before each test.
- if (!cl->HasSwitch(switches::kSyncServiceURL))
- SetUpLocalTestServer();
-
// Yield control back to the InProcessBrowserTest framework.
InProcessBrowserTest::SetUp();
}
- virtual void SetUpLocalTestServer() {
- bool success = server_.Start(net::TestServerLauncher::ProtoHTTP,
- server_.kHostName, server_.kOKHTTPSPort,
- FilePath(), FilePath(), std::wstring());
- ASSERT_TRUE(success);
-
- CommandLine* cl = CommandLine::ForCurrentProcess();
- cl->AppendSwitchWithValue(switches::kSyncServiceURL,
- StringPrintf("http://%s:%d/chromiumsync", server_.kHostName,
- server_.kOKHTTPSPort));
- }
-
// Append command line flag to enable sync.
virtual void SetUpCommandLine(CommandLine* command_line) {
}
@@ -99,8 +78,6 @@ class LiveSyncTest : public InProcessBrowserTest {
// to override the default resolver while the test is active.
scoped_ptr<net::ScopedDefaultHostResolverProc> mock_host_resolver_override_;
- net::TestServerLauncher server_;
-
DISALLOW_COPY_AND_ASSIGN(LiveSyncTest);
};
diff --git a/chrome/test/live_sync/two_client_live_bookmarks_sync_test.cc b/chrome/test/live_sync/two_client_live_bookmarks_sync_test.cc
index 62f5b37..47300bf 100644
--- a/chrome/test/live_sync/two_client_live_bookmarks_sync_test.cc
+++ b/chrome/test/live_sync/two_client_live_bookmarks_sync_test.cc
@@ -50,6 +50,11 @@ class TwoClientLiveBookmarksSyncTest : public LiveSyncTest {
}
}
+ // Overwrites ShouldDeleteProfile, so profile doesn't get deleted.
+ virtual bool ShouldDeleteProfile() {
+ return false;
+ }
+
// Overload this method in inherited class and return false to avoid
// race condition (two clients trying to sync/commit at the same time).
// Race condition may lead to duplicate bookmarks if there is existing
@@ -89,8 +94,7 @@ class TwoClientLiveBookmarksSyncTest : public LiveSyncTest {
FILE_PATH_LITERAL("live_sync_data"));
FilePath source_file = sync_data_source.Append(
bookmarks_file_name);
- ASSERT_TRUE(file_util::PathExists(source_file))
- << "Could not locate test data file: " << source_file.value();
+ ASSERT_TRUE(file_util::PathExists(source_file));
// Now copy pre-generated bookmark file to default profile.
ASSERT_TRUE(file_util::CopyFile(source_file,
dest_user_data_dir_default.Append(FILE_PATH_LITERAL("bookmarks"))));
@@ -147,10 +151,9 @@ class LiveSyncTestPrePopulatedHistory1K
LiveSyncTestPrePopulatedHistory1K() {}
virtual ~LiveSyncTestPrePopulatedHistory1K() {}
- // This is used to pre-populate history data (1K URL Visit) to Client1
- // and Verifier Client. Invoked by InProcBrowserTest::SetUp before
- // the browser is started.
- virtual void SetUpUserDataDirectory() {
+ // This is used to pre-populate history data (1K URL Visit)to Client1
+ // and Verifier Client.
+ void PrePopulateHistory1K() {
// Let's copy history files to default profile.
FilePath dest_user_data_dir;
PathService::Get(chrome::DIR_USER_DATA, &dest_user_data_dir);
@@ -179,6 +182,11 @@ class LiveSyncTestPrePopulatedHistory1K
}
}
+ virtual void SetUp() {
+ PrePopulateHistory1K();
+ LiveSyncTest::SetUp();
+ }
+
private:
DISALLOW_COPY_AND_ASSIGN(LiveSyncTestPrePopulatedHistory1K);
};
@@ -189,10 +197,10 @@ class LiveSyncTestBasicHierarchy50BM
LiveSyncTestBasicHierarchy50BM() {}
virtual ~LiveSyncTestBasicHierarchy50BM() {}
- // Invoked by InProcBrowserTest::SetUp before the browser is started.
- virtual void SetUpUserDataDirectory() {
+ virtual void SetUp() {
FilePath file_name(FILE_PATH_LITERAL("bookmarks_50BM5F3L"));
PrePopulateBookmarksHierarchy(file_name);
+ LiveSyncTest::SetUp();
}
private:
@@ -225,11 +233,10 @@ class LiveSyncTestComplexHierarchy800BM
public:
LiveSyncTestComplexHierarchy800BM() {}
virtual ~LiveSyncTestComplexHierarchy800BM() {}
-
- // Invoked by InProcBrowserTest::SetUp before the browser is started.
- virtual void SetUpUserDataDirectory() {
+ virtual void SetUp() {
FilePath file_name(FILE_PATH_LITERAL("bookmarks_800BM32F8L"));
- PrePopulateBookmarksHierarchy(file_name);
+ TwoClientLiveBookmarksSyncTest::PrePopulateBookmarksHierarchy(file_name);
+ LiveSyncTest::SetUp();
}
private:
@@ -241,11 +248,10 @@ class LiveSyncTestHugeHierarchy5500BM
public:
LiveSyncTestHugeHierarchy5500BM() {}
virtual ~LiveSyncTestHugeHierarchy5500BM() {}
-
- // Invoked by InProcBrowserTest::SetUp before the browser is started.
- virtual void SetUpUserDataDirectory() {
+ virtual void SetUp() {
FilePath file_name(FILE_PATH_LITERAL("bookmarks_5500BM125F25L"));
- PrePopulateBookmarksHierarchy(file_name);
+ TwoClientLiveBookmarksSyncTest::PrePopulateBookmarksHierarchy(file_name);
+ LiveSyncTest::SetUp();
}
virtual bool ShouldSetupSyncWithRace() {
return false;
@@ -261,11 +267,11 @@ class LiveSyncTestDefaultIEFavorites
LiveSyncTestDefaultIEFavorites() {}
virtual ~LiveSyncTestDefaultIEFavorites() {}
- // Invoked by InProcBrowserTest::SetUp before the browser is started.
- virtual void SetUpUserDataDirectory() {
+ virtual void SetUp() {
const FilePath file_name(
FILE_PATH_LITERAL("bookmarks_default_IE_favorites"));
TwoClientLiveBookmarksSyncTest::PrePopulateBookmarksHierarchy(file_name);
+ LiveSyncTest::SetUp();
}
private:
diff --git a/chrome/test/memory_test/memory_test.cc b/chrome/test/memory_test/memory_test.cc
index 079e0a4..ef1eebc 100644
--- a/chrome/test/memory_test/memory_test.cc
+++ b/chrome/test/memory_test/memory_test.cc
@@ -143,7 +143,7 @@ class MemoryTest : public UITest {
int expected_tab_count = 1;
for (unsigned counter = 0; counter < urls_length; ++counter) {
std::string url = urls[counter];
-
+
SCOPED_TRACE(url);
if (url == "<PAUSE>") { // Special command to delay on this page
diff --git a/net/net.gyp b/net/net.gyp
index 76c9f4a..d659731 100644
--- a/net/net.gyp
+++ b/net/net.gyp
@@ -832,9 +832,7 @@
'dependencies': [
'net',
'../base/base.gyp:base',
- '../chrome/browser/sync/protocol/sync_proto.gyp:sync_proto',
'../testing/gtest.gyp:gtest',
- '../third_party/protobuf2/protobuf.gyp:py_proto',
],
'sources': [
'base/cert_test_util.cc',
diff --git a/net/socket/ssl_test_util.cc b/net/socket/ssl_test_util.cc
index 81f5022..ce3589a 100644
--- a/net/socket/ssl_test_util.cc
+++ b/net/socket/ssl_test_util.cc
@@ -89,7 +89,7 @@ void AppendToPythonPath(const FilePath& dir) {
SetEnvironmentVariableW(kPythonPath, dir.value().c_str());
} else if (!wcsstr(oldpath, dir.value().c_str())) {
std::wstring newpath(oldpath);
- newpath.append(L";");
+ newpath.append(L":");
newpath.append(dir.value());
SetEnvironmentVariableW(kPythonPath, newpath.c_str());
}
@@ -117,13 +117,6 @@ void TestServerLauncher::SetPythonPath() {
AppendToPythonPath(third_party_dir.Append(FILE_PATH_LITERAL("tlslite")));
AppendToPythonPath(third_party_dir.Append(FILE_PATH_LITERAL("pyftpdlib")));
-
- // Locate the Python code generated by the protocol buffers compiler.
- FilePath generated_code_dir;
- CHECK(PathService::Get(base::DIR_EXE, &generated_code_dir));
- generated_code_dir = generated_code_dir.Append(FILE_PATH_LITERAL("pyproto"));
- AppendToPythonPath(generated_code_dir);
- AppendToPythonPath(generated_code_dir.Append(FILE_PATH_LITERAL("sync_pb")));
}
bool TestServerLauncher::Start(Protocol protocol,
diff --git a/net/tools/testserver/chromiumsync.py b/net/tools/testserver/chromiumsync.py
deleted file mode 100755
index 2268bbd..0000000
--- a/net/tools/testserver/chromiumsync.py
+++ /dev/null
@@ -1,653 +0,0 @@
-#!/usr/bin/python2.4
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""An implementation of the server side of the Chromium sync protocol.
-
-The details of the protocol are described mostly by comments in the protocol
-buffer definition at chrome/browser/sync/protocol/sync.proto.
-"""
-
-import operator
-import random
-import threading
-
-import autofill_specifics_pb2
-import bookmark_specifics_pb2
-import preference_specifics_pb2
-import theme_specifics_pb2
-import typed_url_specifics_pb2
-import sync_pb2
-
-# An enumeration of the various kinds of data that can be synced.
-# Over the wire, this enumeration is not used: a sync object's type is
-# inferred by which EntitySpecifics extension it has. But in the context
-# of a program, it is useful to have an enumeration.
-ALL_TYPES = (
- TOP_LEVEL, # The type of the 'Google Chrome' folder.
- BOOKMARK,
- AUTOFILL,
- TYPED_URL,
- PREFERENCE,
- # PASSWORD, # Disabled since there's no specifics proto.
- # SESSION,
- THEME) = range(6)
-
-# Given a sync type from ALL_TYPES, find the extension token corresponding
-# to that datatype. Note that TOP_LEVEL has no such token.
-SYNC_TYPE_TO_EXTENSION = {
- BOOKMARK: bookmark_specifics_pb2.bookmark,
- AUTOFILL: autofill_specifics_pb2.autofill,
- TYPED_URL: typed_url_specifics_pb2.typed_url,
- PREFERENCE: preference_specifics_pb2.preference,
- # PASSWORD: password_specifics_pb2.password, # Disabled
- # SESSION: session_specifics_pb2.session, # Disabled
- THEME: theme_specifics_pb2.theme,
- }
-
-# The parent ID used to indicate a top-level node.
-ROOT_ID = '0'
-
-def GetEntryType(entry):
- """Extract the sync type from a SyncEntry.
-
- Args:
- entry: A SyncEntity protobuf object whose type to determine.
- Returns:
- A value from ALL_TYPES if the entry's type can be determined, or None
- if the type cannot be determined.
- """
- if entry.server_defined_unique_tag == 'google_chrome':
- return TOP_LEVEL
- entry_types = GetEntryTypesFromSpecifics(entry.specifics)
- if not entry_types:
- return None
- # It is presupposed that the entry has at most one specifics extension
- # present. If there is more than one, either there's a bug, or else
- # the caller should use GetEntryTypes.
- if len(entry_types) > 1:
- raise 'GetEntryType called with multiple extensions present.'
- return entry_types[0]
-
-def GetEntryTypesFromSpecifics(specifics):
- """Determine the sync types indicated by an EntitySpecifics's extension(s).
-
- If the specifics have more than one recognized extension (as commonly
- happens with the requested_types field of GetUpdatesMessage), all types
- will be returned. Callers must handle the possibility of the returned
- value having more than one item.
-
- Args:
- specifics: A EntitySpecifics protobuf message whose extensions to
- enumerate.
- Returns:
- A list of the sync types (values from ALL_TYPES) assocated with each
- recognized extension of the specifics message.
- """
- entry_types = []
- for data_type, extension in SYNC_TYPE_TO_EXTENSION.iteritems():
- if specifics.HasExtension(extension):
- entry_types.append(data_type)
- return entry_types
-
-def GetRequestedTypes(get_updates_message):
- """Determine the sync types requested by a client GetUpdates operation."""
- types = GetEntryTypesFromSpecifics(
- get_updates_message.requested_types)
- if types:
- types.append(TOP_LEVEL)
- return types
-
-def GetDefaultEntitySpecifics(data_type):
- """Get an EntitySpecifics having a sync type's default extension value.
- """
- specifics = sync_pb2.EntitySpecifics()
- if data_type in SYNC_TYPE_TO_EXTENSION:
- extension_handle = SYNC_TYPE_TO_EXTENSION[data_type]
- specifics.Extensions[extension_handle].SetInParent()
- return specifics
-
-def DeepCopyOfProto(proto):
- """Return a deep copy of a protocol buffer."""
- new_proto = type(proto)()
- new_proto.MergeFrom(proto)
- return new_proto
-
-
-class PermanentItem(object):
- """A specification of one server-created permanent item.
-
- Attributes:
- tag: A known-to-the-client value that uniquely identifies a server-created
- permanent item.
- name: The human-readable display name for this item.
- parent_tag: The tag of the permanent item's parent. If ROOT_ID, indicates
- a top-level item. Otherwise, this must be the tag value of some other
- server-created permanent item.
- sync_type: A value from ALL_TYPES, giving the datatype of this permanent
- item. This controls which types of client GetUpdates requests will
- cause the permanent item to be created and returned.
- """
-
- def __init__(self, tag, name, parent_tag, sync_type):
- self.tag = tag
- self.name = name
- self.parent_tag = parent_tag
- self.sync_type = sync_type
-
-class SyncDataModel(object):
- """Models the account state of one sync user.
- """
- _BATCH_SIZE = 100
-
- # Specify all the permanent items that a model might need.
- _PERMANENT_ITEM_SPECS = [
- PermanentItem('google_chrome', name='Google Chrome',
- parent_tag=ROOT_ID, sync_type=TOP_LEVEL),
- PermanentItem('google_chrome_bookmarks', name='Bookmarks',
- parent_tag='google_chrome', sync_type=BOOKMARK),
- PermanentItem('bookmark_bar', name='Bookmark Bar',
- parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
- PermanentItem('other_bookmarks', name='Other Bookmarks',
- parent_tag='google_chrome_bookmarks', sync_type=BOOKMARK),
- PermanentItem('google_chrome_preferences', name='Preferences',
- parent_tag='google_chrome', sync_type=PREFERENCE),
- PermanentItem('google_chrome_autofill', name='Autofill',
- parent_tag='google_chrome', sync_type=AUTOFILL),
- # TODO(nick): Disabled since the protocol does not support them yet.
- # PermanentItem('google_chrome_passwords', name='Passwords',
- # parent_tag='google_chrome', sync_type=PASSWORD),
- # PermanentItem('google_chrome_sessions', name='Sessions',
- # parent_tag='google_chrome', SESSION),
- PermanentItem('google_chrome_themes', name='Themes',
- parent_tag='google_chrome', sync_type=THEME),
- PermanentItem('google_chrome_typed_urls', name='Typed URLs',
- parent_tag='google_chrome', sync_type=TYPED_URL),
- ]
-
- def __init__(self):
- self._version = 0
-
- # Monotonically increasing version number. The next object change will
- # take on this value + 1.
- self._entries = {}
-
- # TODO(nick): uuid.uuid1() is better, but python 2.5 only.
- self.store_birthday = '%0.30f' % random.random()
-
- def _SaveEntry(self, entry):
- """Insert or update an entry in the change log, and give it a new version.
-
- The ID fields of this entry are assumed to be valid server IDs. This
- entry will be updated with a new version number and sync_timestamp.
-
- Args:
- entry: The entry to be added or updated.
- """
- self._version = self._version + 1
- entry.version = self._version
- entry.sync_timestamp = self._version
-
- # Preserve the originator info, which the client is not required to send
- # when updating.
- base_entry = self._entries.get(entry.id_string)
- if base_entry:
- entry.originator_cache_guid = base_entry.originator_cache_guid
- entry.originator_client_item_id = base_entry.originator_client_item_id
-
- self._entries[entry.id_string] = DeepCopyOfProto(entry)
-
- def _ServerTagToId(self, tag):
- """Determine the server ID from a server-unique tag.
-
- The resulting value is guaranteed not to collide with the other ID
- generation methods.
-
- Args:
- tag: The unique, known-to-the-client tag of a server-generated item.
- """
- if tag and tag != ROOT_ID:
- return '<server tag>%s' % tag
- else:
- return tag
-
- def _ClientTagToId(self, tag):
- """Determine the server ID from a client-unique tag.
-
- The resulting value is guaranteed not to collide with the other ID
- generation methods.
-
- Args:
- tag: The unique, opaque-to-the-server tag of a client-tagged item.
- """
- return '<client tag>%s' % tag
-
- def _ClientIdToId(self, client_guid, client_item_id):
- """Compute a unique server ID from a client-local ID tag.
-
- The resulting value is guaranteed not to collide with the other ID
- generation methods.
-
- Args:
- client_guid: A globally unique ID that identifies the client which
- created this item.
- client_item_id: An ID that uniquely identifies this item on the client
- which created it.
- """
- # Using the client ID info is not required here (we could instead generate
- # a random ID), but it's useful for debugging.
- return '<server ID originally>%s/%s' % (client_guid, client_item_id)
-
- def _WritePosition(self, entry, parent_id, prev_id=None):
- """Convert from a relative position into an absolute, numeric position.
-
- Clients specify positions using the predecessor-based references; the
- server stores and reports item positions using sparse integer values.
- This method converts from the former to the latter.
-
- Args:
- entry: The entry for which to compute a position. Its ID field are
- assumed to be server IDs. This entry will have its parent_id_string
- and position_in_parent fields updated; its insert_after_item_id field
- will be cleared.
- parent_id: The ID of the entry intended as the new parent.
- prev_id: The ID of the entry intended as the new predecessor. If this
- is None, or an ID of an object which is not a child of the new parent,
- the entry will be positioned at the end (right) of the ordering. If
- the empty ID (''), this will be positioned at the front (left) of the
- ordering. Otherwise, the entry will be given a position_in_parent
- value placing it just after (to the right of) the new predecessor.
- """
- PREFERRED_GAP = 2 ** 20
- # Compute values at the beginning or end.
- def ExtendRange(current_limit_entry, sign_multiplier):
- if current_limit_entry.id_string == entry.id_string:
- step = 0
- else:
- step = sign_multiplier * PREFERRED_GAP
- return current_limit_entry.position_in_parent + step
-
- siblings = [x for x in self._entries.values()
- if x.parent_id_string == parent_id and not x.deleted]
- siblings = sorted(siblings, key=operator.attrgetter('position_in_parent'))
- if prev_id == entry.id_string:
- prev_id = ''
- if not siblings:
- # First item in this container; start in the middle.
- entry.position_in_parent = 0
- elif prev_id == '':
- # A special value in the protocol. Insert at first position.
- entry.position_in_parent = ExtendRange(siblings[0], -1)
- else:
- # Consider items along with their successors.
- for a, b in zip(siblings, siblings[1:]):
- if a.id_string != prev_id:
- continue
- elif b.id_string == entry.id_string:
- # We're already in place; don't change anything.
- entry.position_in_parent = b.position_in_parent
- else:
- # Interpolate new position between two others.
- entry.position_in_parent = (
- a.position_in_parent * 7 + b.position_in_parent) / 8
- break
- else:
- # Insert at end. Includes the case where prev_id is None.
- entry.position_in_parent = ExtendRange(siblings[-1], +1)
-
- entry.parent_id_string = parent_id
- entry.ClearField('insert_after_item_id')
-
- def _ItemExists(self, id_string):
- """Determine whether an item exists in the changelog."""
- return id_string in self._entries
-
- def _CreatePermanentItem(self, spec):
- """Create one permanent item from its spec, if it doesn't exist.
-
- The resulting item is added to the changelog.
-
- Args:
- spec: A PermanentItem object holding the properties of the item to create.
- """
- id_string = self._ServerTagToId(spec.tag)
- if self._ItemExists(id_string):
- return
- print 'Creating permanent item: %s' % spec.name
- entry = sync_pb2.SyncEntity()
- entry.id_string = id_string
- entry.non_unique_name = spec.name
- entry.name = spec.name
- entry.server_defined_unique_tag = spec.tag
- entry.folder = True
- entry.deleted = False
- entry.specifics.CopyFrom(GetDefaultEntitySpecifics(spec.sync_type))
- self._WritePosition(entry, self._ServerTagToId(spec.parent_tag))
- self._SaveEntry(entry)
-
- def _CreatePermanentItems(self, requested_types):
- """Ensure creation of all permanent items for a given set of sync types.
-
- Args:
- requested_types: A list of sync data types from ALL_TYPES.
- Permanent items of only these types will be created.
- """
- for spec in self._PERMANENT_ITEM_SPECS:
- if spec.sync_type in requested_types:
- self._CreatePermanentItem(spec)
-
- def GetChangesFromTimestamp(self, requested_types, timestamp):
- """Get entries which have changed since a given timestamp, oldest first.
-
- The returned entries are limited to being _BATCH_SIZE many. The entries
- are returned in strict version order.
-
- Args:
- requested_types: A list of sync data types from ALL_TYPES.
- Only items of these types will be retrieved; others will be filtered
- out.
- timestamp: A timestamp / version number. Only items that have changed
- more recently than this value will be retrieved; older items will
- be filtered out.
- Returns:
- A tuple of (version, entries). Version is a new timestamp value, which
- should be used as the starting point for the next query. Entries is the
- batch of entries meeting the current timestamp query.
- """
- if timestamp == 0:
- self._CreatePermanentItems(requested_types)
- change_log = sorted(self._entries.values(),
- key=operator.attrgetter('version'))
- new_changes = [x for x in change_log if x.version > timestamp]
- # Pick batch_size new changes, and then filter them. This matches
- # the RPC behavior of the production sync server.
- batch = new_changes[:self._BATCH_SIZE]
- if not batch:
- # Client is up to date.
- return (timestamp, [])
-
- # Restrict batch to requested types. Tombstones are untyped
- # and will always get included.
- filtered = []
- for x in batch:
- if (GetEntryType(x) in requested_types) or x.deleted:
- filtered.append(DeepCopyOfProto(x))
- # The new client timestamp is the timestamp of the last item in the
- # batch, even if that item was filtered out.
- return (batch[-1].version, filtered)
-
- def _CheckVersionForCommit(self, entry):
- """Perform an optimistic concurrency check on the version number.
-
- Clients are only allowed to commit if they report having seen the most
- recent version of an object.
-
- Args:
- entry: A sync entity from the client. It is assumed that ID fields
- have been converted to server IDs.
- Returns:
- A boolean value indicating whether the client's version matches the
- newest server version for the given entry.
- """
- if entry.id_string in self._entries:
- if (self._entries[entry.id_string].version != entry.version and
- not self._entries[entry.id_string].deleted):
- # Version mismatch that is not a tombstone recreation.
- return False
- else:
- if entry.version != 0:
- # Edit to an item that does not exist.
- return False
- return True
-
- def _CheckParentIdForCommit(self, entry):
- """Check that the parent ID referenced in a SyncEntity actually exists.
-
- Args:
- entry: A sync entity from the client. It is assumed that ID fields
- have been converted to server IDs.
- Returns:
- A boolean value indicating whether the entity's parent ID is an object
- that actually exists (and is not deleted) in the current account state.
- """
- if entry.parent_id_string == ROOT_ID:
- # This is generally allowed.
- return True
- if entry.parent_id_string not in self._entries:
- print 'Warning: Client sent unknown ID. Should never happen.'
- return False
- if entry.parent_id_string == entry.id_string:
- print 'Warning: Client sent circular reference. Should never happen.'
- return False
- if self._entries[entry.parent_id_string].deleted:
- # This can happen in a race condition between two clients.
- return False
- if not self._entries[entry.parent_id_string].folder:
- print 'Warning: Client sent non-folder parent. Should never happen.'
- return False
- return True
-
- def _RewriteIdsAsServerIds(self, entry, cache_guid, commit_session):
- """Convert ID fields in a client sync entry to server IDs.
-
- A commit batch sent by a client may contain new items for which the
- server has not generated IDs yet. And within a commit batch, later
- items are allowed to refer to earlier items. This method will
- generate server IDs for new items, as well as rewrite references
- to items whose server IDs were generated earlier in the batch.
-
- Args:
- entry: The client sync entry to modify.
- cache_guid: The globally unique ID of the client that sent this
- commit request.
- commit_session: A dictionary mapping the original IDs to the new server
- IDs, for any items committed earlier in the batch.
- """
- if entry.version == 0:
- if entry.HasField('client_defined_unique_tag'):
- # When present, this should determine the item's ID.
- new_id = self._ClientTagToId(entry.client_defined_unique_tag)
- else:
- new_id = self._ClientIdToId(cache_guid, entry.id_string)
- entry.originator_cache_guid = cache_guid
- entry.originator_client_item_id = entry.id_string
- commit_session[entry.id_string] = new_id # Remember the remapping.
- entry.id_string = new_id
- if entry.parent_id_string in commit_session:
- entry.parent_id_string = commit_session[entry.parent_id_string]
- if entry.insert_after_item_id in commit_session:
- entry.insert_after_item_id = commit_session[entry.insert_after_item_id]
-
- def CommitEntry(self, entry, cache_guid, commit_session):
- """Attempt to commit one entry to the user's account.
-
- Args:
- entry: A SyncEntity protobuf representing desired object changes.
- cache_guid: A string value uniquely identifying the client; this
- is used for ID generation and will determine the originator_cache_guid
- if the entry is new.
- commit_session: A dictionary mapping client IDs to server IDs for any
- objects committed earlier this session. If the entry gets a new ID
- during commit, the change will be recorded here.
- Returns:
- A SyncEntity reflecting the post-commit value of the entry, or None
- if the entry was not committed due to an error.
- """
- entry = DeepCopyOfProto(entry)
-
- # Generate server IDs for this entry, and write generated server IDs
- # from earlier entries into the message's fields, as appropriate. The
- # ID generation state is stored in 'commit_session'.
- self._RewriteIdsAsServerIds(entry, cache_guid, commit_session)
-
- # Perform the optimistic concurrency check on the entry's version number.
- # Clients are not allowed to commit unless they indicate that they've seen
- # the most recent version of an object.
- if not self._CheckVersionForCommit(entry):
- return None
-
- # Check the validity of the parent ID; it must exist at this point.
- # TODO(nick): Implement cycle detection and resolution.
- if not self._CheckParentIdForCommit(entry):
- return None
-
- # At this point, the commit is definitely going to happen.
-
- # Deletion works by storing a limited record for an entry, called a
- # tombstone. A sync server must track deleted IDs forever, since it does
- # not keep track of client knowledge (there's no deletion ACK event).
- if entry.deleted:
- # Only the ID, version and deletion state are preserved on a tombstone.
- # TODO(nick): Does the production server not preserve the type? Not
- # doing so means that tombstones cannot be filtered based on
- # requested_types at GetUpdates time.
- tombstone = sync_pb2.SyncEntity()
- tombstone.id_string = entry.id_string
- tombstone.deleted = True
- tombstone.name = '' # 'name' is a required field; we're stuck with it.
- entry = tombstone
- else:
- # Comments in sync.proto detail how the representation of positional
- # ordering works: the 'insert_after_item_id' field specifies a
- # predecessor during Commit operations, but the 'position_in_parent'
- # field provides an absolute ordering in GetUpdates contexts. Here
- # we convert from the former to the latter. Specifically, we'll
- # generate a numeric position placing the item just after the object
- # identified by 'insert_after_item_id', and then clear the
- # 'insert_after_item_id' field so that it's not sent back to the client
- # during later GetUpdates requests.
- if entry.HasField('insert_after_item_id'):
- self._WritePosition(entry, entry.parent_id_string,
- entry.insert_after_item_id)
- else:
- self._WritePosition(entry, entry.parent_id_string)
-
- # Preserve the originator info, which the client is not required to send
- # when updating.
- base_entry = self._entries.get(entry.id_string)
- if base_entry and not entry.HasField("originator_cache_guid"):
- entry.originator_cache_guid = base_entry.originator_cache_guid
- entry.originator_client_item_id = base_entry.originator_client_item_id
-
- # Commit the change. This also updates the version number.
- self._SaveEntry(entry)
- # TODO(nick): Handle recursive deletion.
- return entry
-
-class TestServer(object):
- """An object to handle requests for one (and only one) Chrome Sync account.
-
- TestServer consumes the sync command messages that are the outermost
- layers of the protocol, performs the corresponding actions on its
- SyncDataModel, and constructs an appropropriate response message.
- """
-
- def __init__(self):
- # The implementation supports exactly one account; its state is here.
- self.account = SyncDataModel()
- self.account_lock = threading.Lock()
-
- def HandleCommand(self, raw_request):
- """Decode and handle a sync command from a raw input of bytes.
-
- This is the main entry point for this class. It is safe to call this
- method from multiple threads.
-
- Args:
- raw_request: An iterable byte sequence to be interpreted as a sync
- protocol command.
- Returns:
- A tuple (response_code, raw_response); the first value is an HTTP
- result code, while the second value is a string of bytes which is the
- serialized reply to the command.
- """
- self.account_lock.acquire()
- try:
- request = sync_pb2.ClientToServerMessage()
- request.MergeFromString(raw_request)
- contents = request.message_contents
-
- response = sync_pb2.ClientToServerResponse()
- response.error_code = sync_pb2.ClientToServerResponse.SUCCESS
- response.store_birthday = self.account.store_birthday
-
- if contents == sync_pb2.ClientToServerMessage.AUTHENTICATE:
- print 'Authenticate'
- # We accept any authentication token, and support only one account.
- # TODO(nick): Mock out the GAIA authentication as well; hook up here.
- response.authenticate.user.email = 'syncjuser@chromium'
- response.authenticate.user.display_name = 'Sync J User'
- elif contents == sync_pb2.ClientToServerMessage.COMMIT:
- print 'Commit'
- self.HandleCommit(request.commit, response.commit)
- elif contents == sync_pb2.ClientToServerMessage.GET_UPDATES:
- print ('GetUpdates from timestamp %d' %
- request.get_updates.from_timestamp)
- self.HandleGetUpdates(request.get_updates, response.get_updates)
- return (200, response.SerializeToString())
- finally:
- self.account_lock.release()
-
- def HandleCommit(self, commit_message, commit_response):
- """Respond to a Commit request by updating the user's account state.
-
- Commit attempts stop after the first error, returning a CONFLICT result
- for any unattempted entries.
-
- Args:
- commit_message: A sync_pb.CommitMessage protobuf holding the content
- of the client's request.
- commit_response: A sync_pb.CommitResponse protobuf into which a reply
- to the client request will be written.
- """
- commit_response.SetInParent()
- batch_failure = False
- session = {} # Tracks ID renaming during the commit operation.
- guid = commit_message.cache_guid
- for entry in commit_message.entries:
- server_entry = None
- if not batch_failure:
- # Try to commit the change to the account.
- server_entry = self.account.CommitEntry(entry, guid, session)
-
- # An entryresponse is returned in both success and failure cases.
- reply = commit_response.entryresponse.add()
- if not server_entry:
- reply.response_type = sync_pb2.CommitResponse.CONFLICT
- reply.error_message = 'Conflict.'
- batch_failure = True # One failure halts the batch.
- else:
- reply.response_type = sync_pb2.CommitResponse.SUCCESS
- # These are the properties that the server is allowed to override
- # during commit; the client wants to know their values at the end
- # of the operation.
- reply.id_string = server_entry.id_string
- if not server_entry.deleted:
- reply.parent_id_string = server_entry.parent_id_string
- reply.position_in_parent = server_entry.position_in_parent
- reply.version = server_entry.version
- reply.name = server_entry.name
- reply.non_unique_name = server_entry.non_unique_name
-
- def HandleGetUpdates(self, update_request, update_response):
- """Respond to a GetUpdates request by querying the user's account.
-
- Args:
- update_request: A sync_pb.GetUpdatesMessage protobuf holding the content
- of the client's request.
- update_response: A sync_pb.GetUpdatesResponse protobuf into which a reply
- to the client request will be written.
- """
- update_response.SetInParent()
- requested_types = GetRequestedTypes(update_request)
- new_timestamp, entries = self.account.GetChangesFromTimestamp(
- requested_types, update_request.from_timestamp)
-
- # If the client is up to date, we are careful not to set the
- # new_timestamp field.
- if new_timestamp != update_request.from_timestamp:
- update_response.new_timestamp = new_timestamp
- for e in entries:
- reply = update_response.entries.add()
- reply.CopyFrom(e)
diff --git a/net/tools/testserver/chromiumsync_test.py b/net/tools/testserver/chromiumsync_test.py
deleted file mode 100755
index bb73d05..0000000
--- a/net/tools/testserver/chromiumsync_test.py
+++ /dev/null
@@ -1,317 +0,0 @@
-#!/usr/bin/python2.4
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests exercising chromiumsync and SyncDataModel."""
-
-import unittest
-
-from google.protobuf import text_format
-
-import chromiumsync
-import sync_pb2
-
-class SyncDataModelTest(unittest.TestCase):
- def setUp(self):
- self.model = chromiumsync.SyncDataModel()
-
- def AddToModel(self, proto):
- self.model._entries[proto.id_string] = proto
-
- def testPermanentItemSpecs(self):
- SPECS = chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS
- # parent_tags must be declared before use.
- declared_specs = set(['0'])
- for spec in SPECS:
- self.assertTrue(spec.parent_tag in declared_specs)
- declared_specs.add(spec.tag)
- # Every sync datatype should have a permanent folder associated with it.
- unique_datatypes = set([x.sync_type for x in SPECS])
- self.assertEqual(unique_datatypes,
- set(chromiumsync.ALL_TYPES))
-
- def testSaveEntry(self):
- proto = sync_pb2.SyncEntity()
- proto.id_string = 'abcd';
- proto.version = 0;
- self.assertFalse(self.model._ItemExists(proto.id_string))
- self.model._SaveEntry(proto)
- self.assertEqual(1, proto.version)
- self.assertTrue(self.model._ItemExists(proto.id_string))
- self.model._SaveEntry(proto)
- self.assertEqual(2, proto.version)
- proto.version = 0
- self.assertTrue(self.model._ItemExists(proto.id_string))
- self.assertEqual(2, self.model._entries[proto.id_string].version)
-
- def testWritePosition(self):
- def MakeProto(id_string, parent, position):
- proto = sync_pb2.SyncEntity()
- proto.id_string = id_string
- proto.position_in_parent = position
- proto.parent_id_string = parent
- self.AddToModel(proto)
-
- MakeProto('a', 'X', 1000)
- MakeProto('b', 'X', 1800)
- MakeProto('c', 'X', 2600)
- MakeProto('a1', 'Z', 1007)
- MakeProto('a2', 'Z', 1807)
- MakeProto('a3', 'Z', 2607)
- MakeProto('s', 'Y', 10000)
-
- def AssertPositionResult(my_id, parent_id, prev_id, expected_position):
- entry = sync_pb2.SyncEntity()
- entry.id_string = my_id
- self.model._WritePosition(entry, parent_id, prev_id)
- self.assertEqual(expected_position, entry.position_in_parent)
- self.assertEqual(parent_id, entry.parent_id_string)
- self.assertFalse(entry.HasField('insert_after_item_id'))
-
- AssertPositionResult('new', 'new_parent', '', 0)
- AssertPositionResult('new', 'Y', '', 10000 - (2 ** 20))
- AssertPositionResult('new', 'Y', 's', 10000 + (2 ** 20))
- AssertPositionResult('s', 'Y', '', 10000)
- AssertPositionResult('s', 'Y', 's', 10000)
- AssertPositionResult('a1', 'Z', '', 1007)
-
- AssertPositionResult('new', 'X', '', 1000 - (2 ** 20))
- AssertPositionResult('new', 'X', 'a', 1100)
- AssertPositionResult('new', 'X', 'b', 1900)
- AssertPositionResult('new', 'X', 'c', 2600 + (2 ** 20))
-
- AssertPositionResult('a1', 'X', '', 1000 - (2 ** 20))
- AssertPositionResult('a1', 'X', 'a', 1100)
- AssertPositionResult('a1', 'X', 'b', 1900)
- AssertPositionResult('a1', 'X', 'c', 2600 + (2 ** 20))
-
- AssertPositionResult('a', 'X', '', 1000)
- AssertPositionResult('a', 'X', 'b', 1900)
- AssertPositionResult('a', 'X', 'c', 2600 + (2 ** 20))
-
- AssertPositionResult('b', 'X', '', 1000 - (2 ** 20))
- AssertPositionResult('b', 'X', 'a', 1800)
- AssertPositionResult('b', 'X', 'c', 2600 + (2 ** 20))
-
- AssertPositionResult('c', 'X', '', 1000 - (2 ** 20))
- AssertPositionResult('c', 'X', 'a', 1100)
- AssertPositionResult('c', 'X', 'b', 2600)
-
- def testCreatePermanentItems(self):
- self.model._CreatePermanentItems(chromiumsync.ALL_TYPES)
- self.assertEqual(len(chromiumsync.ALL_TYPES) + 2,
- len(self.model._entries))
-
- def ExpectedPermanentItemCount(self, sync_type):
- if sync_type == chromiumsync.BOOKMARK:
- return 4
- elif sync_type == chromiumsync.TOP_LEVEL:
- return 1
- else:
- return 2
-
- def testGetChangesFromTimestampZeroForEachType(self):
- for sync_type in chromiumsync.ALL_TYPES:
- self.model = chromiumsync.SyncDataModel()
- request_types = [sync_type, chromiumsync.TOP_LEVEL]
-
- version, changes = self.model.GetChangesFromTimestamp(request_types, 0)
-
- expected_count = self.ExpectedPermanentItemCount(sync_type)
- self.assertEqual(expected_count, version)
- self.assertEqual(expected_count, len(changes))
- self.assertEqual('google_chrome', changes[0].server_defined_unique_tag)
- for change in changes:
- self.assertTrue(change.HasField('server_defined_unique_tag'))
- self.assertEqual(change.version, change.sync_timestamp)
- self.assertTrue(change.version <= version)
-
- # Test idempotence: another GetUpdates from ts=0 shouldn't recreate.
- version, changes = self.model.GetChangesFromTimestamp(request_types, 0)
- self.assertEqual(expected_count, version)
- self.assertEqual(expected_count, len(changes))
-
- # Doing a wider GetUpdates from timestamp zero shouldn't recreate either.
- new_version, changes = self.model.GetChangesFromTimestamp(
- chromiumsync.ALL_TYPES, 0)
- self.assertEqual(len(chromiumsync.SyncDataModel._PERMANENT_ITEM_SPECS),
- new_version)
- self.assertEqual(new_version, len(changes))
- version, changes = self.model.GetChangesFromTimestamp(request_types, 0)
- self.assertEqual(new_version, version)
- self.assertEqual(expected_count, len(changes))
-
- def testBatchSize(self):
- for sync_type in chromiumsync.ALL_TYPES[1:]:
- specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type)
- self.model = chromiumsync.SyncDataModel()
- request_types = [sync_type, chromiumsync.TOP_LEVEL]
-
- for i in range(self.model._BATCH_SIZE*3):
- entry = sync_pb2.SyncEntity()
- entry.id_string = 'batch test %d' % i
- entry.specifics.CopyFrom(specifics)
- self.model._SaveEntry(entry)
- version, changes = self.model.GetChangesFromTimestamp(request_types, 0)
- self.assertEqual(self.model._BATCH_SIZE, version)
- version, changes = self.model.GetChangesFromTimestamp(request_types,
- version)
- self.assertEqual(self.model._BATCH_SIZE*2, version)
- version, changes = self.model.GetChangesFromTimestamp(request_types,
- version)
- self.assertEqual(self.model._BATCH_SIZE*3, version)
- expected_dingleberry = self.ExpectedPermanentItemCount(sync_type)
- version, changes = self.model.GetChangesFromTimestamp(request_types,
- version)
- self.assertEqual(self.model._BATCH_SIZE*3 + expected_dingleberry,
- version)
-
- # Now delete a third of the items.
- for i in xrange(self.model._BATCH_SIZE*3 - 1, 0, -3):
- entry = sync_pb2.SyncEntity()
- entry.id_string = 'batch test %d' % i
- entry.deleted = True
- self.model._SaveEntry(entry)
-
- # The batch counts shouldn't change.
- version, changes = self.model.GetChangesFromTimestamp(request_types, 0)
- self.assertEqual(self.model._BATCH_SIZE, len(changes))
- version, changes = self.model.GetChangesFromTimestamp(request_types,
- version)
- self.assertEqual(self.model._BATCH_SIZE, len(changes))
- version, changes = self.model.GetChangesFromTimestamp(request_types,
- version)
- self.assertEqual(self.model._BATCH_SIZE, len(changes))
- expected_dingleberry = self.ExpectedPermanentItemCount(sync_type)
- version, changes = self.model.GetChangesFromTimestamp(request_types,
- version)
- self.assertEqual(expected_dingleberry, len(changes))
- self.assertEqual(self.model._BATCH_SIZE*4 + expected_dingleberry, version)
-
- def testCommitEachDataType(self):
- for sync_type in chromiumsync.ALL_TYPES[1:]:
- specifics = chromiumsync.GetDefaultEntitySpecifics(sync_type)
- self.model = chromiumsync.SyncDataModel()
- my_cache_guid = '112358132134'
- parent = 'foobar'
- commit_session = {}
-
- # Start with a GetUpdates from timestamp 0, to populate permanent items.
- original_version, original_changes = (
- self.model.GetChangesFromTimestamp([sync_type], 0))
-
- def DoCommit(original=None, id='', name=None, parent=None, prev=None):
- proto = sync_pb2.SyncEntity()
- if original is not None:
- proto.version = original.version
- proto.id_string = original.id_string
- proto.parent_id_string = original.parent_id_string
- proto.name = original.name
- else:
- proto.id_string = id
- proto.version = 0
- proto.specifics.CopyFrom(specifics)
- if name is not None:
- proto.name = name
- if parent:
- proto.parent_id_string = parent.id_string
- if prev:
- proto.insert_after_item_id = prev.id_string
- else:
- proto.insert_after_item_id = ''
- proto.folder = True
- proto.deleted = False
- result = self.model.CommitEntry(proto, my_cache_guid, commit_session)
- self.assertTrue(result)
- return (proto, result)
-
- # Commit a new item.
- proto1, result1 = DoCommit(name='namae', id='Foo',
- parent=original_changes[-1])
- # Commit an item whose parent is another item (referenced via the
- # pre-commit ID).
- proto2, result2 = DoCommit(name='Secondo', id='Bar',
- parent=proto1)
- # Commit a sibling of the second item.
- proto3, result3 = DoCommit(name='Third!', id='Baz',
- parent=proto1, prev=proto2)
-
- self.assertEqual(3, len(commit_session))
- for p, r in [(proto1, result1), (proto2, result2), (proto3, result3)]:
- self.assertNotEqual(r.id_string, p.id_string)
- self.assertEqual(r.originator_client_item_id, p.id_string)
- self.assertEqual(r.originator_cache_guid, my_cache_guid)
- self.assertTrue(r is not self.model._entries[r.id_string],
- "Commit result didn't make a defensive copy.")
- self.assertTrue(p is not self.model._entries[r.id_string],
- "Commit result didn't make a defensive copy.")
- self.assertEqual(commit_session.get(p.id_string), r.id_string)
- self.assertTrue(r.version > original_version)
- self.assertEqual(result1.parent_id_string, proto1.parent_id_string)
- self.assertEqual(result2.parent_id_string, result1.id_string)
- version, changes = self.model.GetChangesFromTimestamp([sync_type],
- original_version)
- self.assertEqual(3, len(changes))
- self.assertEqual(original_version + 3, version)
- self.assertEqual([result1, result2, result3], changes)
- for c in changes:
- self.assertTrue(c is not self.model._entries[c.id_string],
- "GetChanges didn't make a defensive copy.")
- self.assertTrue(result2.position_in_parent < result3.position_in_parent)
- self.assertEqual(0, result2.position_in_parent)
-
- # Now update the items so that the second item is the parent of the
- # first; with the first sandwiched between two new items (4 and 5).
- # Do this in a new commit session, meaning we'll reference items from
- # the first batch by their post-commit, server IDs.
- commit_session = {}
- old_cache_guid = my_cache_guid
- my_cache_guid = 'A different GUID'
- proto2b, result2b = DoCommit(original=result2,
- parent=original_changes[-1])
- proto4, result4 = DoCommit(id='ID4', name='Four',
- parent=result2, prev=None)
- proto1b, result1b = DoCommit(original=result1,
- parent=result2, prev=proto4)
- proto5, result5 = DoCommit(id='ID5', name='Five', parent=result2,
- prev=result1)
-
- self.assertEqual(2, len(commit_session),
- 'Only new items in second batch should be in the session')
- for p, r, original in [(proto2b, result2b, proto2),
- (proto4, result4, proto4),
- (proto1b, result1b, proto1),
- (proto5, result5, proto5)]:
- self.assertEqual(r.originator_client_item_id, original.id_string)
- if original is not p:
- self.assertEqual(r.id_string, p.id_string,
- 'Ids should be stable after first commit')
- self.assertEqual(r.originator_cache_guid, old_cache_guid)
- else:
- self.assertNotEqual(r.id_string, p.id_string)
- self.assertEqual(r.originator_cache_guid, my_cache_guid)
- self.assertEqual(commit_session.get(p.id_string), r.id_string)
- self.assertTrue(r is not self.model._entries[r.id_string],
- "Commit result didn't make a defensive copy.")
- self.assertTrue(p is not self.model._entries[r.id_string],
- "Commit didn't make a defensive copy.")
- self.assertTrue(r.version > p.version)
- version, changes = self.model.GetChangesFromTimestamp([sync_type],
- original_version)
- self.assertEqual(5, len(changes))
- self.assertEqual(original_version + 7, version)
- self.assertEqual([result3, result2b, result4, result1b, result5], changes)
- for c in changes:
- self.assertTrue(c is not self.model._entries[c.id_string],
- "GetChanges didn't make a defensive copy.")
- self.assertTrue(result4.parent_id_string ==
- result1b.parent_id_string ==
- result5.parent_id_string ==
- result2b.id_string)
- self.assertTrue(result4.position_in_parent <
- result1b.position_in_parent <
- result5.position_in_parent)
-
-if __name__ == '__main__':
- unittest.main() \ No newline at end of file
diff --git a/net/tools/testserver/testserver.py b/net/tools/testserver/testserver.py
index 8e3df5e..94ad3da 100644
--- a/net/tools/testserver/testserver.py
+++ b/net/tools/testserver/testserver.py
@@ -1,5 +1,5 @@
#!/usr/bin/python2.4
-# Copyright (c) 2006-2010 The Chromium Authors. All rights reserved.
+# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -22,13 +22,9 @@ import shutil
import SocketServer
import sys
import time
-import urllib2
-
-import pyftpdlib.ftpserver
import tlslite
import tlslite.api
-
-import chromiumsync
+import pyftpdlib.ftpserver
try:
import hashlib
@@ -129,14 +125,12 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler):
self.ContentTypeHandler,
self.ServerRedirectHandler,
self.ClientRedirectHandler,
- self.ChromiumSyncTimeHandler,
self.MultipartHandler,
self.DefaultResponseHandler]
self._post_handlers = [
self.WriteFile,
self.EchoTitleHandler,
self.EchoAllHandler,
- self.ChromiumSyncCommandHandler,
self.EchoHandler] + self._get_handlers
self._put_handlers = [
self.WriteFile,
@@ -155,8 +149,6 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler):
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, request,
client_address,
socket_server)
- # Class variable; shared across requests.
- _sync_handler = chromiumsync.TestServer()
def _ShouldHandleRequest(self, handler_name):
"""Determines if the path can be handled by the handler.
@@ -1004,39 +996,6 @@ class TestPageHandler(BaseHTTPServer.BaseHTTPRequestHandler):
return True
- def ChromiumSyncTimeHandler(self):
- """Handle Chromium sync .../time requests.
-
- The syncer sometimes checks server reachability by examining /time.
- """
- test_name = "/chromiumsync/time"
- if not self._ShouldHandleRequest(test_name):
- return False
-
- self.send_response(200)
- self.send_header('Content-type', 'text/html')
- self.end_headers()
- return True
-
- def ChromiumSyncCommandHandler(self):
- """Handle a chromiumsync command arriving via http.
-
- This covers all sync protocol commands: authentication, getupdates, and
- commit.
- """
- test_name = "/chromiumsync/command"
- if not self._ShouldHandleRequest(test_name):
- return False
-
- length = int(self.headers.getheader('content-length'))
- raw_request = self.rfile.read(length)
-
- http_response, raw_reply = self._sync_handler.HandleCommand(raw_request)
- self.send_response(http_response)
- self.end_headers()
- self.wfile.write(raw_reply)
- return True
-
def MultipartHandler(self):
"""Send a multipart response (10 text/html pages)."""
test_name = "/multipart"
@@ -1166,24 +1125,13 @@ def MakeDataDir():
# Create the default path to our data dir, relative to the exe dir.
my_data_dir = os.path.dirname(sys.argv[0])
my_data_dir = os.path.join(my_data_dir, "..", "..", "..", "..",
- "test", "data")
+ "test", "data")
#TODO(ibrar): Must use Find* funtion defined in google\tools
#i.e my_data_dir = FindUpward(my_data_dir, "test", "data")
return my_data_dir
-def TryKillingOldServer(port):
- # Note that an HTTP /kill request to the FTP server has the effect of
- # killing it.
- for protocol in ["http", "https"]:
- try:
- urllib2.urlopen("%s://localhost:%d/kill" % (protocol, port)).read()
- print "Killed old server instance on port %d (via %s)" % (port, protocol)
- except urllib2.URLError:
- # Common case, indicates no server running.
- pass
-
def main(options, args):
# redirect output to a log file so it doesn't spam the unit test output
logfile = open('testserver.log', 'w')
@@ -1191,9 +1139,6 @@ def main(options, args):
port = options.port
- # Try to free up the port if there's an orphaned old instance.
- TryKillingOldServer(port)
-
if options.server_type == SERVER_HTTP:
if options.cert:
# let's make sure the cert file exists.
diff --git a/third_party/protobuf2/__init__.py b/third_party/protobuf2/__init__.py
deleted file mode 100755
index 139597f..0000000
--- a/third_party/protobuf2/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-
-
diff --git a/third_party/protobuf2/descriptor_pb2.py b/third_party/protobuf2/descriptor_pb2.py
deleted file mode 100755
index cbacd34..0000000
--- a/third_party/protobuf2/descriptor_pb2.py
+++ /dev/null
@@ -1,1172 +0,0 @@
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-
-from google.protobuf import descriptor
-from google.protobuf import message
-from google.protobuf import reflection
-# @@protoc_insertion_point(imports)
-
-
-DESCRIPTOR = descriptor.FileDescriptor(
- name='google/protobuf/descriptor.proto',
- package='google.protobuf',
- serialized_pb='\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdc\x02\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\"\xa9\x03\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\x94\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\x7f\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\"\xa4\x03\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12!\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x04true\x12#\n\x15java_generic_services\x18\x11 \x01(\x08:\x04true\x12!\n\x13py_generic_services\x18\x12 \x01(\x08:\x04true\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xb8\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x94\x02\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\x14\x65xperimental_map_key\x18\t \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"]\n\x0b\x45numOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"b\n\x10\x45numValueOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"`\n\x0eServiceOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"_\n\rMethodOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x85\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\x42)\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01')
-
-
-
-_FIELDDESCRIPTORPROTO_TYPE = descriptor.EnumDescriptor(
- name='Type',
- full_name='google.protobuf.FieldDescriptorProto.Type',
- filename=None,
- file=DESCRIPTOR,
- values=[
- descriptor.EnumValueDescriptor(
- name='TYPE_DOUBLE', index=0, number=1,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_FLOAT', index=1, number=2,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_INT64', index=2, number=3,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_UINT64', index=3, number=4,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_INT32', index=4, number=5,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_FIXED64', index=5, number=6,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_FIXED32', index=6, number=7,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_BOOL', index=7, number=8,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_STRING', index=8, number=9,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_GROUP', index=9, number=10,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_MESSAGE', index=10, number=11,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_BYTES', index=11, number=12,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_UINT32', index=12, number=13,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_ENUM', index=13, number=14,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_SFIXED32', index=14, number=15,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_SFIXED64', index=15, number=16,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_SINT32', index=16, number=17,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='TYPE_SINT64', index=17, number=18,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=1187,
- serialized_end=1497,
-)
-
-_FIELDDESCRIPTORPROTO_LABEL = descriptor.EnumDescriptor(
- name='Label',
- full_name='google.protobuf.FieldDescriptorProto.Label',
- filename=None,
- file=DESCRIPTOR,
- values=[
- descriptor.EnumValueDescriptor(
- name='LABEL_OPTIONAL', index=0, number=1,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='LABEL_REQUIRED', index=1, number=2,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='LABEL_REPEATED', index=2, number=3,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=1499,
- serialized_end=1566,
-)
-
-_FILEOPTIONS_OPTIMIZEMODE = descriptor.EnumDescriptor(
- name='OptimizeMode',
- full_name='google.protobuf.FileOptions.OptimizeMode',
- filename=None,
- file=DESCRIPTOR,
- values=[
- descriptor.EnumValueDescriptor(
- name='SPEED', index=0, number=1,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='CODE_SIZE', index=1, number=2,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='LITE_RUNTIME', index=2, number=3,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=2449,
- serialized_end=2507,
-)
-
-_FIELDOPTIONS_CTYPE = descriptor.EnumDescriptor(
- name='CType',
- full_name='google.protobuf.FieldOptions.CType',
- filename=None,
- file=DESCRIPTOR,
- values=[
- descriptor.EnumValueDescriptor(
- name='STRING', index=0, number=0,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='CORD', index=1, number=1,
- options=None,
- type=None),
- descriptor.EnumValueDescriptor(
- name='STRING_PIECE', index=2, number=2,
- options=None,
- type=None),
- ],
- containing_type=None,
- options=None,
- serialized_start=2926,
- serialized_end=2973,
-)
-
-
-_FILEDESCRIPTORSET = descriptor.Descriptor(
- name='FileDescriptorSet',
- full_name='google.protobuf.FileDescriptorSet',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0,
- number=1, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=53,
- serialized_end=124,
-)
-
-
-_FILEDESCRIPTORPROTO = descriptor.Descriptor(
- name='FileDescriptorProto',
- full_name='google.protobuf.FileDescriptorProto',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2,
- number=3, type=9, cpp_type=9, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=3,
- number=4, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=4,
- number=5, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='service', full_name='google.protobuf.FileDescriptorProto.service', index=5,
- number=6, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=6,
- number=7, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='options', full_name='google.protobuf.FileDescriptorProto.options', index=7,
- number=8, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=127,
- serialized_end=475,
-)
-
-
-_DESCRIPTORPROTO_EXTENSIONRANGE = descriptor.Descriptor(
- name='ExtensionRange',
- full_name='google.protobuf.DescriptorProto.ExtensionRange',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0,
- number=1, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1,
- number=2, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=859,
- serialized_end=903,
-)
-
-_DESCRIPTORPROTO = descriptor.Descriptor(
- name='DescriptorProto',
- full_name='google.protobuf.DescriptorProto',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='name', full_name='google.protobuf.DescriptorProto.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='field', full_name='google.protobuf.DescriptorProto.field', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2,
- number=6, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3,
- number=3, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4,
- number=4, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5,
- number=5, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='options', full_name='google.protobuf.DescriptorProto.options', index=6,
- number=7, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, ],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=478,
- serialized_end=903,
-)
-
-
-_FIELDDESCRIPTORPROTO = descriptor.Descriptor(
- name='FieldDescriptorProto',
- full_name='google.protobuf.FieldDescriptorProto',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1,
- number=3, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2,
- number=4, type=14, cpp_type=8, label=1,
- has_default_value=False, default_value=1,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3,
- number=5, type=14, cpp_type=8, label=1,
- has_default_value=False, default_value=1,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4,
- number=6, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6,
- number=7, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=7,
- number=8, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _FIELDDESCRIPTORPROTO_TYPE,
- _FIELDDESCRIPTORPROTO_LABEL,
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=906,
- serialized_end=1566,
-)
-
-
-_ENUMDESCRIPTORPROTO = descriptor.Descriptor(
- name='EnumDescriptorProto',
- full_name='google.protobuf.EnumDescriptorProto',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2,
- number=3, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=1569,
- serialized_end=1709,
-)
-
-
-_ENUMVALUEDESCRIPTORPROTO = descriptor.Descriptor(
- name='EnumValueDescriptorProto',
- full_name='google.protobuf.EnumValueDescriptorProto',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1,
- number=2, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2,
- number=3, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=1711,
- serialized_end=1819,
-)
-
-
-_SERVICEDESCRIPTORPROTO = descriptor.Descriptor(
- name='ServiceDescriptorProto',
- full_name='google.protobuf.ServiceDescriptorProto',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2,
- number=3, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=1822,
- serialized_end=1966,
-)
-
-
-_METHODDESCRIPTORPROTO = descriptor.Descriptor(
- name='MethodDescriptorProto',
- full_name='google.protobuf.MethodDescriptorProto',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2,
- number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3,
- number=4, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=1968,
- serialized_end=2095,
-)
-
-
-_FILEOPTIONS = descriptor.Descriptor(
- name='FileOptions',
- full_name='google.protobuf.FileOptions',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1,
- number=8, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2,
- number=10, type=8, cpp_type=7, label=1,
- has_default_value=True, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=3,
- number=9, type=14, cpp_type=8, label=1,
- has_default_value=True, default_value=1,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=4,
- number=16, type=8, cpp_type=7, label=1,
- has_default_value=True, default_value=True,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=5,
- number=17, type=8, cpp_type=7, label=1,
- has_default_value=True, default_value=True,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=6,
- number=18, type=8, cpp_type=7, label=1,
- has_default_value=True, default_value=True,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=7,
- number=999, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _FILEOPTIONS_OPTIMIZEMODE,
- ],
- options=None,
- is_extendable=True,
- extension_ranges=[(1000, 536870912), ],
- serialized_start=2098,
- serialized_end=2518,
-)
-
-
-_MESSAGEOPTIONS = descriptor.Descriptor(
- name='MessageOptions',
- full_name='google.protobuf.MessageOptions',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0,
- number=1, type=8, cpp_type=7, label=1,
- has_default_value=True, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1,
- number=2, type=8, cpp_type=7, label=1,
- has_default_value=True, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=2,
- number=999, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=True,
- extension_ranges=[(1000, 536870912), ],
- serialized_start=2521,
- serialized_end=2705,
-)
-
-
-_FIELDOPTIONS = descriptor.Descriptor(
- name='FieldOptions',
- full_name='google.protobuf.FieldOptions',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0,
- number=1, type=14, cpp_type=8, label=1,
- has_default_value=True, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='packed', full_name='google.protobuf.FieldOptions.packed', index=1,
- number=2, type=8, cpp_type=7, label=1,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=2,
- number=3, type=8, cpp_type=7, label=1,
- has_default_value=True, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='experimental_map_key', full_name='google.protobuf.FieldOptions.experimental_map_key', index=3,
- number=9, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=4,
- number=999, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- _FIELDOPTIONS_CTYPE,
- ],
- options=None,
- is_extendable=True,
- extension_ranges=[(1000, 536870912), ],
- serialized_start=2708,
- serialized_end=2984,
-)
-
-
-_ENUMOPTIONS = descriptor.Descriptor(
- name='EnumOptions',
- full_name='google.protobuf.EnumOptions',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=0,
- number=999, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=True,
- extension_ranges=[(1000, 536870912), ],
- serialized_start=2986,
- serialized_end=3079,
-)
-
-
-_ENUMVALUEOPTIONS = descriptor.Descriptor(
- name='EnumValueOptions',
- full_name='google.protobuf.EnumValueOptions',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=0,
- number=999, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=True,
- extension_ranges=[(1000, 536870912), ],
- serialized_start=3081,
- serialized_end=3179,
-)
-
-
-_SERVICEOPTIONS = descriptor.Descriptor(
- name='ServiceOptions',
- full_name='google.protobuf.ServiceOptions',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=0,
- number=999, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=True,
- extension_ranges=[(1000, 536870912), ],
- serialized_start=3181,
- serialized_end=3277,
-)
-
-
-_METHODOPTIONS = descriptor.Descriptor(
- name='MethodOptions',
- full_name='google.protobuf.MethodOptions',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=0,
- number=999, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=True,
- extension_ranges=[(1000, 536870912), ],
- serialized_start=3279,
- serialized_end=3374,
-)
-
-
-_UNINTERPRETEDOPTION_NAMEPART = descriptor.Descriptor(
- name='NamePart',
- full_name='google.protobuf.UninterpretedOption.NamePart',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0,
- number=1, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
- number=2, type=8, cpp_type=7, label=2,
- has_default_value=False, default_value=False,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=3587,
- serialized_end=3638,
-)
-
-_UNINTERPRETEDOPTION = descriptor.Descriptor(
- name='UninterpretedOption',
- full_name='google.protobuf.UninterpretedOption',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- descriptor.FieldDescriptor(
- name='name', full_name='google.protobuf.UninterpretedOption.name', index=0,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1,
- number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2,
- number=4, type=4, cpp_type=4, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3,
- number=5, type=3, cpp_type=2, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4,
- number=6, type=1, cpp_type=5, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- descriptor.FieldDescriptor(
- name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5,
- number=7, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value="",
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- options=None),
- ],
- extensions=[
- ],
- nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ],
- enum_types=[
- ],
- options=None,
- is_extendable=False,
- extension_ranges=[],
- serialized_start=3377,
- serialized_end=3638,
-)
-
-
-_FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
-_FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
-_FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
-_FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
-_FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
-_FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
-_DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO;
-_DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
-_DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
-_DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
-_DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
-_DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
-_DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
-_FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
-_FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
-_FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
-_FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO;
-_FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO;
-_ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
-_ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
-_ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
-_SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
-_SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
-_METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
-_FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
-_FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS;
-_MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
-_FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS;
-_ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
-_UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION;
-_UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
-
-class FileDescriptorSet(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _FILEDESCRIPTORSET
-
- # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorSet)
-
-class FileDescriptorProto(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _FILEDESCRIPTORPROTO
-
- # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorProto)
-
-class DescriptorProto(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
-
- class ExtensionRange(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE
-
- # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ExtensionRange)
- DESCRIPTOR = _DESCRIPTORPROTO
-
- # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto)
-
-class FieldDescriptorProto(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _FIELDDESCRIPTORPROTO
-
- # @@protoc_insertion_point(class_scope:google.protobuf.FieldDescriptorProto)
-
-class EnumDescriptorProto(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _ENUMDESCRIPTORPROTO
-
- # @@protoc_insertion_point(class_scope:google.protobuf.EnumDescriptorProto)
-
-class EnumValueDescriptorProto(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO
-
- # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueDescriptorProto)
-
-class ServiceDescriptorProto(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _SERVICEDESCRIPTORPROTO
-
- # @@protoc_insertion_point(class_scope:google.protobuf.ServiceDescriptorProto)
-
-class MethodDescriptorProto(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _METHODDESCRIPTORPROTO
-
- # @@protoc_insertion_point(class_scope:google.protobuf.MethodDescriptorProto)
-
-class FileOptions(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _FILEOPTIONS
-
- # @@protoc_insertion_point(class_scope:google.protobuf.FileOptions)
-
-class MessageOptions(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _MESSAGEOPTIONS
-
- # @@protoc_insertion_point(class_scope:google.protobuf.MessageOptions)
-
-class FieldOptions(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _FIELDOPTIONS
-
- # @@protoc_insertion_point(class_scope:google.protobuf.FieldOptions)
-
-class EnumOptions(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _ENUMOPTIONS
-
- # @@protoc_insertion_point(class_scope:google.protobuf.EnumOptions)
-
-class EnumValueOptions(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _ENUMVALUEOPTIONS
-
- # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueOptions)
-
-class ServiceOptions(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _SERVICEOPTIONS
-
- # @@protoc_insertion_point(class_scope:google.protobuf.ServiceOptions)
-
-class MethodOptions(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _METHODOPTIONS
-
- # @@protoc_insertion_point(class_scope:google.protobuf.MethodOptions)
-
-class UninterpretedOption(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
-
- class NamePart(message.Message):
- __metaclass__ = reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART
-
- # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption.NamePart)
- DESCRIPTOR = _UNINTERPRETEDOPTION
-
- # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption)
-
-# @@protoc_insertion_point(module_scope)
diff --git a/third_party/protobuf2/protobuf.gyp b/third_party/protobuf2/protobuf.gyp
index c812d52..95272e9c 100644
--- a/third_party/protobuf2/protobuf.gyp
+++ b/third_party/protobuf2/protobuf.gyp
@@ -78,7 +78,6 @@
'defines': [
'GOOGLE_PROTOBUF_NO_RTTI',
],
-
'direct_dependent_settings': {
'include_dirs': [
'<(config_h_dir)',
@@ -219,86 +218,6 @@
'src/src',
],
},
- {
- # Generate the python module needed by all protoc-generated Python code.
- 'target_name': 'py_proto',
- 'type': 'none',
- 'copies': [
- {
- 'destination': '<(PRODUCT_DIR)/pyproto/google/',
- 'files': [
- # google/ module gets an empty __init__.py.
- '__init__.py',
- ],
- },
- {
- 'destination': '<(PRODUCT_DIR)/pyproto/google/protobuf',
- 'files': [
- 'src/python/google/protobuf/__init__.py',
- 'src/python/google/protobuf/descriptor.py',
- 'src/python/google/protobuf/message.py',
- 'src/python/google/protobuf/reflection.py',
- 'src/python/google/protobuf/service.py',
- 'src/python/google/protobuf/service_reflection.py',
- 'src/python/google/protobuf/text_format.py',
-
- # TODO(ncarter): protoc's python generator treats descriptor.proto
- # specially, but it's not possible to trigger the special treatment
- # unless you run protoc from ./src/src (the treatment is based
- # on the path to the .proto file matching a constant exactly).
- # I'm not sure how to convince gyp to execute a rule from a
- # different directory. Until this is resolved, use a copy of
- # descriptor_pb2.py that I manually generated.
- 'descriptor_pb2.py',
- ],
- },
- {
- 'destination': '<(PRODUCT_DIR)/pyproto/google/protobuf/internal',
- 'files': [
- 'src/python/google/protobuf/internal/__init__.py',
- 'src/python/google/protobuf/internal/containers.py',
- 'src/python/google/protobuf/internal/decoder.py',
- 'src/python/google/protobuf/internal/encoder.py',
- 'src/python/google/protobuf/internal/generator_test.py',
- 'src/python/google/protobuf/internal/message_listener.py',
- 'src/python/google/protobuf/internal/type_checkers.py',
- 'src/python/google/protobuf/internal/wire_format.py',
- ],
- },
- ],
- # # We can't generate a proper descriptor_pb2.py -- see earlier comment.
- # 'rules': [
- # {
- # 'rule_name': 'genproto',
- # 'extension': 'proto',
- # 'inputs': [
- # '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
- # ],
- # 'variables': {
- # # The protoc compiler requires a proto_path argument with the
- # # directory containing the .proto file.
- # 'rule_input_relpath': 'src/src/google/protobuf',
- # },
- # 'outputs': [
- # '<(PRODUCT_DIR)/pyproto/google/protobuf/<(RULE_INPUT_ROOT)_pb2.py',
- # ],
- # 'action': [
- # '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
- # '-I./src/src',
- # '-I./src',
- # '--python_out=<(PRODUCT_DIR)/pyproto/google/protobuf',
- # 'google/protobuf/descriptor.proto',
- # ],
- # 'message': 'Generating Python code from <(RULE_INPUT_PATH)',
- # },
- # ],
- # 'dependencies': [
- # 'protoc#host',
- # ],
- # 'sources': [
- # 'src/src/google/protobuf/descriptor.proto',
- # ],
- },
],
}