summaryrefslogtreecommitdiffstats
path: root/chrome/common/extensions/docs/server2/integration_test.py
blob: 7bc6dc398c76199ce37f05d72d093dd97341ee46 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

# Run build_server so that files needed by tests are copied to the local
# third_party directory.
import build_server
build_server.main()

import json
import optparse
import os
import posixpath
import sys
import time
import unittest

from branch_utility import BranchUtility
from chroot_file_system import ChrootFileSystem
from extensions_paths import CONTENT_PROVIDERS, EXTENSIONS, PUBLIC_TEMPLATES
from fake_fetchers import ConfigureFakeFetchers
from third_party.json_schema_compiler import json_parse
from handler import Handler
from link_error_detector import LinkErrorDetector, StringifyBrokenLinks
from local_file_system import LocalFileSystem
from local_renderer import LocalRenderer
from servlet import Request
from test_util import ChromiumPath, DisableLogging, EnableLogging, ReadFile


# Arguments set up if __main__ specifies them.
_EXPLICIT_TEST_FILES = None
_REBASE = False
_VERBOSE = False


def _ToPosixPath(os_path):
  return os_path.replace(os.sep, '/')


def _FilterHidden(paths):
  '''Returns a list of the non-hidden paths from |paths|.
  '''
  # Hidden files start with a '.' but paths like './foo' and '../foo' are not
  # hidden.
  return [path for path in paths if (not path.startswith('.')) or
                                     path.startswith('./') or
                                     path.startswith('../')]


def _GetPublicFiles():
  '''Gets all public file paths mapped to their contents.
  '''
  def walk(path, prefix=''):
    path = ChromiumPath(path)
    public_files = {}
    for root, dirs, files in os.walk(path, topdown=True):
      relative_root = root[len(path):].lstrip(os.path.sep)
      dirs[:] = _FilterHidden(dirs)
      for filename in _FilterHidden(files):
        with open(os.path.join(root, filename), 'r') as f:
          request_path = posixpath.join(prefix, relative_root, filename)
          public_files[request_path] = f.read()
    return public_files

  # Public file locations are defined in content_providers.json, sort of.  Epic
  # hack to pull them out; list all the files from the directories that
  # Chromium content providers ask for.
  public_files = {}
  content_providers = json_parse.Parse(ReadFile(CONTENT_PROVIDERS))
  for content_provider in content_providers.itervalues():
    if 'chromium' in content_provider:
      public_files.update(walk(content_provider['chromium']['dir'],
                               prefix=content_provider['serveFrom']))
  return public_files


class IntegrationTest(unittest.TestCase):
  def setUp(self):
    ConfigureFakeFetchers()

  @EnableLogging('info')
  def testCronAndPublicFiles(self):
    '''Runs cron then requests every public file. Cron needs to be run first
    because the public file requests are offline.
    '''
    if _EXPLICIT_TEST_FILES is not None:
      return

    print('Running cron...')
    start_time = time.time()
    try:
      response = Handler(Request.ForTest('/_cron')).Get()
      self.assertEqual(200, response.status)
      self.assertEqual('Success', response.content.ToString())
    finally:
      print('Took %s seconds' % (time.time() - start_time))

    # TODO(kalman): Re-enable this, but it takes about an hour at the moment,
    # presumably because every page now has a lot of links on it from the
    # topnav.

    #print("Checking for broken links...")
    #start_time = time.time()
    #link_error_detector = LinkErrorDetector(
    #    # TODO(kalman): Use of ChrootFileSystem here indicates a hack. Fix.
    #    ChrootFileSystem(LocalFileSystem.Create(), EXTENSIONS),
    #    lambda path: Handler(Request.ForTest(path)).Get(),
    #    'templates/public',
    #    ('extensions/index.html', 'apps/about_apps.html'))

    #broken_links = link_error_detector.GetBrokenLinks()
    #if broken_links:
    #  print('Found %d broken links.' % (
    #    len(broken_links)))
    #  if _VERBOSE:
    #    print(StringifyBrokenLinks(broken_links))

    #broken_links_set = set(broken_links)

    #known_broken_links_path = os.path.join(
    #    sys.path[0], 'known_broken_links.json')
    #try:
    #  with open(known_broken_links_path, 'r') as f:
    #    # The JSON file converts tuples and sets into lists, and for this
    #    # set union/difference logic they need to be converted back.
    #    known_broken_links = set(tuple(item) for item in json.load(f))
    #except IOError:
    #  known_broken_links = set()

    #newly_broken_links = broken_links_set - known_broken_links
    #fixed_links = known_broken_links - broken_links_set

    #print('Took %s seconds.' % (time.time() - start_time))

    #print('Searching for orphaned pages...')
    #start_time = time.time()
    #orphaned_pages = link_error_detector.GetOrphanedPages()
    #if orphaned_pages:
    #  # TODO(jshumway): Test should fail when orphaned pages are detected.
    #  print('Found %d orphaned pages:' % len(orphaned_pages))
    #  for page in orphaned_pages:
    #    print(page)
    #print('Took %s seconds.' % (time.time() - start_time))

    public_files = _GetPublicFiles()

    print('Rendering %s public files...' % len(public_files.keys()))
    start_time = time.time()
    try:
      for path, content in public_files.iteritems():
        assert not path.startswith('/')
        if path.endswith('redirects.json'):
          continue

        def check_result(response):
          self.assertEqual(200, response.status,
              'Got %s when rendering %s' % (response.status, path))
          # This is reaaaaally rough since usually these will be tiny templates
          # that render large files. At least it'll catch zero-length responses.
          self.assertTrue(len(response.content) >= len(content),
              'Rendered content length was %s vs template content length %s '
              'when rendering %s' % (len(response.content), len(content), path))

        check_result(Handler(Request.ForTest(path)).Get())

        if path.startswith(('apps/', 'extensions/')):
          # Make sure that leaving out the .html will temporarily redirect to
          # the path with the .html for APIs and articles.
          if '/examples/' not in path:
            base, _ = posixpath.splitext(path)
            self.assertEqual(
                ('/' + path, False),
                Handler(Request.ForTest(base)).Get().GetRedirect(),
                '%s did not (temporarily) redirect to %s.html' % (path, path))

          # Make sure including a channel will permanently redirect to the same
          # path without a channel.
          for channel in BranchUtility.GetAllChannelNames():
            redirect_result = Handler(
                Request.ForTest(posixpath.join(channel, path))).Get()
            self.assertEqual(
                ('/' + path, True),
                redirect_result.GetRedirect(),
                '%s did not redirect to strip channel %s' % (path, channel))

        # Samples are internationalized, test some locales.
        if path.endswith('/samples.html'):
          for lang in ('en-US', 'es', 'ar'):
            check_result(Handler(Request.ForTest(
                path,
                headers={'Accept-Language': '%s;q=0.8' % lang})).Get())
    finally:
      print('Took %s seconds' % (time.time() - start_time))

    #if _REBASE:
    #  print('Rebasing broken links with %s newly broken and %s fixed links.' %
    #        (len(newly_broken_links), len(fixed_links)))
    #  with open(known_broken_links_path, 'w') as f:
    #    json.dump(broken_links, f,
    #              indent=2, separators=(',', ': '), sort_keys=True)
    #else:
    #  if fixed_links or newly_broken_links:
    #    print('**********************************************\n'
    #          'CHANGE DETECTED IN BROKEN LINKS WITHOUT REBASE\n'
    #          '**********************************************')
    #    print('Found %s broken links, and some have changed. '
    #          'If this is acceptable or expected then run %s with the --rebase '
    #          'option.' % (len(broken_links), os.path.split(__file__)[-1]))
    #  elif broken_links:
    #    print('%s existing broken links' % len(broken_links))
    #  if fixed_links:
    #    print('%s broken links have been fixed:' % len(fixed_links))
    #    print(StringifyBrokenLinks(fixed_links))
    #  if newly_broken_links:
    #    print('There are %s new broken links:' % len(newly_broken_links))
    #    print(StringifyBrokenLinks(newly_broken_links))
    #    self.fail('See logging for details.')

  # TODO(kalman): Move this test elsewhere, it's not an integration test.
  # Perhaps like "presubmit_tests" or something.
  def testExplicitFiles(self):
    '''Tests just the files in _EXPLICIT_TEST_FILES.
    '''
    if _EXPLICIT_TEST_FILES is None:
      return
    for filename in _EXPLICIT_TEST_FILES:
      print('Rendering %s...' % filename)
      start_time = time.time()
      try:
        response = LocalRenderer.Render(_ToPosixPath(filename))
        self.assertEqual(200, response.status)
        self.assertTrue(response.content != '')
      finally:
        print('Took %s seconds' % (time.time() - start_time))

    # TODO(jshumway): Check page for broken links (currently prohibited by the
    # time it takes to render the pages).

  @DisableLogging('warning')
  def testFileNotFound(self):
    response = Handler(Request.ForTest('/extensions/notfound.html')).Get()
    self.assertEqual(404, response.status)

if __name__ == '__main__':
  parser = optparse.OptionParser()
  parser.add_option('-a', '--all', action='store_true', default=False,
                    help='Render all pages, not just the one specified')
  parser.add_option('-r', '--rebase', action='store_true', default=False,
                    help='Rewrites the known_broken_links.json file with '
                         'the current set of broken links')
  parser.add_option('-v', '--verbose', action='store_true', default=False,
                    help='Show verbose output like currently broken links')
  (opts, args) = parser.parse_args()
  if not opts.all:
    _EXPLICIT_TEST_FILES = args
  _REBASE = opts.rebase
  _VERBOSE = opts.verbose
  # Kill sys.argv because we have our own flags.
  sys.argv = [sys.argv[0]]
  unittest.main()