summaryrefslogtreecommitdiffstats
path: root/tools/site_compare/commands
diff options
context:
space:
mode:
Diffstat (limited to 'tools/site_compare/commands')
-rw-r--r--tools/site_compare/commands/compare2.py38
-rw-r--r--tools/site_compare/commands/maskmaker.py46
-rw-r--r--tools/site_compare/commands/measure.py4
-rw-r--r--tools/site_compare/commands/scrape.py6
-rw-r--r--tools/site_compare/commands/timeload.py38
5 files changed, 66 insertions, 66 deletions
diff --git a/tools/site_compare/commands/compare2.py b/tools/site_compare/commands/compare2.py
index e970c24..045141b 100644
--- a/tools/site_compare/commands/compare2.py
+++ b/tools/site_compare/commands/compare2.py
@@ -29,7 +29,7 @@ def CreateCommand(cmdline):
"Compares the output of two browsers on the same URL or list of URLs",
ValidateCompare2,
ExecuteCompare2)
-
+
cmd.AddArgument(
["-b1", "--browser1"], "Full path to first browser's executable",
type="readfile", metaname="PATH", required=True)
@@ -81,7 +81,7 @@ def CreateCommand(cmdline):
cmd.AddArgument(
["-d", "--diffdir"], "Path to hold the difference of comparisons that fail")
-
+
def ValidateCompare2(command):
"""Validate the arguments to compare2. Raises ParseError if failed."""
executables = [".exe", ".com", ".bat"]
@@ -102,68 +102,68 @@ def ExecuteCompare2(command):
endline = command["--endline"]
url_list = [url.strip() for url in
open(command["--list"], "r").readlines()[startline:endline]]
-
+
log_file = open(command["--logfile"], "w")
outdir = command["--outdir"]
if not outdir: outdir = tempfile.gettempdir()
-
+
scrape_info_list = []
-
+
class ScrapeInfo(object):
"""Helper class to hold information about a scrape."""
__slots__ = ["browser_path", "scraper", "outdir", "result"]
-
+
for index in xrange(1, 3):
scrape_info = ScrapeInfo()
scrape_info.browser_path = command["--browser%d" % index]
scrape_info.scraper = scrapers.GetScraper(
(command["--browser"], command["--browser%dver" % index]))
-
+
if command["--browser%dname" % index]:
scrape_info.outdir = os.path.join(outdir,
command["--browser%dname" % index])
else:
scrape_info.outdir = os.path.join(outdir, str(index))
-
+
drivers.windowing.PreparePath(scrape_info.outdir)
scrape_info_list.append(scrape_info)
-
+
compare = operators.GetOperator("equals_with_mask")
-
+
for url in url_list:
success = True
-
+
for scrape_info in scrape_info_list:
scrape_info.result = scrape_info.scraper.Scrape(
[url], scrape_info.outdir, command["--size"], (0, 0),
command["--timeout"], path=scrape_info.browser_path)
-
+
if not scrape_info.result:
scrape_info.result = "success"
else:
success = False
-
+
result = "unknown"
-
+
if success:
result = "equal"
-
+
file1 = drivers.windowing.URLtoFilename(
url, scrape_info_list[0].outdir, ".bmp")
file2 = drivers.windowing.URLtoFilename(
url, scrape_info_list[1].outdir, ".bmp")
-
+
comparison_result = compare.Compare(file1, file2,
maskdir=command["--maskdir"])
-
+
if comparison_result is not None:
result = "not-equal"
-
+
if command["--diffdir"]:
comparison_result[1].save(
drivers.windowing.URLtoFilename(url, command["--diffdir"], ".bmp"))
-
+
# TODO(jhaas): maybe use the logging module rather than raw file writes
log_file.write("%s %s %s %s\n" % (url,
scrape_info_list[0].result,
diff --git a/tools/site_compare/commands/maskmaker.py b/tools/site_compare/commands/maskmaker.py
index a5bf6e4..73b732c 100644
--- a/tools/site_compare/commands/maskmaker.py
+++ b/tools/site_compare/commands/maskmaker.py
@@ -96,7 +96,7 @@ def ValidateMaskmaker(command):
def ExecuteMaskmaker(command):
"""Performs automatic mask generation."""
-
+
# Get the list of URLs to generate masks for
class MaskmakerURL(object):
"""Helper class for holding information about a URL passed to maskmaker."""
@@ -105,7 +105,7 @@ def ExecuteMaskmaker(command):
self.url = url
self.consecutive_successes = 0
self.errors = 0
-
+
if command["--url"]:
url_list = [MaskmakerURL(command["--url"])]
else:
@@ -116,22 +116,22 @@ def ExecuteMaskmaker(command):
endline = command["--endline"]
url_list = [MaskmakerURL(url.strip()) for url in
open(command["--list"], "r").readlines()[startline:endline]]
-
+
complete_list = []
error_list = []
-
+
outdir = command["--outdir"]
scrapes = command["--scrapes"]
errors = command["--errors"]
size = command["--size"]
scrape_pass = 0
-
+
scrapedir = command["--scrapedir"]
if not scrapedir: scrapedir = tempfile.gettempdir()
-
+
# Get the scraper
scraper = scrapers.GetScraper((command["--browser"], command["--browserver"]))
-
+
# Repeatedly iterate through the list of URLs until either every URL has
# a successful mask or too many errors, or we've exceeded the giveup limit
while url_list and scrape_pass < command["--giveup"]:
@@ -157,31 +157,31 @@ def ExecuteMaskmaker(command):
print " %r does not exist, creating" % mask_filename
mask = Image.new("1", size, 1)
mask.save(mask_filename)
-
+
# Find the stored scrape path
mask_scrape_dir = os.path.join(
scrapedir, os.path.splitext(os.path.basename(mask_filename))[0])
drivers.windowing.PreparePath(mask_scrape_dir)
-
+
# Find the baseline image
mask_scrapes = os.listdir(mask_scrape_dir)
mask_scrapes.sort()
-
+
if not mask_scrapes:
print " No baseline image found, mask will not be updated"
baseline = None
else:
baseline = Image.open(os.path.join(mask_scrape_dir, mask_scrapes[0]))
-
+
mask_scrape_filename = os.path.join(mask_scrape_dir,
time.strftime("%y%m%d-%H%M%S.bmp"))
-
+
# Do the scrape
result = scraper.Scrape(
[url.url], mask_scrape_dir, size, (0, 0),
command["--timeout"], path=command["--browserpath"],
filename=mask_scrape_filename)
-
+
if result:
# Return value other than None means an error
print " Scrape failed with error '%r'" % result
@@ -189,16 +189,16 @@ def ExecuteMaskmaker(command):
if url.errors >= errors:
print " ** Exceeded maximum error count for this URL, giving up"
continue
-
+
# Load the new scrape
scrape = Image.open(mask_scrape_filename)
-
+
# Calculate the difference between the new scrape and the baseline,
# subject to the current mask
if baseline:
diff = ImageChops.multiply(ImageChops.difference(scrape, baseline),
mask.convert(scrape.mode))
-
+
# If the difference is none, there's nothing to update
if max(diff.getextrema()) == (0, 0):
print " Scrape identical to baseline, no change in mask"
@@ -221,10 +221,10 @@ def ExecuteMaskmaker(command):
# a monochrome bitmap. If the original RGB image were converted
# directly to monochrome, PIL would dither it.
diff = diff.convert("L").point([255]+[0]*255, "1")
-
+
# count the number of different pixels
diff_pixels = diff.getcolors()[0][0]
-
+
# is this too much?
diff_pixel_percent = diff_pixels * 100.0 / (mask.size[0]*mask.size[1])
if diff_pixel_percent > command["--threshhold"]:
@@ -234,10 +234,10 @@ def ExecuteMaskmaker(command):
print " Scrape differed in %d pixels, updating mask" % diff_pixels
mask = ImageChops.multiply(mask, diff)
mask.save(mask_filename)
-
+
# reset the number of consecutive "good" scrapes
url.consecutive_successes = 0
-
+
# Remove URLs whose mask is deemed done
complete_list.extend(
[url for url in url_list if url.consecutive_successes >= scrapes])
@@ -247,16 +247,16 @@ def ExecuteMaskmaker(command):
url for url in url_list if
url.consecutive_successes < scrapes and
url.errors < errors]
-
+
scrape_pass += 1
print "**Done with scrape pass %d\n" % scrape_pass
-
+
if scrape_pass >= command["--giveup"]:
print "**Exceeded giveup threshhold. Giving up."
else:
print "Waiting %d seconds..." % command["--wait"]
time.sleep(command["--wait"])
-
+
print
print "*** MASKMAKER COMPLETE ***"
print "Summary report:"
diff --git a/tools/site_compare/commands/measure.py b/tools/site_compare/commands/measure.py
index 1815a3d2..086fcbe 100644
--- a/tools/site_compare/commands/measure.py
+++ b/tools/site_compare/commands/measure.py
@@ -40,14 +40,14 @@ def CreateCommand(cmdline):
def ExecuteMeasure(command):
"""Executes the Measure command."""
-
+
def LogResult(url, proc, wnd, result):
"""Write the result of the browse to the log file."""
log_file.write(result)
log_file = open(command["--logfile"], "w")
- browser_iterate.Iterate(command, LogResult)
+ browser_iterate.Iterate(command, LogResult)
# Close the log file and return. We're done.
log_file.close()
diff --git a/tools/site_compare/commands/scrape.py b/tools/site_compare/commands/scrape.py
index 21a00ce..1c47cab 100644
--- a/tools/site_compare/commands/scrape.py
+++ b/tools/site_compare/commands/scrape.py
@@ -41,7 +41,7 @@ def CreateCommand(cmdline):
def ExecuteScrape(command):
"""Executes the Scrape command."""
-
+
def ScrapeResult(url, proc, wnd, result):
"""Capture and save the scrape."""
if log_file: log_file.write(result)
@@ -49,12 +49,12 @@ def ExecuteScrape(command):
# Scrape the page
image = windowing.ScrapeWindow(wnd)
filename = windowing.URLtoFilename(url, command["--outdir"], ".bmp")
- image.save(filename)
+ image.save(filename)
if command["--logfile"]: log_file = open(command["--logfile"], "w")
else: log_file = None
- browser_iterate.Iterate(command, ScrapeResult)
+ browser_iterate.Iterate(command, ScrapeResult)
# Close the log file and return. We're done.
if log_file: log_file.close()
diff --git a/tools/site_compare/commands/timeload.py b/tools/site_compare/commands/timeload.py
index 554d3b6..ca5b0db 100644
--- a/tools/site_compare/commands/timeload.py
+++ b/tools/site_compare/commands/timeload.py
@@ -6,7 +6,7 @@
"""SiteCompare command to time page loads
Loads a series of URLs in a series of browsers (and browser versions)
-and measures how long the page takes to load in each. Outputs a
+and measures how long the page takes to load in each. Outputs a
comma-delimited file. The first line is "URL,[browser names", each
additional line is a URL follored by comma-delimited times (in seconds),
or the string "timeout" or "crashed".
@@ -67,44 +67,44 @@ def CreateCommand(cmdline):
cmd.AddArgument(
["-sz", "--size"], "Browser window size", default=(800, 600), type="coords")
-
+
def ExecuteTimeLoad(command):
"""Executes the TimeLoad command."""
browsers = command["--browsers"].split(",")
num_browsers = len(browsers)
-
+
if command["--browserversions"]:
browser_versions = command["--browserversions"].split(",")
else:
browser_versions = [None] * num_browsers
-
+
if command["--browserpaths"]:
browser_paths = command["--browserpaths"].split(",")
else:
browser_paths = [None] * num_browsers
-
+
if len(browser_versions) != num_browsers:
raise ValueError(
"--browserversions must be same length as --browser_paths")
if len(browser_paths) != num_browsers:
raise ValueError(
"--browserversions must be same length as --browser_paths")
-
+
if [b for b in browsers if b not in ["chrome", "ie", "firefox"]]:
raise ValueError("unknown browsers: %r" % b)
-
+
scraper_list = []
-
+
for b in xrange(num_browsers):
version = browser_versions[b]
if not version: version = None
-
+
scraper = scrapers.GetScraper( (browsers[b], version) )
if not scraper:
- raise ValueError("could not find scraper for (%r, %r)" %
+ raise ValueError("could not find scraper for (%r, %r)" %
(browsers[b], version))
scraper_list.append(scraper)
-
+
if command["--url"]:
url_list = [command["--url"]]
else:
@@ -115,32 +115,32 @@ def ExecuteTimeLoad(command):
endline = command["--endline"]
url_list = [url.strip() for url in
open(command["--list"], "r").readlines()[startline:endline]]
-
+
log_file = open(command["--logfile"], "w")
-
+
log_file.write("URL")
for b in xrange(num_browsers):
log_file.write(",%s" % browsers[b])
-
+
if browser_versions[b]: log_file.write(" %s" % browser_versions[b])
log_file.write("\n")
-
+
results = {}
for url in url_list:
results[url] = [None] * num_browsers
-
+
for b in xrange(num_browsers):
result = scraper_list[b].Time(url_list, command["--size"],
command["--timeout"],
path=browser_paths[b])
-
+
for (url, time) in result:
results[url][b] = time
-
+
# output the results
for url in url_list:
log_file.write(url)
for b in xrange(num_browsers):
log_file.write(",%r" % results[url][b])
-
+