From c2e8542075aa6d00d26a4af74ecd98db413324d2 Mon Sep 17 00:00:00 2001 From: Joseph Wynn Date: Wed, 18 Dec 2019 10:05:47 +1300 Subject: [PATCH 01/16] Install patched ujson in Docker image --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index e418b1978..9166ebe14 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,6 +5,7 @@ RUN apt-get update && \ apt-get install -y \ wget \ curl \ + git \ python \ python-pip \ xvfb \ @@ -61,7 +62,7 @@ RUN apt-get update && \ pillow \ psutil \ requests \ - ujson \ + git+git://github.com/marshallpierce/ultrajson.git@v1.35-gentoo-fixes \ tornado \ wsaccel \ xvfbwrapper \ From 064e3f36d7278d6e05d2aee1cca8908b9d255d2d Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Wed, 18 Dec 2019 10:18:44 -0800 Subject: [PATCH 02/16] Prep work for python 3 --- alive.py | 10 ++-- internal/adb.py | 23 ++++---- internal/android_browser.py | 7 ++- internal/blackbox_android.py | 23 ++++---- internal/chrome_android.py | 9 ++- internal/desktop_browser.py | 31 ++++++----- internal/devtools.py | 59 ++++++++++---------- internal/devtools_browser.py | 11 ++-- internal/firefox.py | 29 +++++----- internal/ios_device.py | 29 +++++----- internal/message_server.py | 9 ++- internal/microsoft_edge.py | 37 +++++++------ internal/optimization_checks.py | 31 ++++++----- internal/safari_ios.py | 47 ++++++++-------- internal/support/devtools_parser.py | 4 +- internal/support/firefox_log_parser.py | 11 ++-- internal/support/ios/usbmux.py | 6 +- internal/support/pcap-parser.py | 6 +- internal/support/trace_parser.py | 2 +- internal/support/visualmetrics.py | 64 ++++++++++----------- internal/traceroute.py | 2 +- internal/video_processing.py | 2 +- internal/webpagetest.py | 19 ++++--- wptagent.py | 77 ++++++++++++++------------ 24 files changed, 297 insertions(+), 251 deletions(-) diff --git a/alive.py b/alive.py index ac4cf65f8..c4ffe52b1 100644 --- a/alive.py +++ b/alive.py @@ -25,33 +25,33 @@ def main(): with open('/proc/uptime', 'r') as f_in: uptime_seconds = int(float(f_in.readline().split()[0])) if uptime_seconds < 3600: - print 'OK: Freshly booted ({0:d} seconds)'.format(uptime_seconds) + print('OK: Freshly booted ({0:d} seconds)'.format(uptime_seconds)) exit(0) elif platform.system() == "Windows": uptime_seconds = int(time.time()) - int(psutil.boot_time()) if uptime_seconds < 3600: - print 'OK: Freshly booted ({0:d} seconds)'.format(uptime_seconds) + print('OK: Freshly booted ({0:d} seconds)'.format(uptime_seconds)) exit(0) # Check if the watchdog file has been updated in the last hour. if options.file and os.path.isfile(options.file): elapsed = int(time.time() - os.path.getmtime(options.file)) if elapsed < 3600: - print 'OK: File last modified {0:d} seconds ago'.format(elapsed) + print('OK: File last modified {0:d} seconds ago'.format(elapsed)) exit(0) # Ping the provided address if requested. if options.ping and platform.system() != "Windows": response = os.system('ping -c 2 -i 0.2 -n -W 1 {0} > /dev/null 2>&1'.format(options.ping)) if response == 0: - print 'OK: ping succeeded' + print('OK: ping succeeded') # Update the alive file to avoid pinging all the time if options.file: with open(options.file, 'a'): os.utime(options.file, None) exit(0) - print 'FAIL: No checks passed' + print('FAIL: No checks passed') if options.reboot: if platform.system() == 'Windows': subprocess.call(['shutdown', '/r', '/f']) diff --git a/internal/adb.py b/internal/adb.py index 57a986646..83fe2d0c1 100644 --- a/internal/adb.py +++ b/internal/adb.py @@ -9,7 +9,10 @@ import subprocess from threading import Timer import time -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic # cSpell:ignore vpndialogs, sysctl, iptables, ifconfig, dstaddr, clientidbase, nsecs @@ -33,7 +36,7 @@ def __init__(self, options, cache_dir): self.simplert_path = None self.simplert = None self.no_network_count = 0 - self.last_network_ok = monotonic.monotonic() + self.last_network_ok = monotonic() self.needs_exit = False self.rebooted = False self.vpn_forwarder = None @@ -305,7 +308,7 @@ def is_installed(self, package): def cleanup_device(self): """Do some device-level cleanup""" - start = monotonic.monotonic() + start = monotonic() # Simulate pressing the home button to dismiss any UI self.shell(['input', 'keyevent', '3']) # Clear notifications @@ -343,7 +346,7 @@ def cleanup_device(self): if out.find('com.motorola.ccc.ota/com.motorola.ccc.ota.ui.DownloadActivity') >= 0: self.shell(['am', 'force-stop', 'com.motorola.ccc.ota']) # reboot the phone and exit the agent if it is running EXTREMELY slowly - elapsed = monotonic.monotonic() - start + elapsed = monotonic() - start if elapsed > 300: logging.debug("Cleanup took %0.3f seconds. Rebooting the phone and restarting agent", elapsed) @@ -506,8 +509,8 @@ def check_simplert(self): self.su('setprop sys.usb.config adb') self.adb(['wait-for-device']) # wait up to 30 seconds for the interface to come up - end_time = monotonic.monotonic() + 30 - while not is_ready and monotonic.monotonic() < end_time: + end_time = monotonic() + 30 + while not is_ready and monotonic() < end_time: time.sleep(1) self.dismiss_vpn_dialog() is_ready = self.is_tun_interface_available() @@ -608,8 +611,8 @@ def check_gnirehtet(self): self.dismiss_vpn_dialog() # Simulate pressing the home button to dismiss any UI self.shell(['input', 'keyevent', '3']) - end = monotonic.monotonic() + 30 - while not is_ready and monotonic.monotonic() < end: + end = monotonic() + 30 + while not is_ready and monotonic() < end: if self.is_tun_interface_available(): is_ready = True else: @@ -672,7 +675,7 @@ def is_device_ready(self): net_ok = False if self.ping(self.ping_address) is not None: self.no_network_count = 0 - self.last_network_ok = monotonic.monotonic() + self.last_network_ok = monotonic() self.rebooted = False net_ok = True else: @@ -714,7 +717,7 @@ def is_device_ready(self): is_ready = False if not is_ready: needs_kick = False - elapsed = monotonic.monotonic() - self.last_network_ok + elapsed = monotonic() - self.last_network_ok if self.no_network_count > 20: needs_kick = True elif self.no_network_count > 1 and elapsed > 1800: diff --git a/internal/android_browser.py b/internal/android_browser.py index 1ea542c4e..d3e228a8b 100644 --- a/internal/android_browser.py +++ b/internal/android_browser.py @@ -9,7 +9,10 @@ import shutil import subprocess import time -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -281,7 +284,7 @@ def step_complete(self, task): task['page_data']['eventName'] = task['step_name'] if 'run_start_time' in task: task['page_data']['test_run_time_ms'] = \ - int(round((monotonic.monotonic() - task['run_start_time']) * 1000.0)) + int(round((monotonic() - task['run_start_time']) * 1000.0)) path = os.path.join(task['dir'], task['prefix'] + '_page_data.json.gz') json_page_data = json.dumps(task['page_data']) logging.debug('Page Data: %s', json_page_data) diff --git a/internal/blackbox_android.py b/internal/blackbox_android.py index e0cbc0a7e..5e786cf1d 100644 --- a/internal/blackbox_android.py +++ b/internal/blackbox_android.py @@ -6,7 +6,10 @@ import os import re import time -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic from .android_browser import AndroidBrowser CHROME_COMMAND_LINE_OPTIONS = [ @@ -104,7 +107,7 @@ def launch(self, job, task): def run_task(self, task): """Skip anything that isn't a navigate command""" logging.debug("Running test") - end_time = monotonic.monotonic() + task['test_time_limit'] + end_time = monotonic() + task['test_time_limit'] task['log_data'] = True task['current_step'] = 1 task['prefix'] = task['task_prefix'] @@ -112,9 +115,9 @@ def run_task(self, task): if self.job['video']: task['video_directories'].append(task['video_subdirectory']) task['step_name'] = 'Navigate' - task['run_start_time'] = monotonic.monotonic() + task['run_start_time'] = monotonic() self.on_start_recording(task) - while len(task['script']) and monotonic.monotonic() < end_time: + while len(task['script']) and monotonic() < end_time: command = task['script'].pop(0) if command['command'] == 'navigate': task['page_data']['URL'] = command['target'] @@ -211,10 +214,10 @@ def prepare_opera_mini_settings(self): def wait_for_network_idle(self): """Wait for 5 one-second intervals that receive less than 1KB""" logging.debug('Waiting for network idle') - end_time = monotonic.monotonic() + 60 + end_time = monotonic() + 60 self.adb.get_bytes_rx() idle_count = 0 - while idle_count < 5 and monotonic.monotonic() < end_time: + while idle_count < 5 and monotonic() < end_time: time.sleep(1) bytes_rx = self.adb.get_bytes_rx() logging.debug("Bytes received: %d", bytes_rx) @@ -227,12 +230,12 @@ def wait_for_page_load(self): """Once the video starts growing, wait for it to stop""" logging.debug('Waiting for the page to load') # Wait for the video to start (up to 30 seconds) - end_startup = monotonic.monotonic() + 30 - end_time = monotonic.monotonic() + self.task['time_limit'] + end_startup = monotonic() + 30 + end_time = monotonic() + self.task['time_limit'] last_size = self.adb.get_video_size() video_started = False bytes_rx = self.adb.get_bytes_rx() - while not video_started and monotonic.monotonic() < end_startup: + while not video_started and monotonic() < end_startup: time.sleep(5) video_size = self.adb.get_video_size() bytes_rx = self.adb.get_bytes_rx() @@ -243,7 +246,7 @@ def wait_for_page_load(self): video_started = True # Wait for the activity to stop video_idle_count = 0 - while video_idle_count <= 3 and monotonic.monotonic() < end_time: + while video_idle_count <= 3 and monotonic() < end_time: time.sleep(5) video_size = self.adb.get_video_size() bytes_rx = self.adb.get_bytes_rx() diff --git a/internal/chrome_android.py b/internal/chrome_android.py index 845b2fefa..683a4c2be 100644 --- a/internal/chrome_android.py +++ b/internal/chrome_android.py @@ -8,7 +8,10 @@ import re import shutil import time -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic from .devtools_browser import DevtoolsBrowser from .android_browser import AndroidBrowser @@ -252,9 +255,9 @@ def write_prefs(self, prefs, file_base): def get_devtools_socket(self): """Get the socket name of the remote devtools socket. @..._devtools_remote""" socket_name = None - end_time = monotonic.monotonic() + 120 + end_time = monotonic() + 120 time.sleep(1) - while socket_name is None and monotonic.monotonic() < end_time: + while socket_name is None and monotonic() < end_time: out = self.adb.shell(['cat', '/proc/net/unix']) if out is not None: for line in out.splitlines(): diff --git a/internal/desktop_browser.py b/internal/desktop_browser.py index c0eb36e77..4a0b32d60 100644 --- a/internal/desktop_browser.py +++ b/internal/desktop_browser.py @@ -13,7 +13,10 @@ import subprocess import threading import time -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -331,15 +334,15 @@ def wait_for_idle(self): if cpu_count > 0: target_pct = 50. / float(cpu_count) idle_start = None - end_time = monotonic.monotonic() + self.START_BROWSER_TIME_LIMIT + end_time = monotonic() + self.START_BROWSER_TIME_LIMIT idle = False - while not idle and monotonic.monotonic() < end_time: - check_start = monotonic.monotonic() + while not idle and monotonic() < end_time: + check_start = monotonic() pct = psutil.cpu_percent(interval=0.1) if pct <= target_pct: if idle_start is None: idle_start = check_start - if monotonic.monotonic() - idle_start >= 0.4: + if monotonic() - idle_start >= 0.4: idle = True else: idle_start = None @@ -347,8 +350,8 @@ def wait_for_idle(self): def clear_profile(self, task): """Delete the browser profile directory""" if os.path.isdir(task['profile']): - end_time = monotonic.monotonic() + 30 - while monotonic.monotonic() < end_time: + end_time = monotonic() + 30 + while monotonic() < end_time: try: shutil.rmtree(task['profile']) except Exception: @@ -413,9 +416,9 @@ def on_start_recording(self, task): logging.debug(' '.join(args)) self.tcpdump = subprocess.Popen(args) # Wait for the capture file to start growing - end_time = monotonic.monotonic() + 5 + end_time = monotonic() + 5 started = False - while not started and monotonic.monotonic() < end_time: + while not started and monotonic() < end_time: if os.path.isfile(self.pcap_file): started = True time.sleep(0.1) @@ -469,10 +472,10 @@ def on_start_recording(self, task): else: self.ffmpeg = subprocess.Popen(args) # Wait up to 5 seconds for something to be captured - end_time = monotonic.monotonic() + 5 + end_time = monotonic() + 5 started = False initial_size = None - while not started and monotonic.monotonic() < end_time: + while not started and monotonic() < end_time: if os.path.isfile(task['video_file']): video_size = os.path.getsize(task['video_file']) if initial_size == None: @@ -645,7 +648,7 @@ def step_complete(self, task): task['page_data']['eventName'] = task['step_name'] if 'run_start_time' in task: task['page_data']['test_run_time_ms'] = \ - int(round((monotonic.monotonic() - task['run_start_time']) * 1000.0)) + int(round((monotonic() - task['run_start_time']) * 1000.0)) path = os.path.join(task['dir'], task['prefix'] + '_page_data.json.gz') json_page_data = json.dumps(task['page_data']) logging.debug('Page Data: %s', json_page_data) @@ -692,14 +695,14 @@ def get_net_bytes(self): def background_thread(self): """Background thread for monitoring CPU and bandwidth usage""" import psutil - last_time = start_time = monotonic.monotonic() + last_time = start_time = monotonic() last_bytes = self.get_net_bytes() snapshot = {'time': 0, 'cpu': 0.0, 'bw': 0} self.usage_queue.put(snapshot) while self.recording: snapshot = {'bw': 0} snapshot['cpu'] = psutil.cpu_percent(interval=0.1) - now = monotonic.monotonic() + now = monotonic() snapshot['time'] = int((now - start_time) * 1000) # calculate the bandwidth over the last interval in Kbps bytes_in = self.get_net_bytes() diff --git a/internal/devtools.py b/internal/devtools.py index 3194cbce8..8ea9aebb1 100644 --- a/internal/devtools.py +++ b/internal/devtools.py @@ -12,7 +12,10 @@ import time import zipfile from urlparse import urlsplit -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -36,7 +39,7 @@ def __init__(self, options, job, task, use_devtools_video): self.main_frame = None self.response_started = False self.is_navigating = False - self.last_activity = monotonic.monotonic() + self.last_activity = monotonic() self.dev_tools_file = None self.trace_file = None self.trace_enabled = False @@ -105,15 +108,15 @@ def wait_for_available(self, timeout): import requests proxies = {"http": None, "https": None} ret = False - end_time = monotonic.monotonic() + timeout - while not ret and monotonic.monotonic() < end_time: + end_time = monotonic() + timeout + while not ret and monotonic() < end_time: try: response = requests.get(self.url, timeout=timeout, proxies=proxies) if len(response.text): tabs = response.json() logging.debug("Dev Tools tabs: %s", json.dumps(tabs)) if len(tabs): - for index in xrange(len(tabs)): + for index in range(len(tabs)): if 'type' in tabs[index] and \ tabs[index]['type'] == 'page' and \ 'webSocketDebuggerUrl' in tabs[index] and \ @@ -131,8 +134,8 @@ def connect(self, timeout): session = requests.session() proxies = {"http": None, "https": None} ret = False - end_time = monotonic.monotonic() + timeout - while not ret and monotonic.monotonic() < end_time: + end_time = monotonic() + timeout + while not ret and monotonic() < end_time: try: response = session.get(self.url, timeout=timeout, proxies=proxies) if len(response.text): @@ -140,7 +143,7 @@ def connect(self, timeout): logging.debug("Dev Tools tabs: %s", json.dumps(tabs)) if len(tabs): websocket_url = None - for index in xrange(len(tabs)): + for index in range(len(tabs)): if 'type' in tabs[index] and \ tabs[index]['type'] == 'page' and \ 'webSocketDebuggerUrl' in tabs[index] and \ @@ -332,7 +335,7 @@ def start_recording(self): self.send_command('Tracing.start', {'traceConfig': trace_config}, wait=True) - now = monotonic.monotonic() + now = monotonic() if not self.task['stop_at_onload']: self.last_activity = now if self.page_loaded is not None: @@ -454,7 +457,7 @@ def collect_trace(self): """Stop tracing and collect the results""" if self.trace_enabled: self.trace_enabled = False - start = monotonic.monotonic() + start = monotonic() try: # Keep pumping messages until we get tracingComplete or # we get a gap of 30 seconds between messages @@ -462,7 +465,7 @@ def collect_trace(self): logging.info('Collecting trace events') done = False no_message_count = 0 - elapsed = monotonic.monotonic() - start + elapsed = monotonic() - start while not done and no_message_count < 30 and elapsed < 60: try: raw = self.websocket.get_message(1) @@ -480,7 +483,7 @@ def collect_trace(self): self.websocket.stop_processing_trace() except Exception: pass - elapsed = monotonic.monotonic() - start + elapsed = monotonic() - start logging.debug("Time to collect trace: %0.3f sec", elapsed) self.recording_video = False @@ -665,7 +668,7 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): msg = {'id': command_id, 'method': method, 'params': params} if wait: self.pending_commands.append(command_id) - end_time = monotonic.monotonic() + timeout + end_time = monotonic() + timeout self.send_command('Target.sendMessageToTarget', {'targetId': target_id, 'message': json.dumps(msg)}, wait=True, timeout=timeout) @@ -674,7 +677,7 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): ret = self.command_responses[command_id] del self.command_responses[command_id] else: - while ret is None and monotonic.monotonic() < end_time: + while ret is None and monotonic() < end_time: try: raw = self.websocket.get_message(1) if raw is not None and len(raw): @@ -697,8 +700,8 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): logging.debug("Sending: %s", out[:1000]) self.websocket.send(out) if wait: - end_time = monotonic.monotonic() + timeout - while ret is None and monotonic.monotonic() < end_time: + end_time = monotonic() + timeout + while ret is None and monotonic() < end_time: try: raw = self.websocket.get_message(1) if raw is not None and len(raw): @@ -717,7 +720,7 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): def wait_for_page_load(self): """Wait for the page load and activity to finish""" if self.websocket: - start_time = monotonic.monotonic() + start_time = monotonic() end_time = start_time + self.task['time_limit'] done = False interval = 1 @@ -733,7 +736,7 @@ def wait_for_page_load(self): except Exception: # ignore timeouts when we're in a polling read loop pass - now = monotonic.monotonic() + now = monotonic() elapsed_test = now - start_time if self.nav_error is not None: done = True @@ -760,7 +763,7 @@ def wait_for_page_load(self): def grab_screenshot(self, path, png=True, resize=0): """Save the screen shot (png or jpeg)""" if not self.main_thread_blocked: - response = self.send_command("Page.captureScreenshot", {}, wait=True, timeout=10) + response = self.send_command("Page.captureScreenshot", {}, wait=True, timeout=30) if response is not None and 'result' in response and 'data' in response['result']: resize_string = '' if not resize else '-resize {0:d}x{0:d} '.format(resize) if png: @@ -791,7 +794,7 @@ def colors_are_similar(self, color1, color2, threshold=15): """See if 2 given pixels are of similar color""" similar = True delta_sum = 0 - for value in xrange(3): + for value in range(3): delta = abs(color1[value] - color2[value]) delta_sum += delta if delta > threshold: @@ -871,14 +874,14 @@ def process_message(self, msg, target_id=None): def process_page_event(self, event, msg): """Process Page.* dev tools events""" if event == 'loadEventFired': - self.page_loaded = monotonic.monotonic() + self.page_loaded = monotonic() elif event == 'frameStartedLoading' and 'params' in msg and 'frameId' in msg['params']: if self.is_navigating and self.main_frame is None: self.is_navigating = False self.main_frame = msg['params']['frameId'] if self.main_frame == msg['params']['frameId']: logging.debug("Navigating main frame") - self.last_activity = monotonic.monotonic() + self.last_activity = monotonic() self.page_loaded = None elif event == 'frameNavigated' and 'params' in msg and \ 'frame' in msg['params'] and 'id' in msg['params']['frame']: @@ -897,7 +900,7 @@ def process_page_event(self, event, msg): self.task['page_data']['result'] = self.nav_error_code else: self.task['page_data']['result'] = 12999 - self.page_loaded = monotonic.monotonic() + self.page_loaded = monotonic() elif event == 'javascriptDialogOpening': result = self.send_command("Page.handleJavaScriptDialog", {"accept": False}, wait=True) if result is not None and 'error' in result: @@ -1033,7 +1036,7 @@ def process_network_event(self, event, msg, target_id=None): else: ignore_activity = True if not self.task['stop_at_onload'] and not ignore_activity: - self.last_activity = monotonic.monotonic() + self.last_activity = monotonic() def process_inspector_event(self, event): """Process Inspector.* dev tools events""" @@ -1111,7 +1114,7 @@ def bytes_from_range(self, text, range_info): else: # count the whole lines between the partial start and end lines if end_line > start_line + 1: - for row in xrange(start_line + 1, end_line): + for row in range(start_line + 1, end_line): byte_count += len(lines[row]) byte_count += len(lines[start_line][start_column:]) byte_count += end_column @@ -1161,7 +1164,7 @@ def received_message(self, raw): message = raw.data.decode(raw.encoding) if raw.encoding is not None else raw.data compare = message[:50] if self.path_base is not None and compare.find('"Tracing.dataCollected') > -1: - now = monotonic.monotonic() + now = monotonic() msg = json.loads(message) message = None if msg is not None: @@ -1227,7 +1230,7 @@ def stop_processing_trace(self): self.video_viewport = None self.last_image = None if self.trace_parser is not None and self.path_base is not None: - start = monotonic.monotonic() + start = monotonic() logging.debug("Post-Processing the trace netlog events") self.trace_parser.post_process_netlog_events() logging.debug("Processing the trace timeline events") @@ -1239,7 +1242,7 @@ def stop_processing_trace(self): self.trace_parser.WriteInteractive(self.path_base + '_interactive.json.gz') self.trace_parser.WriteNetlog(self.path_base + '_netlog_requests.json.gz') self.trace_parser.WriteV8Stats(self.path_base + '_v8stats.json.gz') - elapsed = monotonic.monotonic() - start + elapsed = monotonic() - start logging.debug("Done processing the trace events: %0.3fs", elapsed) self.trace_parser = None self.path_base = None diff --git a/internal/devtools_browser.py b/internal/devtools_browser.py index 938fe80f0..fb4532a93 100644 --- a/internal/devtools_browser.py +++ b/internal/devtools_browser.py @@ -11,7 +11,10 @@ import subprocess import threading import time -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -156,7 +159,7 @@ def on_start_recording(self, task): """Start recording""" task['page_data'] = {'date': time.time()} task['page_result'] = None - task['run_start_time'] = monotonic.monotonic() + task['run_start_time'] = monotonic() if self.browser_version is not None and 'browserVersion' not in task['page_data']: task['page_data']['browserVersion'] = self.browser_version task['page_data']['browser_version'] = self.browser_version @@ -195,11 +198,11 @@ def run_task(self, task): if self.devtools is not None: self.task = task logging.debug("Running test") - end_time = monotonic.monotonic() + task['test_time_limit'] + end_time = monotonic() + task['test_time_limit'] task['current_step'] = 1 recording = False while len(task['script']) and task['error'] is None and \ - monotonic.monotonic() < end_time: + monotonic() < end_time: self.prepare_task(task) command = task['script'].pop(0) if not recording and command['record']: diff --git a/internal/firefox.py b/internal/firefox.py index 724f1382e..37f32f6df 100644 --- a/internal/firefox.py +++ b/internal/firefox.py @@ -13,7 +13,10 @@ import subprocess import time import urlparse -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -48,7 +51,7 @@ def __init__(self, path, options, job): self.log_level = job['browser_info']['log_level'] self.page = {} self.requests = {} - self.last_activity = monotonic.monotonic() + self.last_activity = monotonic() self.script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js') self.start_page = 'http://127.0.0.1:8888/orange.html' self.block_domains = [ @@ -281,11 +284,11 @@ def run_task(self, task): if self.marionette is not None and self.connected: self.task = task logging.debug("Running test") - end_time = monotonic.monotonic() + task['test_time_limit'] + end_time = monotonic() + task['test_time_limit'] task['current_step'] = 1 recording = False while len(task['script']) and task['error'] is None and \ - monotonic.monotonic() < end_time: + monotonic() < end_time: self.prepare_task(task) command = task['script'].pop(0) if not recording and command['record']: @@ -320,8 +323,8 @@ def run_task(self, task): def wait_for_extension(self): """Wait for the extension to send the started message""" if self.job['message_server'] is not None: - end_time = monotonic.monotonic() + 30 - while monotonic.monotonic() < end_time: + end_time = monotonic() + 30 + while monotonic() < end_time: try: self.job['message_server'].get_message(1) logging.debug('Extension started') @@ -333,7 +336,7 @@ def wait_for_extension(self): def wait_for_page_load(self): """Wait for the onload event from the extension""" if self.job['message_server'] is not None and self.connected: - start_time = monotonic.monotonic() + start_time = monotonic() end_time = start_time + self.task['time_limit'] done = False interval = 1 @@ -344,7 +347,7 @@ def wait_for_page_load(self): self.process_message(self.job['message_server'].get_message(interval)) except Exception: pass - now = monotonic.monotonic() + now = monotonic() elapsed_test = now - start_time # Allow up to 5 seconds after a navigation for a re-navigation to happen # (bizarre sequence Firefox seems to do) @@ -449,7 +452,7 @@ def process_message(self, message): """Process a message from the extension""" logging.debug(message) if self.recording: - self.last_activity = monotonic.monotonic() + self.last_activity = monotonic() try: # Make all of the timestamps relative to the test start to match the log events if 'timeStamp' in message['body']: @@ -495,7 +498,7 @@ def process_web_navigation(self, message, evt): self.page['DOMContentLoaded'] = evt['timeStamp'] elif message == 'onCompleted': if 'frameId' in evt and evt['frameId'] == 0: - self.page_loaded = monotonic.monotonic() + self.page_loaded = monotonic() logging.debug("Page loaded") if 'timeStamp' in evt: self.page['loaded'] = evt['timeStamp'] @@ -504,7 +507,7 @@ def process_web_navigation(self, message, evt): logging.debug("Possible navigation error") err_msg = evt['error'] if 'error' in evt else 'Navigation failed' self.possible_navigation_error = { - 'time': monotonic.monotonic(), + 'time': monotonic(), 'error': err_msg } @@ -593,7 +596,7 @@ def on_start_recording(self, task): self.requests = {} task['page_data'] = {'date': time.time()} task['page_result'] = None - task['run_start_time'] = monotonic.monotonic() + task['run_start_time'] = monotonic() if self.browser_version is not None and 'browserVersion' not in task['page_data']: task['page_data']['browserVersion'] = self.browser_version task['page_data']['browser_version'] = self.browser_version @@ -604,7 +607,7 @@ def on_start_recording(self, task): for path in files: self.log_pos[path] = os.path.getsize(path) self.recording = True - now = monotonic.monotonic() + now = monotonic() if not self.task['stop_at_onload']: self.last_activity = now if self.page_loaded is not None: diff --git a/internal/ios_device.py b/internal/ios_device.py index fee26ffd7..7a1db2f60 100644 --- a/internal/ios_device.py +++ b/internal/ios_device.py @@ -11,7 +11,10 @@ import shutil import subprocess import threading -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -32,7 +35,7 @@ def __init__(self, serial=None): self.video_file = None self.last_video_data = None self.video_size = 0 - self.last_restart = monotonic.monotonic() + self.last_restart = monotonic() def check_install(self): """Check to make sure usbmux is installed and the device is available""" @@ -45,9 +48,9 @@ def check_install(self): if os.path.exists('/var/run/usbmuxd'): ret = True else: - print "usbmuxd is not available, please try installing it manually" + print("usbmuxd is not available, please try installing it manually") else: - print "iOS is only supported on Mac and Linux" + print("iOS is only supported on Mac and Linux") return ret def startup(self): @@ -218,10 +221,10 @@ def connect(self): break except Exception: # If the app isn't running restart the device (no more than every 10 minutes) - if connecting and monotonic.monotonic() - self.last_restart > 600: + if connecting and monotonic() - self.last_restart > 600: needs_restart = True if needs_restart: - self.last_restart = monotonic.monotonic() + self.last_restart = monotonic() try: subprocess.call(['idevicediagnostics', 'restart']) except Exception: @@ -255,8 +258,8 @@ def send_message(self, message, data=None, wait=True, timeout=30): try: self.socket.send(msg + "\n") if wait: - end = monotonic.monotonic() + timeout - while response is None and monotonic.monotonic() < end: + end = monotonic() + timeout + while response is None and monotonic() < end: try: msg = self.messages.get(timeout=1) self.messages.task_done() @@ -348,7 +351,7 @@ def process_raw_message(self, message): def process_message(self, msg): """Handle a single decoded message""" if msg['msg'] == 'VideoData' and 'data' in msg: - now = monotonic.monotonic() + now = monotonic() self.video_size += len(msg['data']) if self.last_video_data is None or now - self.last_video_data >= 0.5: logging.debug('<<< Video data (current size: %d)', self.video_size) @@ -372,10 +375,10 @@ def process_message(self, msg): def install_main(): """Main entry-point when running as an installer (under sudo permissions)""" if os.getuid() != 0: - print "Must run as sudo" + print("Must run as sudo") exit(1) if not os.path.exists('/var/run/usbmuxd') and platform.system() == "Linux": - print "Installing usbmuxd" + print("Installing usbmuxd") if os.uname()[4].startswith('arm'): src_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'support', 'ios', 'arm') @@ -394,12 +397,12 @@ def install_main(): elif filename.find('.so') >= 0: dest = os.path.join('/usr/local/lib', filename) if dest is not None and not os.path.isfile(dest): - print "Copying {0} to {1}".format(filename, dest) + print("Copying {0} to {1}".format(filename, dest)) shutil.copy(src, dest) # Update the library cache subprocess.call(['ldconfig']) # Start and initialize usbmuxd - print "Starting usbmuxd" + print("Starting usbmuxd") subprocess.call(['/usr/local/sbin/usbmuxd']) subprocess.call(['/usr/local/bin/ideviceinfo']) diff --git a/internal/message_server.py b/internal/message_server.py index d48a17047..ebc5c3a91 100644 --- a/internal/message_server.py +++ b/internal/message_server.py @@ -152,11 +152,14 @@ def stop(self): def is_ok(self): """Check that the server is responding and restart it if necessary""" import requests - import monotonic - end_time = monotonic.monotonic() + 30 + try: + from monotonic import monotonic + except BaseException: + from time import monotonic + end_time = monotonic() + 30 server_ok = False proxies = {"http": None, "https": None} - while not server_ok and monotonic.monotonic() < end_time: + while not server_ok and monotonic() < end_time: try: response = requests.get('http://127.0.0.1:8888/ping', timeout=10, proxies=proxies) if response.text == 'pong': diff --git a/internal/microsoft_edge.py b/internal/microsoft_edge.py index f81efcdc0..3aa2cfde5 100644 --- a/internal/microsoft_edge.py +++ b/internal/microsoft_edge.py @@ -12,7 +12,10 @@ import subprocess import time import urlparse -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -261,11 +264,11 @@ def run_task(self, task): if self.driver is not None and self.extension_loaded: self.task = task logging.debug("Running test") - end_time = monotonic.monotonic() + task['test_time_limit'] + end_time = monotonic() + task['test_time_limit'] task['current_step'] = 1 recording = False while len(task['script']) and task['error'] is None and \ - monotonic.monotonic() < end_time: + monotonic() < end_time: self.prepare_task(task) command = task['script'].pop(0) if not recording and command['record']: @@ -300,8 +303,8 @@ def run_task(self, task): def wait_for_extension(self): """Wait for the extension to send the started message""" if self.job['message_server'] is not None: - end_time = monotonic.monotonic() + 30 - while monotonic.monotonic() < end_time: + end_time = monotonic() + 30 + while monotonic() < end_time: try: message = self.job['message_server'].get_message(1) logging.debug(message) @@ -315,7 +318,7 @@ def wait_for_page_load(self): """Wait for the onload event from the extension""" if self.job['message_server'] is not None: logging.debug("Waiting for page load...") - start_time = monotonic.monotonic() + start_time = monotonic() end_time = start_time + self.task['time_limit'] done = False self.last_activity = None @@ -324,7 +327,7 @@ def wait_for_page_load(self): self.process_message(self.job['message_server'].get_message(1)) except Exception: pass - now = monotonic.monotonic() + now = monotonic() elapsed_test = now - start_time if self.nav_error is not None: done = True @@ -387,7 +390,7 @@ def process_ie_message(self, message): self.pageContexts.append(message['data']['EventContextId']) self.CMarkup.append(message['data']['CMarkup']) self.navigating = False - self.last_activity = monotonic.monotonic() + self.last_activity = monotonic() if 'start' not in self.page: logging.debug("Navigation started") self.page['start'] = message['ts'] @@ -409,7 +412,7 @@ def process_ie_message(self, message): if 'loadEventStart' not in self.page: self.page['loadEventStart'] = elapsed logging.debug("Page Loaded") - self.page_loaded = monotonic.monotonic() + self.page_loaded = monotonic() if message['Event'] == 'Mshtml_CMarkup_DOMContentLoadedEvent_Start/Start': self.page['domContentLoadedEventStart'] = elapsed elif message['Event'] == 'Mshtml_CMarkup_DOMContentLoadedEvent_Stop/Stop': @@ -419,7 +422,7 @@ def process_ie_message(self, message): elif message['Event'] == 'Mshtml_CMarkup_LoadEvent_Stop/Stop': self.page['loadEventEnd'] = elapsed logging.debug("Page loadEventEnd") - self.page_loaded = monotonic.monotonic() + self.page_loaded = monotonic() def process_ieframe_message(self, message): """Handle IEFRAME trace events""" @@ -428,13 +431,13 @@ def process_ieframe_message(self, message): if message['Event'] == 'Shdocvw_BaseBrowser_DocumentComplete': self.page['loadEventStart'] = elapsed self.page['loadEventEnd'] = elapsed - self.page_loaded = monotonic.monotonic() + self.page_loaded = monotonic() logging.debug("Page loaded (Document Complete)") def process_wininet_message(self, message): """Handle WinInet trace events""" if 'Activity' in message: - self.last_activity = monotonic.monotonic() + self.last_activity = monotonic() self.process_dns_message(message) self.process_socket_message(message) self.process_request_message(message) @@ -720,9 +723,9 @@ def collect_browser_metrics(self, task): outfile.write(json.dumps(hero_elements)) # Wait for the interactive periods to be written if self.supports_interactive: - end_time = monotonic.monotonic() + 10 + end_time = monotonic() + 10 interactive = None - while interactive is None and monotonic.monotonic() < end_time: + while interactive is None and monotonic() < end_time: interactive = self.execute_js( 'return document.getElementById("wptagentLongTasks").innerText;') if interactive is None: @@ -757,7 +760,7 @@ def on_start_recording(self, task): self.reset() task['page_data'] = {'date': time.time()} task['page_result'] = None - task['run_start_time'] = monotonic.monotonic() + task['run_start_time'] = monotonic() if self.job['message_server'] is not None: self.job['message_server'].flush_messages() if self.browser_version is not None and 'browserVersion' not in task['page_data']: @@ -765,7 +768,7 @@ def on_start_recording(self, task): task['page_data']['browser_version'] = self.browser_version self.recording = True self.navigating = True - now = monotonic.monotonic() + now = monotonic() if self.page_loaded is not None: self.page_loaded = now DesktopBrowser.on_start_recording(self, task) @@ -953,7 +956,7 @@ def process_sockets(self): first_request = None first_request_time = None count = len(self.sockets[event_id]['requests']) - for i in xrange(0, count): + for i in range(0, count): rid = self.sockets[event_id]['requests'][i] if rid in self.requests and 'start' in self.requests[rid]: if first_request is None or \ diff --git a/internal/optimization_checks.py b/internal/optimization_checks.py index 40a3c7dc6..c4d1e0a32 100644 --- a/internal/optimization_checks.py +++ b/internal/optimization_checks.py @@ -13,7 +13,10 @@ import subprocess import threading import time -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -495,7 +498,7 @@ def check_cache_static(self): def check_hosting(self): """Pull the data needed to determine the hosting""" - start = monotonic.monotonic() + start = monotonic() self.hosting_results['base_page_ip_ptr'] = '' self.hosting_results['base_page_cname'] = '' self.hosting_results['base_page_dns_server'] = '' @@ -550,12 +553,12 @@ def check_hosting(self): domain = None except Exception: pass - self.hosting_time = monotonic.monotonic() - start + self.hosting_time = monotonic() - start def check_cdn(self): """Check each request to see if it was served from a CDN""" from urlparse import urlparse - start = monotonic.monotonic() + start = monotonic() # First pass, build a list of domains and see if the headers or domain matches static_requests = {} domains = {} @@ -583,7 +586,7 @@ def check_cdn(self): if count: thread_count = min(10, count) threads = [] - for _ in xrange(thread_count): + for _ in range(thread_count): thread = threading.Thread(target=self.dns_worker) thread.start() threads.append(thread) @@ -614,7 +617,7 @@ def check_cdn(self): check['score'] = 100 check['provider'] = provider self.cdn_results[request_id] = check - self.cdn_time = monotonic.monotonic() - start + self.cdn_time = monotonic() - start def find_dns_cdn(self, domain, depth=0): """Recursively check a CNAME chain""" @@ -703,7 +706,7 @@ def check_cdn_headers(self, headers): def check_gzip(self): """Check each request to see if it can be compressed""" - start = monotonic.monotonic() + start = monotonic() for request_id in self.requests: try: request = self.requests[request_id] @@ -765,11 +768,11 @@ def check_gzip(self): self.gzip_results[request_id] = check except Exception: pass - self.gzip_time = monotonic.monotonic() - start + self.gzip_time = monotonic() - start def check_images(self): """Check each request to see if images can be compressed better""" - start = monotonic.monotonic() + start = monotonic() for request_id in self.requests: try: request = self.requests[request_id] @@ -885,12 +888,12 @@ def check_images(self): self.image_results[request_id] = check except Exception: pass - self.image_time = monotonic.monotonic() - start + self.image_time = monotonic() - start def check_progressive(self): """Count the number of scan lines in each jpeg""" from PIL import Image - start = monotonic.monotonic() + start = monotonic() for request_id in self.requests: try: request = self.requests[request_id] @@ -947,11 +950,11 @@ def check_progressive(self): self.progressive_results[request_id] = check except Exception: pass - self.progressive_time = monotonic.monotonic() - start + self.progressive_time = monotonic() - start def check_fonts(self): """Check each request to extract metadata about fonts""" - start = monotonic.monotonic() + start = monotonic() try: from fontTools.ttLib import TTFont for request_id in self.requests: @@ -976,7 +979,7 @@ def check_fonts(self): pass except Exception: pass - self.font_time = monotonic.monotonic() - start + self.font_time = monotonic() - start def get_header_value(self, headers, name): """Get the value for the requested header""" diff --git a/internal/safari_ios.py b/internal/safari_ios.py index decc33f2e..8c3809cf6 100644 --- a/internal/safari_ios.py +++ b/internal/safari_ios.py @@ -14,7 +14,10 @@ import time import urlparse import zipfile -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -57,7 +60,7 @@ def __init__(self, ios_device, options, job): self.bodies_zip_file = None self.body_fail_count = 0 self.body_index = 0 - self.last_activity = monotonic.monotonic() + self.last_activity = monotonic() self.script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js') self.path_base = None self.websocket = None @@ -164,8 +167,8 @@ def connect(self, timeout=30): proxies = {"http": None, "https": None} ret = False self.default_target = None - end_time = monotonic.monotonic() + timeout - while not ret and monotonic.monotonic() < end_time: + end_time = monotonic() + timeout + while not ret and monotonic() < end_time: try: response = requests.get("http://localhost:9222/json", timeout=timeout, proxies=proxies) if response.text: @@ -173,7 +176,7 @@ def connect(self, timeout=30): logging.debug("Dev Tools tabs: %s", json.dumps(tabs)) if tabs: websocket_url = None - for index in xrange(len(tabs)): + for index in range(len(tabs)): if 'webSocketDebuggerUrl' in tabs[index]: websocket_url = tabs[index]['webSocketDebuggerUrl'] break @@ -229,11 +232,11 @@ def run_task(self, task): if self.connected: self.task = task logging.debug("Running test") - end_time = monotonic.monotonic() + task['test_time_limit'] + end_time = monotonic() + task['test_time_limit'] task['current_step'] = 1 recording = False while task['script'] and task['error'] is None and \ - monotonic.monotonic() < end_time: + monotonic() < end_time: self.prepare_task(task) command = task['script'].pop(0) if not recording and command['record']: @@ -262,7 +265,7 @@ def run_task(self, task): def wait_for_page_load(self): """Wait for the onload event from the extension""" if self.connected: - start_time = monotonic.monotonic() + start_time = monotonic() end_time = start_time + self.task['time_limit'] done = False interval = 1 @@ -273,7 +276,7 @@ def wait_for_page_load(self): self.process_message(self.messages.get(timeout=interval)) except Exception: pass - now = monotonic.monotonic() + now = monotonic() elapsed_test = now - start_time if self.nav_error is not None: done = True @@ -426,7 +429,7 @@ def process_page_event(self, event, msg): if 'start' not in self.page and 'params' in msg and 'timestamp' in msg['params']: self.page['start'] = msg['params']['timestamp'] if event == 'loadEventFired': - self.page_loaded = monotonic.monotonic() + self.page_loaded = monotonic() self.page['loaded'] = msg['params']['timestamp'] elif event == 'domContentEventFired': self.page['DOMContentLoaded'] = msg['params']['timestamp'] @@ -436,7 +439,7 @@ def process_page_event(self, event, msg): self.main_frame = msg['params']['frameId'] if self.main_frame == msg['params']['frameId']: logging.debug("Navigating main frame") - self.last_activity = monotonic.monotonic() + self.last_activity = monotonic() self.page_loaded = None elif event == 'frameStoppedLoading': if self.main_frame is not None and \ @@ -447,7 +450,7 @@ def process_page_event(self, event, msg): logging.debug("Page load failed: %s", self.nav_error) if self.nav_error_code is not None: self.task['page_data']['result'] = self.nav_error_code - self.page_loaded = monotonic.monotonic() + self.page_loaded = monotonic() def process_network_event(self, event, msg): """Process Network.* dev tools events""" @@ -634,7 +637,7 @@ def process_network_event(self, event, msg): else: ignore_activity = True if not self.task['stop_at_onload'] and not ignore_activity: - self.last_activity = monotonic.monotonic() + self.last_activity = monotonic() def process_inspector_event(self, event): """Process Inspector.* dev tools events""" @@ -822,7 +825,7 @@ def on_start_recording(self, task): self.wpt_result = None task['page_data'] = {'date': time.time()} task['page_result'] = None - task['run_start_time'] = monotonic.monotonic() + task['run_start_time'] = monotonic() self.flush_messages() self.enable_safari_events() if self.task['log_data']: @@ -854,7 +857,7 @@ def on_start_recording(self, task): task['page_data']['browserVersion'] = self.ios_version task['page_data']['browser_version'] = self.ios_version self.recording = True - now = monotonic.monotonic() + now = monotonic() if not self.task['stop_at_onload']: self.last_activity = now if self.page_loaded is not None: @@ -961,13 +964,13 @@ def on_start_processing(self, task): json.dump(self.console_log, f_out) # Process the timeline data if self.trace_parser is not None and self.path_base is not None: - start = monotonic.monotonic() + start = monotonic() logging.debug("Processing the trace timeline events") self.trace_parser.ProcessTimelineEvents() self.trace_parser.WriteCPUSlices(self.path_base + '_timeline_cpu.json.gz') self.trace_parser.WriteScriptTimings(self.path_base + '_script_timing.json.gz') self.trace_parser.WriteInteractive(self.path_base + '_interactive.json.gz') - elapsed = monotonic.monotonic() - start + elapsed = monotonic() - start logging.debug("Done processing the trace events: %0.3fs", elapsed) self.trace_parser = None # Calculate the request and page stats @@ -1008,7 +1011,7 @@ def step_complete(self, task): task['page_data']['eventName'] = task['step_name'] if 'run_start_time' in task: task['page_data']['test_run_time_ms'] = \ - int(round((monotonic.monotonic() - task['run_start_time']) * 1000.0)) + int(round((monotonic() - task['run_start_time']) * 1000.0)) if self.path_base is not None: path = self.path_base + '_page_data.json.gz' json_page_data = json.dumps(task['page_data']) @@ -1029,7 +1032,7 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): msg = {'id': command_id, 'method': method, 'params': params} if wait: self.pending_commands.append(command_id) - end_time = monotonic.monotonic() + timeout + end_time = monotonic() + timeout self.send_command('Target.sendMessageToTarget', {'targetId': target_id, 'message': json.dumps(msg)}, wait=True, timeout=timeout) @@ -1038,7 +1041,7 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): ret = self.command_responses[command_id] del self.command_responses[command_id] else: - while ret is None and monotonic.monotonic() < end_time: + while ret is None and monotonic() < end_time: try: raw = self.websocket.get_message(1) if raw is not None and len(raw): @@ -1061,8 +1064,8 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): logging.debug("Sending: %s", out) self.websocket.send(out) if wait: - end_time = monotonic.monotonic() + timeout - while ret is None and monotonic.monotonic() < end_time: + end_time = monotonic() + timeout + while ret is None and monotonic() < end_time: try: msg = self.messages.get(timeout=1) if msg: diff --git a/internal/support/devtools_parser.py b/internal/support/devtools_parser.py index dc81f44d1..284b5d9d6 100644 --- a/internal/support/devtools_parser.py +++ b/internal/support/devtools_parser.py @@ -81,7 +81,7 @@ def make_utf8(self, data): except Exception: pass elif isinstance(data, list): - for key in xrange(len(data)): + for key in range(len(data)): entry = data[key] if isinstance(entry, dict) or isinstance(entry, list): self.make_utf8(entry) @@ -1230,7 +1230,7 @@ def process_cpu_times(self): page_data['cpuTimesDoc'][name] = 0 slices = all_slices[name] last_slice = min(int(math.ceil((end * 1000) / usecs)), len(slices)) - for index in xrange(last_slice): + for index in range(last_slice): slice_time = float(slices[index]) / 1000.0 page_data['cpuTimes'][name] += slice_time busy += slice_time diff --git a/internal/support/firefox_log_parser.py b/internal/support/firefox_log_parser.py index 9cd8d093f..f3f59fbb0 100644 --- a/internal/support/firefox_log_parser.py +++ b/internal/support/firefox_log_parser.py @@ -20,7 +20,10 @@ import os import re import urlparse -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -33,7 +36,7 @@ def __init__(self): self.start_day = None self.unique_id = 0 self.int_map = {} - for val in xrange(0, 100): + for val in range(0, 100): self.int_map['{0:02d}'.format(val)] = float(val) self.dns = {} self.http = {'channels': {}, 'requests': {}, 'connections': {}, 'sockets': {}, 'streams': {}} @@ -118,7 +121,7 @@ def finish_processing(self): def process_log_file(self, path): """Process a single log file""" logging.debug("Processing %s", path) - start = monotonic.monotonic() + start = monotonic() _, ext = os.path.splitext(path) line_count = 0 if ext.lower() == '.gz': @@ -130,7 +133,7 @@ def process_log_file(self, path): line = line.rstrip("\r\n") self.process_log_line(line) f_in.close() - elapsed = monotonic.monotonic() - start + elapsed = monotonic() - start logging.debug("%0.3f s to process %s (%d lines)", elapsed, path, line_count) def process_log_line(self, line): diff --git a/internal/support/ios/usbmux.py b/internal/support/ios/usbmux.py index 79ec26aee..eeef1fe4c 100644 --- a/internal/support/ios/usbmux.py +++ b/internal/support/ios/usbmux.py @@ -236,11 +236,11 @@ def connect(self, device, port): if __name__ == "__main__": mux = USBMux() - print "Waiting for devices..." + print("Waiting for devices...") if not mux.devices: mux.process(0.1) while True: - print "Devices:" + print("Devices:") for dev in mux.devices: - print dev + print(dev) mux.process() diff --git a/internal/support/pcap-parser.py b/internal/support/pcap-parser.py index 3dbd0be11..f7272fbc6 100644 --- a/internal/support/pcap-parser.py +++ b/internal/support/pcap-parser.py @@ -74,9 +74,9 @@ def Print(self): if options.json: print(json.dumps(self.bytes, indent=2)) else: - print "Bytes Out: {0:d}".format(self.bytes['out']) - print "Bytes In: {0:d}".format(self.bytes['in']) - print "Duplicate Bytes In: {0:d}".format(self.bytes['in_dup']) + print("Bytes Out: {0:d}".format(self.bytes['out'])) + print("Bytes In: {0:d}".format(self.bytes['in'])) + print("Duplicate Bytes In: {0:d}".format(self.bytes['in_dup'])) def Process(self, pcap): diff --git a/internal/support/trace_parser.py b/internal/support/trace_parser.py index f6aed39a7..1bb460fd4 100644 --- a/internal/support/trace_parser.py +++ b/internal/support/trace_parser.py @@ -573,7 +573,7 @@ def ProcessTimelineEvent(self, timeline_event, parent, stack=None): slice_usecs = self.cpu['slice_usecs'] first_slice = int(float(start) / float(slice_usecs)) last_slice = int(float(end) / float(slice_usecs)) - for slice_number in xrange(first_slice, last_slice + 1): + for slice_number in range(first_slice, last_slice + 1): slice_start = slice_number * slice_usecs slice_end = slice_start + slice_usecs used_start = max(slice_start, start) diff --git a/internal/support/visualmetrics.py b/internal/support/visualmetrics.py index 641fb9515..68c1a6762 100644 --- a/internal/support/visualmetrics.py +++ b/internal/support/visualmetrics.py @@ -446,7 +446,7 @@ def find_first_frame(directory, white_file): count = len(files) if count > 1: from PIL import Image - for i in xrange(count): + for i in range(count): if is_white_frame(files[i], white_file): break else: @@ -474,7 +474,7 @@ def find_first_frame(directory, white_file): first_frame = None if white_file is None: found_white_frame = True - for i in xrange(count): + for i in range(count): if not found_first_change: different = not frames_match( files[i], files[i + 1], 5, 100, crop, None) @@ -513,7 +513,7 @@ def find_last_frame(directory, white_file): if count > 2: found_end = False from PIL import Image - for i in xrange(2, count): + for i in range(2, count): if found_end: logging.debug( 'Removing frame {0} from the end'.format( @@ -582,7 +582,7 @@ def find_render_start(directory, orange_file, gray_file): top += client_viewport['y'] crop = '{0:d}x{1:d}+{2:d}+{3:d}'.format( width, height, left, top) - for i in xrange(1, count): + for i in range(1, count): if frames_match(first, files[i], 10, 0, crop, mask): logging.debug('Removing pre-render frame %s', files[i]) os.remove(files[i]) @@ -641,7 +641,7 @@ def eliminate_duplicate_frames(directory): # for up to a 10% per-pixel difference for noise in the white # field. count = len(files) - for i in xrange(1, count): + for i in range(1, count): if frames_match(blank, files[i], 10, 0, crop, None): logging.debug( 'Removing duplicate frame {0} from the beginning'.format( @@ -660,7 +660,7 @@ def eliminate_duplicate_frames(directory): files.reverse() baseline = files[0] previous_frame = baseline - for i in xrange(1, count): + for i in range(1, count): if frames_match(baseline, files[i], 10, 0, crop, None): if previous_frame is baseline: duplicates.append(previous_frame) @@ -694,7 +694,7 @@ def eliminate_similar_frames(directory): crop = '{0:d}x{1:d}+{2:d}+{3:d}'.format(client_viewport['width'], client_viewport['height'], client_viewport['x'], client_viewport['y']) baseline = files[1] - for i in xrange(2, count - 1): + for i in range(2, count - 1): if frames_match(baseline, files[i], 1, 0, crop, None): logging.debug( 'Removing similar frame {0}'.format( @@ -730,7 +730,7 @@ def crop_viewport(directory): if count > 0: crop = '{0:d}x{1:d}+{2:d}+{3:d}'.format(client_viewport['width'], client_viewport['height'], client_viewport['x'], client_viewport['y']) - for i in xrange(count): + for i in range(count): command = '{0} "{1}" -crop {2} "{1}"'.format( image_magick['convert'], files[i], crop) subprocess.call(command, shell=True) @@ -847,7 +847,7 @@ def is_white_frame(file, white_file): def colors_are_similar(a, b, threshold=15): similar = True sum = 0 - for x in xrange(3): + for x in range(3): delta = abs(a[x] - b[x]) sum += delta if delta > threshold: @@ -1108,9 +1108,9 @@ def calculate_image_histogram(file): im = Image.open(file) width, height = im.size colors = im.getcolors(width * height) - histogram = {'r': [0 for i in xrange(256)], - 'g': [0 for i in xrange(256)], - 'b': [0 for i in xrange(256)]} + histogram = {'r': [0 for i in range(256)], + 'g': [0 for i in range(256)], + 'b': [0 for i in range(256)]} for entry in colors: try: count = entry[0] @@ -1223,7 +1223,7 @@ def render_video(directory, video_file): current_frame += 1 # hold the end frame for one second so it's actually # visible - for i in xrange(30): + for i in range(30): proc.stdin.write(current_image) proc.stdin.close() proc.communicate() @@ -1432,16 +1432,16 @@ def calculate_frame_progress(histogram, start, final): channel_total = 0 channel_matched = 0 buckets = 256 - available = [0 for i in xrange(buckets)] - for i in xrange(buckets): + available = [0 for i in range(buckets)] + for i in range(buckets): available[i] = abs(histogram[channel][i] - start[channel][i]) - for i in xrange(buckets): + for i in range(buckets): target = abs(final[channel][i] - start[channel][i]) if (target): channel_total += target low = max(0, i - slop) high = min(buckets, i + slop) - for j in xrange(low, high): + for j in range(low, high): this_match = min(target, available[j]) available[j] -= this_match channel_matched += this_match @@ -1492,7 +1492,6 @@ def calculate_perceptual_speed_index(progress, directory): ssim = ssim_1 for p in progress[1:]: elapsed = p['time'] - last_ms - # print '*******elapsed %f'%elapsed # Full Path of the Current Frame current_frame = os.path.join(dir, "ms_{0:06d}.png".format(p["time"])) logging.debug("Current Image is %s" % current_frame) @@ -1602,43 +1601,38 @@ def calculate_hero_time(progress, directory, hero, viewport): def check_config(): ok = True - print 'ffmpeg: ', if get_decimate_filter() is not None: - print 'OK' + print('ffmpeg: OK') else: - print 'FAIL' + print('ffmpeg: FAIL') ok = False - print 'convert: ', if check_process('{0} -version'.format(image_magick['convert']), 'ImageMagick'): - print 'OK' + print('convert: OK') else: - print 'FAIL' + print('convert: FAIL') ok = False - print 'compare: ', if check_process('{0} -version'.format(image_magick['compare']), 'ImageMagick'): - print 'OK' + print('compare: OK') else: - print 'FAIL' + print('compare: FAIL') ok = False - print 'Pillow: ', try: from PIL import Image, ImageDraw - print 'OK' + print('Pillow: OK') except BaseException: - print 'FAIL' + print('Pillow: FAIL') ok = False - print 'SSIM: ', try: from ssim import compute_ssim - print 'OK' + print('SSIM: OK') except BaseException: - print 'FAIL' + print('SSIM: FAIL') ok = False return ok @@ -1878,10 +1872,10 @@ def main(): for metric in metrics: data[metric['name'].replace( ' ', '')] = metric['value'] - print json.dumps(data) + print(json.dumps(data)) else: for metric in metrics: - print "{0}: {1}".format(metric['name'], metric['value']) + print("{0}: {1}".format(metric['name'], metric['value'])) else: ok = check_config() except Exception as e: diff --git a/internal/traceroute.py b/internal/traceroute.py index 2099f80e1..f05d381f2 100644 --- a/internal/traceroute.py +++ b/internal/traceroute.py @@ -42,7 +42,7 @@ def run_task(self, task): f_out.write('-1,{0},0,{1}\n'.format(results[0]['addr'], hostname)) else: f_out.write('-1,,0,{0}\n'.format(hostname)) - for hop in xrange(1, last_hop + 1): + for hop in range(1, last_hop + 1): if hop in results: entry = results[hop] f_out.write('{0:d},{1},{2},{3}\n'.format(hop, entry['addr'], diff --git a/internal/video_processing.py b/internal/video_processing.py index db80c6620..2ef2044dd 100644 --- a/internal/video_processing.py +++ b/internal/video_processing.py @@ -61,7 +61,7 @@ def process(self): count = len(files) if count > 1: baseline = files[0] - for index in xrange(1, count): + for index in range(1, count): if self.frames_match(baseline, files[index], crop, 1, 0): logging.debug('Removing similar frame %s', os.path.basename(files[index])) try: diff --git a/internal/webpagetest.py b/internal/webpagetest.py index 8378e2029..d528da5c9 100644 --- a/internal/webpagetest.py +++ b/internal/webpagetest.py @@ -17,7 +17,10 @@ import urllib import zipfile import psutil -import monotonic +try: + from monotonic import monotonic +except BaseException: + from time import monotonic try: import ujson as json except BaseException: @@ -152,11 +155,13 @@ def benchmark_cpu(self): hash_val = hashlib.sha256() with open(__file__, 'rb') as f_in: hash_data = f_in.read(4096) - start = monotonic.monotonic() + start = monotonic() # 106k iterations takes ~1 second on the reference machine - for _ in xrange(106000): + iteration = 0 + while iteration < 106000: hash_val.update(hash_data) - elapsed = monotonic.monotonic() - start + iteration += 1 + elapsed = monotonic() - start self.cpu_scale_multiplier = 1.0 / elapsed logging.debug('CPU Benchmark elapsed time: %0.3f, multiplier: %0.3f', elapsed, self.cpu_scale_multiplier) @@ -485,7 +490,7 @@ def get_test(self, browsers): except requests.exceptions.RequestException as err: logging.critical("Get Work Error: %s", err.strerror) retry = True - now = monotonic.monotonic() + now = monotonic() if self.first_failure is None: self.first_failure = now # Reboot if we haven't been able to reach the server for 30 minutes @@ -633,7 +638,7 @@ def get_task(self, job): task['time_limit'] = job['timeout'] task['test_time_limit'] = task['time_limit'] * task['script_step_count'] task['stop_at_onload'] = bool('web10' in job and job['web10']) - task['run_start_time'] = monotonic.monotonic() + task['run_start_time'] = monotonic() # Keep the full resolution video frames if the browser window is smaller than 600px if 'thumbsize' not in job and (task['width'] < 600 or task['height'] < 600): job['fullSizeVideo'] = 1 @@ -1003,7 +1008,7 @@ def get_bodies(self, task): logging.debug("Fetching bodies for %d requests", count) threads = [] thread_count = min(count, 10) - for _ in xrange(thread_count): + for _ in range(thread_count): thread = threading.Thread(target=self.body_fetch_thread) thread.daemon = True thread.start() diff --git a/wptagent.py b/wptagent.py index 4cccd4a9c..2f4601490 100644 --- a/wptagent.py +++ b/wptagent.py @@ -68,8 +68,11 @@ def __init__(self, options, browsers): def run_testing(self): """Main testing flow""" - import monotonic - start_time = monotonic.monotonic() + try: + from monotonic import monotonic + except BaseException: + from time import monotonic + start_time = monotonic() browser = None exit_file = os.path.join(self.root_path, 'exit') message_server = None @@ -103,7 +106,7 @@ def run_testing(self): self.job['shaper'] = self.shaper self.task = self.wpt.get_task(self.job) while self.task is not None: - start = monotonic.monotonic() + start = monotonic() try: self.task['running_lighthouse'] = False if self.job['type'] != 'lighthouse': @@ -120,7 +123,7 @@ def run_testing(self): self.task['running_lighthouse'] = True self.wpt.running_another_test(self.task) self.run_single_test() - elapsed = monotonic.monotonic() - start + elapsed = monotonic() - start logging.debug('Test run time: %0.3f sec', elapsed) except Exception as err: msg = '' @@ -151,7 +154,7 @@ def run_testing(self): browser.on_stop_recording(None) browser = None if self.options.exit > 0: - run_time = (monotonic.monotonic() - start_time) / 60.0 + run_time = (monotonic() - start_time) / 60.0 if run_time > self.options.exit: break # Exit if adb is having issues (will cause a reboot after several tries) @@ -203,9 +206,9 @@ def signal_handler(self, *_): if self.must_exit: exit(1) if self.job is None: - print "Exiting..." + print("Exiting...") else: - print "Will exit after test completes. Hit Ctrl+C again to exit immediately" + print("Will exit after test completes. Hit Ctrl+C again to exit immediately") self.must_exit = True def cleanup(self): @@ -228,23 +231,26 @@ def sleep(self, seconds): def wait_for_idle(self, timeout=30): """Wait for the system to go idle for at least 2 seconds""" - import monotonic + try: + from monotonic import monotonic + except BaseException: + from time import monotonic import psutil logging.debug("Waiting for Idle...") cpu_count = psutil.cpu_count() if cpu_count > 0: target_pct = 50. / float(cpu_count) idle_start = None - end_time = monotonic.monotonic() + timeout + end_time = monotonic() + timeout idle = False - while not idle and monotonic.monotonic() < end_time: + while not idle and monotonic() < end_time: self.alive() - check_start = monotonic.monotonic() + check_start = monotonic() pct = psutil.cpu_percent(interval=0.5) if pct <= target_pct: if idle_start is None: idle_start = check_start - if monotonic.monotonic() - idle_start > 2: + if monotonic() - idle_start > 2: idle = True else: idle_start = None @@ -278,7 +284,7 @@ def requires(self, module, module_name=None): except ImportError: pass if not ret: - print "Missing {0} module. Please run 'pip install {1}'".format(module, module_name) + print("Missing {0} module. Please run 'pip install {1}'".format(module, module_name)) return ret def startup(self): @@ -318,21 +324,19 @@ def startup(self): try: subprocess.check_output(['python', '--version']) except Exception: - print "Make sure python 2.7 is available in the path." + print("Make sure python 2.7 is available in the path.") ret = False try: subprocess.check_output('{0} -version'.format(self.image_magick['convert']), shell=True) except Exception: - print "Missing convert utility. Please install ImageMagick " \ - "and make sure it is in the path." + print("Missing convert utility. Please install ImageMagick and make sure it is in the path.") ret = False try: subprocess.check_output('{0} -version'.format(self.image_magick['mogrify']), shell=True) except Exception: - print "Missing mogrify utility. Please install ImageMagick " \ - "and make sure it is in the path." + print("Missing mogrify utility. Please install ImageMagick and make sure it is in the path.") ret = False if platform.system() == "Linux": @@ -374,7 +378,7 @@ def startup(self): try: subprocess.check_output('sudo cgset -h', shell=True) except Exception: - print "Missing cgroups, make sure cgroup-tools is installed." + print("Missing cgroups, make sure cgroup-tools is installed.") ret = False # Fix Lighthouse install permissions @@ -406,14 +410,14 @@ def startup(self): self.wait_for_idle(300) if self.adb is not None: if not self.adb.start(): - print "Error configuring adb. Make sure it is installed and in the path." + print("Error configuring adb. Make sure it is installed and in the path.") ret = False self.shaper.remove() if not self.shaper.install(): if platform.system() == "Windows": - print "Error configuring traffic shaping, make sure secure boot is disabled." + print("Error configuring traffic shaping, make sure secure boot is disabled.") else: - print "Error configuring traffic shaping, make sure it is installed." + print("Error configuring traffic shaping, make sure it is installed.") ret = False # Update the Windows root certs @@ -786,7 +790,7 @@ def main(): # Video capture/display settings parser.add_argument('--xvfb', action='store_true', default=False, help="Use an xvfb virtual display (Linux only).") - parser.add_argument('--fps', type=int, choices=xrange(1, 61), default=10, + parser.add_argument('--fps', type=int, choices=range(1, 61), default=10, help='Video capture frame rate (defaults to 10). ' 'Valid range is 1-60 (Linux only).') @@ -868,23 +872,24 @@ def main(): options, _ = parser.parse_known_args() # Make sure we are running python 2.7.11 or newer (required for Windows 8.1) - if platform.system() == "Windows": - if sys.version_info[0] != 2 or \ - sys.version_info[1] != 7 or \ - sys.version_info[2] < 11: - print "Requires python 2.7.11 (or later)" + if sys.version_info[0] < 3: + if platform.system() == "Windows": + if sys.version_info[0] != 2 or \ + sys.version_info[1] != 7 or \ + sys.version_info[2] < 11: + print("Requires python 2.7.11 (or later)") + exit(1) + elif sys.version_info[0] != 2 or sys.version_info[1] != 7: + print("Requires python 2.7") exit(1) - elif sys.version_info[0] != 2 or sys.version_info[1] != 7: - print "Requires python 2.7" - exit(1) if options.list: from internal.ios_device import iOSDevice ios = iOSDevice() devices = ios.get_devices() - print "Available iOS devices:" + print("Available iOS devices:") for device in devices: - print device + print(device) exit(1) # Set up logging @@ -921,7 +926,7 @@ def main(): if not options.android and not options.iOS: browsers = find_browsers() if len(browsers) == 0: - print "No browsers configured. Check that browsers.ini is present and correct." + print("No browsers configured. Check that browsers.ini is present and correct.") exit(1) if options.collectversion and platform.system() == "Windows": @@ -930,9 +935,9 @@ def main(): agent = WPTAgent(options, browsers) if agent.startup(): # Create a work directory relative to where we are running - print "Running agent, hit Ctrl+C to exit" + print("Running agent, hit Ctrl+C to exit") agent.run_testing() - print "Done" + print("Done") if __name__ == '__main__': From eec030def0e358575c3b2de1064fa1287076766a Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Wed, 18 Dec 2019 11:16:04 -0800 Subject: [PATCH 03/16] More python 3 prep --- internal/adb.py | 6 +++++- internal/desktop_browser.py | 6 +++++- internal/support/visualmetrics.py | 13 +++++++++---- internal/traceroute.py | 11 +++++++++-- internal/traffic_shaping.py | 6 +++++- internal/webpagetest.py | 7 +++++-- wptagent.py | 20 +++++++++++++++----- 7 files changed, 53 insertions(+), 16 deletions(-) diff --git a/internal/adb.py b/internal/adb.py index 83fe2d0c1..1efb9bc65 100644 --- a/internal/adb.py +++ b/internal/adb.py @@ -7,6 +7,7 @@ import platform import re import subprocess +import sys from threading import Timer import time try: @@ -138,7 +139,10 @@ def start(self): self.simplert_path = None if self.options.simplert is not None and platform.system() == 'Linux': running = False - stdout = subprocess.check_output(['ps', 'ax']) + if (sys.version_info > (3, 0)): + stdout = subprocess.check_output(['ps', 'ax'], encoding='UTF-8') + else: + stdout = subprocess.check_output(['ps', 'ax']) if stdout.find('simple-rt ') > -1: running = True logging.debug('simple-rt is already running') diff --git a/internal/desktop_browser.py b/internal/desktop_browser.py index 4a0b32d60..9e5ffe097 100644 --- a/internal/desktop_browser.py +++ b/internal/desktop_browser.py @@ -11,6 +11,7 @@ import shutil import signal import subprocess +import sys import threading import time try: @@ -666,7 +667,10 @@ def process_pcap(self): cmd = ['python', pcap_parser, '--json', '-i', pcap_file, '-d', slices_file] logging.debug(cmd) try: - stdout = subprocess.check_output(cmd) + if (sys.version_info > (3, 0)): + stdout = subprocess.check_output(cmd, encoding='UTF-8') + else: + stdout = subprocess.check_output(cmd) if stdout is not None: result = json.loads(stdout) if result: diff --git a/internal/support/visualmetrics.py b/internal/support/visualmetrics.py index 68c1a6762..977975f03 100644 --- a/internal/support/visualmetrics.py +++ b/internal/support/visualmetrics.py @@ -37,6 +37,7 @@ import re import shutil import subprocess +import sys import tempfile # Globals @@ -742,8 +743,10 @@ def crop_viewport(directory): def get_decimate_filter(): decimate = None try: - filters = subprocess.check_output( - ['ffmpeg', '-filters'], stderr=subprocess.STDOUT) + if (sys.version_info > (3, 0)): + filters = subprocess.check_output(['ffmpeg', '-filters'], stderr=subprocess.STDOUT, encoding='UTF-8') + else: + filters = subprocess.check_output(['ffmpeg', '-filters'], stderr=subprocess.STDOUT) lines = filters.split("\n") match = re.compile( r'(?P[\w]*decimate).*V->V.*Remove near-duplicate frames') @@ -1641,8 +1644,10 @@ def check_config(): def check_process(command, output): ok = False try: - out = subprocess.check_output( - command, stderr=subprocess.STDOUT, shell=True) + if (sys.version_info > (3, 0)): + out = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True, encoding='UTF-8') + else: + out = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True) if out.find(output) > -1: ok = True except BaseException: diff --git a/internal/traceroute.py b/internal/traceroute.py index f05d381f2..d21f9ed58 100644 --- a/internal/traceroute.py +++ b/internal/traceroute.py @@ -8,6 +8,7 @@ import platform import re import subprocess +import sys import urlparse @@ -57,7 +58,10 @@ def windows_traceroute(self, hostname): last_hop = 0 command = ['tracert', '-h', '30', '-w', '500', hostname] logging.debug(' '.join(command)) - out = subprocess.check_output(command) + if (sys.version_info > (3, 0)): + out = subprocess.check_output(command, encoding='UTF-8') + else: + out = subprocess.check_output(command) lines = out.splitlines() dest = re.compile(r'^Tracing route to.*\[([\d\.]+)\]') timeout = re.compile(r'^\s*(\d+).*Request timed out') @@ -125,7 +129,10 @@ def unix_traceroute(self, hostname): last_hop = 0 command = ['traceroute', '-m', '30', '-w', '0.5', hostname] logging.debug(' '.join(command)) - out = subprocess.check_output(command) + if (sys.version_info > (3, 0)): + out = subprocess.check_output(command, encoding='UTF-8') + else: + out = subprocess.check_output(command) lines = out.splitlines() dest = re.compile(r'^traceroute to [^\(]+\(([\d\.]+)\)') timeout = re.compile(r'^\s*(\d+)\s+\*\s+\*\s+\*') diff --git a/internal/traffic_shaping.py b/internal/traffic_shaping.py index 1700e4449..0c00e713b 100644 --- a/internal/traffic_shaping.py +++ b/internal/traffic_shaping.py @@ -7,6 +7,7 @@ import platform import re import subprocess +import sys import time class TrafficShaper(object): @@ -391,7 +392,10 @@ def install(self): # Figure out the default interface try: if self.interface is None: - out = subprocess.check_output(['route']) + if (sys.version_info > (3, 0)): + out = subprocess.check_output(['route'], encoding='UTF-8') + else: + out = subprocess.check_output(['route']) routes = out.splitlines() match = re.compile(r'^([^\s]+)\s+[^\s]+\s+[^\s]+\s+[^\s]+\s+'\ r'[^\s]+\s+[^\s]+\s+[^\s]+\s+([^\s]+)') diff --git a/internal/webpagetest.py b/internal/webpagetest.py index d528da5c9..583be2ae1 100644 --- a/internal/webpagetest.py +++ b/internal/webpagetest.py @@ -12,6 +12,7 @@ import shutil import socket import subprocess +import sys import threading import time import urllib @@ -125,8 +126,10 @@ def __init__(self, options, workdir): self.version = '19.04' try: directory = os.path.abspath(os.path.dirname(__file__)) - out = subprocess.check_output('git log -1 --format=%cd --date=raw', - shell=True, cwd=directory) + if (sys.version_info > (3, 0)): + out = subprocess.check_output('git log -1 --format=%cd --date=raw', shell=True, cwd=directory, encoding='UTF-8') + else: + out = subprocess.check_output('git log -1 --format=%cd --date=raw', shell=True, cwd=directory) if out is not None: matches = re.search(r'^(\d+)', out) if matches: diff --git a/wptagent.py b/wptagent.py index 2f4601490..cbd1b3f3e 100644 --- a/wptagent.py +++ b/wptagent.py @@ -430,7 +430,10 @@ def get_node_version(self): """Get the installed version of Node.js""" version = 0 try: - stdout = subprocess.check_output(['node', '--version']) + if (sys.version_info > (3, 0)): + stdout = subprocess.check_output(['node', '--version'], encoding='UTF-8') + else: + stdout = subprocess.check_output(['node', '--version']) matches = re.match(r'^v(\d+\.\d+)', stdout) if matches: version = float(matches.group(1)) @@ -463,10 +466,15 @@ def update_windows_certificates(self): def parse_ini(ini): """Parse an ini file and convert it to a dictionary""" - import ConfigParser ret = None if os.path.isfile(ini): - parser = ConfigParser.SafeConfigParser() + parser = None + try: + import ConfigParser + parser = ConfigParser.SafeConfigParser() + except BaseException: + import configparser + parser = configparser.ConfigParser() parser.read(ini) ret = {} for section in parser.sections(): @@ -738,8 +746,10 @@ def upgrade_pip_modules(): from internal.os_util import run_elevated subprocess.call([sys.executable, '-m', 'pip', 'install', '--upgrade', 'pip']) run_elevated(sys.executable, '-m pip install --upgrade pip') - out = subprocess.check_output([sys.executable, '-m', 'pip', 'list', - '--outdated', '--format', 'freeze']) + if (sys.version_info > (3, 0)): + out = subprocess.check_output([sys.executable, '-m', 'pip', 'list', '--outdated', '--format', 'freeze'], encoding='UTF-8') + else: + out = subprocess.check_output([sys.executable, '-m', 'pip', 'list', '--outdated', '--format', 'freeze']) for line in out.splitlines(): separator = line.find('==') if separator > 0: From 753045337c29dca7a2d6b2cf4340c1295dc496fb Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Wed, 18 Dec 2019 12:02:07 -0800 Subject: [PATCH 04/16] More python 3 compatibility --- internal/message_server.py | 8 ++++++++ internal/webpagetest.py | 19 +++++++++++-------- 2 files changed, 19 insertions(+), 8 deletions(-) diff --git a/internal/message_server.py b/internal/message_server.py index ebc5c3a91..cf283ac4b 100644 --- a/internal/message_server.py +++ b/internal/message_server.py @@ -1,6 +1,10 @@ # Copyright 2017 Google Inc. All rights reserved. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. +try: + import asyncio +except Exception: + pass from multiprocessing import JoinableQueue import logging import threading @@ -173,6 +177,10 @@ def is_ok(self): def run(self): """Main server loop""" logging.debug('Starting extension server on port 8888') + try: + asyncio.set_event_loop(asyncio.new_event_loop()) + except Exception: + pass application = tornado.web.Application([(r"/.*", TornadoRequestHandler)]) application.listen(8888, '127.0.0.1') self.__is_started.set() diff --git a/internal/webpagetest.py b/internal/webpagetest.py index 583be2ae1..557fb40e9 100644 --- a/internal/webpagetest.py +++ b/internal/webpagetest.py @@ -15,7 +15,10 @@ import sys import threading import time -import urllib +try: + from urllib import quote_plus +except BaseException: + from urllib.parse import quote_plus import zipfile import psutil try: @@ -393,14 +396,14 @@ def get_test(self, browsers): retry = False count += 1 url = self.url + "getwork.php?f=json&shards=1&reboot=1" - url += "&location=" + urllib.quote_plus(location) - url += "&pc=" + urllib.quote_plus(self.pc_name) + url += "&location=" + quote_plus(location) + url += "&pc=" + quote_plus(self.pc_name) if self.key is not None: - url += "&key=" + urllib.quote_plus(self.key) + url += "&key=" + quote_plus(self.key) if self.instance_id is not None: - url += "&ec2=" + urllib.quote_plus(self.instance_id) + url += "&ec2=" + quote_plus(self.instance_id) if self.zone is not None: - url += "&ec2zone=" + urllib.quote_plus(self.zone) + url += "&ec2zone=" + quote_plus(self.zone) if self.options.android: url += '&apk=1' url += '&version={0}'.format(self.version) @@ -421,7 +424,7 @@ def get_test(self, browsers): versions.append('{0}:{1}'.format(name, \ browsers[name]['version'])) browser_versions = ','.join(versions) - url += '&browsers=' + urllib.quote_plus(browser_versions) + url += '&browsers=' + quote_plus(browser_versions) logging.info("Checking for work: %s", url) try: response = self.session.get(url, timeout=30, proxies=proxies) @@ -1169,7 +1172,7 @@ def post_data(self, url, data, file_path, filename): url += "?" for key in data: if data[key] != None: - url += key + '=' + urllib.quote_plus(data[key]) + '&' + url += key + '=' + quote_plus(data[key]) + '&' logging.debug(url) try: if file_path is not None and os.path.isfile(file_path): From 49da5cde0910bfdcde37f30e0a0314259100eed1 Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Wed, 18 Dec 2019 16:08:14 -0800 Subject: [PATCH 05/16] Updated copyrights --- LICENSE | 1 + alive.py | 3 ++- internal/adb.py | 3 ++- internal/android_browser.py | 3 ++- internal/base_browser.py | 3 ++- internal/blackbox_android.py | 3 ++- internal/browsers.py | 3 ++- internal/chrome_android.py | 3 ++- internal/chrome_desktop.py | 3 ++- internal/desktop_browser.py | 3 ++- internal/devtools.py | 3 ++- internal/devtools_browser.py | 3 ++- internal/firefox.py | 3 ++- internal/internet_explorer.py | 3 ++- internal/ios_device.py | 3 ++- internal/message_server.py | 3 ++- internal/microsoft_edge.py | 3 ++- internal/optimization_checks.py | 3 ++- internal/os_util.py | 3 ++- internal/safari_ios.py | 3 ++- internal/support/devtools_parser.py | 3 ++- internal/support/firefox_log_parser.py | 3 ++- internal/support/pcap-parser.py | 3 ++- internal/support/trace_parser.py | 3 ++- internal/support/visualmetrics.py | 1 + internal/traceroute.py | 3 ++- internal/traffic_shaping.py | 3 ++- internal/video_processing.py | 3 ++- internal/webpagetest.py | 3 ++- wptagent.py | 3 ++- 30 files changed, 58 insertions(+), 28 deletions(-) diff --git a/LICENSE b/LICENSE index a7216ca5d..07fe602c1 100644 --- a/LICENSE +++ b/LICENSE @@ -1,3 +1,4 @@ +Copyright 2019 WebPageTest LLC. Copyright 2017, Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/alive.py b/alive.py index c4ffe52b1..7f407d54a 100644 --- a/alive.py +++ b/alive.py @@ -1,5 +1,6 @@ #!/usr/bin/env python -# Copyright 2018 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2018 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Watchdog helper""" diff --git a/internal/adb.py b/internal/adb.py index 1efb9bc65..dab12455e 100644 --- a/internal/adb.py +++ b/internal/adb.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """ADB command-line interface""" diff --git a/internal/android_browser.py b/internal/android_browser.py index d3e228a8b..544b55cc1 100644 --- a/internal/android_browser.py +++ b/internal/android_browser.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Base class support for android browsers""" diff --git a/internal/base_browser.py b/internal/base_browser.py index eb48d201e..873a110b7 100644 --- a/internal/base_browser.py +++ b/internal/base_browser.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Base class support for browsers""" diff --git a/internal/blackbox_android.py b/internal/blackbox_android.py index 5e786cf1d..41efca96c 100644 --- a/internal/blackbox_android.py +++ b/internal/blackbox_android.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Chrome browser on Android""" diff --git a/internal/browsers.py b/internal/browsers.py index bdec522fa..a682c8926 100644 --- a/internal/browsers.py +++ b/internal/browsers.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Main entry point for controlling browsers""" diff --git a/internal/chrome_android.py b/internal/chrome_android.py index 683a4c2be..2fe0b0061 100644 --- a/internal/chrome_android.py +++ b/internal/chrome_android.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Chrome browser on Android""" diff --git a/internal/chrome_desktop.py b/internal/chrome_desktop.py index 36d878b50..2faf64731 100644 --- a/internal/chrome_desktop.py +++ b/internal/chrome_desktop.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Logic for controlling a desktop Chrome browser""" diff --git a/internal/desktop_browser.py b/internal/desktop_browser.py index 9e5ffe097..7546b8dc6 100644 --- a/internal/desktop_browser.py +++ b/internal/desktop_browser.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Base class support for desktop browsers""" diff --git a/internal/devtools.py b/internal/devtools.py index 8ea9aebb1..3c403a443 100644 --- a/internal/devtools.py +++ b/internal/devtools.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Main entry point for interfacing with Chrome's remote debugging protocol""" diff --git a/internal/devtools_browser.py b/internal/devtools_browser.py index fb4532a93..65e33bd53 100644 --- a/internal/devtools_browser.py +++ b/internal/devtools_browser.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Base class support for browsers that speak the dev tools protocol""" diff --git a/internal/firefox.py b/internal/firefox.py index 37f32f6df..d8114477e 100644 --- a/internal/firefox.py +++ b/internal/firefox.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Support for Firefox""" diff --git a/internal/internet_explorer.py b/internal/internet_explorer.py index 218b2df3c..74633361b 100644 --- a/internal/internet_explorer.py +++ b/internal/internet_explorer.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Microsoft Internet Explorer testing (based on the Edge support)""" diff --git a/internal/ios_device.py b/internal/ios_device.py index 7a1db2f60..3960fc58c 100644 --- a/internal/ios_device.py +++ b/internal/ios_device.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Interface for iWptBrowser on iOS devices""" diff --git a/internal/message_server.py b/internal/message_server.py index cf283ac4b..74f15c409 100644 --- a/internal/message_server.py +++ b/internal/message_server.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. try: diff --git a/internal/microsoft_edge.py b/internal/microsoft_edge.py index 3aa2cfde5..4938f8772 100644 --- a/internal/microsoft_edge.py +++ b/internal/microsoft_edge.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Microsoft Edge testing""" diff --git a/internal/optimization_checks.py b/internal/optimization_checks.py index c4d1e0a32..61fac0eeb 100644 --- a/internal/optimization_checks.py +++ b/internal/optimization_checks.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Run the various optimization checks""" diff --git a/internal/os_util.py b/internal/os_util.py index 72019bc6a..74c1daacb 100644 --- a/internal/os_util.py +++ b/internal/os_util.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Cross-platform support for os-level things that differ on different platforms""" diff --git a/internal/safari_ios.py b/internal/safari_ios.py index 8c3809cf6..7b287cc39 100644 --- a/internal/safari_ios.py +++ b/internal/safari_ios.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Support for Safari on iOS using iWptBrowser""" diff --git a/internal/support/devtools_parser.py b/internal/support/devtools_parser.py index 284b5d9d6..f0fc0eea0 100644 --- a/internal/support/devtools_parser.py +++ b/internal/support/devtools_parser.py @@ -1,6 +1,7 @@ #!/usr/bin/env python """ -Copyright 2016 Google Inc. All Rights Reserved. +Copyright 2019 WebPageTest LLC. +Copyright 2016 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/internal/support/firefox_log_parser.py b/internal/support/firefox_log_parser.py index f3f59fbb0..ef0e1ff9a 100644 --- a/internal/support/firefox_log_parser.py +++ b/internal/support/firefox_log_parser.py @@ -1,6 +1,7 @@ #!/usr/bin/env python """ -Copyright 2016 Google Inc. All Rights Reserved. +Copyright 2019 WebPageTest LLC. +Copyright 2016 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/internal/support/pcap-parser.py b/internal/support/pcap-parser.py index f7272fbc6..3648f3250 100644 --- a/internal/support/pcap-parser.py +++ b/internal/support/pcap-parser.py @@ -1,6 +1,7 @@ #!/usr/bin/env python """ -Copyright 2016 Google Inc. All Rights Reserved. +Copyright 2019 WebPageTest LLC. +Copyright 2016 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/internal/support/trace_parser.py b/internal/support/trace_parser.py index 1bb460fd4..f50158009 100644 --- a/internal/support/trace_parser.py +++ b/internal/support/trace_parser.py @@ -1,6 +1,7 @@ #!/usr/bin/env python """ -Copyright 2016 Google Inc. All Rights Reserved. +Copyright 2019 WebPageTest LLC. +Copyright 2016 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/internal/support/visualmetrics.py b/internal/support/visualmetrics.py index 977975f03..7bd33569e 100644 --- a/internal/support/visualmetrics.py +++ b/internal/support/visualmetrics.py @@ -1,5 +1,6 @@ #!/usr/bin/env python """ +Copyright 2019 WebPageTest LLC. Copyright (c) 2014, Google Inc. All rights reserved. diff --git a/internal/traceroute.py b/internal/traceroute.py index d21f9ed58..e75f348c5 100644 --- a/internal/traceroute.py +++ b/internal/traceroute.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Logic for running a traceroute test""" diff --git a/internal/traffic_shaping.py b/internal/traffic_shaping.py index 0c00e713b..ac716304a 100644 --- a/internal/traffic_shaping.py +++ b/internal/traffic_shaping.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Cross-platform support for traffic-shaping""" diff --git a/internal/video_processing.py b/internal/video_processing.py index 2ef2044dd..cb023b7e8 100644 --- a/internal/video_processing.py +++ b/internal/video_processing.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Video processing logic""" diff --git a/internal/webpagetest.py b/internal/webpagetest.py index 557fb40e9..35c20668f 100644 --- a/internal/webpagetest.py +++ b/internal/webpagetest.py @@ -1,4 +1,5 @@ -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """Main entry point for interfacing with WebPageTest server""" diff --git a/wptagent.py b/wptagent.py index cbd1b3f3e..0813dc124 100644 --- a/wptagent.py +++ b/wptagent.py @@ -1,5 +1,6 @@ #!/usr/bin/env python -# Copyright 2017 Google Inc. All rights reserved. +# Copyright 2019 WebPageTest LLC. +# Copyright 2017 Google Inc. # Use of this source code is governed by the Apache 2.0 license that can be # found in the LICENSE file. """WebPageTest cross-platform agent""" From 569e25d2c1f4277645e96981f8a5a14fdbaf93dd Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Thu, 19 Dec 2019 11:28:00 -0800 Subject: [PATCH 06/16] More python 3 patches --- internal/adb.py | 7 ++--- internal/android_browser.py | 8 +++-- internal/blackbox_android.py | 9 +++--- internal/chrome_android.py | 9 +++--- internal/desktop_browser.py | 20 ++++++------ internal/devtools.py | 12 +++++--- internal/devtools_browser.py | 18 ++++++----- internal/firefox.py | 16 +++++----- internal/ios_device.py | 7 +++-- internal/message_server.py | 7 +++-- internal/microsoft_edge.py | 14 +++++---- internal/optimization_checks.py | 18 ++++++++--- internal/safari_ios.py | 18 ++++++----- internal/support/devtools_parser.py | 15 +++++---- internal/support/firefox_log_parser.py | 12 +++++--- internal/support/pcap-parser.py | 8 ++--- internal/support/trace_parser.py | 15 ++++++--- internal/support/visualmetrics.py | 8 ++--- internal/traceroute.py | 7 +++-- internal/traffic_shaping.py | 42 +++++++++++++------------- internal/webpagetest.py | 18 +++++------ wptagent.py | 12 ++++---- ws4py/compat.py | 4 +-- 23 files changed, 169 insertions(+), 135 deletions(-) diff --git a/internal/adb.py b/internal/adb.py index dab12455e..a6461460f 100644 --- a/internal/adb.py +++ b/internal/adb.py @@ -11,10 +11,10 @@ import sys from threading import Timer import time -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic +else: + from monotonic import monotonic # cSpell:ignore vpndialogs, sysctl, iptables, ifconfig, dstaddr, clientidbase, nsecs @@ -590,7 +590,6 @@ def check_gnirehtet(self): if self.is_tun_interface_available(): is_ready = True elif self.gnirehtet_exe is not None: - interface, dns_server = self.options.gnirehtet.split(',', 1) if self.gnirehtet is not None: try: subprocess.call([self.gnirehtet_exe, 'stop']) diff --git a/internal/android_browser.py b/internal/android_browser.py index 544b55cc1..6a79210d7 100644 --- a/internal/android_browser.py +++ b/internal/android_browser.py @@ -9,15 +9,17 @@ import os import shutil import subprocess +import sys import time -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic +else: + from monotonic import monotonic try: import ujson as json except BaseException: import json + from .base_browser import BaseBrowser diff --git a/internal/blackbox_android.py b/internal/blackbox_android.py index 41efca96c..fb1762fa6 100644 --- a/internal/blackbox_android.py +++ b/internal/blackbox_android.py @@ -6,11 +6,12 @@ import logging import os import re +import sys import time -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic +else: + from monotonic import monotonic from .android_browser import AndroidBrowser CHROME_COMMAND_LINE_OPTIONS = [ @@ -181,7 +182,7 @@ def clear_profile(self, _): def ensure_xml_setting(self, settings, key, value): """Make sure the provided setting exists in the setting string""" if settings.find('name="{0}" value="{1}"'.format(key, value)) == -1: - modified = True + self.modified = True settings = re.sub(r'name=\"{0}\" value=\"[^\"]\"'.format(key), 'name="{0}" value="{1}"'.format(key, value), settings) if settings.find('name="{0}" value="{1}"'.format(key, value)) == -1: diff --git a/internal/chrome_android.py b/internal/chrome_android.py index 2fe0b0061..aa8e8a0a2 100644 --- a/internal/chrome_android.py +++ b/internal/chrome_android.py @@ -8,11 +8,12 @@ import os import re import shutil +import sys import time -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic +else: + from monotonic import monotonic from .devtools_browser import DevtoolsBrowser from .android_browser import AndroidBrowser @@ -179,7 +180,6 @@ def launch(self, job, task): def setup_prefs(self): """Install our base set of preferences""" # Crashes chrome on the Moto G4's so disabled for now - return """ src = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'support', 'chrome', 'prefs.json') @@ -196,6 +196,7 @@ def setup_prefs(self): self.adb.shell(['rm', remote_prefs]) self.adb.su('chmod 777 {0}'.format(dest)) """ + return def configure_prefs(self): """Configure browser-specific shared_prefs""" diff --git a/internal/desktop_browser.py b/internal/desktop_browser.py index 7546b8dc6..522d399a4 100644 --- a/internal/desktop_browser.py +++ b/internal/desktop_browser.py @@ -15,10 +15,10 @@ import sys import threading import time -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic +else: + from monotonic import monotonic try: import ujson as json except BaseException: @@ -192,7 +192,7 @@ def close_top_window(self, hwnd, _): window_title, window_class) win32api.TerminateProcess(handle, 0) win32api.CloseHandle(handle) - except Exception as err: + except Exception: pass def close_top_dialog(self, hwnd, _): @@ -383,7 +383,7 @@ def on_start_recording(self, task): if self.device_pixel_ratio is None: self.device_pixel_ratio = 1.0 try: - ratio = self.execute_js('window.devicePixelRatio') + ratio = self.execute_js('window.devicePixelRatio') #pylint: disable=assignment-from-none if ratio is not None: self.device_pixel_ratio = max(1.0, float(ratio)) except Exception: @@ -432,12 +432,12 @@ def on_start_recording(self, task): time.sleep(1) task['video_file'] = os.path.join(task['dir'], task['prefix']) + '_video.mp4' if platform.system() == 'Windows': - from win32api import GetSystemMetrics + from win32api import GetSystemMetrics #pylint: disable=import-error self.screen_width = GetSystemMetrics(0) self.screen_height = GetSystemMetrics(1) elif platform.system() == 'Darwin': try: - from AppKit import NSScreen + from AppKit import NSScreen #pylint: disable=import-error self.screen_width = int(NSScreen.screens()[0].frame().size.width) self.screen_height = int(NSScreen.screens()[0].frame().size.height) except Exception: @@ -504,7 +504,7 @@ def on_stop_capture(self, task): logging.debug('Stopping tcpdump') from .os_util import kill_all if platform.system() == 'Windows': - os.kill(self.tcpdump.pid, signal.CTRL_BREAK_EVENT) + os.kill(self.tcpdump.pid, signal.CTRL_BREAK_EVENT) #pylint: disable=no-member kill_all('WinDump', False) else: subprocess.call(['sudo', 'killall', 'tcpdump']) @@ -513,7 +513,7 @@ def on_stop_capture(self, task): logging.debug('Stopping video capture') self.video_capture_running = False if platform.system() == 'Windows': - os.kill(self.ffmpeg.pid, signal.CTRL_BREAK_EVENT) + os.kill(self.ffmpeg.pid, signal.CTRL_BREAK_EVENT) #pylint: disable=no-member else: self.ffmpeg.terminate() @@ -726,7 +726,7 @@ def background_thread(self): logging.debug('Stopping video capture - File is too big: %d', video_size) self.video_capture_running = False if platform.system() == 'Windows': - os.kill(self.ffmpeg.pid, signal.CTRL_BREAK_EVENT) + os.kill(self.ffmpeg.pid, signal.CTRL_BREAK_EVENT) #pylint: disable=no-member else: self.ffmpeg.terminate() diff --git a/internal/devtools.py b/internal/devtools.py index 3c403a443..7013058bf 100644 --- a/internal/devtools.py +++ b/internal/devtools.py @@ -10,13 +10,15 @@ import os import re import subprocess +import sys import time import zipfile -from urlparse import urlsplit -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic + from urllib.parse import urlsplit # pylint: disable=import-error +else: + from monotonic import monotonic + from urlparse import urlsplit # pylint: disable=import-error try: import ujson as json except BaseException: @@ -417,7 +419,7 @@ def stop_recording(self): summary[url]['{0}_bytes_used'.format(category)] = used_bytes summary[url]['{0}_percent_used'.format(category)] = used_pct path = self.path_base + '_coverage.json.gz' - with gzip.open(path, 'wb', 7) as f_out: + with gzip.open(path, 'w', 7) as f_out: json.dump(summary, f_out) self.send_command('CSS.disable', {}) self.send_command('DOM.disable', {}) diff --git a/internal/devtools_browser.py b/internal/devtools_browser.py index 65e33bd53..eb792cbae 100644 --- a/internal/devtools_browser.py +++ b/internal/devtools_browser.py @@ -10,12 +10,14 @@ import re import shutil import subprocess +import sys import threading import time -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic + unicode = str +else: + from monotonic import monotonic try: import ujson as json except BaseException: @@ -219,7 +221,7 @@ def run_task(self, task): self.on_start_processing(task) self.wait_for_processing(task) self.process_devtools_requests(task) - self.step_complete(task) + self.step_complete(task) #pylint: disable=no-member if task['log_data']: # Move on to the next step task['current_step'] += 1 @@ -364,7 +366,7 @@ def process_command(self, command): self.task['page_data']['URL'] = command['target'] url = str(command['target']).replace('"', '\"') script = 'window.location="{0}";'.format(url) - script = self.prepare_script_for_record(script) + script = self.prepare_script_for_record(script) #pylint: disable=no-member self.devtools.start_navigating() self.devtools.execute_js(script) elif command['command'] == 'logdata': @@ -383,7 +385,7 @@ def process_command(self, command): elif command['command'] == 'exec': script = command['target'] if command['record']: - script = self.prepare_script_for_record(script) + script = self.prepare_script_for_record(script) #pylint: disable=no-member self.devtools.start_navigating() self.devtools.execute_js(script) elif command['command'] == 'sleep': @@ -507,7 +509,7 @@ def run_lighthouse_test(self, task): command.extend(['--blocked-url-patterns', pattern]) if 'headers' in task: headers_file = os.path.join(task['dir'], 'lighthouse-headers.json') - with open(headers_file, 'wb') as f_out: + with open(headers_file, 'w') as f_out: json.dump(task['headers'], f_out) command.extend(['--extra-headers', '"{0}"'.format(headers_file)]) cmd = ' '.join(command) @@ -611,7 +613,7 @@ def run_lighthouse_test(self, task): elif 'numericValue' in audit: audits[name] = audit['numericValue'] audits_gzip = os.path.join(task['dir'], 'lighthouse_audits.json.gz') - with gzip.open(audits_gzip, 'wb', 7) as f_out: + with gzip.open(audits_gzip, 'w', 7) as f_out: json.dump(audits, f_out) # Compress the HTML lighthouse report if os.path.isfile(html_file): diff --git a/internal/firefox.py b/internal/firefox.py index d8114477e..36d7a4595 100644 --- a/internal/firefox.py +++ b/internal/firefox.py @@ -12,12 +12,14 @@ import re import shutil import subprocess +import sys import time -import urlparse -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic + from urllib.parse import urlsplit # pylint: disable=import-error +else: + from monotonic import monotonic + from urlparse import urlsplit # pylint: disable=import-error try: import ujson as json except BaseException: @@ -120,7 +122,7 @@ def disable_fsync(self, command_line): logging.debug(' '.join(cmd)) subprocess.check_call(cmd) command_line = 'eatmydata ' + command_line - except Exception as err: + except Exception: pass return command_line @@ -838,12 +840,12 @@ def process_requests(self, request_timings, task): result['requests'] = self.merge_requests(request_timings) result['pageData'] = self.calculate_page_stats(result['requests']) devtools_file = os.path.join(task['dir'], task['prefix'] + '_devtools_requests.json.gz') - with gzip.open(devtools_file, 'wb', 7) as f_out: + with gzip.open(devtools_file, 'w', 7) as f_out: json.dump(result, f_out) def get_empty_request(self, request_id, url): """Return and empty, initialized request""" - parts = urlparse.urlsplit(url) + parts = urlsplit(url) request = {'type': 3, 'id': request_id, 'request_id': request_id, diff --git a/internal/ios_device.py b/internal/ios_device.py index 3960fc58c..bd2800f06 100644 --- a/internal/ios_device.py +++ b/internal/ios_device.py @@ -11,11 +11,12 @@ import select import shutil import subprocess +import sys import threading -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic +else: + from monotonic import monotonic try: import ujson as json except BaseException: diff --git a/internal/message_server.py b/internal/message_server.py index 74f15c409..b574f313a 100644 --- a/internal/message_server.py +++ b/internal/message_server.py @@ -8,6 +8,7 @@ pass from multiprocessing import JoinableQueue import logging +import sys import threading import time import tornado.ioloop @@ -157,10 +158,10 @@ def stop(self): def is_ok(self): """Check that the server is responding and restart it if necessary""" import requests - try: - from monotonic import monotonic - except BaseException: + if (sys.version_info > (3, 0)): from time import monotonic + else: + from monotonic import monotonic end_time = monotonic() + 30 server_ok = False proxies = {"http": None, "https": None} diff --git a/internal/microsoft_edge.py b/internal/microsoft_edge.py index 4938f8772..d73413ef6 100644 --- a/internal/microsoft_edge.py +++ b/internal/microsoft_edge.py @@ -11,12 +11,14 @@ import re import shutil import subprocess +import sys import time -import urlparse -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic + from urllib.parse import urlsplit # pylint: disable=import-error +else: + from monotonic import monotonic + from urlparse import urlsplit # pylint: disable=import-error try: import ujson as json except BaseException: @@ -931,7 +933,7 @@ def process_requests(self, task): result['pageData'] = self.calculate_page_stats(result['requests']) self.check_optimization(task, result['requests'], result['pageData']) devtools_file = os.path.join(task['dir'], task['prefix'] + '_devtools_requests.json.gz') - with gzip.open(devtools_file, 'wb', 7) as f_out: + with gzip.open(devtools_file, 'w', 7) as f_out: json.dump(result, f_out) def process_sockets(self): @@ -989,7 +991,7 @@ def process_sockets(self): def get_empty_request(self, request_id, url): """Return and empty, initialized request""" - parts = urlparse.urlsplit(url) + parts = urlsplit(url) request = {'type': 3, 'id': request_id, 'request_id': request_id, diff --git a/internal/optimization_checks.py b/internal/optimization_checks.py index 61fac0eeb..d05c797b9 100644 --- a/internal/optimization_checks.py +++ b/internal/optimization_checks.py @@ -12,12 +12,13 @@ import shutil import struct import subprocess +import sys import threading import time -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic +else: + from monotonic import monotonic try: import ujson as json except BaseException: @@ -383,7 +384,11 @@ def join(self): def check_keep_alive(self): """Check for requests where the connection is force-closed""" - from urlparse import urlsplit + if (sys.version_info > (3, 0)): + from urllib.parse import urlsplit # pylint: disable=import-error + else: + from urlparse import urlsplit # pylint: disable=import-error + # build a list of origins and how many requests were issued to each origins = {} for request_id in self.requests: @@ -558,7 +563,10 @@ def check_hosting(self): def check_cdn(self): """Check each request to see if it was served from a CDN""" - from urlparse import urlparse + if (sys.version_info > (3, 0)): + from urllib.parse import urlparse # pylint: disable=import-error + else: + from urlparse import urlparse # pylint: disable=import-error start = monotonic() # First pass, build a list of domains and see if the headers or domain matches static_requests = {} diff --git a/internal/safari_ios.py b/internal/safari_ios.py index 7b287cc39..1c5b46ead 100644 --- a/internal/safari_ios.py +++ b/internal/safari_ios.py @@ -12,13 +12,15 @@ import platform import re import subprocess +import sys import time -import urlparse import zipfile -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic + from urllib.parse import urlsplit # pylint: disable=import-error +else: + from monotonic import monotonic + from urlparse import urlsplit # pylint: disable=import-error try: import ujson as json except BaseException: @@ -841,7 +843,7 @@ def on_start_recording(self, task): if 'timeline' in self.job and self.job['timeline']: if self.path_base is not None: timeline_path = self.path_base + '_devtools.json.gz' - self.timeline = gzip.open(timeline_path, 'wb', 7) + self.timeline = gzip.open(timeline_path, 'w', 7) if self.timeline: self.timeline.write('[\n') from internal.support.trace_parser import Trace @@ -961,7 +963,7 @@ def on_start_processing(self, task): # Save the console logs if self.console_log and self.path_base is not None: log_file = self.path_base + '_console_log.json.gz' - with gzip.open(log_file, 'wb', 7) as f_out: + with gzip.open(log_file, 'w', 7) as f_out: json.dump(self.console_log, f_out) # Process the timeline data if self.trace_parser is not None and self.path_base is not None: @@ -998,7 +1000,7 @@ def wait_for_processing(self, task): self.wpt_result['requests'], opt) if self.path_base is not None: devtools_file = self.path_base + '_devtools_requests.json.gz' - with gzip.open(devtools_file, 'wb', 7) as f_out: + with gzip.open(devtools_file, 'w', 7) as f_out: json.dump(self.wpt_result, f_out) def step_complete(self, task): @@ -1193,7 +1195,7 @@ def grab_screenshot(self, path, png=True, resize=0): def get_empty_request(self, request_id, url): """Return and empty, initialized request""" - parts = urlparse.urlsplit(url) + parts = urlsplit(url) request = {'type': 3, 'id': request_id, 'request_id': request_id, diff --git a/internal/support/devtools_parser.py b/internal/support/devtools_parser.py index f0fc0eea0..d8569ae13 100644 --- a/internal/support/devtools_parser.py +++ b/internal/support/devtools_parser.py @@ -19,8 +19,13 @@ import logging import os import re +import sys import time -import urlparse +if (sys.version_info > (3, 0)): + from urllib.parse import urlsplit # pylint: disable=import-error + unicode = str +else: + from urlparse import urlsplit # pylint: disable=import-error # try a fast json parser if it is installed try: @@ -99,7 +104,7 @@ def write(self): try: _, ext = os.path.splitext(self.out_file) if ext.lower() == '.gz': - with gzip.open(self.out_file, 'wb') as f_out: + with gzip.open(self.out_file, 'w') as f_out: json.dump(self.result, f_out) else: with open(self.out_file, 'w') as f_out: @@ -124,7 +129,6 @@ def extract_net_requests(self): ('params' in x and 'timestamp' in x['params']) else 9999999) f_in.close() if raw_events is not None and len(raw_events): - end_timestamp = None first_timestamp = None raw_requests = {} id_map = {} @@ -396,7 +400,7 @@ def process_requests(self, raw_requests, raw_page_data): for raw_request in raw_requests: if 'url' in raw_request: url = raw_request['url'].split('#', 1)[0] - parts = urlparse.urlsplit(url) + parts = urlsplit(url) request = {'type': 3, 'id': raw_request['id'], 'request_id': raw_request['id']} request['ip_addr'] = '' request['full_url'] = url @@ -709,7 +713,6 @@ def process_netlog_requests(self): if 'request_id' not in request and 'id' in request: request['request_id'] = request['id'] if 'full_url' in request: - matched = False for entry in netlog: if 'url' in entry and 'start' in entry and 'claimed' not in entry and \ entry['url'] == request['full_url']: @@ -795,7 +798,7 @@ def process_netlog_requests(self): if 'claimed' not in entry and 'url' in entry and 'start' in entry: index += 1 request = {'type': 3, 'full_url': entry['url']} - parts = urlparse.urlsplit(entry['url']) + parts = urlsplit(entry['url']) request['id'] = '99999.99999.{0:d}'.format(index) request['is_secure'] = 1 if parts.scheme == 'https' else 0 request['host'] = parts.netloc diff --git a/internal/support/firefox_log_parser.py b/internal/support/firefox_log_parser.py index ef0e1ff9a..2d65fd5a4 100644 --- a/internal/support/firefox_log_parser.py +++ b/internal/support/firefox_log_parser.py @@ -20,11 +20,13 @@ import logging import os import re -import urlparse -try: - from monotonic import monotonic -except BaseException: +import sys +if (sys.version_info > (3, 0)): from time import monotonic + from urllib.parse import urlsplit # pylint: disable=import-error +else: + from monotonic import monotonic + from urlparse import urlsplit # pylint: disable=import-error try: import ujson as json except BaseException: @@ -94,7 +96,7 @@ def finish_processing(self): for domain in self.dns: if 'claimed' not in self.dns[domain]: for request in requests: - host = urlparse.urlsplit(request['url']).hostname + host = urlsplit(request['url']).hostname if host == domain: self.dns[domain]['claimed'] = True if 'start' in self.dns[domain]: diff --git a/internal/support/pcap-parser.py b/internal/support/pcap-parser.py index 3648f3250..c3415fd51 100644 --- a/internal/support/pcap-parser.py +++ b/internal/support/pcap-parser.py @@ -44,9 +44,9 @@ def __init__(self): def SaveStats(self, out): file_name, ext = os.path.splitext(out) if ext.lower() == '.gz': - f = gzip.open(out, 'wb') + f = gzip.open(out, 'w') else: - f = open(out, 'wb') + f = open(out, 'w') try: result = {"bytes": self.bytes} json.dump(result, f) @@ -59,9 +59,9 @@ def SaveStats(self, out): def SaveDetails(self, out): file_name, ext = os.path.splitext(out) if ext.lower() == '.gz': - f = gzip.open(out, 'wb') + f = gzip.open(out, 'w') else: - f = open(out, 'wb') + f = open(out, 'w') try: json.dump(self.slices, f) logging.info('Result details written to {0}'.format(out)) diff --git a/internal/support/trace_parser.py b/internal/support/trace_parser.py index f50158009..953b6b963 100644 --- a/internal/support/trace_parser.py +++ b/internal/support/trace_parser.py @@ -20,8 +20,13 @@ import math import os import re +import sys import time -import urlparse +if (sys.version_info > (3, 0)): + from urllib.parse import urlparse # pylint: disable=import-error + unicode = str +else: + from urlparse import urlparse # pylint: disable=import-error # try a fast json parser if it is installed try: @@ -66,7 +71,7 @@ def write_json(self, out_file, json_data): try: _, ext = os.path.splitext(out_file) if ext.lower() == '.gz': - with gzip.open(out_file, 'wb') as f: + with gzip.open(out_file, 'w') as f: json.dump(json_data, f) else: with open(out_file, 'w') as f: @@ -721,7 +726,7 @@ def post_process_netlog_events(self): not request['url'].startswith('http://192.168.10.'): # Match orphaned request streams with their h2 sessions if 'stream_id' in request and 'h2_session' not in request and 'url' in request: - request_host = urlparse.urlparse(request['url']).hostname + request_host = urlparse(request['url']).hostname for h2_session_id in self.netlog['h2_session']: h2_session = self.netlog['h2_session'][h2_session_id] if 'host' in h2_session: @@ -841,7 +846,7 @@ def post_process_netlog_events(self): # Go through the requests and assign the DNS lookups as needed for request in requests: if 'connect_start' in request: - hostname = urlparse.urlparse(request['url']).hostname + hostname = urlparse(request['url']).hostname if hostname in dns_lookups and 'claimed' not in dns_lookups[hostname]: dns = dns_lookups[hostname] dns['claimed'] = True @@ -1335,7 +1340,7 @@ def main(): help="Output list of interactive times.") parser.add_argument('-n', '--netlog', help="Output netlog details file.") parser.add_argument('-s', '--stats', help="Output v8 Call stats file.") - options, unknown = parser.parse_known_args() + options, _ = parser.parse_known_args() # Set up logging log_level = logging.CRITICAL diff --git a/internal/support/visualmetrics.py b/internal/support/visualmetrics.py index 7bd33569e..af3baaa78 100644 --- a/internal/support/visualmetrics.py +++ b/internal/support/visualmetrics.py @@ -1092,7 +1092,7 @@ def calculate_histograms(directory, histograms_file, force): 'histogram': histogram}) if os.path.isfile(histograms_file): os.remove(histograms_file) - f = gzip.open(histograms_file, 'wb') + f = gzip.open(histograms_file, 'w') json.dump(histograms, f) f.close() else: @@ -1325,9 +1325,9 @@ def calculate_visual_metrics(histograms_file, start, end, perceptual, dirs, prog if progress and progress_file is not None: file_name, ext = os.path.splitext(progress_file) if ext.lower() == '.gz': - f = gzip.open(progress_file, 'wb', 7) + f = gzip.open(progress_file, 'w', 7) else: - f = open(progress_file, 'wb') + f = open(progress_file, 'w') json.dump(progress, f) f.close() if len(histograms) > 1: @@ -1367,7 +1367,7 @@ def calculate_visual_metrics(histograms_file, start, end, perceptual, dirs, prog hero_data['timings'] = hero_timings metrics += hero_timings - with gzip.open(hero_elements_file, 'wb', 7) as hero_f_out: + with gzip.open(hero_elements_file, 'w', 7) as hero_f_out: json.dump(hero_data, hero_f_out) else: logging.warn('Hero elements file is not valid: ' + str(hero_elements_file)) diff --git a/internal/traceroute.py b/internal/traceroute.py index e75f348c5..9f7f8883c 100644 --- a/internal/traceroute.py +++ b/internal/traceroute.py @@ -10,7 +10,10 @@ import re import subprocess import sys -import urlparse +if (sys.version_info > (3, 0)): + from urllib.parse import urlparse # pylint: disable=import-error +else: + from urlparse import urlparse # pylint: disable=import-error class Traceroute(object): @@ -31,7 +34,7 @@ def run_task(self, task): """Run an individual test""" if 'url' in self.job: results = None - hostname = urlparse.urlparse(self.job['url']).hostname + hostname = urlparse(self.job['url']).hostname if platform.system() == 'Windows': last_hop, results = self.windows_traceroute(hostname) else: diff --git a/internal/traffic_shaping.py b/internal/traffic_shaping.py index ac716304a..6415e3d23 100644 --- a/internal/traffic_shaping.py +++ b/internal/traffic_shaping.py @@ -162,12 +162,12 @@ def reset(self): def configure(self, in_bps, out_bps, rtt, plr): """Enable traffic-shaping""" return self.shaper(['set', - 'inbps={0:d}'.format(in_bps), - 'outbps={0:d}'.format(out_bps), - 'rtt={0:d}'.format(rtt), - 'plr={0:.2f}'.format(plr), - 'inbuff={0:d}'.format(self.in_buff), - 'outbuff={0:d}'.format(self.out_buff)]) + 'inbps={0:d}'.format(int(in_bps)), + 'outbps={0:d}'.format(int(out_bps)), + 'rtt={0:d}'.format(int(rtt)), + 'plr={0:.2f}'.format(float(plr)), + 'inbuff={0:d}'.format(int(self.in_buff)), + 'outbuff={0:d}'.format(int(self.out_buff))]) # # Dummynet @@ -231,18 +231,18 @@ def configure(self, in_bps, out_bps, rtt, plr): in_latency += 1 in_command = ['pipe', self.in_pipe, 'config'] if in_kbps > 0: - in_command.extend(['bw', '{0:d}Kbit/s'.format(in_kbps)]) + in_command.extend(['bw', '{0:d}Kbit/s'.format(int(in_kbps))]) if in_latency >= 0: - in_command.extend(['delay', '{0:d}ms'.format(in_latency)]) + in_command.extend(['delay', '{0:d}ms'.format(int(in_latency))]) # outbound connection out_kbps = int(out_bps / 1000) out_latency = rtt / 2 out_command = ['pipe', self.out_pipe, 'config'] if out_kbps > 0: - out_command.extend(['bw', '{0:d}Kbit/s'.format(out_kbps)]) + out_command.extend(['bw', '{0:d}Kbit/s'.format(int(out_kbps))]) if out_latency >= 0: - out_command.extend(['delay', '{0:d}ms'.format(out_latency)]) + out_command.extend(['delay', '{0:d}ms'.format(int(out_latency))]) # Packet loss get applied to the queues plr = plr / 100.0 @@ -250,8 +250,8 @@ def configure(self, in_bps, out_bps, rtt, plr): out_queue_command = ['queue', self.out_pipe, 'config', 'pipe', self.out_pipe, 'queue', '100'] if plr > 0.0 and plr <= 1.0: - in_queue_command.extend(['plr', '{0:.4f}'.format(plr)]) - out_queue_command.extend(['plr', '{0:.4f}'.format(plr)]) + in_queue_command.extend(['plr', '{0:.4f}'.format(float(plr))]) + out_queue_command.extend(['plr', '{0:.4f}'.format(float(plr))]) in_queue_command.extend(['mask', 'dst-port', '0xffff']) out_queue_command.extend(['mask', 'dst-port', '0xffff']) @@ -318,24 +318,24 @@ def configure(self, in_bps, out_bps, rtt, plr): in_latency += 1 in_command = ['pipe', self.in_pipe, 'config'] if in_kbps > 0: - in_command.extend(['bw', '{0:d}Kbit/s'.format(in_kbps)]) + in_command.extend(['bw', '{0:d}Kbit/s'.format(int(in_kbps))]) if in_latency >= 0: - in_command.extend(['delay', '{0:d}ms'.format(in_latency)]) + in_command.extend(['delay', '{0:d}ms'.format(int(in_latency))]) # outbound connection out_kbps = int(out_bps / 1000) out_latency = rtt / 2 out_command = ['pipe', self.out_pipe, 'config'] if out_kbps > 0: - out_command.extend(['bw', '{0:d}Kbit/s'.format(out_kbps)]) + out_command.extend(['bw', '{0:d}Kbit/s'.format(int(out_kbps))]) if out_latency >= 0: - out_command.extend(['delay', '{0:d}ms'.format(out_latency)]) + out_command.extend(['delay', '{0:d}ms'.format(int(out_latency))]) # Packet loss get applied to the queues plr = plr / 100.0 if plr > 0.0 and plr <= 1.0: - in_command.extend(['plr', '{0:.4f}'.format(plr)]) - out_command.extend(['plr', '{0:.4f}'.format(plr)]) + in_command.extend(['plr', '{0:.4f}'.format(float(plr))]) + out_command.extend(['plr', '{0:.4f}'.format(float(plr))]) return self.dnctl(in_command) and\ self.dnctl(out_command) @@ -466,12 +466,12 @@ def configure_interface(self, interface, bps, latency, plr): """Configure traffic-shaping for a single interface""" ret = False args = ['sudo', 'tc', 'qdisc', 'add', 'dev', interface, 'root', - 'netem', 'delay', '{0:d}ms'.format(latency)] + 'netem', 'delay', '{0:d}ms'.format(int(latency))] if bps > 0: kbps = int(bps / 1000) - args.extend(['rate', '{0:d}kbit'.format(kbps)]) + args.extend(['rate', '{0:d}kbit'.format(int(kbps))]) if plr > 0: - args.extend(['loss', '{0:.2f}%'.format(plr)]) + args.extend(['loss', '{0:.2f}%'.format(float(plr))]) logging.debug(' '.join(args)) ret = subprocess.call(args) == 0 return ret diff --git a/internal/webpagetest.py b/internal/webpagetest.py index 35c20668f..8fee61b73 100644 --- a/internal/webpagetest.py +++ b/internal/webpagetest.py @@ -16,16 +16,14 @@ import sys import threading import time -try: - from urllib import quote_plus -except BaseException: - from urllib.parse import quote_plus import zipfile import psutil -try: - from monotonic import monotonic -except BaseException: +if (sys.version_info > (3, 0)): from time import monotonic + from urllib.parse import quote_plus # pylint: disable=import-error +else: + from monotonic import monotonic + from urllib import quote_plus # pylint: disable=import-error,no-name-in-module try: import ujson as json except BaseException: @@ -90,14 +88,14 @@ def __init__(self, options, workdir): self.screen_height = 1200 elif platform.system() == 'Windows': try: - from win32api import GetSystemMetrics + from win32api import GetSystemMetrics # pylint: disable=import-error self.screen_width = GetSystemMetrics(0) self.screen_height = GetSystemMetrics(1) except Exception: pass elif platform.system() == 'Darwin': try: - from AppKit import NSScreen + from AppKit import NSScreen # pylint: disable=import-error self.screen_width = int(NSScreen.screens()[0].frame().size.width) self.screen_height = int(NSScreen.screens()[0].frame().size.height) except Exception: @@ -893,7 +891,7 @@ def update_browser_viewport(self, task): if not os.path.isdir(self.persistent_dir): os.makedirs(self.persistent_dir) margins_file = os.path.join(self.persistent_dir, 'margins.json') - with open(margins_file, 'wb') as f_out: + with open(margins_file, 'w') as f_out: json.dump(self.margins, f_out) def body_fetch_thread(self): diff --git a/wptagent.py b/wptagent.py index 0813dc124..f9738e983 100644 --- a/wptagent.py +++ b/wptagent.py @@ -69,10 +69,10 @@ def __init__(self, options, browsers): def run_testing(self): """Main testing flow""" - try: - from monotonic import monotonic - except BaseException: + if (sys.version_info > (3, 0)): from time import monotonic + else: + from monotonic import monotonic start_time = monotonic() browser = None exit_file = os.path.join(self.root_path, 'exit') @@ -232,10 +232,10 @@ def sleep(self, seconds): def wait_for_idle(self, timeout=30): """Wait for the system to go idle for at least 2 seconds""" - try: - from monotonic import monotonic - except BaseException: + if (sys.version_info > (3, 0)): from time import monotonic + else: + from monotonic import monotonic import psutil logging.debug("Waiting for Idle...") cpu_count = psutil.cpu_count() diff --git a/ws4py/compat.py b/ws4py/compat.py index e986e338a..33f934b16 100644 --- a/ws4py/compat.py +++ b/ws4py/compat.py @@ -33,8 +33,8 @@ def ord(c): return _ord(c) else: py3k = False - from urlparse import urlsplit - range = xrange + from urlparse import urlsplit # pylint: disable=import-error + range = xrange # pylint: disable=undefined-variable unicode = unicode basestring = basestring ord = ord From a86cbd747252690f8d060fa180ecf9cf287e4fde Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Thu, 19 Dec 2019 11:31:02 -0800 Subject: [PATCH 07/16] updated ujson install --- ubuntu_install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ubuntu_install.sh b/ubuntu_install.sh index 82e759be6..1dcfd2cd3 100755 --- a/ubuntu_install.sh +++ b/ubuntu_install.sh @@ -3,14 +3,14 @@ until sudo apt-get update do sleep 1 done -until sudo apt-get install -y python2.7 python-pip imagemagick ffmpeg xvfb dbus-x11 cgroup-tools traceroute software-properties-common psmisc libnss3-tools iproute2 net-tools +until sudo apt-get install -y python2.7 python-pip imagemagick ffmpeg xvfb dbus-x11 cgroup-tools traceroute software-properties-common psmisc libnss3-tools iproute2 net-tools git do sleep 1 done # Unavailable on Ubuntu 18.04 but needed on earlier releases sudo apt-get install -y python-software-properties sudo dbus-uuidgen --ensure -until sudo pip install dnspython monotonic pillow psutil requests ujson tornado wsaccel xvfbwrapper brotli marionette_driver +until sudo pip install dnspython monotonic pillow psutil requests git+git://github.com/marshallpierce/ultrajson.git@v1.35-gentoo-fixes tornado wsaccel xvfbwrapper brotli marionette_driver do sleep 1 done From e4aae2f82f78135310d32b65ce0cb31e2125daa7 Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Wed, 25 Dec 2019 09:32:16 -0800 Subject: [PATCH 08/16] More python 3 support (working now) --- internal/android_browser.py | 6 +-- internal/blackbox_android.py | 6 +-- internal/browsers.py | 2 +- internal/chrome_android.py | 4 +- internal/desktop_browser.py | 4 +- internal/devtools.py | 64 ++++++++++++++------------ internal/devtools_browser.py | 18 ++++---- internal/firefox.py | 14 +++--- internal/microsoft_edge.py | 24 +++++----- internal/optimization_checks.py | 2 +- internal/safari_ios.py | 23 ++++----- internal/support/devtools_parser.py | 36 +++++++-------- internal/support/firefox_log_parser.py | 2 +- internal/support/pcap-parser.py | 4 +- internal/support/trace_parser.py | 6 +-- internal/support/visualmetrics.py | 14 +++--- internal/traceroute.py | 2 +- internal/webpagetest.py | 6 +-- wptagent.py | 2 +- 19 files changed, 122 insertions(+), 117 deletions(-) diff --git a/internal/android_browser.py b/internal/android_browser.py index 6a79210d7..0ec0bbe8b 100644 --- a/internal/android_browser.py +++ b/internal/android_browser.py @@ -71,7 +71,7 @@ def prepare(self, job, task): self.config['package'] + '.md5') last_md5 = None if os.path.isfile(last_install_file): - with open(last_install_file, 'rb') as f_in: + with open(last_install_file, 'r') as f_in: last_md5 = f_in.read() if last_md5 is None or last_md5 != self.config['md5']: valid = False @@ -106,7 +106,7 @@ def prepare(self, job, task): self.adb.adb(['uninstall', self.config['package']]) logging.debug('Installing browser APK') self.adb.adb(['install', '-rg', tmp_file]) - with open(last_install_file, 'wb') as f_out: + with open(last_install_file, 'w') as f_out: f_out.write(md5) else: logging.error('Error downloading browser APK') @@ -291,7 +291,7 @@ def step_complete(self, task): path = os.path.join(task['dir'], task['prefix'] + '_page_data.json.gz') json_page_data = json.dumps(task['page_data']) logging.debug('Page Data: %s', json_page_data) - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json_page_data) def screenshot(self, task): diff --git a/internal/blackbox_android.py b/internal/blackbox_android.py index fb1762fa6..a89a295a5 100644 --- a/internal/blackbox_android.py +++ b/internal/blackbox_android.py @@ -88,7 +88,7 @@ def launch(self, job, task): remote_command_line = '/data/local/tmp/chrome-command-line' root_command_line = '/data/local/chrome-command-line' logging.debug(command_line) - with open(local_command_line, 'wb') as f_out: + with open(local_command_line, 'w') as f_out: f_out.write(command_line) if self.adb.adb(['push', local_command_line, remote_command_line]): os.remove(local_command_line) @@ -130,7 +130,7 @@ def run_task(self, task): local_intent = os.path.join(task['dir'], 'wpt_intent.sh') remote_intent = '/data/local/tmp/wpt_intent.sh' self.adb.shell(['rm', remote_intent]) - with open(local_intent, 'wb') as f_out: + with open(local_intent, 'w') as f_out: f_out.write(cmd) if self.adb.adb(['push', local_intent, remote_intent]): os.remove(local_intent) @@ -206,7 +206,7 @@ def prepare_opera_mini_settings(self): if settings != original_settings: local_settings = os.path.join(self.task['dir'], 'user_settings.xml') remote_temp = '/data/local/tmp/user_settings.xml' - with open(local_settings, 'wb') as f_out: + with open(local_settings, 'w') as f_out: f_out.write(settings) if self.adb.adb(['push', local_settings, remote_temp]): self.adb.su('chmod 666 /data/local/tmp/user_settings.xml') diff --git a/internal/browsers.py b/internal/browsers.py index a682c8926..39323cd8a 100644 --- a/internal/browsers.py +++ b/internal/browsers.py @@ -22,7 +22,7 @@ def __init__(self, options, browsers, adb, ios): self.ios = ios android_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'android_browsers.json') - with open(android_file, 'rb') as f_in: + with open(android_file, 'r') as f_in: self.android_browsers = {k.lower(): v for k, v in json.load(f_in).items()} def is_ready(self): diff --git a/internal/chrome_android.py b/internal/chrome_android.py index aa8e8a0a2..df45b1a37 100644 --- a/internal/chrome_android.py +++ b/internal/chrome_android.py @@ -152,7 +152,7 @@ def launch(self, job, task): remote_command_line = '/data/local/tmp/' + self.config['command_line_file'] root_command_line = '/data/local/' + self.config['command_line_file'] logging.debug(command_line) - with open(local_command_line, 'wb') as f_out: + with open(local_command_line, 'w') as f_out: f_out.write(command_line) if self.adb.adb(['push', local_command_line, remote_command_line]): os.remove(local_command_line) @@ -244,7 +244,7 @@ def write_prefs(self, prefs, file_base): if modified: local = os.path.join(self.task['dir'], 'pref.xml') remote = '/data/local/tmp/pref.xml' - with open(local, 'wb') as f_out: + with open(local, 'w') as f_out: f_out.write(out) if os.path.isfile(local): self.adb.shell(['rm', remote]) diff --git a/internal/desktop_browser.py b/internal/desktop_browser.py index 522d399a4..a88ddab45 100644 --- a/internal/desktop_browser.py +++ b/internal/desktop_browser.py @@ -537,7 +537,7 @@ def on_stop_recording(self, task): # record the CPU/Bandwidth/memory info if self.usage_queue is not None and not self.usage_queue.empty() and task is not None: file_path = os.path.join(task['dir'], task['prefix']) + '_progress.csv.gz' - gzfile = gzip.open(file_path, 'wb', 7) + gzfile = gzip.open(file_path, 'wt', 7) if gzfile: gzfile.write("Offset Time (ms),Bandwidth In (bps),CPU Utilization (%),Memory\n") while not self.usage_queue.empty(): @@ -654,7 +654,7 @@ def step_complete(self, task): path = os.path.join(task['dir'], task['prefix'] + '_page_data.json.gz') json_page_data = json.dumps(task['page_data']) logging.debug('Page Data: %s', json_page_data) - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json_page_data) def process_pcap(self): diff --git a/internal/devtools.py b/internal/devtools.py index 7013058bf..e29129744 100644 --- a/internal/devtools.py +++ b/internal/devtools.py @@ -16,6 +16,7 @@ if (sys.version_info > (3, 0)): from time import monotonic from urllib.parse import urlsplit # pylint: disable=import-error + unicode = str else: from monotonic import monotonic from urlparse import urlsplit # pylint: disable=import-error @@ -419,7 +420,7 @@ def stop_recording(self): summary[url]['{0}_bytes_used'.format(category)] = used_bytes summary[url]['{0}_percent_used'.format(category)] = used_pct path = self.path_base + '_coverage.json.gz' - with gzip.open(path, 'w', 7) as f_out: + with gzip.open(path, 'wt', 7) as f_out: json.dump(summary, f_out) self.send_command('CSS.disable', {}) self.send_command('DOM.disable', {}) @@ -547,33 +548,36 @@ def get_response_body(self, request_id): logging.warning('Missing response body for request %s', request_id) elif len(response['result']['body']): - self.body_fail_count = 0 - # Write the raw body to a file (all bodies) - if 'base64Encoded' in response['result'] and \ - response['result']['base64Encoded']: - body = base64.b64decode(response['result']['body']) - # Run a sanity check to make sure it isn't binary - if self.bodies_zip_file is not None and is_text: - try: - json.loads('"' + body.replace('"', '\\"') + '"') - except Exception: - is_text = False - else: - body = response['result']['body'].encode('utf-8') - is_text = True - # Add text bodies to the zip archive - store_body = self.all_bodies - if self.html_body and request_id == self.main_request: - store_body = True - if store_body and self.bodies_zip_file is not None and is_text: - self.body_index += 1 - name = '{0:03d}-{1}-body.txt'.format(self.body_index, request_id) - self.bodies_zip_file.writestr(name, body) - logging.debug('%s: Stored body in zip', request_id) - logging.debug('%s: Body length: %d', request_id, len(body)) - self.response_bodies[request_id] = body - with open(body_file_path, 'wb') as body_file: - body_file.write(body) + try: + self.body_fail_count = 0 + # Write the raw body to a file (all bodies) + if 'base64Encoded' in response['result'] and \ + response['result']['base64Encoded']: + body = base64.b64decode(response['result']['body']) + # Run a sanity check to make sure it isn't binary + if self.bodies_zip_file is not None and is_text: + try: + json.loads('"' + body.replace('"', '\\"') + '"') + except Exception: + is_text = False + else: + body = unicode(response['result']['body']) + is_text = True + # Add text bodies to the zip archive + store_body = self.all_bodies + if self.html_body and request_id == self.main_request: + store_body = True + if store_body and self.bodies_zip_file is not None and is_text: + self.body_index += 1 + name = '{0:03d}-{1}-body.txt'.format(self.body_index, request_id) + self.bodies_zip_file.writestr(name, body) + logging.debug('%s: Stored body in zip', request_id) + logging.debug('%s: Body length: %d', request_id, len(body)) + self.response_bodies[request_id] = body + with open(body_file_path, 'w') as body_file: + body_file.write(body) + except Exception: + logging.Exception('Exception retrieving body') else: self.body_fail_count = 0 self.response_bodies[request_id] = response['result']['body'] @@ -1079,7 +1083,7 @@ def log_dev_tools_event(self, msg): if self.task['log_data']: if self.dev_tools_file is None: path = self.path_base + '_devtools.json.gz' - self.dev_tools_file = gzip.open(path, 'wb', 7) + self.dev_tools_file = gzip.open(path, 'wt', 7) self.dev_tools_file.write("[{}") if self.dev_tools_file is not None: self.dev_tools_file.write(",\n") @@ -1259,7 +1263,7 @@ def process_trace_event(self, msg): if 'params' in msg and 'value' in msg['params'] and len(msg['params']['value']): if self.trace_file is None and self.keep_timeline: self.trace_file = gzip.open(self.path_base + '_trace.json.gz', - 'wb', compresslevel=7) + 'wt', compresslevel=7) self.trace_file.write('{"traceEvents":[{}') if self.trace_parser is None: from internal.support.trace_parser import Trace diff --git a/internal/devtools_browser.py b/internal/devtools_browser.py index eb792cbae..f107cfeb8 100644 --- a/internal/devtools_browser.py +++ b/internal/devtools_browser.py @@ -316,7 +316,7 @@ def run_js_file(self, file_name): script = None script_file_path = os.path.join(self.script_dir, file_name) if os.path.isfile(script_file_path): - with open(script_file_path, 'rb') as script_file: + with open(script_file_path, 'r') as script_file: script = script_file.read() if script is not None: ret = self.devtools.execute_js(script) @@ -327,7 +327,7 @@ def collect_browser_metrics(self, task): user_timing = self.run_js_file('user_timing.js') if user_timing is not None: path = os.path.join(task['dir'], task['prefix'] + '_timed_events.json.gz') - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(user_timing)) page_data = self.run_js_file('page_data.js') if page_data is not None: @@ -340,21 +340,21 @@ def collect_browser_metrics(self, task): '};try{wptCustomMetric();}catch(e){};' custom_metrics[name] = self.devtools.execute_js(script) path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(custom_metrics)) if 'heroElementTimes' in self.job and self.job['heroElementTimes']: hero_elements = None custom_hero_selectors = {} if 'heroElements' in self.job: custom_hero_selectors = self.job['heroElements'] - with open(os.path.join(self.script_dir, 'hero_elements.js'), 'rb') as script_file: + with open(os.path.join(self.script_dir, 'hero_elements.js'), 'r') as script_file: hero_elements_script = script_file.read() script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' hero_elements = self.devtools.execute_js(script) if hero_elements is not None: logging.debug('Hero Elements: %s', json.dumps(hero_elements)) path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(hero_elements)) @@ -530,12 +530,12 @@ def run_lighthouse_test(self, task): lh_trace_src = os.path.join(task['dir'], 'lighthouse-0.trace.json') if os.path.isfile(lh_trace_src): # read the JSON in and re-write it line by line to match the other traces - with open(lh_trace_src, 'rb') as f_in: + with open(lh_trace_src, 'r') as f_in: trace = json.load(f_in) if trace is not None and 'traceEvents' in trace: lighthouse_trace = os.path.join(task['dir'], 'lighthouse_trace.json.gz') - with gzip.open(lighthouse_trace, 'wb', 7) as f_out: + with gzip.open(lighthouse_trace, 'wt', 7) as f_out: f_out.write('{"traceEvents":[{}') for trace_event in trace['traceEvents']: f_out.write(",\n") @@ -552,7 +552,7 @@ def run_lighthouse_test(self, task): pass if os.path.isfile(json_file): lh_report = None - with open(json_file, 'rb') as f_in: + with open(json_file, 'r') as f_in: lh_report = json.load(f_in) with open(json_file, 'rb') as f_in: @@ -613,7 +613,7 @@ def run_lighthouse_test(self, task): elif 'numericValue' in audit: audits[name] = audit['numericValue'] audits_gzip = os.path.join(task['dir'], 'lighthouse_audits.json.gz') - with gzip.open(audits_gzip, 'w', 7) as f_out: + with gzip.open(audits_gzip, 'wt', 7) as f_out: json.dump(audits, f_out) # Compress the HTML lighthouse report if os.path.isfile(html_file): diff --git a/internal/firefox.py b/internal/firefox.py index 36d7a4595..3e5869281 100644 --- a/internal/firefox.py +++ b/internal/firefox.py @@ -397,7 +397,7 @@ def run_js_file(self, file_name): script = None script_file_path = os.path.join(self.script_dir, file_name) if os.path.isfile(script_file_path): - with open(script_file_path, 'rb') as script_file: + with open(script_file_path, 'r') as script_file: script = script_file.read() if script is not None: try: @@ -414,7 +414,7 @@ def collect_browser_metrics(self, task): user_timing = self.run_js_file('user_timing.js') if user_timing is not None: path = os.path.join(task['dir'], task['prefix'] + '_timed_events.json.gz') - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(user_timing)) logging.debug("Collecting page-level metrics") page_data = self.run_js_file('page_data.js') @@ -434,7 +434,7 @@ def collect_browser_metrics(self, task): except Exception: pass path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(custom_metrics)) if 'heroElementTimes' in self.job and self.job['heroElementTimes']: hero_elements = None @@ -442,13 +442,13 @@ def collect_browser_metrics(self, task): if 'heroElements' in self.job: custom_hero_selectors = self.job['heroElements'] logging.debug('Collecting hero element positions') - with open(os.path.join(self.script_dir, 'hero_elements.js'), 'rb') as script_file: + with open(os.path.join(self.script_dir, 'hero_elements.js'), 'r') as script_file: hero_elements_script = script_file.read() script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' hero_elements = self.execute_js(script) if hero_elements is not None: path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(hero_elements)) def process_message(self, message): @@ -640,7 +640,7 @@ def on_stop_recording(self, task): interactive = self.execute_js('window.wrappedJSObject.wptagentGetInteractivePeriods();') if interactive is not None and len(interactive): interactive_file = os.path.join(task['dir'], task['prefix'] + '_interactive.json.gz') - with gzip.open(interactive_file, 'wb', 7) as f_out: + with gzip.open(interactive_file, 'wt', 7) as f_out: f_out.write(interactive) # Close the browser if we are done testing (helps flush logs) if not len(task['script']): @@ -840,7 +840,7 @@ def process_requests(self, request_timings, task): result['requests'] = self.merge_requests(request_timings) result['pageData'] = self.calculate_page_stats(result['requests']) devtools_file = os.path.join(task['dir'], task['prefix'] + '_devtools_requests.json.gz') - with gzip.open(devtools_file, 'w', 7) as f_out: + with gzip.open(devtools_file, 'wt', 7) as f_out: json.dump(result, f_out) def get_empty_request(self, request_id, url): diff --git a/internal/microsoft_edge.py b/internal/microsoft_edge.py index d73413ef6..2b658c246 100644 --- a/internal/microsoft_edge.py +++ b/internal/microsoft_edge.py @@ -80,7 +80,7 @@ def prepare(self, job, task): if not os.path.isdir(self.bodies_path): os.makedirs(self.bodies_path) try: - import _winreg + import _winreg # pylint: disable=import-error registry_key = _winreg.CreateKeyEx(_winreg.HKEY_CURRENT_USER, self.edge_registry_path, 0, _winreg.KEY_READ | _winreg.KEY_WRITE) self.edge_registry_key_value = _winreg.QueryValueEx(registry_key, "ClearBrowsingHistoryOnExit")[0] if not task['cached']: @@ -112,7 +112,7 @@ def prepare(self, job, task): def get_driver(self, task): """Get the webdriver instance""" - from selenium import webdriver + from selenium import webdriver # pylint: disable=import-error from .os_util import run_elevated path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'support', 'edge') @@ -211,7 +211,7 @@ def stop(self, job, task): except Exception: pass try: - import _winreg + import _winreg # pylint: disable=import-error registry_key = _winreg.CreateKeyEx(_winreg.HKEY_CURRENT_USER, self.edge_registry_path, 0, _winreg.KEY_WRITE) _winreg.SetValueEx(registry_key, "ClearBrowsingHistoryOnExit", 0, _winreg.REG_DWORD, self.edge_registry_key_value) _winreg.CloseKey(registry_key) @@ -665,7 +665,7 @@ def run_js_file(self, file_name): script = None script_file_path = os.path.join(self.script_dir, file_name) if os.path.isfile(script_file_path): - with open(script_file_path, 'rb') as script_file: + with open(script_file_path, 'r') as script_file: script = script_file.read() if script is not None: try: @@ -687,7 +687,7 @@ def collect_browser_metrics(self, task): user_timing = self.run_js_file('user_timing.js') if user_timing is not None: path = os.path.join(task['dir'], task['prefix'] + '_timed_events.json.gz') - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(user_timing)) logging.debug("Collecting page-level metrics") page_data = self.run_js_file('page_data.js') @@ -708,7 +708,7 @@ def collect_browser_metrics(self, task): except Exception: pass path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(custom_metrics)) if 'heroElementTimes' in self.job and self.job['heroElementTimes']: hero_elements = None @@ -716,13 +716,13 @@ def collect_browser_metrics(self, task): if 'heroElements' in self.job: custom_hero_selectors = self.job['heroElements'] logging.debug('Collecting hero element positions') - with open(os.path.join(self.script_dir, 'hero_elements.js'), 'rb') as script_file: + with open(os.path.join(self.script_dir, 'hero_elements.js'), 'r') as script_file: hero_elements_script = script_file.read() script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' hero_elements = self.execute_js(script) if hero_elements is not None: path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(hero_elements)) # Wait for the interactive periods to be written if self.supports_interactive: @@ -736,7 +736,7 @@ def collect_browser_metrics(self, task): if interactive is not None and len(interactive): interactive_file = os.path.join(task['dir'], task['prefix'] + '_interactive.json.gz') - with gzip.open(interactive_file, 'wb', 7) as f_out: + with gzip.open(interactive_file, 'wt', 7) as f_out: f_out.write(interactive) def prepare_task(self, task): @@ -873,7 +873,7 @@ def process_command(self, command): except Exception: pass try: - import win32inet + import win32inet # pylint: disable=import-error cookie_string = cookie if cookie.find('xpires') == -1: expires = datetime.utcnow() + timedelta(days=30) @@ -933,7 +933,7 @@ def process_requests(self, task): result['pageData'] = self.calculate_page_stats(result['requests']) self.check_optimization(task, result['requests'], result['pageData']) devtools_file = os.path.join(task['dir'], task['prefix'] + '_devtools_requests.json.gz') - with gzip.open(devtools_file, 'w', 7) as f_out: + with gzip.open(devtools_file, 'wt', 7) as f_out: json.dump(result, f_out) def process_sockets(self): @@ -1225,7 +1225,7 @@ def check_optimization(self, task, requests, page_data): optimization_file = os.path.join(self.task['dir'], self.task['prefix']) + \ '_optimization.json.gz' if os.path.isfile(optimization_file): - with gzip.open(optimization_file, 'rb') as f_in: + with gzip.open(optimization_file, 'r') as f_in: optimization_results = json.load(f_in) page_data['score_cache'] = -1 page_data['score_cdn'] = -1 diff --git a/internal/optimization_checks.py b/internal/optimization_checks.py index d05c797b9..b7fc905fd 100644 --- a/internal/optimization_checks.py +++ b/internal/optimization_checks.py @@ -375,7 +375,7 @@ def join(self): # Save the results if self.results: path = os.path.join(self.task['dir'], self.task['prefix']) + '_optimization.json.gz' - gz_file = gzip.open(path, 'wb', 7) + gz_file = gzip.open(path, 'wt', 7) if gz_file: gz_file.write(json.dumps(self.results)) gz_file.close() diff --git a/internal/safari_ios.py b/internal/safari_ios.py index 1c5b46ead..50d784ffa 100644 --- a/internal/safari_ios.py +++ b/internal/safari_ios.py @@ -18,6 +18,7 @@ if (sys.version_info > (3, 0)): from time import monotonic from urllib.parse import urlsplit # pylint: disable=import-error + unicode = str else: from monotonic import monotonic from urlparse import urlsplit # pylint: disable=import-error @@ -316,7 +317,7 @@ def run_js_file(self, file_name): script = None script_file_path = os.path.join(self.script_dir, file_name) if os.path.isfile(script_file_path): - with open(script_file_path, 'rb') as script_file: + with open(script_file_path, 'r') as script_file: script = script_file.read() if script is not None: ret = self.ios.execute_js(script) @@ -356,7 +357,7 @@ def collect_browser_metrics(self, task): logging.debug(user_timing) if user_timing is not None and self.path_base is not None: path = self.path_base + '_timed_events.json.gz' - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(user_timing)) logging.debug("Collecting page-level metrics") page_data = self.run_js_file('page_data.js') @@ -378,7 +379,7 @@ def collect_browser_metrics(self, task): pass if self.path_base is not None: path = self.path_base + '_metrics.json.gz' - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(custom_metrics)) if 'heroElementTimes' in self.job and self.job['heroElementTimes']: hero_elements = None @@ -386,13 +387,13 @@ def collect_browser_metrics(self, task): if 'heroElements' in self.job: custom_hero_selectors = self.job['heroElements'] logging.debug('Collecting hero element positions') - with open(os.path.join(self.script_dir, 'hero_elements.js'), 'rb') as script_file: + with open(os.path.join(self.script_dir, 'hero_elements.js'), 'r') as script_file: hero_elements_script = script_file.read() script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' hero_elements = self.ios.execute_js(script) if hero_elements is not None: path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(hero_elements)) def process_message(self, msg, target_id=None): @@ -742,7 +743,7 @@ def get_response_body(self, request_id, original_id): response['result']['base64Encoded']: body = base64.b64decode(response['result']['body']) else: - body = response['result']['body'].encode('utf-8') + body = unicode(response['result']['body'].encode('utf-8')) is_text = True # Add text bodies to the zip archive if self.bodies_zip_file is not None and is_text: @@ -752,7 +753,7 @@ def get_response_body(self, request_id, original_id): logging.debug('%s: Stored body in zip', request_id) logging.debug('%s: Body length: %d', request_id, len(body)) self.response_bodies[request_id] = body - with open(body_file_path, 'wb') as body_file: + with open(body_file_path, 'w') as body_file: body_file.write(body) else: self.body_fail_count = 0 @@ -843,7 +844,7 @@ def on_start_recording(self, task): if 'timeline' in self.job and self.job['timeline']: if self.path_base is not None: timeline_path = self.path_base + '_devtools.json.gz' - self.timeline = gzip.open(timeline_path, 'w', 7) + self.timeline = gzip.open(timeline_path, 'wt', 7) if self.timeline: self.timeline.write('[\n') from internal.support.trace_parser import Trace @@ -963,7 +964,7 @@ def on_start_processing(self, task): # Save the console logs if self.console_log and self.path_base is not None: log_file = self.path_base + '_console_log.json.gz' - with gzip.open(log_file, 'w', 7) as f_out: + with gzip.open(log_file, 'wt', 7) as f_out: json.dump(self.console_log, f_out) # Process the timeline data if self.trace_parser is not None and self.path_base is not None: @@ -1000,7 +1001,7 @@ def wait_for_processing(self, task): self.wpt_result['requests'], opt) if self.path_base is not None: devtools_file = self.path_base + '_devtools_requests.json.gz' - with gzip.open(devtools_file, 'w', 7) as f_out: + with gzip.open(devtools_file, 'wt', 7) as f_out: json.dump(self.wpt_result, f_out) def step_complete(self, task): @@ -1019,7 +1020,7 @@ def step_complete(self, task): path = self.path_base + '_page_data.json.gz' json_page_data = json.dumps(task['page_data']) logging.debug('Page Data: %s', json_page_data) - with gzip.open(path, 'wb', 7) as outfile: + with gzip.open(path, 'wt', 7) as outfile: outfile.write(json_page_data) def send_command(self, method, params, wait=False, timeout=10, target_id=None): diff --git a/internal/support/devtools_parser.py b/internal/support/devtools_parser.py index d8569ae13..957f24a03 100644 --- a/internal/support/devtools_parser.py +++ b/internal/support/devtools_parser.py @@ -104,7 +104,7 @@ def write(self): try: _, ext = os.path.splitext(self.out_file) if ext.lower() == '.gz': - with gzip.open(self.out_file, 'w') as f_out: + with gzip.open(self.out_file, 'wt') as f_out: json.dump(self.result, f_out) else: with open(self.out_file, 'w') as f_out: @@ -119,7 +119,7 @@ def extract_net_requests(self): page_data = {'endTime': 0} _, ext = os.path.splitext(self.devtools_file) if ext.lower() == '.gz': - f_in = gzip.open(self.devtools_file, 'rb') + f_in = gzip.open(self.devtools_file, 'rt') else: f_in = open(self.devtools_file, 'r') raw_events = json.load(f_in) @@ -504,7 +504,7 @@ def process_requests(self, raw_requests, raw_page_data): # Add the socket timing (always assigned to the first request on a connection) if request['socket'] != -1 and request['socket'] not in connections: connections[request['socket']] = timing - if 'dnsStart' in timing and 'dnsStart' >= 0: + if 'dnsStart' in timing and timing['dnsStart'] >= 0: dns_key = request['host'] if dns_key not in dns_times: dns_times[dns_key] = True @@ -564,7 +564,7 @@ def process_requests(self, raw_requests, raw_page_data): request['headers'] = {'request': [], 'response': []} if 'response' in raw_request and 'requestHeadersText' in raw_request['response']: for line in raw_request['response']['requestHeadersText'].splitlines(): - line = line.encode('utf-8').strip() + line = unicode(line.encode('utf-8')).strip() if len(line): request['headers']['request'].append(line) elif 'response' in raw_request and 'requestHeaders' in raw_request['response']: @@ -572,8 +572,8 @@ def process_requests(self, raw_requests, raw_page_data): for value in raw_request['response']['requestHeaders'][key].splitlines(): try: request['headers']['request'].append(\ - u'{0}: {1}'.format(key.encode('utf-8'), - value.encode('utf-8').strip())) + u'{0}: {1}'.format(unicode(key.encode('utf-8')), + unicode(value.encode('utf-8')).strip())) except Exception: pass elif 'headers' in raw_request: @@ -581,13 +581,13 @@ def process_requests(self, raw_requests, raw_page_data): for value in raw_request['headers'][key].splitlines(): try: request['headers']['request'].append(\ - u'{0}: {1}'.format(key.encode('utf-8'), - value.encode('utf-8').strip())) + u'{0}: {1}'.format(unicode(key.encode('utf-8')), + unicode(value.encode('utf-8')).strip())) except Exception: pass if 'response' in raw_request and 'headersText' in raw_request['response']: for line in raw_request['response']['headersText'].splitlines(): - line = line.encode('utf-8').strip() + line = unicode(line.encode('utf-8')).strip() if len(line): request['headers']['response'].append(line) elif 'response' in raw_request and 'headers' in raw_request['response']: @@ -595,11 +595,11 @@ def process_requests(self, raw_requests, raw_page_data): for value in raw_request['response']['headers'][key].splitlines(): try: request['headers']['response'].append(\ - u'{0}: {1}'.format(key.encode('utf-8'), - value.encode('utf-8').strip())) + u'{0}: {1}'.format(unicode(key.encode('utf-8')), + unicode(value.encode('utf-8')).strip())) except Exception: pass - request['bytesOut'] = len("\r\n".join(request['headers']['request'])) + request['bytesOut'] = len("\r\n".join(str(request['headers']['request']))) request['score_cache'] = -1 request['score_cdn'] = -1 request['score_gzip'] = -1 @@ -703,7 +703,7 @@ def process_netlog_requests(self): if self.netlog_requests_file is not None and os.path.isfile(self.netlog_requests_file): _, ext = os.path.splitext(self.netlog_requests_file) if ext.lower() == '.gz': - f_in = gzip.open(self.netlog_requests_file, 'rb') + f_in = gzip.open(self.netlog_requests_file, 'rt') else: f_in = open(self.netlog_requests_file, 'r') netlog = json.load(f_in) @@ -995,7 +995,7 @@ def process_user_timing(self): if self.user_timing_file is not None and os.path.isfile(self.user_timing_file): _, ext = os.path.splitext(self.user_timing_file) if ext.lower() == '.gz': - f_in = gzip.open(self.user_timing_file, 'rb') + f_in = gzip.open(self.user_timing_file, 'rt') else: f_in = open(self.user_timing_file, 'r') user_timing_events = json.load(f_in) @@ -1055,7 +1055,7 @@ def process_optimization_results(self): if self.optimization is not None and os.path.isfile(self.optimization): _, ext = os.path.splitext(self.optimization) if ext.lower() == '.gz': - f_in = gzip.open(self.optimization, 'rb') + f_in = gzip.open(self.optimization, 'rt') else: f_in = open(self.optimization, 'r') optimization_results = json.load(f_in) @@ -1167,7 +1167,7 @@ def process_code_coverage(self): if self.coverage is not None and os.path.isfile(self.coverage): _, ext = os.path.splitext(self.coverage) if ext.lower() == '.gz': - f_in = gzip.open(self.coverage, 'rb') + f_in = gzip.open(self.coverage, 'rt') else: f_in = open(self.coverage, 'r') coverage = json.load(f_in) @@ -1216,7 +1216,7 @@ def process_cpu_times(self): if end > 0 and self.cpu_times is not None and os.path.isfile(self.cpu_times): _, ext = os.path.splitext(self.cpu_times) if ext.lower() == '.gz': - f_in = gzip.open(self.cpu_times, 'rb') + f_in = gzip.open(self.cpu_times, 'rt') else: f_in = open(self.cpu_times, 'r') cpu = json.load(f_in) @@ -1262,7 +1262,7 @@ def process_v8_stats(self): if os.path.isfile(self.v8_stats): _, ext = os.path.splitext(self.v8_stats) if ext.lower() == '.gz': - f_in = gzip.open(self.v8_stats, 'rb') + f_in = gzip.open(self.v8_stats, 'rt') else: f_in = open(self.v8_stats, 'r') stats = json.load(f_in) diff --git a/internal/support/firefox_log_parser.py b/internal/support/firefox_log_parser.py index 2d65fd5a4..64a6448a1 100644 --- a/internal/support/firefox_log_parser.py +++ b/internal/support/firefox_log_parser.py @@ -128,7 +128,7 @@ def process_log_file(self, path): _, ext = os.path.splitext(path) line_count = 0 if ext.lower() == '.gz': - f_in = gzip.open(path, 'rb') + f_in = gzip.open(path, 'rt') else: f_in = open(path, 'r') for line in f_in: diff --git a/internal/support/pcap-parser.py b/internal/support/pcap-parser.py index c3415fd51..01fb04e23 100644 --- a/internal/support/pcap-parser.py +++ b/internal/support/pcap-parser.py @@ -44,7 +44,7 @@ def __init__(self): def SaveStats(self, out): file_name, ext = os.path.splitext(out) if ext.lower() == '.gz': - f = gzip.open(out, 'w') + f = gzip.open(out, 'wt') else: f = open(out, 'w') try: @@ -59,7 +59,7 @@ def SaveStats(self, out): def SaveDetails(self, out): file_name, ext = os.path.splitext(out) if ext.lower() == '.gz': - f = gzip.open(out, 'w') + f = gzip.open(out, 'wt') else: f = open(out, 'w') try: diff --git a/internal/support/trace_parser.py b/internal/support/trace_parser.py index 953b6b963..21bab1354 100644 --- a/internal/support/trace_parser.py +++ b/internal/support/trace_parser.py @@ -71,7 +71,7 @@ def write_json(self, out_file, json_data): try: _, ext = os.path.splitext(out_file) if ext.lower() == '.gz': - with gzip.open(out_file, 'w') as f: + with gzip.open(out_file, 'wt') as f: json.dump(json_data, f) else: with open(out_file, 'w') as f: @@ -123,7 +123,7 @@ def Process(self, trace): try: _, ext = os.path.splitext(trace) if ext.lower() == '.gz': - f = gzip.open(trace, 'rb') + f = gzip.open(trace, 'rt') else: f = open(trace, 'r') for line in f: @@ -152,7 +152,7 @@ def ProcessTimeline(self, timeline): try: _, ext = os.path.splitext(timeline) if ext.lower() == '.gz': - f = gzip.open(timeline, 'rb') + f = gzip.open(timeline, 'rt') else: f = open(timeline, 'r') events = json.load(f) diff --git a/internal/support/visualmetrics.py b/internal/support/visualmetrics.py index af3baaa78..4ef25f4da 100644 --- a/internal/support/visualmetrics.py +++ b/internal/support/visualmetrics.py @@ -958,7 +958,7 @@ def get_timeline_offset(timeline_file): try: file_name, ext = os.path.splitext(timeline_file) if ext.lower() == '.gz': - f = gzip.open(timeline_file, 'rb') + f = gzip.open(timeline_file, 'rt') else: f = open(timeline_file, 'r') timeline = json.load(f) @@ -1092,7 +1092,7 @@ def calculate_histograms(directory, histograms_file, force): 'histogram': histogram}) if os.path.isfile(histograms_file): os.remove(histograms_file) - f = gzip.open(histograms_file, 'w') + f = gzip.open(histograms_file, 'wt') json.dump(histograms, f) f.close() else: @@ -1325,7 +1325,7 @@ def calculate_visual_metrics(histograms_file, start, end, perceptual, dirs, prog if progress and progress_file is not None: file_name, ext = os.path.splitext(progress_file) if ext.lower() == '.gz': - f = gzip.open(progress_file, 'w', 7) + f = gzip.open(progress_file, 'wt', 7) else: f = open(progress_file, 'w') json.dump(progress, f) @@ -1345,7 +1345,7 @@ def calculate_visual_metrics(histograms_file, start, end, perceptual, dirs, prog if hero_elements_file is not None and os.path.isfile(hero_elements_file): logging.debug('Calculating hero element times') hero_data = None - with gzip.open(hero_elements_file, 'rb') as hero_f_in: + with gzip.open(hero_elements_file, 'rt') as hero_f_in: try: hero_data = json.load(hero_f_in) except Exception as e: @@ -1367,7 +1367,7 @@ def calculate_visual_metrics(histograms_file, start, end, perceptual, dirs, prog hero_data['timings'] = hero_timings metrics += hero_timings - with gzip.open(hero_elements_file, 'w', 7) as hero_f_out: + with gzip.open(hero_elements_file, 'wt', 7) as hero_f_out: json.dump(hero_data, hero_f_out) else: logging.warn('Hero elements file is not valid: ' + str(hero_elements_file)) @@ -1480,7 +1480,7 @@ def calculate_speed_index(progress): def calculate_perceptual_speed_index(progress, directory): - from ssim import compute_ssim + from ssim import compute_ssim # pylint: disable=import-error x = len(progress) dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), directory) first_paint_frame = os.path.join( @@ -1632,7 +1632,7 @@ def check_config(): ok = False try: - from ssim import compute_ssim + from ssim import compute_ssim # pylint: disable=import-error print('SSIM: OK') except BaseException: diff --git a/internal/traceroute.py b/internal/traceroute.py index 9f7f8883c..acb41edec 100644 --- a/internal/traceroute.py +++ b/internal/traceroute.py @@ -41,7 +41,7 @@ def run_task(self, task): last_hop, results = self.unix_traceroute(hostname) if last_hop > 0 and results is not None and len(results): out_file = os.path.join(task['dir'], task['prefix']) + '_traceroute.txt.gz' - with gzip.open(out_file, 'wb', 7) as f_out: + with gzip.open(out_file, 'wt', 7) as f_out: f_out.write('Hop,IP,ms,FQDN\n') if 0 in results: f_out.write('-1,{0},0,{1}\n'.format(results[0]['addr'], hostname)) diff --git a/internal/webpagetest.py b/internal/webpagetest.py index 8fee61b73..5cf3a9144 100644 --- a/internal/webpagetest.py +++ b/internal/webpagetest.py @@ -144,7 +144,7 @@ def __init__(self, options, workdir): self.margins = {} margins_file = os.path.join(self.persistent_dir, 'margins.json') if os.path.isfile(margins_file): - with open(margins_file, 'rb') as f_in: + with open(margins_file, 'r') as f_in: self.margins = json.load(f_in) # Override the public webpagetest server automatically if self.url is not None and self.url.find('www.webpagetest.org') >= 0: @@ -950,7 +950,7 @@ def get_bodies(self, task): path = os.path.join(task['dir'], 'bodies') requests = [] devtools_file = os.path.join(task['dir'], task['prefix'] + '_devtools_requests.json.gz') - with gzip.open(devtools_file, 'rb') as f_in: + with gzip.open(devtools_file, 'rt') as f_in: requests = json.load(f_in) count = 0 bodies_zip = path_base + '_bodies.zip' @@ -1029,7 +1029,7 @@ def get_bodies(self, task): # check to see if it is text or utf-8 data try: data = '' - with open(task['file'], 'rb') as f_in: + with open(task['file'], 'r') as f_in: data = f_in.read() json.loads('"' + data.replace('"', '\\"') + '"') body_index += 1 diff --git a/wptagent.py b/wptagent.py index f9738e983..1725086cd 100644 --- a/wptagent.py +++ b/wptagent.py @@ -180,7 +180,7 @@ def run_single_test(self): pass if self.task['lighthouse_log']: log_file = os.path.join(self.task['dir'], 'lighthouse.log.gz') - with gzip.open(log_file, 'wb', 7) as f_out: + with gzip.open(log_file, 'wt', 7) as f_out: f_out.write(self.task['lighthouse_log']) else: browser.run_task(self.task) From e96f61ca0ba704f44cce5a058da86b09166b937c Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Thu, 26 Dec 2019 10:34:58 -0500 Subject: [PATCH 09/16] Cross-platform compatibility fixes with the python 3 changes (particularly for Windows) --- internal/android_browser.py | 4 +++- internal/desktop_browser.py | 6 ++++-- internal/devtools.py | 8 +++++--- internal/devtools_browser.py | 12 +++++++----- internal/firefox.py | 12 +++++++----- internal/microsoft_edge.py | 12 +++++++----- internal/optimization_checks.py | 4 +++- internal/safari_ios.py | 16 +++++++++------- internal/support/devtools_parser.py | 20 ++++++++++++-------- internal/support/firefox_log_parser.py | 2 +- internal/support/pcap-parser.py | 9 +++++++-- internal/support/trace_parser.py | 10 +++++++--- internal/support/visualmetrics.py | 16 +++++++++++----- internal/traceroute.py | 5 +++-- internal/webpagetest.py | 4 +++- wptagent.py | 6 +++++- 16 files changed, 94 insertions(+), 52 deletions(-) diff --git a/internal/android_browser.py b/internal/android_browser.py index 0ec0bbe8b..1395fd135 100644 --- a/internal/android_browser.py +++ b/internal/android_browser.py @@ -13,8 +13,10 @@ import time if (sys.version_info > (3, 0)): from time import monotonic + GZIP_TEXT = 'wt' else: from monotonic import monotonic + GZIP_TEXT = 'w' try: import ujson as json except BaseException: @@ -291,7 +293,7 @@ def step_complete(self, task): path = os.path.join(task['dir'], task['prefix'] + '_page_data.json.gz') json_page_data = json.dumps(task['page_data']) logging.debug('Page Data: %s', json_page_data) - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json_page_data) def screenshot(self, task): diff --git a/internal/desktop_browser.py b/internal/desktop_browser.py index a88ddab45..defe56091 100644 --- a/internal/desktop_browser.py +++ b/internal/desktop_browser.py @@ -17,8 +17,10 @@ import time if (sys.version_info > (3, 0)): from time import monotonic + GZIP_TEXT = 'wt' else: from monotonic import monotonic + GZIP_TEXT = 'w' try: import ujson as json except BaseException: @@ -537,7 +539,7 @@ def on_stop_recording(self, task): # record the CPU/Bandwidth/memory info if self.usage_queue is not None and not self.usage_queue.empty() and task is not None: file_path = os.path.join(task['dir'], task['prefix']) + '_progress.csv.gz' - gzfile = gzip.open(file_path, 'wt', 7) + gzfile = gzip.open(file_path, GZIP_TEXT, 7) if gzfile: gzfile.write("Offset Time (ms),Bandwidth In (bps),CPU Utilization (%),Memory\n") while not self.usage_queue.empty(): @@ -654,7 +656,7 @@ def step_complete(self, task): path = os.path.join(task['dir'], task['prefix'] + '_page_data.json.gz') json_page_data = json.dumps(task['page_data']) logging.debug('Page Data: %s', json_page_data) - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json_page_data) def process_pcap(self): diff --git a/internal/devtools.py b/internal/devtools.py index e29129744..9e7676ba1 100644 --- a/internal/devtools.py +++ b/internal/devtools.py @@ -17,9 +17,11 @@ from time import monotonic from urllib.parse import urlsplit # pylint: disable=import-error unicode = str + GZIP_TEXT = 'wt' else: from monotonic import monotonic from urlparse import urlsplit # pylint: disable=import-error + GZIP_TEXT = 'w' try: import ujson as json except BaseException: @@ -420,7 +422,7 @@ def stop_recording(self): summary[url]['{0}_bytes_used'.format(category)] = used_bytes summary[url]['{0}_percent_used'.format(category)] = used_pct path = self.path_base + '_coverage.json.gz' - with gzip.open(path, 'wt', 7) as f_out: + with gzip.open(path, GZIP_TEXT, 7) as f_out: json.dump(summary, f_out) self.send_command('CSS.disable', {}) self.send_command('DOM.disable', {}) @@ -1083,7 +1085,7 @@ def log_dev_tools_event(self, msg): if self.task['log_data']: if self.dev_tools_file is None: path = self.path_base + '_devtools.json.gz' - self.dev_tools_file = gzip.open(path, 'wt', 7) + self.dev_tools_file = gzip.open(path, GZIP_TEXT, 7) self.dev_tools_file.write("[{}") if self.dev_tools_file is not None: self.dev_tools_file.write(",\n") @@ -1263,7 +1265,7 @@ def process_trace_event(self, msg): if 'params' in msg and 'value' in msg['params'] and len(msg['params']['value']): if self.trace_file is None and self.keep_timeline: self.trace_file = gzip.open(self.path_base + '_trace.json.gz', - 'wt', compresslevel=7) + GZIP_TEXT, compresslevel=7) self.trace_file.write('{"traceEvents":[{}') if self.trace_parser is None: from internal.support.trace_parser import Trace diff --git a/internal/devtools_browser.py b/internal/devtools_browser.py index f107cfeb8..f9bb60a9b 100644 --- a/internal/devtools_browser.py +++ b/internal/devtools_browser.py @@ -16,8 +16,10 @@ if (sys.version_info > (3, 0)): from time import monotonic unicode = str + GZIP_TEXT = 'wt' else: from monotonic import monotonic + GZIP_TEXT = 'w' try: import ujson as json except BaseException: @@ -327,7 +329,7 @@ def collect_browser_metrics(self, task): user_timing = self.run_js_file('user_timing.js') if user_timing is not None: path = os.path.join(task['dir'], task['prefix'] + '_timed_events.json.gz') - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(user_timing)) page_data = self.run_js_file('page_data.js') if page_data is not None: @@ -340,7 +342,7 @@ def collect_browser_metrics(self, task): '};try{wptCustomMetric();}catch(e){};' custom_metrics[name] = self.devtools.execute_js(script) path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(custom_metrics)) if 'heroElementTimes' in self.job and self.job['heroElementTimes']: hero_elements = None @@ -354,7 +356,7 @@ def collect_browser_metrics(self, task): if hero_elements is not None: logging.debug('Hero Elements: %s', json.dumps(hero_elements)) path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(hero_elements)) @@ -535,7 +537,7 @@ def run_lighthouse_test(self, task): if trace is not None and 'traceEvents' in trace: lighthouse_trace = os.path.join(task['dir'], 'lighthouse_trace.json.gz') - with gzip.open(lighthouse_trace, 'wt', 7) as f_out: + with gzip.open(lighthouse_trace, GZIP_TEXT, 7) as f_out: f_out.write('{"traceEvents":[{}') for trace_event in trace['traceEvents']: f_out.write(",\n") @@ -613,7 +615,7 @@ def run_lighthouse_test(self, task): elif 'numericValue' in audit: audits[name] = audit['numericValue'] audits_gzip = os.path.join(task['dir'], 'lighthouse_audits.json.gz') - with gzip.open(audits_gzip, 'wt', 7) as f_out: + with gzip.open(audits_gzip, GZIP_TEXT, 7) as f_out: json.dump(audits, f_out) # Compress the HTML lighthouse report if os.path.isfile(html_file): diff --git a/internal/firefox.py b/internal/firefox.py index 3e5869281..d97f7b111 100644 --- a/internal/firefox.py +++ b/internal/firefox.py @@ -17,9 +17,11 @@ if (sys.version_info > (3, 0)): from time import monotonic from urllib.parse import urlsplit # pylint: disable=import-error + GZIP_TEXT = 'wt' else: from monotonic import monotonic from urlparse import urlsplit # pylint: disable=import-error + GZIP_TEXT = 'w' try: import ujson as json except BaseException: @@ -414,7 +416,7 @@ def collect_browser_metrics(self, task): user_timing = self.run_js_file('user_timing.js') if user_timing is not None: path = os.path.join(task['dir'], task['prefix'] + '_timed_events.json.gz') - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(user_timing)) logging.debug("Collecting page-level metrics") page_data = self.run_js_file('page_data.js') @@ -434,7 +436,7 @@ def collect_browser_metrics(self, task): except Exception: pass path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(custom_metrics)) if 'heroElementTimes' in self.job and self.job['heroElementTimes']: hero_elements = None @@ -448,7 +450,7 @@ def collect_browser_metrics(self, task): hero_elements = self.execute_js(script) if hero_elements is not None: path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(hero_elements)) def process_message(self, message): @@ -640,7 +642,7 @@ def on_stop_recording(self, task): interactive = self.execute_js('window.wrappedJSObject.wptagentGetInteractivePeriods();') if interactive is not None and len(interactive): interactive_file = os.path.join(task['dir'], task['prefix'] + '_interactive.json.gz') - with gzip.open(interactive_file, 'wt', 7) as f_out: + with gzip.open(interactive_file, GZIP_TEXT, 7) as f_out: f_out.write(interactive) # Close the browser if we are done testing (helps flush logs) if not len(task['script']): @@ -840,7 +842,7 @@ def process_requests(self, request_timings, task): result['requests'] = self.merge_requests(request_timings) result['pageData'] = self.calculate_page_stats(result['requests']) devtools_file = os.path.join(task['dir'], task['prefix'] + '_devtools_requests.json.gz') - with gzip.open(devtools_file, 'wt', 7) as f_out: + with gzip.open(devtools_file, GZIP_TEXT, 7) as f_out: json.dump(result, f_out) def get_empty_request(self, request_id, url): diff --git a/internal/microsoft_edge.py b/internal/microsoft_edge.py index 2b658c246..d6395e70e 100644 --- a/internal/microsoft_edge.py +++ b/internal/microsoft_edge.py @@ -16,9 +16,11 @@ if (sys.version_info > (3, 0)): from time import monotonic from urllib.parse import urlsplit # pylint: disable=import-error + GZIP_TEXT = 'wt' else: from monotonic import monotonic from urlparse import urlsplit # pylint: disable=import-error + GZIP_TEXT = 'w' try: import ujson as json except BaseException: @@ -687,7 +689,7 @@ def collect_browser_metrics(self, task): user_timing = self.run_js_file('user_timing.js') if user_timing is not None: path = os.path.join(task['dir'], task['prefix'] + '_timed_events.json.gz') - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(user_timing)) logging.debug("Collecting page-level metrics") page_data = self.run_js_file('page_data.js') @@ -708,7 +710,7 @@ def collect_browser_metrics(self, task): except Exception: pass path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(custom_metrics)) if 'heroElementTimes' in self.job and self.job['heroElementTimes']: hero_elements = None @@ -722,7 +724,7 @@ def collect_browser_metrics(self, task): hero_elements = self.execute_js(script) if hero_elements is not None: path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(hero_elements)) # Wait for the interactive periods to be written if self.supports_interactive: @@ -736,7 +738,7 @@ def collect_browser_metrics(self, task): if interactive is not None and len(interactive): interactive_file = os.path.join(task['dir'], task['prefix'] + '_interactive.json.gz') - with gzip.open(interactive_file, 'wt', 7) as f_out: + with gzip.open(interactive_file, GZIP_TEXT, 7) as f_out: f_out.write(interactive) def prepare_task(self, task): @@ -933,7 +935,7 @@ def process_requests(self, task): result['pageData'] = self.calculate_page_stats(result['requests']) self.check_optimization(task, result['requests'], result['pageData']) devtools_file = os.path.join(task['dir'], task['prefix'] + '_devtools_requests.json.gz') - with gzip.open(devtools_file, 'wt', 7) as f_out: + with gzip.open(devtools_file, GZIP_TEXT, 7) as f_out: json.dump(result, f_out) def process_sockets(self): diff --git a/internal/optimization_checks.py b/internal/optimization_checks.py index b7fc905fd..7c92781ea 100644 --- a/internal/optimization_checks.py +++ b/internal/optimization_checks.py @@ -17,8 +17,10 @@ import time if (sys.version_info > (3, 0)): from time import monotonic + GZIP_TEXT = 'wt' else: from monotonic import monotonic + GZIP_TEXT = 'w' try: import ujson as json except BaseException: @@ -375,7 +377,7 @@ def join(self): # Save the results if self.results: path = os.path.join(self.task['dir'], self.task['prefix']) + '_optimization.json.gz' - gz_file = gzip.open(path, 'wt', 7) + gz_file = gzip.open(path, GZIP_TEXT, 7) if gz_file: gz_file.write(json.dumps(self.results)) gz_file.close() diff --git a/internal/safari_ios.py b/internal/safari_ios.py index 50d784ffa..edaff9196 100644 --- a/internal/safari_ios.py +++ b/internal/safari_ios.py @@ -19,9 +19,11 @@ from time import monotonic from urllib.parse import urlsplit # pylint: disable=import-error unicode = str + GZIP_TEXT = 'wt' else: from monotonic import monotonic from urlparse import urlsplit # pylint: disable=import-error + GZIP_TEXT = 'w' try: import ujson as json except BaseException: @@ -357,7 +359,7 @@ def collect_browser_metrics(self, task): logging.debug(user_timing) if user_timing is not None and self.path_base is not None: path = self.path_base + '_timed_events.json.gz' - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(user_timing)) logging.debug("Collecting page-level metrics") page_data = self.run_js_file('page_data.js') @@ -379,7 +381,7 @@ def collect_browser_metrics(self, task): pass if self.path_base is not None: path = self.path_base + '_metrics.json.gz' - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(custom_metrics)) if 'heroElementTimes' in self.job and self.job['heroElementTimes']: hero_elements = None @@ -393,7 +395,7 @@ def collect_browser_metrics(self, task): hero_elements = self.ios.execute_js(script) if hero_elements is not None: path = os.path.join(task['dir'], task['prefix'] + '_hero_elements.json.gz') - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json.dumps(hero_elements)) def process_message(self, msg, target_id=None): @@ -844,7 +846,7 @@ def on_start_recording(self, task): if 'timeline' in self.job and self.job['timeline']: if self.path_base is not None: timeline_path = self.path_base + '_devtools.json.gz' - self.timeline = gzip.open(timeline_path, 'wt', 7) + self.timeline = gzip.open(timeline_path, GZIP_TEXT, 7) if self.timeline: self.timeline.write('[\n') from internal.support.trace_parser import Trace @@ -964,7 +966,7 @@ def on_start_processing(self, task): # Save the console logs if self.console_log and self.path_base is not None: log_file = self.path_base + '_console_log.json.gz' - with gzip.open(log_file, 'wt', 7) as f_out: + with gzip.open(log_file, GZIP_TEXT, 7) as f_out: json.dump(self.console_log, f_out) # Process the timeline data if self.trace_parser is not None and self.path_base is not None: @@ -1001,7 +1003,7 @@ def wait_for_processing(self, task): self.wpt_result['requests'], opt) if self.path_base is not None: devtools_file = self.path_base + '_devtools_requests.json.gz' - with gzip.open(devtools_file, 'wt', 7) as f_out: + with gzip.open(devtools_file, GZIP_TEXT, 7) as f_out: json.dump(self.wpt_result, f_out) def step_complete(self, task): @@ -1020,7 +1022,7 @@ def step_complete(self, task): path = self.path_base + '_page_data.json.gz' json_page_data = json.dumps(task['page_data']) logging.debug('Page Data: %s', json_page_data) - with gzip.open(path, 'wt', 7) as outfile: + with gzip.open(path, GZIP_TEXT, 7) as outfile: outfile.write(json_page_data) def send_command(self, method, params, wait=False, timeout=10, target_id=None): diff --git a/internal/support/devtools_parser.py b/internal/support/devtools_parser.py index 957f24a03..d7e3d99c4 100644 --- a/internal/support/devtools_parser.py +++ b/internal/support/devtools_parser.py @@ -24,8 +24,12 @@ if (sys.version_info > (3, 0)): from urllib.parse import urlsplit # pylint: disable=import-error unicode = str + GZIP_TEXT = 'wt' + GZIP_READ_TEXT = 'rt' else: from urlparse import urlsplit # pylint: disable=import-error + GZIP_TEXT = 'w' + GZIP_READ_TEXT = 'r' # try a fast json parser if it is installed try: @@ -104,7 +108,7 @@ def write(self): try: _, ext = os.path.splitext(self.out_file) if ext.lower() == '.gz': - with gzip.open(self.out_file, 'wt') as f_out: + with gzip.open(self.out_file, GZIP_TEXT) as f_out: json.dump(self.result, f_out) else: with open(self.out_file, 'w') as f_out: @@ -119,7 +123,7 @@ def extract_net_requests(self): page_data = {'endTime': 0} _, ext = os.path.splitext(self.devtools_file) if ext.lower() == '.gz': - f_in = gzip.open(self.devtools_file, 'rt') + f_in = gzip.open(self.devtools_file, GZIP_READ_TEXT) else: f_in = open(self.devtools_file, 'r') raw_events = json.load(f_in) @@ -703,7 +707,7 @@ def process_netlog_requests(self): if self.netlog_requests_file is not None and os.path.isfile(self.netlog_requests_file): _, ext = os.path.splitext(self.netlog_requests_file) if ext.lower() == '.gz': - f_in = gzip.open(self.netlog_requests_file, 'rt') + f_in = gzip.open(self.netlog_requests_file, GZIP_READ_TEXT) else: f_in = open(self.netlog_requests_file, 'r') netlog = json.load(f_in) @@ -995,7 +999,7 @@ def process_user_timing(self): if self.user_timing_file is not None and os.path.isfile(self.user_timing_file): _, ext = os.path.splitext(self.user_timing_file) if ext.lower() == '.gz': - f_in = gzip.open(self.user_timing_file, 'rt') + f_in = gzip.open(self.user_timing_file, GZIP_READ_TEXT) else: f_in = open(self.user_timing_file, 'r') user_timing_events = json.load(f_in) @@ -1055,7 +1059,7 @@ def process_optimization_results(self): if self.optimization is not None and os.path.isfile(self.optimization): _, ext = os.path.splitext(self.optimization) if ext.lower() == '.gz': - f_in = gzip.open(self.optimization, 'rt') + f_in = gzip.open(self.optimization, GZIP_READ_TEXT) else: f_in = open(self.optimization, 'r') optimization_results = json.load(f_in) @@ -1167,7 +1171,7 @@ def process_code_coverage(self): if self.coverage is not None and os.path.isfile(self.coverage): _, ext = os.path.splitext(self.coverage) if ext.lower() == '.gz': - f_in = gzip.open(self.coverage, 'rt') + f_in = gzip.open(self.coverage, GZIP_READ_TEXT) else: f_in = open(self.coverage, 'r') coverage = json.load(f_in) @@ -1216,7 +1220,7 @@ def process_cpu_times(self): if end > 0 and self.cpu_times is not None and os.path.isfile(self.cpu_times): _, ext = os.path.splitext(self.cpu_times) if ext.lower() == '.gz': - f_in = gzip.open(self.cpu_times, 'rt') + f_in = gzip.open(self.cpu_times, GZIP_READ_TEXT) else: f_in = open(self.cpu_times, 'r') cpu = json.load(f_in) @@ -1262,7 +1266,7 @@ def process_v8_stats(self): if os.path.isfile(self.v8_stats): _, ext = os.path.splitext(self.v8_stats) if ext.lower() == '.gz': - f_in = gzip.open(self.v8_stats, 'rt') + f_in = gzip.open(self.v8_stats, GZIP_READ_TEXT) else: f_in = open(self.v8_stats, 'r') stats = json.load(f_in) diff --git a/internal/support/firefox_log_parser.py b/internal/support/firefox_log_parser.py index 64a6448a1..14705aa66 100644 --- a/internal/support/firefox_log_parser.py +++ b/internal/support/firefox_log_parser.py @@ -128,7 +128,7 @@ def process_log_file(self, path): _, ext = os.path.splitext(path) line_count = 0 if ext.lower() == '.gz': - f_in = gzip.open(path, 'rt') + f_in = gzip.open(path, GZIP_READ_TEXT) else: f_in = open(path, 'r') for line in f_in: diff --git a/internal/support/pcap-parser.py b/internal/support/pcap-parser.py index 01fb04e23..c7f8e53ad 100644 --- a/internal/support/pcap-parser.py +++ b/internal/support/pcap-parser.py @@ -21,7 +21,12 @@ import math import os import struct +import sys import time +if (sys.version_info > (3, 0)): + GZIP_TEXT = 'wt' +else: + GZIP_TEXT = 'w' #Globals options = None @@ -44,7 +49,7 @@ def __init__(self): def SaveStats(self, out): file_name, ext = os.path.splitext(out) if ext.lower() == '.gz': - f = gzip.open(out, 'wt') + f = gzip.open(out, GZIP_TEXT) else: f = open(out, 'w') try: @@ -59,7 +64,7 @@ def SaveStats(self, out): def SaveDetails(self, out): file_name, ext = os.path.splitext(out) if ext.lower() == '.gz': - f = gzip.open(out, 'wt') + f = gzip.open(out, GZIP_TEXT) else: f = open(out, 'w') try: diff --git a/internal/support/trace_parser.py b/internal/support/trace_parser.py index 21bab1354..f2273f416 100644 --- a/internal/support/trace_parser.py +++ b/internal/support/trace_parser.py @@ -25,8 +25,12 @@ if (sys.version_info > (3, 0)): from urllib.parse import urlparse # pylint: disable=import-error unicode = str + GZIP_TEXT = 'wt' + GZIP_READ_TEXT = 'rt' else: from urlparse import urlparse # pylint: disable=import-error + GZIP_TEXT = 'w' + GZIP_READ_TEXT = 'r' # try a fast json parser if it is installed try: @@ -71,7 +75,7 @@ def write_json(self, out_file, json_data): try: _, ext = os.path.splitext(out_file) if ext.lower() == '.gz': - with gzip.open(out_file, 'wt') as f: + with gzip.open(out_file, GZIP_TEXT) as f: json.dump(json_data, f) else: with open(out_file, 'w') as f: @@ -123,7 +127,7 @@ def Process(self, trace): try: _, ext = os.path.splitext(trace) if ext.lower() == '.gz': - f = gzip.open(trace, 'rt') + f = gzip.open(trace, GZIP_READ_TEXT) else: f = open(trace, 'r') for line in f: @@ -152,7 +156,7 @@ def ProcessTimeline(self, timeline): try: _, ext = os.path.splitext(timeline) if ext.lower() == '.gz': - f = gzip.open(timeline, 'rt') + f = gzip.open(timeline, GZIP_READ_TEXT) else: f = open(timeline, 'r') events = json.load(f) diff --git a/internal/support/visualmetrics.py b/internal/support/visualmetrics.py index 4ef25f4da..8ed72f75b 100644 --- a/internal/support/visualmetrics.py +++ b/internal/support/visualmetrics.py @@ -40,6 +40,12 @@ import subprocess import sys import tempfile +if (sys.version_info > (3, 0)): + GZIP_TEXT = 'wt' + GZIP_READ_TEXT = 'rt' +else: + GZIP_TEXT = 'w' + GZIP_READ_TEXT = 'r' # Globals options = None @@ -958,7 +964,7 @@ def get_timeline_offset(timeline_file): try: file_name, ext = os.path.splitext(timeline_file) if ext.lower() == '.gz': - f = gzip.open(timeline_file, 'rt') + f = gzip.open(timeline_file, GZIP_READ_TEXT) else: f = open(timeline_file, 'r') timeline = json.load(f) @@ -1092,7 +1098,7 @@ def calculate_histograms(directory, histograms_file, force): 'histogram': histogram}) if os.path.isfile(histograms_file): os.remove(histograms_file) - f = gzip.open(histograms_file, 'wt') + f = gzip.open(histograms_file, GZIP_TEXT) json.dump(histograms, f) f.close() else: @@ -1325,7 +1331,7 @@ def calculate_visual_metrics(histograms_file, start, end, perceptual, dirs, prog if progress and progress_file is not None: file_name, ext = os.path.splitext(progress_file) if ext.lower() == '.gz': - f = gzip.open(progress_file, 'wt', 7) + f = gzip.open(progress_file, GZIP_TEXT, 7) else: f = open(progress_file, 'w') json.dump(progress, f) @@ -1345,7 +1351,7 @@ def calculate_visual_metrics(histograms_file, start, end, perceptual, dirs, prog if hero_elements_file is not None and os.path.isfile(hero_elements_file): logging.debug('Calculating hero element times') hero_data = None - with gzip.open(hero_elements_file, 'rt') as hero_f_in: + with gzip.open(hero_elements_file, GZIP_READ_TEXT) as hero_f_in: try: hero_data = json.load(hero_f_in) except Exception as e: @@ -1367,7 +1373,7 @@ def calculate_visual_metrics(histograms_file, start, end, perceptual, dirs, prog hero_data['timings'] = hero_timings metrics += hero_timings - with gzip.open(hero_elements_file, 'wt', 7) as hero_f_out: + with gzip.open(hero_elements_file, GZIP_TEXT, 7) as hero_f_out: json.dump(hero_data, hero_f_out) else: logging.warn('Hero elements file is not valid: ' + str(hero_elements_file)) diff --git a/internal/traceroute.py b/internal/traceroute.py index acb41edec..825607aef 100644 --- a/internal/traceroute.py +++ b/internal/traceroute.py @@ -12,9 +12,10 @@ import sys if (sys.version_info > (3, 0)): from urllib.parse import urlparse # pylint: disable=import-error + GZIP_TEXT = 'wt' else: from urlparse import urlparse # pylint: disable=import-error - + GZIP_TEXT = 'w' class Traceroute(object): """Traceroute (desktop)""" @@ -41,7 +42,7 @@ def run_task(self, task): last_hop, results = self.unix_traceroute(hostname) if last_hop > 0 and results is not None and len(results): out_file = os.path.join(task['dir'], task['prefix']) + '_traceroute.txt.gz' - with gzip.open(out_file, 'wt', 7) as f_out: + with gzip.open(out_file, GZIP_TEXT, 7) as f_out: f_out.write('Hop,IP,ms,FQDN\n') if 0 in results: f_out.write('-1,{0},0,{1}\n'.format(results[0]['addr'], hostname)) diff --git a/internal/webpagetest.py b/internal/webpagetest.py index 5cf3a9144..8f5044b0d 100644 --- a/internal/webpagetest.py +++ b/internal/webpagetest.py @@ -21,9 +21,11 @@ if (sys.version_info > (3, 0)): from time import monotonic from urllib.parse import quote_plus # pylint: disable=import-error + GZIP_READ_TEXT = 'rt' else: from monotonic import monotonic from urllib import quote_plus # pylint: disable=import-error,no-name-in-module + GZIP_READ_TEXT = 'r' try: import ujson as json except BaseException: @@ -950,7 +952,7 @@ def get_bodies(self, task): path = os.path.join(task['dir'], 'bodies') requests = [] devtools_file = os.path.join(task['dir'], task['prefix'] + '_devtools_requests.json.gz') - with gzip.open(devtools_file, 'rt') as f_in: + with gzip.open(devtools_file, GZIP_READ_TEXT) as f_in: requests = json.load(f_in) count = 0 bodies_zip = path_base + '_bodies.zip' diff --git a/wptagent.py b/wptagent.py index 1725086cd..1e9483b3e 100644 --- a/wptagent.py +++ b/wptagent.py @@ -16,6 +16,10 @@ import sys import time import traceback +if (sys.version_info > (3, 0)): + GZIP_TEXT = 'wt' +else: + GZIP_TEXT = 'w' class WPTAgent(object): @@ -180,7 +184,7 @@ def run_single_test(self): pass if self.task['lighthouse_log']: log_file = os.path.join(self.task['dir'], 'lighthouse.log.gz') - with gzip.open(log_file, 'wt', 7) as f_out: + with gzip.open(log_file, GZIP_TEXT, 7) as f_out: f_out.write(self.task['lighthouse_log']) else: browser.run_task(self.task) From e9a806b84bd98388f228ca137b83ce910594bccd Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Sun, 29 Dec 2019 10:38:05 -0800 Subject: [PATCH 10/16] Added MUCH better exception logging --- internal/adb.py | 4 +- internal/android_browser.py | 4 +- internal/chrome_desktop.py | 2 +- internal/desktop_browser.py | 14 +-- internal/devtools.py | 114 ++++++++++++++----------- internal/devtools_browser.py | 26 +++--- internal/firefox.py | 51 ++++++----- internal/internet_explorer.py | 2 +- internal/ios_device.py | 74 +++++++++------- internal/message_server.py | 2 +- internal/microsoft_edge.py | 32 +++---- internal/optimization_checks.py | 39 +++++---- internal/os_util.py | 4 +- internal/safari_ios.py | 72 +++++++++------- internal/support/devtools_parser.py | 23 +++-- internal/support/firefox_log_parser.py | 4 +- internal/support/pcap-parser.py | 14 +-- internal/support/trace_parser.py | 19 ++--- internal/support/visualmetrics.py | 23 ++--- internal/traceroute.py | 4 +- internal/traffic_shaping.py | 2 +- internal/webpagetest.py | 24 +++--- wptagent.py | 2 +- 23 files changed, 299 insertions(+), 256 deletions(-) diff --git a/internal/adb.py b/internal/adb.py index a6461460f..0f4b2038c 100644 --- a/internal/adb.py +++ b/internal/adb.py @@ -226,7 +226,7 @@ def start_screenrecord(self): '/data/local/tmp/wpt_video.mp4']) self.screenrecord = subprocess.Popen(cmd) except Exception: - pass + logging.exception('Error starting screenrecord') def stop_screenrecord(self, local_file): """Stop a screen record and download the video to local_file""" @@ -256,7 +256,7 @@ def start_tcpdump(self): logging.debug(' '.join(cmd)) self.tcpdump = subprocess.Popen(cmd) except Exception: - pass + logging.exception('Error starting tcpdump') def stop_tcpdump(self, local_file): """Stop a tcpdump capture and download to local_file""" diff --git a/internal/android_browser.py b/internal/android_browser.py index 0ec0bbe8b..9a23105de 100644 --- a/internal/android_browser.py +++ b/internal/android_browser.py @@ -97,7 +97,7 @@ def prepare(self, job, task): if md5 == self.config['md5']: valid = True except Exception: - pass + logging.exception('Error downloading browser update') if os.path.isfile(tmp_file): if valid: # Uninstall the previous install of the same package if we are installing a custom browser. @@ -275,7 +275,7 @@ def wait_for_processing(self, task): if self.tcpdump_file is not None: os.remove(self.tcpdump_file) except Exception: - pass + logging.exception('Error processing tcpdump') def step_complete(self, task): """All of the processing for the current test step is complete""" diff --git a/internal/chrome_desktop.py b/internal/chrome_desktop.py index 2faf64731..32cc7f88d 100644 --- a/internal/chrome_desktop.py +++ b/internal/chrome_desktop.py @@ -183,7 +183,7 @@ def setup_prefs(self, profile_dir): os.makedirs(dest_dir) shutil.copy(src, os.path.join(dest_dir, 'Preferences')) except Exception: - pass + logging.exception('Error copying prefs file') def install_policy(self): """Install the required policy list (Linux only right now)""" diff --git a/internal/desktop_browser.py b/internal/desktop_browser.py index a88ddab45..a95db2dd6 100644 --- a/internal/desktop_browser.py +++ b/internal/desktop_browser.py @@ -275,7 +275,7 @@ def find_default_interface(self): for interface in remove: del self.interfaces[interface] except Exception: - pass + logging.exception('Error finding default interface') def launch_browser(self, command_line): """Launch the browser and keep track of the process""" @@ -489,7 +489,7 @@ def on_start_recording(self, task): time.sleep(0.1) self.video_capture_running = True except Exception: - pass + logging.exception('Error starting video capture') # start the background thread for monitoring CPU and bandwidth self.usage_queue = multiprocessing.JoinableQueue() @@ -682,7 +682,7 @@ def process_pcap(self): if 'in_dup' in result: self.task['page_data']['pcapBytesInDup'] = result['in_dup'] except Exception: - pass + logging.exception('Error processing tcpdump') def get_net_bytes(self): """Get the bytes received, ignoring the loopback interface""" @@ -753,7 +753,7 @@ def enable_cpu_throttling(self, command_line): subprocess.check_call(cmd) command_line = 'cgexec -g cpu:wptagent ' + command_line except Exception as err: - logging.critical("Exception enabling throttling: %s", err.__str__()) + logging.exception("Exception enabling throttling: %s", err.__str__()) self.throttling_cpu = True return command_line @@ -765,7 +765,7 @@ def disable_cpu_throttling(self): logging.debug(' '.join(cmd)) subprocess.check_call(cmd) except Exception: - pass + logging.exception('Error disabling throttling') def start_cpu_throttling(self): """Start the CPU throttling if necessary""" @@ -780,7 +780,7 @@ def start_cpu_throttling(self): logging.debug(' '.join(cmd)) subprocess.check_call(cmd) except Exception: - pass + logging.exception('Error starting throttling') def stop_cpu_throttling(self): """Start the CPU throttling if necessary""" @@ -790,4 +790,4 @@ def stop_cpu_throttling(self): logging.debug(' '.join(cmd)) subprocess.check_call(cmd) except Exception: - pass + logging.exception('Error stopping throttling') diff --git a/internal/devtools.py b/internal/devtools.py index e29129744..034b9d77c 100644 --- a/internal/devtools.py +++ b/internal/devtools.py @@ -128,7 +128,7 @@ def wait_for_available(self, timeout): ret = True logging.debug('Dev tools interface is available') except Exception as err: - logging.debug("Connect to dev tools Error: %s", err.__str__()) + logging.exception("Connect to dev tools Error: %s", err.__str__()) time.sleep(0.5) return ret @@ -161,15 +161,14 @@ def connect(self, timeout): session.get(self.url + '/close/' + tabs[index]['id'], proxies=proxies) except Exception: - pass + logging.exception('Error closing tabs') if websocket_url is not None: try: self.websocket = DevToolsClient(websocket_url) self.websocket.connect() ret = True except Exception as err: - logging.debug("Connect to dev tools websocket Error: %s", - err.__str__()) + logging.exception("Connect to dev tools websocket Error: %s", err.__str__()) if not ret: # try connecting to 127.0.0.1 instead of localhost try: @@ -178,8 +177,7 @@ def connect(self, timeout): self.websocket.connect() ret = True except Exception as err: - logging.debug("Connect to dev tools websocket Error: %s", - err.__str__()) + logging.exception("Connect to dev tools websocket Error: %s", err.__str__()) else: time.sleep(0.5) else: @@ -208,7 +206,7 @@ def close(self, close_tab=True): try: self.websocket.close() except Exception: - pass + logging.exception('Error closing websocket') self.websocket = None if close_tab and self.tab_id is not None: import requests @@ -216,7 +214,7 @@ def close(self, close_tab=True): try: requests.get(self.url + '/close/' + self.tab_id, proxies=proxies) except Exception: - pass + logging.exception('Error closing tab') self.tab_id = None def start_recording(self): @@ -235,7 +233,7 @@ def start_recording(self): self.grab_screenshot(tmp_file) os.remove(tmp_file) except Exception: - pass + logging.exception('Error grabbing screenshot') self.flush_pending_messages() self.send_command('Page.enable', {}) self.send_command('Inspector.enable', {}) @@ -424,8 +422,8 @@ def stop_recording(self): json.dump(summary, f_out) self.send_command('CSS.disable', {}) self.send_command('DOM.disable', {}) - except Exception as err: - logging.exception(err) + except Exception: + logging.exception('Error stopping devtools') self.recording = False self.flush_pending_messages() if self.task['log_data']: @@ -473,20 +471,24 @@ def collect_trace(self): while not done and no_message_count < 30 and elapsed < 60: try: raw = self.websocket.get_message(1) - if raw is not None and len(raw): - no_message_count = 0 - msg = json.loads(raw) - if 'method' in msg and msg['method'] == 'Tracing.tracingComplete': - done = True - else: + try: + if raw is not None and len(raw): + no_message_count = 0 + msg = json.loads(raw) + if 'method' in msg and msg['method'] == 'Tracing.tracingComplete': + done = True + else: + no_message_count += 1 + except Exception: no_message_count += 1 + logging.exception('Error processing devtools message') except Exception: no_message_count += 1 time.sleep(1) pass self.websocket.stop_processing_trace() except Exception: - pass + logging.exception('Error processing trace events') elapsed = monotonic() - start logging.debug("Time to collect trace: %0.3f sec", elapsed) self.recording_video = False @@ -561,7 +563,7 @@ def get_response_body(self, request_id): except Exception: is_text = False else: - body = unicode(response['result']['body']) + body = response['result']['body'].encode('utf-8') is_text = True # Add text bodies to the zip archive store_body = self.all_bodies @@ -574,10 +576,10 @@ def get_response_body(self, request_id): logging.debug('%s: Stored body in zip', request_id) logging.debug('%s: Body length: %d', request_id, len(body)) self.response_bodies[request_id] = body - with open(body_file_path, 'w') as body_file: + with open(body_file_path, 'wb') as body_file: body_file.write(body) except Exception: - logging.Exception('Exception retrieving body') + logging.exception('Exception retrieving body') else: self.body_fail_count = 0 self.response_bodies[request_id] = response['result']['body'] @@ -656,13 +658,16 @@ def flush_pending_messages(self): try: while True: raw = self.websocket.get_message(0) - if raw is not None and len(raw): - if self.recording: - logging.debug(raw[:200]) - msg = json.loads(raw) - self.process_message(msg) - if not raw: - break + try: + if raw is not None and len(raw): + if self.recording: + logging.debug(raw[:200]) + msg = json.loads(raw) + self.process_message(msg) + if not raw: + break + except Exception: + logging.exception('Error flushing websocket messages') except Exception: pass @@ -687,13 +692,16 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): while ret is None and monotonic() < end_time: try: raw = self.websocket.get_message(1) - if raw is not None and len(raw): - logging.debug(raw[:200]) - msg = json.loads(raw) - self.process_message(msg) - if command_id in self.command_responses: - ret = self.command_responses[command_id] - del self.command_responses[command_id] + try: + if raw is not None and len(raw): + logging.debug(raw[:200]) + msg = json.loads(raw) + self.process_message(msg) + if command_id in self.command_responses: + ret = self.command_responses[command_id] + del self.command_responses[command_id] + except Exception: + logging.exception('Error processing websocket message') except Exception: pass elif self.websocket: @@ -711,17 +719,20 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): while ret is None and monotonic() < end_time: try: raw = self.websocket.get_message(1) - if raw is not None and len(raw): - logging.debug(raw[:200]) - msg = json.loads(raw) - self.process_message(msg) - if command_id in self.command_responses: - ret = self.command_responses[command_id] - del self.command_responses[command_id] + try: + if raw is not None and len(raw): + logging.debug(raw[:200]) + msg = json.loads(raw) + self.process_message(msg) + if command_id in self.command_responses: + ret = self.command_responses[command_id] + del self.command_responses[command_id] + except Exception as err: + logging.error('Error processing websocket message: %s', err.__str__()) except Exception: pass except Exception as err: - logging.debug("Websocket send error: %s", err.__str__()) + logging.exception("Websocket send error: %s", err.__str__()) return ret def wait_for_page_load(self): @@ -736,10 +747,13 @@ def wait_for_page_load(self): interval = 0.1 try: raw = self.websocket.get_message(interval) - if raw is not None and len(raw): - logging.debug(raw[:200]) - msg = json.loads(raw) - self.process_message(msg) + try: + if raw is not None and len(raw): + logging.debug(raw[:200]) + msg = json.loads(raw) + self.process_message(msg) + except Exception: + logging.exception('Error processing message while waiting for page load') except Exception: # ignore timeouts when we're in a polling read loop pass @@ -952,7 +966,7 @@ def process_network_event(self, event, msg, target_id=None): params['url'] = url.replace(host, self.task['overrideHosts'][host_match], 1) break except Exception: - pass + logging.exception('Error processing host override') self.send_command('Network.continueInterceptedRequest', params) elif 'requestId' in msg['params']: request_id = msg['params']['requestId'] @@ -1126,7 +1140,7 @@ def bytes_from_range(self, text, range_info): byte_count += len(lines[start_line][start_column:]) byte_count += end_column except Exception: - pass + logging.exception('Error in bytes_from_range') return byte_count @@ -1190,7 +1204,7 @@ def received_message(self, raw): if message is not None: self.messages.put(message) except Exception: - pass + logging.exception('Error processing received websocket message') def get_message(self, timeout): """Wait for and return a message from the queue""" diff --git a/internal/devtools_browser.py b/internal/devtools_browser.py index f107cfeb8..84d6fb997 100644 --- a/internal/devtools_browser.py +++ b/internal/devtools_browser.py @@ -5,6 +5,7 @@ """Base class support for browsers that speak the dev tools protocol""" import glob import gzip +import io import logging import os import re @@ -138,7 +139,7 @@ def prepare_browser(self, task): {'latitude': lat, 'longitude': lng, 'accuracy': 0}) except Exception: - pass + logging.exception('Error overriding location') # UA String ua_string = self.devtools.execute_js("navigator.userAgent") @@ -316,7 +317,7 @@ def run_js_file(self, file_name): script = None script_file_path = os.path.join(self.script_dir, file_name) if os.path.isfile(script_file_path): - with open(script_file_path, 'r') as script_file: + with io.open(script_file_path, 'r', encoding='utf-8') as script_file: script = script_file.read() if script is not None: ret = self.devtools.execute_js(script) @@ -347,7 +348,7 @@ def collect_browser_metrics(self, task): custom_hero_selectors = {} if 'heroElements' in self.job: custom_hero_selectors = self.job['heroElements'] - with open(os.path.join(self.script_dir, 'hero_elements.js'), 'r') as script_file: + with io.open(os.path.join(self.script_dir, 'hero_elements.js'), 'r', encoding='utf-8') as script_file: hero_elements_script = script_file.read() script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' hero_elements = self.devtools.execute_js(script) @@ -430,7 +431,7 @@ def process_command(self, command): {'latitude': lat, 'longitude': lng, 'accuracy': accuracy}) except Exception: - pass + logging.exception('Error setting location') elif command['command'] == 'addheader': self.devtools.set_header(command['target']) elif command['command'] == 'setheader': @@ -465,10 +466,11 @@ def lighthouse_thread(self): proc = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE) for line in iter(proc.stderr.readline, b''): try: + line = unicode(line) logging.debug(line.rstrip()) self.task['lighthouse_log'] += line except Exception: - pass + logging.exception('Error recording lighthouse log') proc.communicate() def run_lighthouse_test(self, task): @@ -509,7 +511,7 @@ def run_lighthouse_test(self, task): command.extend(['--blocked-url-patterns', pattern]) if 'headers' in task: headers_file = os.path.join(task['dir'], 'lighthouse-headers.json') - with open(headers_file, 'w') as f_out: + with io.open(headers_file, 'w', encoding='utf-8') as f_out: json.dump(task['headers'], f_out) command.extend(['--extra-headers', '"{0}"'.format(headers_file)]) cmd = ' '.join(command) @@ -520,7 +522,7 @@ def run_lighthouse_test(self, task): lh_thread.start() lh_thread.join(600) except Exception: - pass + logging.exception('Error running lighthouse audits') from .os_util import kill_all kill_all('node', True) self.job['shaper'].reset() @@ -530,7 +532,7 @@ def run_lighthouse_test(self, task): lh_trace_src = os.path.join(task['dir'], 'lighthouse-0.trace.json') if os.path.isfile(lh_trace_src): # read the JSON in and re-write it line by line to match the other traces - with open(lh_trace_src, 'r') as f_in: + with io.open(lh_trace_src, 'r', encoding='utf-8') as f_in: trace = json.load(f_in) if trace is not None and 'traceEvents' in trace: lighthouse_trace = os.path.join(task['dir'], @@ -542,7 +544,7 @@ def run_lighthouse_test(self, task): f_out.write(json.dumps(trace_event)) f_out.write("\n]}") except Exception: - pass + logging.exception('Error processing lighthouse trace') # Delete all the left-over lighthouse assets files = glob.glob(os.path.join(task['dir'], 'lighthouse-*')) for file_path in files: @@ -552,7 +554,7 @@ def run_lighthouse_test(self, task): pass if os.path.isfile(json_file): lh_report = None - with open(json_file, 'r') as f_in: + with io.open(json_file, 'r', encoding='utf-8') as f_in: lh_report = json.load(f_in) with open(json_file, 'rb') as f_in: @@ -623,7 +625,7 @@ def run_lighthouse_test(self, task): shutil.copyfileobj(f_in, f_out) os.remove(html_file) except Exception: - pass + logging.exception('Error compressing lighthouse report') def wappalyzer_detect(self, task, request_headers): """Run the wappalyzer detection""" @@ -699,5 +701,5 @@ def wappalyzer_script(self, response_headers): script = script.replace('%JSON%', json_data) script = script.replace('%RESPONSE_HEADERS%', json.dumps(headers)) except Exception: - pass + logging.exception('Error building wappalyzer script') return script diff --git a/internal/firefox.py b/internal/firefox.py index 3e5869281..e0954bcf8 100644 --- a/internal/firefox.py +++ b/internal/firefox.py @@ -6,6 +6,7 @@ from datetime import datetime, timedelta import glob import gzip +import io import logging import os import platform @@ -82,7 +83,7 @@ def prepare(self, job, task): shutil.rmtree(task['profile']) shutil.copytree(profile_template, task['profile']) except Exception: - pass + logging.exception('Error copying Firefox profile') # Delete any unsent crash reports crash_dir = None if platform.system() == 'Windows': @@ -188,7 +189,7 @@ def launch(self, job, task): logging.debug('Setting location: %s', location_uri) self.set_pref('geo.wifi.uri', location_uri) except Exception: - pass + logging.exception('Error overriding location') # Figure out the native viewport size size = self.execute_js("[window.innerWidth, window.innerHeight]") logging.debug(size) @@ -240,7 +241,7 @@ def configure_prefs(self): try: self.marionette.set_prefs(prefs, True) except Exception: - pass + logging.exception('Error setting prefs through marionette') def close_browser(self, job, task): """Terminate the browser but don't do all of the cleanup that stop does""" @@ -248,14 +249,14 @@ def close_browser(self, job, task): try: self.addons.uninstall(self.extension_id) except Exception: - pass + logging.exception('Error removing addons') self.extension_id = None self.addons = None if self.marionette is not None: try: self.marionette.close() except Exception: - pass + logging.exception('Error closing marionette') self.marionette = None DesktopBrowser.close_browser(self, job, task) # make SURE the Firefox processes are gone @@ -320,7 +321,7 @@ def run_task(self, task): try: self.marionette.navigate('about:blank') except Exception: - logging.debug('Marionette exception navigating to about:blank after the test') + logging.exception('Marionette exception navigating to about:blank after the test') self.task = None def wait_for_extension(self): @@ -347,7 +348,11 @@ def wait_for_page_load(self): if self.page_loaded is not None: interval = 0.1 try: - self.process_message(self.job['message_server'].get_message(interval)) + message = self.job['message_server'].get_message(interval) + try: + self.process_message(message) + except Exception: + logging.exception('Error processing message') except Exception: pass now = monotonic() @@ -388,7 +393,7 @@ def execute_js(self, script): try: ret = self.marionette.execute_script('return ' + script, script_timeout=30) except Exception: - pass + logging.exception('Error executing script') return ret def run_js_file(self, file_name): @@ -403,7 +408,7 @@ def run_js_file(self, file_name): try: ret = self.marionette.execute_script('return ' + script, script_timeout=30) except Exception: - pass + logging.exception('Error executing script file') if ret is not None: logging.debug(ret) return ret @@ -432,7 +437,7 @@ def collect_browser_metrics(self, task): if custom_metrics[name] is not None: logging.debug(custom_metrics[name]) except Exception: - pass + logging.exception('Error collecting custom metrics') path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(custom_metrics)) @@ -442,7 +447,7 @@ def collect_browser_metrics(self, task): if 'heroElements' in self.job: custom_hero_selectors = self.job['heroElements'] logging.debug('Collecting hero element positions') - with open(os.path.join(self.script_dir, 'hero_elements.js'), 'r') as script_file: + with io.open(os.path.join(self.script_dir, 'hero_elements.js'), 'r', encoding='utf-8') as script_file: hero_elements_script = script_file.read() script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' hero_elements = self.execute_js(script) @@ -473,7 +478,7 @@ def process_message(self, message): elif cat == 'webRequest': self.process_web_request(msg, message['body']) except Exception: - pass + logging.exception('Error processing message') def process_web_navigation(self, message, evt): """Handle webNavigation.*""" @@ -495,7 +500,7 @@ def process_web_navigation(self, message, evt): self.marionette.execute_script(self.job['injectScript'], script_timeout=30) except Exception: - pass + logging.exception('Error injecting script') elif message == 'onDOMContentLoaded': if 'timeStamp' in evt and 'frameId' in evt and evt['frameId'] == 0: self.page['DOMContentLoaded'] = evt['timeStamp'] @@ -670,7 +675,7 @@ def on_stop_recording(self, task): f_out.write(buff) length -= read_bytes except Exception: - pass + logging.exception('Error copying log files') def on_start_processing(self, task): """Start any processing of the captured data""" @@ -781,7 +786,7 @@ def process_command(self, command): logging.debug('Setting location: %s', location_uri) self.set_pref('geo.wifi.uri', location_uri) except Exception: - pass + logging.exception('Error setting location') def navigate(self, url): """Navigate to the given URL""" @@ -789,7 +794,7 @@ def navigate(self, url): try: self.marionette.navigate(url) except Exception as err: - logging.debug("Error navigating Firefox: %s", str(err)) + logging.exception("Error navigating Firefox: %s", str(err)) def set_pref(self, key, value_str): """Set an individual pref value""" @@ -799,7 +804,7 @@ def set_pref(self, key, value_str): logging.debug('Setting Pref "%s" to %s', key, value_str) self.marionette.set_pref(key, value) except Exception: - pass + logging.exception('Error setting pref') def grab_screenshot(self, path, png=True, resize=0): """Save the screen shot (png or jpeg)""" @@ -832,7 +837,7 @@ def grab_screenshot(self, path, png=True, resize=0): except Exception: pass except Exception as err: - logging.debug('Exception grabbing screen shot: %s', str(err)) + logging.exception('Exception grabbing screen shot: %s', str(err)) def process_requests(self, request_timings, task): """Convert all of the request and page events into the format needed for WPT""" @@ -949,7 +954,7 @@ def merge_requests(self, request_timings): request['bytesIn'] += len(header_text) + 2 request['headers']['response'].append(header_text) except Exception: - pass + logging.exception('Error appending response header') if 'created' in req: request['created'] = req['created'] request['load_start'] = int(round(req['start'] * 1000.0)) @@ -965,7 +970,7 @@ def merge_requests(self, request_timings): request['bytesIn'] += int(re.search(r'\d+', str(size)).group()) requests.append(request) except Exception: - pass + logging.exception('Error merging request') # Overwrite them with the same requests from the logs for request in requests: for req in request_timings: @@ -975,7 +980,7 @@ def merge_requests(self, request_timings): req['claimed'] = True self.populate_request(request, req) except Exception: - pass + logging.exception('Error populating request') # Add any events from the logs that weren't reported by the extension for req in request_timings: try: @@ -984,7 +989,7 @@ def merge_requests(self, request_timings): self.populate_request(request, req) requests.append(request) except Exception: - pass + logging.exception('Error adding request from logs') # parse values out of the headers for request in requests: try: @@ -1004,7 +1009,7 @@ def merge_requests(self, request_timings): if value: request['objectSize'] = value except Exception: - pass + logging.exception('Error processing headers') requests.sort(key=lambda x: x['startTime'] if 'startTime' in x else 0) return requests diff --git a/internal/internet_explorer.py b/internal/internet_explorer.py index 74633361b..0508f9d7f 100644 --- a/internal/internet_explorer.py +++ b/internal/internet_explorer.py @@ -52,7 +52,7 @@ def prepare(self, job, task): values.append(value[0]) index += 1 except Exception: - pass + logging.exception('Error processing registry') for value in values: _winreg.DeleteValue(key, value) if 'AppendUA' in task and len(task['AppendUA']): diff --git a/internal/ios_device.py b/internal/ios_device.py index bd2800f06..c0ed0e74e 100644 --- a/internal/ios_device.py +++ b/internal/ios_device.py @@ -62,7 +62,7 @@ def startup(self): from .support.ios.usbmux import USBMux self.mux = USBMux() except Exception: - logging.critical("Error initializing usbmux") + logging.exception("Error initializing usbmux") def get_devices(self): """Get a list of available devices""" @@ -131,7 +131,7 @@ def execute_js(self, script, remove_orange=False): try: ret = json.loads(ret) except Exception: - pass + logging.exception('Error running script') return ret def set_user_agent(self, ua_string): @@ -222,6 +222,7 @@ def connect(self): self.message_thread.start() break except Exception: + logging.exception('Error connecting to device') # If the app isn't running restart the device (no more than every 10 minutes) if connecting and monotonic() - self.last_restart > 600: needs_restart = True @@ -230,7 +231,7 @@ def connect(self): try: subprocess.call(['idevicediagnostics', 'restart']) except Exception: - pass + logging.exception('Error restarting device') return self.socket is not None def disconnect(self): @@ -264,22 +265,26 @@ def send_message(self, message, data=None, wait=True, timeout=30): while response is None and monotonic() < end: try: msg = self.messages.get(timeout=1) - self.messages.task_done() - if msg: - if msg['msg'] == 'disconnected': - self.disconnect() - self.connect() - elif 'id' in msg and msg['id'] == str(message_id): - if msg['msg'] == 'OK': - if 'data' in msg: - response = msg['data'] + try: + self.messages.task_done() + if msg: + if msg['msg'] == 'disconnected': + self.disconnect() + self.connect() + elif 'id' in msg and msg['id'] == str(message_id): + if msg['msg'] == 'OK': + if 'data' in msg: + response = msg['data'] + else: + response = True else: - response = True - else: - break + break + except Exception: + logging.exception('Error processing message') except Exception: pass except Exception: + logging.exception('Error sending message') self.disconnect() return response @@ -298,25 +303,28 @@ def pump_messages(self): try: while not self.must_disconnect and self.socket != None: rlo, _, xlo = select.select([self.socket], [], [self.socket]) - if xlo: - logging.debug("iWptBrowser disconnected") - self.messages.put({"msg": "disconnected"}) - return - if rlo: - data_in = self.socket.recv(8192) - if not data_in: + try: + if xlo: logging.debug("iWptBrowser disconnected") self.messages.put({"msg": "disconnected"}) return - buff += data_in - pos = 0 - while pos >= 0: - pos = buff.find("\n") - if pos >= 0: - message = buff[:pos].strip() - buff = buff[pos + 1:] - if message: - self.process_raw_message(message) + if rlo: + data_in = self.socket.recv(8192) + if not data_in: + logging.debug("iWptBrowser disconnected") + self.messages.put({"msg": "disconnected"}) + return + buff += data_in + pos = 0 + while pos >= 0: + pos = buff.find("\n") + if pos >= 0: + message = buff[:pos].strip() + buff = buff[pos + 1:] + if message: + self.process_raw_message(message) + except Exception: + logging.exception('Error pumping message') except Exception: pass @@ -365,13 +373,13 @@ def process_message(self, msg): try: self.messages.put(msg) except Exception: - pass + logging.exception('Error adding message to queue') elif self.notification_queue is not None: logging.debug('<<< %s', msg['msg']) try: self.notification_queue.put(msg) except Exception: - pass + logging.exception('Error adding message to notification queue') def install_main(): diff --git a/internal/message_server.py b/internal/message_server.py index b574f313a..69d96dd62 100644 --- a/internal/message_server.py +++ b/internal/message_server.py @@ -103,7 +103,7 @@ def post(self): message['body'] = None MESSAGE_SERVER.handle_message(message) except Exception: - pass + logging.exception('Error processing POST message') self.set_status(200) diff --git a/internal/microsoft_edge.py b/internal/microsoft_edge.py index 2b658c246..907cff12d 100644 --- a/internal/microsoft_edge.py +++ b/internal/microsoft_edge.py @@ -6,6 +6,7 @@ from datetime import datetime, timedelta import glob import gzip +import io import logging import os import re @@ -134,7 +135,7 @@ def get_driver(self, task): if os.path.isfile(src): shutil.copy(src, extension_dir) except Exception: - pass + logging.exception('Error copying extension') capabilities['extensionPaths'] = [extension_dir] capabilities['ms:extensionPaths'] = [extension_dir] driver = webdriver.Edge(executable_path=self.path, capabilities=capabilities) @@ -187,6 +188,7 @@ def launch(self, job, task): else: task['error'] = 'Error waiting for wpt-etw to start. Make sure .net is installed' except Exception as err: + logging.exception('Error starting browser') task['error'] = 'Error starting browser: {0}'.format(err.__str__()) def stop(self, job, task): @@ -196,7 +198,7 @@ def stop(self, job, task): try: self.driver.quit() except Exception: - pass + logging.exception('Error quitting webdriver') self.driver = None DesktopBrowser.stop(self, job, task) if self.wpt_etw_proc is not None: @@ -300,7 +302,7 @@ def run_task(self, task): try: self.driver.get('about:blank') except Exception: - logging.debug('Webdriver exception navigating to about:blank after the test') + logging.exception('Webdriver exception navigating to about:blank after the test') self.task = None def wait_for_extension(self): @@ -377,7 +379,7 @@ def process_message(self, message): if message['pid'] == self.pid: self.process_ieframe_message(message) except Exception: - pass + logging.exception('Error processing message') def process_ie_message(self, message): """Handle IE trace events""" @@ -656,7 +658,7 @@ def execute_js(self, script): self.driver.set_script_timeout(30) ret = self.driver.execute_script(script) except Exception: - pass + logging.exception('Error executing script') return ret def run_js_file(self, file_name): @@ -672,7 +674,7 @@ def run_js_file(self, file_name): self.driver.set_script_timeout(30) ret = self.driver.execute_script('return ' + script) except Exception: - pass + logging.exception('Error executing script file') if ret is not None: logging.debug(ret) return ret @@ -706,7 +708,7 @@ def collect_browser_metrics(self, task): if custom_metrics[name] is not None: logging.debug(custom_metrics[name]) except Exception: - pass + logging.exception('Error collecting custom metric') path = os.path.join(task['dir'], task['prefix'] + '_metrics.json.gz') with gzip.open(path, 'wt', 7) as outfile: outfile.write(json.dumps(custom_metrics)) @@ -716,7 +718,7 @@ def collect_browser_metrics(self, task): if 'heroElements' in self.job: custom_hero_selectors = self.job['heroElements'] logging.debug('Collecting hero element positions') - with open(os.path.join(self.script_dir, 'hero_elements.js'), 'r') as script_file: + with io.open(os.path.join(self.script_dir, 'hero_elements.js'), 'r', encoding='utf-8') as script_file: hero_elements_script = script_file.read() script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' hero_elements = self.execute_js(script) @@ -818,7 +820,7 @@ def process_command(self, command): self.driver.set_script_timeout(30) self.driver.execute_script(script) except Exception: - pass + logging.exception('Error navigating') self.page_loaded = None elif command['command'] == 'logdata': self.task['combine_steps'] = False @@ -841,7 +843,7 @@ def process_command(self, command): self.driver.set_script_timeout(30) self.driver.execute_script(script) except Exception: - pass + logging.exception('Error executing script command') elif command['command'] == 'sleep': delay = min(60, max(0, int(re.search(r'\d+', str(command['target'])).group()))) if delay > 0: @@ -871,7 +873,7 @@ def process_command(self, command): try: self.driver.add_cookie({'url': url, 'name': name, 'value': value}) except Exception: - pass + logging.exception('Error adding cookie') try: import win32inet # pylint: disable=import-error cookie_string = cookie @@ -890,7 +892,7 @@ def navigate(self, url): try: self.driver.get(url) except Exception as err: - logging.debug("Error navigating Edge: %s", str(err)) + logging.exception("Error navigating Edge: %s", str(err)) def grab_screenshot(self, path, png=True, resize=0): """Save the screen shot (png or jpeg)""" @@ -923,7 +925,7 @@ def grab_screenshot(self, path, png=True, resize=0): except Exception: pass except Exception as err: - logging.debug('Exception grabbing screen shot: %s', str(err)) + logging.exception('Exception grabbing screen shot: %s', str(err)) def process_requests(self, task): """Convert all of the request and page events into the format needed for WPT""" @@ -987,7 +989,7 @@ def process_sockets(self): self.requests[first_request]['dnsEnd'] = \ self.dns[event_id]['end'] except Exception: - pass + logging.exception('Error processing request timings') def get_empty_request(self, request_id, url): """Return and empty, initialized request""" @@ -1193,7 +1195,7 @@ def process_raw_requests(self): request['id'], request['url']) requests.append(request) except Exception: - pass + logging.exception('Error processing request') if bodies_zip_file is not None: bodies_zip_file.close() requests.sort(key=lambda x: x['load_start']) diff --git a/internal/optimization_checks.py b/internal/optimization_checks.py index b7fc905fd..73b368c84 100644 --- a/internal/optimization_checks.py +++ b/internal/optimization_checks.py @@ -418,7 +418,7 @@ def check_keep_alive(self): self.results[request_id] = {} self.results[request_id]['keep_alive'] = check except Exception: - pass + logging.exception('Error checking keep-alive') def get_time_remaining(self, request): """See if a request is static and how long it can be cached for""" @@ -476,7 +476,7 @@ def get_time_remaining(self, request): if time_remaining < 0: is_static = False except Exception: - pass + logging.exception('Error calculating time remaining') return is_static, time_remaining def check_cache_static(self): @@ -500,7 +500,7 @@ def check_cache_static(self): self.results[request_id] = {} self.results[request_id]['cache'] = check except Exception: - pass + logging.exception('Error checking cache static') def check_hosting(self): """Pull the data needed to determine the hosting""" @@ -558,7 +558,7 @@ def check_hosting(self): else: domain = None except Exception: - pass + logging.exception('Error checking hosting') self.hosting_time = monotonic() - start def check_cdn(self): @@ -670,9 +670,12 @@ def dns_worker(self): try: while True: domain = self.dns_lookup_queue.get_nowait() - provider = self.find_dns_cdn(domain) - if provider is not None: - self.dns_result_queue.put({'domain': domain, 'provider': provider}) + try: + provider = self.find_dns_cdn(domain) + if provider is not None: + self.dns_result_queue.put({'domain': domain, 'provider': provider}) + except Exception: + logging.debug('Error in dns worker') self.dns_lookup_queue.task_done() except Exception: pass @@ -706,7 +709,7 @@ def check_cdn_headers(self, headers): break if all_match: matched_cdns.append(cdn) - break; + break if not len(matched_cdns): return None @@ -719,8 +722,9 @@ def check_gzip(self): for request_id in self.requests: try: request = self.requests[request_id] - content_length = self.get_header_value(request['response_headers'], - 'Content-Length') + content_length = None + if 'response_headers' in request: + content_length = self.get_header_value(request['response_headers'], 'Content-Length') if 'objectSize' in request: content_length = request['objectSize'] elif content_length is not None: @@ -776,7 +780,7 @@ def check_gzip(self): if check['score'] >= 0: self.gzip_results[request_id] = check except Exception: - pass + logging.exception('Error checking gzip') self.gzip_time = monotonic() - start def check_images(self): @@ -785,8 +789,9 @@ def check_images(self): for request_id in self.requests: try: request = self.requests[request_id] - content_length = self.get_header_value(request['response_headers'], - 'Content-Length') + content_length = None + if 'response_headers' in request: + content_length = self.get_header_value(request['response_headers'], 'Content-Length') if content_length is not None: content_length = int(re.search(r'\d+', str(content_length)).group()) elif 'transfer_size' in request: @@ -896,7 +901,7 @@ def check_images(self): if check['score'] >= 0: self.image_results[request_id] = check except Exception: - pass + logging.exception('Error checking images') self.image_time = monotonic() - start def check_progressive(self): @@ -955,10 +960,10 @@ def check_progressive(self): block_size = block_size[0] * 256 + block_size[1] - 2 pos += block_size except Exception: - pass + logging.exception('Error scanning JPEG') self.progressive_results[request_id] = check except Exception: - pass + logging.exception('Error checking progressive') self.progressive_time = monotonic() - start def check_fonts(self): @@ -985,7 +990,7 @@ def check_fonts(self): if tables is not None: self.font_results[request_id] = {'table_sizes': tables} except Exception: - pass + logging.exception('Error checking font') except Exception: pass self.font_time = monotonic() - start diff --git a/internal/os_util.py b/internal/os_util.py index 74c1daacb..6acfdf1b7 100644 --- a/internal/os_util.py +++ b/internal/os_util.py @@ -86,7 +86,7 @@ def run_elevated(command, args, wait=True): logging.debug('sudo ' + command + ' ' + args) ret = subprocess.call('sudo ' + command + ' ' + args, shell=True) except Exception: - pass + logging.exception('Error running elevated command: %s', command) return ret def wait_for_elevated_process(process_info): @@ -125,5 +125,5 @@ def get_file_version(filename): ls = info['FileVersionLS'] version = '{0}.{1}.{2}.{3}'.format(HIWORD(ms), LOWORD(ms), HIWORD(ls), LOWORD(ls)) except: - pass + logging.exception('Error getting file version for %s', filename) return version diff --git a/internal/safari_ios.py b/internal/safari_ios.py index 50d784ffa..ec6b627a2 100644 --- a/internal/safari_ios.py +++ b/internal/safari_ios.py @@ -6,6 +6,7 @@ import base64 from datetime import datetime import gzip +import io import logging import multiprocessing import os @@ -191,8 +192,7 @@ def connect(self, timeout=30): self.websocket.connect() ret = True except Exception as err: - logging.debug("Connect to dev tools websocket Error: %s", - err.__str__()) + logging.exception("Connect to dev tools websocket Error: %s", err.__str__()) if not ret: # try connecting to 127.0.0.1 instead of localhost try: @@ -202,14 +202,13 @@ def connect(self, timeout=30): self.websocket.connect() ret = True except Exception as err: - logging.debug("Connect to dev tools websocket Error: %s", - err.__str__()) + logging.exception("Connect to dev tools websocket Error: %s", err.__str__()) else: time.sleep(0.5) else: time.sleep(0.5) except Exception as err: - logging.debug("Connect to dev tools Error: %s", err.__str__()) + logging.exception("Connect to dev tools Error: %s", err.__str__()) time.sleep(0.5) return ret @@ -219,7 +218,7 @@ def stop(self, job, task): try: self.websocket.close() except Exception: - pass + logging.exception('Error closing websocket') self.websocket = None if self.webinspector_proxy: self.webinspector_proxy.terminate() @@ -277,7 +276,11 @@ def wait_for_page_load(self): if self.page_loaded is not None: interval = 0.1 try: - self.process_message(self.messages.get(timeout=interval)) + message = self.messages.get(timeout=interval) + try: + self.process_message(message) + except Exception: + logging.exception('Error processing message') except Exception: pass now = monotonic() @@ -376,7 +379,7 @@ def collect_browser_metrics(self, task): if custom_metrics[name] is not None: logging.debug(custom_metrics[name]) except Exception: - pass + logging.exception('Error collecting custom metric') if self.path_base is not None: path = self.path_base + '_metrics.json.gz' with gzip.open(path, 'wt', 7) as outfile: @@ -387,7 +390,7 @@ def collect_browser_metrics(self, task): if 'heroElements' in self.job: custom_hero_selectors = self.job['heroElements'] logging.debug('Collecting hero element positions') - with open(os.path.join(self.script_dir, 'hero_elements.js'), 'r') as script_file: + with io.open(os.path.join(self.script_dir, 'hero_elements.js'), 'r', encoding='utf-8') as script_file: hero_elements_script = script_file.read() script = hero_elements_script + '(' + json.dumps(custom_hero_selectors) + ')' hero_elements = self.ios.execute_js(script) @@ -418,7 +421,7 @@ def process_message(self, msg, target_id=None): elif category == 'Target': self.process_target_event(event, msg) except Exception: - pass + logging.exception('Error processing browser message') if self.timeline and 'method' in msg and not msg['method'].startswith('Target.') and self.recording: json.dump(msg, self.timeline) self.timeline.write(",\n") @@ -743,7 +746,7 @@ def get_response_body(self, request_id, original_id): response['result']['base64Encoded']: body = base64.b64decode(response['result']['body']) else: - body = unicode(response['result']['body'].encode('utf-8')) + body = response['result']['body'].encode('utf-8') is_text = True # Add text bodies to the zip archive if self.bodies_zip_file is not None and is_text: @@ -753,7 +756,7 @@ def get_response_body(self, request_id, original_id): logging.debug('%s: Stored body in zip', request_id) logging.debug('%s: Body length: %d', request_id, len(body)) self.response_bodies[request_id] = body - with open(body_file_path, 'w') as body_file: + with open(body_file_path, 'wb') as body_file: body_file.write(body) else: self.body_fail_count = 0 @@ -776,7 +779,7 @@ def get_header_value(self, headers, name): value = headers[header_name] break except Exception: - pass + logging.exception('Error getting header value for %s', name) return value def prepare_task(self, task): @@ -1048,13 +1051,16 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): while ret is None and monotonic() < end_time: try: raw = self.websocket.get_message(1) - if raw is not None and len(raw): - logging.debug(raw[:200]) - msg = json.loads(raw) - self.process_message(msg) - if command_id in self.command_responses: - ret = self.command_responses[command_id] - del self.command_responses[command_id] + try: + if raw is not None and len(raw): + logging.debug(raw[:200]) + msg = json.loads(raw) + self.process_message(msg) + if command_id in self.command_responses: + ret = self.command_responses[command_id] + del self.command_responses[command_id] + except Exception: + logging.exception('Error processing command response') except Exception: pass elif self.websocket: @@ -1072,15 +1078,18 @@ def send_command(self, method, params, wait=False, timeout=10, target_id=None): while ret is None and monotonic() < end_time: try: msg = self.messages.get(timeout=1) - if msg: - self.process_message(msg) - if command_id in self.command_responses: - ret = self.command_responses[command_id] - del self.command_responses[command_id] + try: + if msg: + self.process_message(msg) + if command_id in self.command_responses: + ret = self.command_responses[command_id] + del self.command_responses[command_id] + except Exception: + logging.exception('Error processing response to command') except Exception: pass except Exception as err: - logging.debug("Websocket send error: %s", err.__str__()) + logging.exception("Websocket send error: %s", err.__str__()) return ret def flush_pending_messages(self): @@ -1089,8 +1098,11 @@ def flush_pending_messages(self): try: while True: msg = self.messages.get(timeout=0) - if msg: - self.process_message(msg) + try: + if msg: + self.process_message(msg) + except Exception: + logging.exception('Error processing message') except Exception: pass @@ -1359,7 +1371,7 @@ def process_requests(self, raw_requests): request['bytesIn'] = content_length + \ sum(len(s) for s in request['headers']['response']) except Exception: - pass + logging.exception('Error processing response length') requests.append(request) requests.sort(key=lambda x: x['load_start']) return requests @@ -1572,4 +1584,4 @@ def received_message(self, raw): if message: self.messages.put(message) except Exception: - pass + logging.exception('Error processing received message') diff --git a/internal/support/devtools_parser.py b/internal/support/devtools_parser.py index 957f24a03..bd4deee2c 100644 --- a/internal/support/devtools_parser.py +++ b/internal/support/devtools_parser.py @@ -85,7 +85,7 @@ def make_utf8(self, data): try: data[key] = unicode(entry) except Exception: - pass + logging.exception('Error making utf8') elif isinstance(data, list): for key in range(len(data)): entry = data[key] @@ -95,7 +95,7 @@ def make_utf8(self, data): try: data[key] = unicode(entry) except Exception: - pass + logging.exception('Error making utf8') def write(self): """Write out the resulting json data""" @@ -110,7 +110,7 @@ def write(self): with open(self.out_file, 'w') as f_out: json.dump(self.result, f_out) except Exception: - logging.critical("Error writing to " + self.out_file) + logging.exception("Error writing to " + self.out_file) def extract_net_requests(self): """Load the events we are interested in""" @@ -575,7 +575,7 @@ def process_requests(self, raw_requests, raw_page_data): u'{0}: {1}'.format(unicode(key.encode('utf-8')), unicode(value.encode('utf-8')).strip())) except Exception: - pass + logging.exception('Error processing response headers') elif 'headers' in raw_request: for key in raw_request['headers']: for value in raw_request['headers'][key].splitlines(): @@ -584,7 +584,7 @@ def process_requests(self, raw_requests, raw_page_data): u'{0}: {1}'.format(unicode(key.encode('utf-8')), unicode(value.encode('utf-8')).strip())) except Exception: - pass + logging.exception('Error processing request headers') if 'response' in raw_request and 'headersText' in raw_request['response']: for line in raw_request['response']['headersText'].splitlines(): line = unicode(line.encode('utf-8')).strip() @@ -598,7 +598,7 @@ def process_requests(self, raw_requests, raw_page_data): u'{0}: {1}'.format(unicode(key.encode('utf-8')), unicode(value.encode('utf-8')).strip())) except Exception: - pass + logging.exception('Error processing response headers') request['bytesOut'] = len("\r\n".join(str(request['headers']['request']))) request['score_cache'] = -1 request['score_cdn'] = -1 @@ -732,7 +732,7 @@ def process_netlog_requests(self): else: request[mapping[key]] = str(entry[key]) except Exception: - pass + logging.exception('Error copying request key %s', key) if protocol is not None: request['protocol'] = protocol if 'start' in entry: @@ -851,7 +851,7 @@ def process_netlog_requests(self): else: request[mapping[key]] = str(entry[key]) except Exception: - pass + logging.exception('Error processing request key %s', key) if 'first_byte' in entry: request['ttfb_ms'] = int(round(entry['first_byte'] - entry['start'])) @@ -1203,7 +1203,7 @@ def process_code_coverage(self): / page_coverage[total]) / 100.0 page_data['code_coverage'] = dict(page_coverage) except Exception: - pass + logging.exception('Error processing code coverage') def process_cpu_times(self): """Calculate the main thread CPU times from the time slices file""" @@ -1253,7 +1253,7 @@ def process_cpu_times(self): page_data[entry] = page_data['cpuTimes'][name] pass except Exception: - pass + logging.exception('Error processing CPU times') def process_v8_stats(self): """Add the v8 stats to the page data""" @@ -1287,8 +1287,7 @@ def process_v8_stats(self): if remainder > 0.0: page_data['v8Stats'][group]['{0}unaccounted'.format(prefix)] = remainder except Exception: - pass - pass + logging.exception('Error processing V8 stats') def main(): """Main entry point""" diff --git a/internal/support/firefox_log_parser.py b/internal/support/firefox_log_parser.py index 64a6448a1..296f0c500 100644 --- a/internal/support/firefox_log_parser.py +++ b/internal/support/firefox_log_parser.py @@ -65,7 +65,7 @@ def process_logs(self, log_file, start_time): try: self.process_log_file(path) except Exception: - pass + logging.exception('Error processing log file') return self.finish_processing() def finish_processing(self): @@ -182,7 +182,7 @@ def process_log_line(self, line): elif msg['category'] == 'nsHostResolver': self.dns_entry(msg) except Exception: - pass + logging.exception('Error processing log line') def main_thread_http_entry(self, msg): """Process a single HTTP log line from the main thread""" diff --git a/internal/support/pcap-parser.py b/internal/support/pcap-parser.py index 01fb04e23..9bfcf9230 100644 --- a/internal/support/pcap-parser.py +++ b/internal/support/pcap-parser.py @@ -42,7 +42,7 @@ def __init__(self): def SaveStats(self, out): - file_name, ext = os.path.splitext(out) + _, ext = os.path.splitext(out) if ext.lower() == '.gz': f = gzip.open(out, 'wt') else: @@ -52,12 +52,12 @@ def SaveStats(self, out): json.dump(result, f) logging.info('Result stats written to {0}'.format(out)) except: - logging.critical('Error writing result stats to {0}'.format(out)) + logging.exception('Error writing result stats to {0}'.format(out)) f.close() def SaveDetails(self, out): - file_name, ext = os.path.splitext(out) + _, ext = os.path.splitext(out) if ext.lower() == '.gz': f = gzip.open(out, 'wt') else: @@ -66,7 +66,7 @@ def SaveDetails(self, out): json.dump(self.slices, f) logging.info('Result details written to {0}'.format(out)) except: - logging.critical('Error writing result details to {0}'.format(out)) + logging.exception('Error writing result details to {0}'.format(out)) f.close() @@ -144,12 +144,12 @@ def Process(self, pcap): if len(packet_data) >= self.linklen: try: self.ProcessPacket(packet_data, packet_info) - except Exception as e: - print(e) + except Exception: + logging.exception('Error processing packet') else: logging.critical("Invalid pcap file " + pcap) except: - logging.critical("Error processing pcap " + pcap) + logging.exception("Error processing pcap " + pcap) if f is not None: f.close() diff --git a/internal/support/trace_parser.py b/internal/support/trace_parser.py index 21bab1354..4f77b7b05 100644 --- a/internal/support/trace_parser.py +++ b/internal/support/trace_parser.py @@ -77,7 +77,7 @@ def write_json(self, out_file, json_data): with open(out_file, 'w') as f: json.dump(json_data, f) except BaseException: - logging.critical("Error writing to " + out_file) + logging.exception("Error writing to " + out_file) def WriteUserTiming(self, out_file): out = self.post_process_netlog_events() @@ -136,9 +136,9 @@ def Process(self, trace): line_mode = True self.FilterTraceEvent(trace_event) except BaseException: - pass + logging.exception('Error processing trace line') except BaseException: - logging.critical("Error processing trace " + trace) + logging.exception("Error processing trace " + trace) if f is not None: f.close() self.ProcessTraceEvents() @@ -179,7 +179,7 @@ def ProcessTimeline(self, timeline): self.timeline_events.append(e) self.ProcessTimelineEvents() except BaseException: - logging.critical("Error processing timeline " + timeline) + logging.exception("Error processing timeline " + timeline) if f is not None: f.close() @@ -302,7 +302,7 @@ def post_process_user_timing(self): if not consumed: out.append(event) except Exception: - pass + logging.exception('Error processing user timing event') if lcp_event is not None and 'LargestContentfulPaint' not in candidates: lcp_event['name'] = 'LargestContentfulPaint' out.append(lcp_event) @@ -517,7 +517,7 @@ def ProcessTimelineEvents(self): main_thread = thread main_thread_cpu = thread_cpu except Exception: - pass + logging.exception('Error processing thread') if main_thread is not None: self.cpu['main_thread'] = main_thread @@ -624,7 +624,7 @@ def AdjustTimelineSlice(self, thread, slice_number, name, parent, elapsed): self.cpu['slices'][thread]['total'][slice_number] = min( 1.0, max(0.0, 1.0 - available)) except BaseException: - pass + logging.exception('Error adjusting timeline slice') ########################################################################## # Blink Features @@ -711,7 +711,7 @@ def ProcessNetlogEvent(self, trace_event): elif event_type == 'URL_REQUEST': self.ProcessNetlogUrlRequestEvent(trace_event) except Exception: - pass + logging.exception('Error processing netlog event') def post_process_netlog_events(self): """Post-process the raw netlog events into request data""" @@ -1314,8 +1314,7 @@ def ProcessV8Event(self, trace_event): self.v8stats['threads'][thread][name]['breakdown'][stat]["count"] += int(trace_event["args"]["runtime-call-stats"][stat][0]) self.v8stats['threads'][thread][name]['breakdown'][stat]["dur"] += float(trace_event["args"]["runtime-call-stats"][stat][1]) / 1000.0 except BaseException: - pass - pass + logging.exception('Error processing V8 event') ########################################################################## diff --git a/internal/support/visualmetrics.py b/internal/support/visualmetrics.py index 4ef25f4da..89296e8e5 100644 --- a/internal/support/visualmetrics.py +++ b/internal/support/visualmetrics.py @@ -307,7 +307,8 @@ def find_image_viewport(file): 'width': (right - left), 'height': (bottom - top)} - except Exception as e: + except Exception: + logging.exception('Error calculating viewport') viewport = None if im is not None: @@ -379,7 +380,7 @@ def find_video_viewport(video, directory, find_viewport, viewport_time): bottom - top)} except Exception: - pass + logging.exception('Error finding vieport pixels') if im is not None: try: @@ -393,7 +394,8 @@ def find_video_viewport(video, directory, find_viewport, viewport_time): viewport = {'x': 0, 'y': 0, 'width': width, 'height': height} os.remove(frame) - except Exception as e: + except Exception: + logging.exception('Error finding viewport') viewport = None return viewport @@ -757,7 +759,7 @@ def get_decimate_filter(): decimate = m.groupdict().get('filter') break except BaseException: - logging.critical('Error checking ffmpeg filters for decimate') + logging.exception('Error checking ffmpeg filters for decimate') decimate = None return decimate @@ -804,14 +806,14 @@ def is_color_frame(file, color_file): ).format(image_magick['convert'], color_file, file, crop, image_magick['compare']) compare = subprocess.Popen(command, stderr=subprocess.PIPE, shell=True) - out, err = compare.communicate() + _, err = compare.communicate() if re.match('^[0-9]+$', err): different_pixels = int(err) if different_pixels < 100: match = True break except Exception: - pass + logging.exception('Error checking frame color') if file not in frame_cache: frame_cache[file] = {} frame_cache[file][color_file] = bool(match) @@ -984,7 +986,7 @@ def get_timeline_offset(timeline_file): logging.info( "Trimming {0:d}ms from the start of the video based on timeline synchronization".format(offset)) except BaseException: - logging.critical("Error processing timeline file " + timeline_file) + logging.exception("Error processing timeline file " + timeline_file) return offset @@ -1126,7 +1128,7 @@ def calculate_image_histogram(file): histogram['g'][pixel[1]] += count histogram['b'][pixel[2]] += count except Exception: - pass + logging.exception('Error processing histogram pixel') colors = None except Exception: histogram = None @@ -1232,7 +1234,7 @@ def render_video(directory, video_file): proc.stdin.close() proc.communicate() except Exception: - pass + logging.exception('Error rendering video') ########################################################################## @@ -1348,9 +1350,8 @@ def calculate_visual_metrics(histograms_file, start, end, perceptual, dirs, prog with gzip.open(hero_elements_file, 'rt') as hero_f_in: try: hero_data = json.load(hero_f_in) - except Exception as e: + except Exception: logging.exception('Could not load hero elements data') - logging.exception(e) if hero_data is not None and hero_data['heroes'] is not None and \ hero_data['viewport'] is not None and len(hero_data['heroes']) > 0: diff --git a/internal/traceroute.py b/internal/traceroute.py index acb41edec..7b59b7f4e 100644 --- a/internal/traceroute.py +++ b/internal/traceroute.py @@ -122,7 +122,7 @@ def windows_traceroute(self, hostname): if fields: ret[0] = {'ms': '', 'hostname': hostname, 'addr': fields.group(1)} except Exception: - pass + logging.exception('Error processing traceroute') return last_hop, ret def unix_traceroute(self, hostname): @@ -172,7 +172,7 @@ def unix_traceroute(self, hostname): if fields: ret[0] = {'ms': '', 'hostname': hostname, 'addr': fields.group(1)} except Exception: - pass + logging.exception('Error processing traceroute') return last_hop, ret def run_lighthouse_test(self, task): diff --git a/internal/traffic_shaping.py b/internal/traffic_shaping.py index 6415e3d23..7ed5c62f8 100644 --- a/internal/traffic_shaping.py +++ b/internal/traffic_shaping.py @@ -430,7 +430,7 @@ def install(self): else: logging.critical("Unable to identify default interface using 'route'") except Exception as err: - logging.debug("Error configuring netem: %s", err.__str__()) + logging.exception("Error configuring netem: %s", err.__str__()) return ret def remove(self): diff --git a/internal/webpagetest.py b/internal/webpagetest.py index 5cf3a9144..296dfa540 100644 --- a/internal/webpagetest.py +++ b/internal/webpagetest.py @@ -92,14 +92,14 @@ def __init__(self, options, workdir): self.screen_width = GetSystemMetrics(0) self.screen_height = GetSystemMetrics(1) except Exception: - pass + logging.exception('Error getting screen resolution') elif platform.system() == 'Darwin': try: from AppKit import NSScreen # pylint: disable=import-error self.screen_width = int(NSScreen.screens()[0].frame().size.width) self.screen_height = int(NSScreen.screens()[0].frame().size.height) except Exception: - pass + logging.exception('Error getting screen resolution') # See if we have to load dynamic config options if self.options.ec2: self.load_from_ec2() @@ -193,8 +193,7 @@ def load_from_ec2(self): ok = False while not ok: try: - response = session.get('http://169.254.169.254/latest/user-data', - timeout=30, proxies=proxies) + response = session.get('http://169.254.169.254/latest/user-data', timeout=30, proxies=proxies) if len(response.text): self.parse_user_data(response.text) ok = True @@ -205,8 +204,7 @@ def load_from_ec2(self): ok = False while not ok: try: - response = session.get('http://169.254.169.254/latest/meta-data/instance-id', - timeout=30, proxies=proxies) + response = session.get('http://169.254.169.254/latest/meta-data/instance-id', timeout=30, proxies=proxies) if len(response.text): self.instance_id = response.text.strip() ok = True @@ -217,9 +215,7 @@ def load_from_ec2(self): ok = False while not ok: try: - response = session.get( - 'http://169.254.169.254/latest/meta-data/placement/availability-zone', - timeout=30, proxies=proxies) + response = session.get('http://169.254.169.254/latest/meta-data/placement/availability-zone', timeout=30, proxies=proxies) if len(response.text): self.zone = response.text.strip() if not len(self.test_locations): @@ -340,7 +336,7 @@ def parse_user_data(self, user_data): elif key == 'fps': self.fps = int(re.search(r'\d+', str(value)).group()) except Exception: - pass + logging.exception('Error parsing metadata') # pylint: disable=E1101 def get_uptime_minutes(self): @@ -819,7 +815,7 @@ def build_script(self, job, task): addr = sockaddr[0] break except Exception: - pass + logging.exception('Error resolving DNS for %s', value) if addr is not None and target.find('"') == -1: if 'dns_override' not in task: task['dns_override'] = [] @@ -970,7 +966,7 @@ def get_bodies(self, task): body_index = index bodies.append(request_id) except Exception: - pass + logging.exception('Error matching requests to bodies') for request in requests['requests']: if 'full_url' in request and \ 'responseCode' in request \ @@ -1036,7 +1032,7 @@ def get_bodies(self, task): file_name = '{0:03d}-{1}-body.txt'.format(body_index, task['id']) bodies.append({'name': file_name, 'file': task['file']}) except Exception: - pass + logging.exception('Error appending bodies') self.fetch_result_queue.task_done() except Exception: pass @@ -1046,7 +1042,7 @@ def get_bodies(self, task): for body in bodies: zip_file.write(body['file'], body['name']) except Exception: - pass + logging.exception('Error backfilling bodies') def upload_task_result(self, task): """Upload the result of an individual test run""" diff --git a/wptagent.py b/wptagent.py index 1725086cd..25863f310 100644 --- a/wptagent.py +++ b/wptagent.py @@ -177,7 +177,7 @@ def run_single_test(self): try: browser.run_lighthouse_test(self.task) except Exception: - pass + logging.exception('Error running lighthouse test') if self.task['lighthouse_log']: log_file = os.path.join(self.task['dir'], 'lighthouse.log.gz') with gzip.open(log_file, 'wt', 7) as f_out: From d85d7eb4ece1601e65519b73ce607b9b0449e041 Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Sun, 29 Dec 2019 10:55:21 -0800 Subject: [PATCH 11/16] Fixed content sniffing in python 3 --- internal/optimization_checks.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/internal/optimization_checks.py b/internal/optimization_checks.py index 2ae4c5268..ea6371072 100644 --- a/internal/optimization_checks.py +++ b/internal/optimization_checks.py @@ -18,6 +18,7 @@ if (sys.version_info > (3, 0)): from time import monotonic GZIP_TEXT = 'wt' + unicode = str else: from monotonic import monotonic GZIP_TEXT = 'w' @@ -1015,23 +1016,23 @@ def get_header_value(self, headers, name): def sniff_content(self, raw_bytes): """Check the beginning of the file to see if it is a known image type""" content_type = None - hex_bytes = binascii.hexlify(raw_bytes[:14]).lower() + hex_bytes = binascii.hexlify(raw_bytes[:14]) # spell-checker: disable - if hex_bytes[0:6] == 'ffd8ff': + if hex_bytes[0:6] == b'ffd8ff': content_type = 'jpeg' - elif hex_bytes[0:16] == '89504e470d0a1a0a': + elif hex_bytes[0:16] == b'89504e470d0a1a0a': content_type = 'png' - elif raw_bytes[:6] == 'GIF87a' or raw_bytes[:6] == 'GIF89a': + elif raw_bytes[:6] == b'GIF87a' or raw_bytes[:6] == b'GIF89a': content_type = 'gif' - elif raw_bytes[:4] == 'RIFF' and raw_bytes[8:14] == 'WEBPVP': + elif raw_bytes[:4] == b'RIFF' and raw_bytes[8:14] == b'WEBPVP': content_type = 'webp' - elif raw_bytes[:4] == 'OTTO': + elif raw_bytes[:4] == b'OTTO': content_type = 'OTF' - elif raw_bytes[:4] == 'ttcf': + elif raw_bytes[:4] == b'ttcf': content_type = 'TTF' - elif raw_bytes[:4] == 'wOFF': + elif raw_bytes[:4] == b'wOFF': content_type = 'WOFF' - elif raw_bytes[:4] == 'wOF2': + elif raw_bytes[:4] == b'wOF2': content_type = 'WOFF2' # spell-checker: enable return content_type From cb0bb532cad86f994771e1eaa5d0ec8b8a84a5a5 Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Sun, 29 Dec 2019 11:01:26 -0800 Subject: [PATCH 12/16] Fixed png processing for python 3 --- internal/optimization_checks.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/optimization_checks.py b/internal/optimization_checks.py index ea6371072..976839f2f 100644 --- a/internal/optimization_checks.py +++ b/internal/optimization_checks.py @@ -835,9 +835,9 @@ def check_images(self): check['score'] = 100 else: # spell-checker: disable - image_chunks = ["iCCP", "tIME", "gAMA", "PLTE", "acTL", "IHDR", "cHRM", - "bKGD", "tRNS", "sBIT", "sRGB", "pHYs", "hIST", "vpAg", - "oFFs", "fcTL", "fdAT", "IDAT"] + image_chunks = [b"iCCP", b"tIME", b"gAMA", b"PLTE", b"acTL", b"IHDR", b"cHRM", + b"bKGD", b"tRNS", b"sBIT", b"sRGB", b"pHYs", b"hIST", b"vpAg", + b"oFFs", b"fcTL", b"fdAT", b"IDAT"] # spell-checker: enable body = request['response_body'] image_size = len(body) From e2fa5127cb5f160c0803150c083f46c7f723402a Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Mon, 30 Dec 2019 11:38:14 -0800 Subject: [PATCH 13/16] Don't auto-install modules or force marionette to update when running in python3 --- wptagent.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/wptagent.py b/wptagent.py index 6e970097e..52acb5bbf 100644 --- a/wptagent.py +++ b/wptagent.py @@ -276,7 +276,7 @@ def requires(self, module, module_name=None): ret = True except ImportError: pass - if not ret: + if not ret and sys.version_info < (3, 0): from internal.os_util import run_elevated logging.debug('Trying to install %s...', module_name) subprocess.call([sys.executable, '-m', 'pip', 'uninstall', '-y', module_name]) @@ -289,7 +289,10 @@ def requires(self, module, module_name=None): except ImportError: pass if not ret: - print("Missing {0} module. Please run 'pip install {1}'".format(module, module_name)) + if (sys.version_info > (3, 0)): + print("Missing {0} module. Please run 'pip3 install {1}'".format(module, module_name)) + else: + print("Missing {0} module. Please run 'pip install {1}'".format(module, module_name)) return ret def startup(self): @@ -324,7 +327,7 @@ def startup(self): import wsaccel wsaccel.patch_ws4py() except Exception: - pass + logging.debug('wsaccel not installed, Chrome debug interface will be slower than it could be') try: subprocess.check_output(['python', '--version']) @@ -387,7 +390,7 @@ def startup(self): ret = False # Fix Lighthouse install permissions - if platform.system() != "Windows": + if platform.system() != "Windows" and sys.version_info < (3, 0): from internal.os_util import run_elevated run_elevated('chmod', '-R 777 ~/.config/configstore/') try: @@ -733,7 +736,7 @@ def find_browsers(): logging.debug('Detected Browsers:') for browser in browsers: logging.debug('%s: %s', browser, browsers[browser]['exe']) - if 'Firefox' in browsers: + if 'Firefox' in browsers and sys.version_info < (3, 0): try: # make sure marionette is up to date from internal.os_util import run_elevated From 87b8f0daadd8ce86324b26cfb393fe2a6f3ef0db Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Mon, 30 Dec 2019 11:48:11 -0800 Subject: [PATCH 14/16] Updated the fonttools dependency to the version that still supports python 2.7 --- Dockerfile | 2 +- ubuntu_install.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9166ebe14..4259b666c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -67,7 +67,7 @@ RUN apt-get update && \ wsaccel \ xvfbwrapper \ brotli \ - fonttools \ + 'fonttools>=3.44.0,<4.0.0' \ marionette_driver COPY wptagent.py /wptagent/wptagent.py diff --git a/ubuntu_install.sh b/ubuntu_install.sh index 1dcfd2cd3..9af34e6ad 100755 --- a/ubuntu_install.sh +++ b/ubuntu_install.sh @@ -14,7 +14,7 @@ until sudo pip install dnspython monotonic pillow psutil requests git+git://gith do sleep 1 done -sudo pip install fonttools +sudo pip install 'fonttools>=3.44.0,<4.0.0' curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash - until sudo apt-get install -y nodejs do From f231dcf9130642d85c3fbad24a9a44cb9324ba44 Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Sat, 4 Jan 2020 12:42:18 -0500 Subject: [PATCH 15/16] Updated to node 12 --- Dockerfile | 2 +- docs/install.md | 2 +- ubuntu_install.sh | 4 ++-- wptagent.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4259b666c..3a2ef83a3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -26,7 +26,7 @@ RUN apt-get update && \ iproute2 \ software-properties-common && \ # Node setup - curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash - && \ + curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash - && \ wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \ wget -qO- https://deb.opera.com/archive.key | apt-key add - && \ echo "deb http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list && \ diff --git a/docs/install.md b/docs/install.md index 0a25f1d8a..0175e9348 100644 --- a/docs/install.md +++ b/docs/install.md @@ -49,7 +49,7 @@ wptagent currently supports Windows, Linux and OSX for desktop browsers as well ## For lighthouse testing * NodeJS * Ubuntu/Debian: - * ```curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash -``` + * ```curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash -``` * ```sudo apt-get install -y nodejs``` * The lighthouse npm module * ```sudo npm install -g lighthouse``` diff --git a/ubuntu_install.sh b/ubuntu_install.sh index 9af34e6ad..f902f3048 100755 --- a/ubuntu_install.sh +++ b/ubuntu_install.sh @@ -3,7 +3,7 @@ until sudo apt-get update do sleep 1 done -until sudo apt-get install -y python2.7 python-pip imagemagick ffmpeg xvfb dbus-x11 cgroup-tools traceroute software-properties-common psmisc libnss3-tools iproute2 net-tools git +until sudo apt-get install -y python2.7 python-pip imagemagick ffmpeg xvfb dbus-x11 cgroup-tools traceroute software-properties-common psmisc libnss3-tools iproute2 net-tools git curl do sleep 1 done @@ -15,7 +15,7 @@ do sleep 1 done sudo pip install 'fonttools>=3.44.0,<4.0.0' -curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash - +curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash - until sudo apt-get install -y nodejs do sleep 1 diff --git a/wptagent.py b/wptagent.py index 52acb5bbf..d0d8ac15f 100644 --- a/wptagent.py +++ b/wptagent.py @@ -403,8 +403,8 @@ def startup(self): if self.get_node_version() < 10.0: if platform.system() == "Linux": # This only works on debian-based systems - logging.debug('Updating Node.js to 10.x') - subprocess.call('curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash -', + logging.debug('Updating Node.js to 12.x') + subprocess.call('curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash -', shell=True) subprocess.call(['sudo', 'apt-get', 'install', '-y', 'nodejs']) if self.get_node_version() < 10.0: From 1c051a5331f450ad938ae251bb65432cd3b0e24a Mon Sep 17 00:00:00 2001 From: Patrick Meenan Date: Wed, 15 Jan 2020 13:20:48 -0500 Subject: [PATCH 16/16] Added support for Microsoft Edge stable release --- wptagent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wptagent.py b/wptagent.py index d0d8ac15f..8a9301dcc 100644 --- a/wptagent.py +++ b/wptagent.py @@ -623,7 +623,7 @@ def find_browsers(): browsers['Edge'] = dict(edge) # Microsoft Edge (Chromium) paths = [program_files, program_files_x86, local_appdata] - channels = ['Edge Dev'] + channels = ['Edge', 'Edge Dev'] for channel in channels: for path in paths: if path is not None and channel not in browsers: