class BrowserMobProxy: def __init__(self, **kwargs): self.server = Server(BrowserMobProxyConfig.binary) self.server.start() self.host = 'localhost' self.trust_all_servers = kwargs.get('trust_all_servers') self.create_proxy() if kwargs.get('capture_content'): self.capture_content() self.blacklist_extras() @property def api_base_url(self): return "http://{}:{}".format(self.host, self.server.port) @property def proxy_server(self): return "{}:{}".format(self.host, self.port) def _at(self, path): return "{}/{}".format(self.api_base_url, path) def create_proxy(self): options = {'trustAllServers': 'true'} if self.trust_all_servers else {} r = requests.post(self._at('proxy'), data=options) self.port = json.loads(r.text)['port'] self.proxy = Client(self.api_base_url.replace('http://', ''), options={'existing_proxy_port_to_use': self.port}) def capture_content(self): requests.put(self._at('proxy/{}/har'.format(self.port)), data={'captureContent': 'true'}) def blacklist_extras(self): for site_name in extras: self.proxy.blacklist(get_generic_url_regex(site_name), 401) def close(self): self.proxy.close() self.server.stop()
class TestTwo(object): def setup_method(self, method): self.client = Client("http://localhost:8080") profile = selenium.webdriver.FirefoxProfile() profile.set_proxy(self.client.webdriver_proxy()) self.driver = selenium.webdriver.Firefox(firefox_profile=profile) def teardown_method(self, method): self.driver.quit() self.client.close() @pytest.mark.pycon @pytest.mark.deep @pytest.mark.blacklist def test_one(self): self.client.blacklist("http://www\\.google-analytics\\.com/.*", 309) self.client.new_har() home = HomePage(self.driver).open().wait_until_loaded() har = self.client.har print(har)
def get(self, URL): try: proxy = Client('im-expservices1.gksm.local:8999') chromedriver = './chromedriver' os.environ['webdriver.chrome.driver'] = chromedriver url = urlparse.urlparse(proxy.proxy).path chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-dev-shm-usage') chrome_options.add_argument('--proxy-server={0}'.format(url)) driver = webdriver.Chrome(chromedriver, chrome_options=chrome_options) driver.set_window_size(1920, 1080) try: proxy.new_har(str(URL), options={ 'captureHeaders': True, 'captureContent': True, 'captureBinaryContent': True }) driver.get(URL) proxy.wait_for_traffic_to_stop(100, 20000) except Exception, err: return 'GET ERROR: ' + str(err) S = lambda X: \ driver.execute_script('return document.body.parentNode.scroll' + X) driver.set_window_size(S('Width'), S('Height')) # har = proxy.har['log']['entries'] # mimeType = [] bodySize = [] download_time = [] counter = 0 for entry in proxy.har['log']['entries']: counter += 1 # mimeType.append(entry['response']['content']['mimeType']) bodySize.append(int(entry['response']['bodySize'])) download_time.append(int(entry['time'])) # example = defaultdict(dict) # keys = defaultdict(dict) # for i in range(1, len(mimeType)): # for entries in range(1, len(har)): # if (mimeType[i] == har[entries]['response']['content']['mimeType']): # keys[mimeType[i]][entries] = {'bodySize': int(har[entries]['response']['bodySize']),'URL': str(har[entries]['request']['url'])} # example['result'] = keys driver.quit() startDownloadTime = \ datetime.datetime.strptime(str(proxy.har['log' ]['entries'][0]['startedDateTime']), '%Y-%m-%dT%H:%M:%S.%fZ') LastStartDownloadTime = \ datetime.datetime.strptime(str(proxy.har['log' ]['entries'][counter - 1]['startedDateTime']), '%Y-%m-%dT%H:%M:%S.%fZ') proxy.close() return { 'bodySize': str(sum(bodySize)), 'browser_download_time': str(sum(download_time)), 'LastStartDownloadTime': str(LastStartDownloadTime), 'startDownloadTime': str(startDownloadTime), 'total_download_time': str((LastStartDownloadTime - startDownloadTime).total_seconds()), }