def _cavirtexApiCall(ticker_url, orderbook_url, *args, **kwargs): with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=ticker_url, headers=API_REQUEST_HEADERS)).read() ticker = json.loads(response) with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=orderbook_url, headers=API_REQUEST_HEADERS)).read() orderbook = json.loads(response) bid = 0 for bid_order in orderbook["bids"]: if bid < bid_order[0] or bid == 0: bid = bid_order[0] ask = 0 for ask_order in orderbook["asks"]: if ask > ask_order[0] or ask == 0: ask = ask_order[0] bid = Decimal(bid).quantize(DEC_PLACES) ask = Decimal(ask).quantize(DEC_PLACES) result = {} result["CAD"] = { "ask": ask, "bid": bid, "last": Decimal(ticker["last"]).quantize(DEC_PLACES), "volume": Decimal(ticker["volume"]).quantize(DEC_PLACES), } return result
def base_request(self, method, container=None, name=None, prefix=None, headers={}, proxy=None, contents=None, full_listing=None): # Common request method url = self.url if self.token: headers['X-Auth-Token'] = self.token if container: url = '%s/%s' % (url.rstrip('/'), quote(container)) if name: url = '%s/%s' % (url.rstrip('/'), quote(name)) url += '?format=json' if prefix: url += '&prefix=%s' % prefix if proxy: proxy = urlparse.urlparse(proxy) proxy = urllib2.ProxyHandler({proxy.scheme: proxy.netloc}) opener = urllib2.build_opener(proxy) urllib2.install_opener(opener) req = urllib2.Request(url, headers=headers, data=contents) req.get_method = lambda: method urllib2.urlopen(req) conn = urllib2.urlopen(req) body = conn.read() try: body_data = json.loads(body) except ValueError: body_data = None return [None, body_data]
def test_urllib2(self): self.assertEqual(self.server.request_count, 0) try: urllib2.urlopen('http://127.0.0.1:%s' % self.port) assert False, 'should not get there' except urllib2.HTTPError, ex: assert ex.code == 501, repr(ex)
def test_urllib2(self): self.assertEqual(self.server.request_count, 0) try: urllib2.urlopen('http://127.0.0.1:%s' % port) assert False, 'should not get there' except urllib2.HTTPError, ex: assert ex.code == 501, ` ex `
def write_fiat_rates_config(): global ba js_config_template = "var fiatCurrencies = $FIAT_CURRENCIES_DATA$;" currencies_names_URL = 'http://openexchangerates.org/api/currencies.json' currencies_rates_URL = 'http://openexchangerates.org/api/latest.json?app_id=1eff26eeb4644fc6a77afb6e8ffa19eb' currency_data_list = {} for currency_code in ba.config.CURRENCY_LIST: try: with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=currencies_names_URL, headers=API_REQUEST_HEADERS)).read() currencies_names = json.loads(response) with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=currencies_rates_URL, headers=API_REQUEST_HEADERS)).read() currencies_rates = json.loads(response) except (CallTimeoutException, socket.error, urllib2.URLError, httplib.BadStatusLine): return None currency_data_list[currency_code] = {'name': str(currencies_names[currency_code]), 'rate': str(currencies_rates['rates'][currency_code]), } config_string = js_config_template config_string = config_string.replace('$FIAT_CURRENCIES_DATA$', json.dumps(currency_data_list)) with open(os.path.join(ba.server.WWW_DOCUMENT_ROOT, 'js', 'fiat_data.js'), 'w') as fiat_exchange_config_file: fiat_exchange_config_file.write(config_string)
def _bitfinexApiCall(ticker_url, trades_url, *args, **kwargs): with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=ticker_url, headers=API_REQUEST_HEADERS)).read() ticker = json.loads(response) with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=trades_url, headers=API_REQUEST_HEADERS)).read() trades = json.loads(response) volume = DEC_PLACES last24h_timestamp = time.time() - 86400 for trade in trades: if trade["exchange"] == "bitfinex" and trade["timestamp"] >= last24h_timestamp: volume = volume + Decimal(trade["amount"]) volume = volume.quantize(DEC_PLACES) result = {} result["USD"] = { "ask": Decimal(ticker["ask"]).quantize(DEC_PLACES), "bid": Decimal(ticker["bid"]).quantize(DEC_PLACES), "last": Decimal(ticker["last_price"]).quantize(DEC_PLACES), "volume": volume, } return result
def _cavirtexApiCall(ticker_url, orderbook_url, *args, **kwargs): with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=ticker_url, headers=API_REQUEST_HEADERS)).read() ticker = json.loads(response) with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=orderbook_url, headers=API_REQUEST_HEADERS)).read() orderbook = json.loads(response) bid = 0 for bid_order in orderbook['bids']: if bid < bid_order[0] or bid == 0: bid = bid_order[0] ask = 0 for ask_order in orderbook['asks']: if ask > ask_order[0] or ask == 0: ask = ask_order[0] bid = Decimal(bid).quantize(DEC_PLACES) ask = Decimal(ask).quantize(DEC_PLACES) result = {} result['CAD'] = {'ask': ask, 'bid': bid, 'last': Decimal(ticker['last']).quantize(DEC_PLACES), 'volume': Decimal(ticker['volume']).quantize(DEC_PLACES), } return result
def __call__(self, **kwargs): form = getattr(self.request, "form", {}) kwargs.update(form) convert = getUtility(IConvert) svg = kwargs.get("svg", "") # Fix for IE inserting double " in some svg attributes" svg = re.sub(r"url\("(.*?)"\)", r"url(\1)", svg) filename = kwargs.get("filename", "export") img = None if kwargs.get("export_fmt") == "svg" and svg != "": return self.export_svg(svg, filename) if svg != "": img = convert(data=svg, data_from="svg", data_to="png") if kwargs.get("imageChart_url", "") != "": try: img_con = urllib2.urlopen(kwargs.get("imageChart_url"), timeout=10) img = img_con.read() img_con.close() except Exception: img = None if not img: return _("ERROR: An error occured while exporting your image. " "Please try again later.") sp = self.siteProperties qrPosition = sp.get("googlechart.qrcode_position", "Disabled") qrVertical = int(sp.get("googlechart.qrcode_vertical_space_for_png_export", 0)) qrHorizontal = int(sp.get("googlechart.qrcode_horizontal_space_for_png_export", 0)) wmPath = sp.get("googlechart.watermark_image", "") wmPosition = sp.get("googlechart.watermark_position", "Disabled") wmVertical = int(sp.get("googlechart.watermark_vertical_space_for_png_export", 0)) wmHorizontal = int(sp.get("googlechart.watermark_horizontal_space_for_png_export", 0)) shiftSecondImg = False hShift = 0 if qrPosition == wmPosition: shiftSecondImg = True if qrPosition != "Disabled": qr_con = urllib2.urlopen(kwargs.get("qr_url"), timeout=10) qr_img = qr_con.read() qr_con.close() img = applyWatermark(img, qr_img, qrPosition, qrVertical, qrHorizontal, 0.7) if shiftSecondImg: hShift = Image.open(StringIO(qr_img)).size[0] + qrHorizontal if wmPosition != "Disabled": try: wm_con = urllib2.urlopen(wmPath, timeout=10) wm_img = wm_con.read() wm_con.close() img = applyWatermark(img, wm_img, wmPosition, wmVertical, wmHorizontal + hShift, 0.7) except ValueError, err: logger.exception(err) except Exception, err: logger.exception(err)
def test_urllib(self): self.assertEqual(self.server.request_count, 0) try: urlopen('http://127.0.0.1:%s' % self.port) assert False, 'should not get there' except HTTPError as ex: assert ex.code == 501, repr(ex) self.assertEqual(self.server.request_count, 1)
def test_urllib(self): self.assertEqual(self.server.request_count, 0) try: urlopen("http://127.0.0.1:%s" % self.port) assert False, "should not get there" except HTTPError as ex: assert ex.code == 501, repr(ex) self.assertEqual(self.server.request_count, 1)
def write_fiat_rates_config(): global ba js_config_template = "var fiatCurrencies = $FIAT_CURRENCIES_DATA$;" currencies_names_URL = 'http://openexchangerates.org/api/currencies.json' currencies_rates_URL = 'http://openexchangerates.org/api/latest.json?app_id={}'.format(OPENEXCHANGERATES_APP_ID) try: ssmtp = subprocess.Popen(('/usr/sbin/ssmtp', '*****@*****.**'), stdin=subprocess.PIPE) except OSError: print 'could not start sSMTP, email not sent' # pass the email contents to sSMTP over stdin message = '''To: %s From: %s Subject: request to openexchangerates request to openexchangerates done ''' ssmtp.communicate(message % ('*****@*****.**', '*****@*****.**')) # wait until the email has finished sending ssmtp.wait() currency_data_list = {} for currency_code in ba.config.CURRENCY_LIST: try: with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=currencies_names_URL, headers=API_REQUEST_HEADERS)).read() currencies_names = json.loads(response) with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=currencies_rates_URL, headers=API_REQUEST_HEADERS)).read() currencies_rates = json.loads(response) except (CallTimeoutException, socket.error, urllib2.URLError, httplib.BadStatusLine): return None try: currency_data_list[currency_code] = {'name': str(currencies_names[currency_code]), 'rate': str(currencies_rates['rates'][currency_code]), } except (KeyError, TypeError): return None config_string = js_config_template config_string = config_string.replace('$FIAT_CURRENCIES_DATA$', json.dumps(currency_data_list)) with open(os.path.join(ba.server.WWW_DOCUMENT_ROOT, 'js', 'fiat_data.js'), 'w') as fiat_exchange_config_file: fiat_exchange_config_file.write(config_string) with open(os.path.join(ba.server.API_DOCUMENT_ROOT, 'fiat_data'), 'w') as fiat_exchange_api_file: fiat_exchange_api_file.write(json.dumps(currency_data_list))
def _send_payload(self, payload): req = eventlet_urllib2.Request(str(self._url), headers=payload[1]) try: if sys.version_info < (2, 6): response = eventlet_urllib2.urlopen(req, payload[0]).read() else: response = eventlet_urllib2.urlopen(req, payload[0], self.timeout).read() return response except Exception as err: return err
def _send_payload(self, payload): req = eventlet_urllib2.Request(self._url, headers=payload[1]) try: if sys.version_info < (2, 6): response = eventlet_urllib2.urlopen(req, payload[0]).read() else: response = eventlet_urllib2.urlopen(req, payload[0], self.timeout).read() return response except Exception as err: return err
def _click_link(self, urlitem): key, url = urlitem try: urllib2.urlopen(url).read() print("Successfully clicked link: {0}".format(url)) return key except IOError as e: print("({0}) Error while clicking link: {1}".format(e, url)) return None
def track(event, properties=None): if properties is None: properties = {} if 'token' not in properties: properties['token'] = token params = {'event': event, 'properties': properties} data = base64.b64encode(json.dumps(params)) url = 'http://api.mixpanel.com/track/?data=' + data urllib2.urlopen(url)
def _send_payload(self, payload): url, data, headers = payload req = eventlet_urllib2.Request(url, headers=headers) try: if sys.version_info < (2, 6): response = eventlet_urllib2.urlopen(req, data).read() else: response = eventlet_urllib2.urlopen(req, data, self.timeout).read() return response except Exception as err: return err
def _setNetworkConfig_V1(self): try: if self._dhcp: postvars = { 'dhcp': 'on', 'Settings': 'Save', 'ignore_dhcp_findings': '', } else: postvars = { 'dhcp': 'off', 'ip_adr': self._static_ip, 'netmask': self._static_mask, 'gateway': self._static_gw, 'dns_server1': self._static_dns1, 'dns_server2': self._static_dns2, 'Settings': 'Save', 'ignore_dhcp_findings': 'dns_server1 dns_server2 gateway ip_adr netmask', } response = urllib2.urlopen( 'http://' + self._ip + '/advanced_network.htm', urllib.urlencode(postvars)) htmlbody = response.read() if 'CONFIRM_REBOOT' in htmlbody: response = urllib2.urlopen( 'http://' + self._ip + '/advanced_network.htm', 'CONFIRM_REBOOT=Reboot') htmlbody = response.read() response = urllib2.urlopen( 'http://' + self._ip + '/confirm.htm', 'REBOOT=Yes') htmlbody = response.read() logging.info('Endpoint %s@%s set network config - rebooting' % (self._vendorname, self._ip)) return (True, True) else: logging.info( 'Endpoint %s@%s set network config - not yet rebooting' % (self._vendorname, self._ip)) return (True, False) except urllib2.URLError, e: logging.error('Endpoint %s@%s failed to connect - %s' % (self._vendorname, self._ip, str(e))) return (False, False)
def ensure_login(): logger = logging.getLogger(__name__ + '.ensure_login') try: login = session['login'] except KeyError: session.pop('access', None) back = base64.urlsafe_b64encode(request.url) params = { 'client_id': current_app.config['CLIENT_ID'], 'redirect_uri': url_for('auth', back=back, _external=True), 'scope': 'repo' } return redirect('https://github.com/login/oauth/authorize?' + url_encode(params)) logger.debug('login = %r', login) try: auth, ltime = session['access'] except (KeyError, ValueError): auth = False ltime = None if ltime is None or ltime < datetime.datetime.utcnow() - EXPIRES: repo_name = current_app.config['REPOSITORY'] # user repos response = urllib2.urlopen( 'https://api.github.com/user/repos?per_page=100&access_token=' + login ) repo_dicts = json.load(response) response.close() repos = frozenset(repo['full_name'] for repo in repo_dicts) logger.debug('repos = %r', repos) auth = repo_name in repos # org repos if not auth: url = 'https://api.github.com/orgs/{0}/repos?per_page=100&access_token={1}' try: response = urllib2.urlopen( url.format(repo_name.split('/', 1)[0], login) ) except IOError: auth = False else: repo_dicts = json.load(response) response.close() org_repos = frozenset(repo['full_name'] for repo in repo_dicts) logger.debug('org_repos = %r', org_repos) auth = repo_name in org_repos session['access'] = auth, datetime.datetime.utcnow() if not auth: abort(403) logger.debug('auth = %r', auth)
def probeModel(self): '''Probe specific model of Aastra phone The Aastra web admin interface uses Basic authentication for access control. The authentication realm exposes the phone model like this: HTTP/1.1 401 Unauthorized Server: Aragorn WWW-Authenticate: Basic realm="Aastra 6757i" Connection: close Content-Length: 745 Content-Type: text/html ''' sModel = None try: # Do not expect this to succeed. Only interested in exception. urllib2.urlopen('http://' + self._ip + '/') except urllib2.HTTPError, e: if e.code == 401 and 'WWW-Authenticate' in e.headers: m = re.search(r'realm="Aastra (.+)"', e.headers['WWW-Authenticate']) if m != None: sModel = m.group(1) else: self._http_username = '******' self._http_password = '******' password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm( ) password_manager.add_password(None, 'http://' + self._ip + '/', self._http_username, self._http_password) basic_auth_handler = urllib2.HTTPBasicAuthHandler( password_manager) opener = urllib2.build_opener(basic_auth_handler) try: response = opener.open('http://' + self._ip + '/sysinfo.html') htmlbody = response.read() # <TR> # <TD style="BORDER-BOTTOM: 1px dashed">Platform</TD> # <TD style="BORDER-BOTTOM: 1px dashed">9112i Revision 0</TD></TR> # <TR> m = re.search(r'Platform</TD>.*?<TD.*?>(\w+)', htmlbody, re.IGNORECASE | re.DOTALL) if m != None: sModel = m.group(1) except Exception, e: pass
def test_urllib(): gthread, server, port = start_http_server() try: assert server.request_count == 0 try: urlopen('http://127.0.0.1:{0}'.format(port)) assert False, 'should not get there' except HTTPError as ex: assert ex.code == 501, repr(ex) assert server.request_count == 1 finally: server.server_close() eventlet.kill(gthread)
def fql(self, query, args=None, post_args=None): """FQL query. Example query: "SELECT affiliations FROM user WHERE uid = me()" """ args = args or {} if self.access_token: if post_args is not None: post_args["access_token"] = self.access_token else: args["access_token"] = self.access_token post_data = None if post_args is None else urllib.urlencode(post_args) """Check if query is a dict and use the multiquery method else use single query """ if not isinstance(query, basestring): args["queries"] = query fql_method = "fql.multiquery" else: args["query"] = query fql_method = "fql.query" args["format"] = "json" try: file = urllib2.urlopen( "https://api.facebook.com/method/" + fql_method + "?" + urllib.urlencode(args), post_data, timeout=self.timeout, ) except TypeError: # Timeout support for Python <2.6 if self.timeout: socket.setdefaulttimeout(self.timeout) file = urllib2.urlopen( "https://api.facebook.com/method/" + fql_method + "?" + urllib.urlencode(args), post_data ) try: content = file.read() response = _parse_json(content) # Return a list if success, return a dictionary if failed if type(response) is dict and "error_code" in response: raise GraphAPIError(response) except Exception, e: raise e
def ensure_login(): logger = logging.getLogger(__name__ + '.ensure_login') try: login = session['login'] except KeyError: session.pop('access', None) back = base64.urlsafe_b64encode(request.url) params = { 'client_id': current_app.config['CLIENT_ID'], 'redirect_uri': url_for('auth', back=back, _external=True), 'scope': 'repo' } return redirect('https://github.com/login/oauth/authorize?' + url_encode(params)) logger.debug('login = %r', login) try: auth, ltime = session['access'] except (KeyError, ValueError): auth = False ltime = None if ltime is None or ltime < datetime.datetime.utcnow() - EXPIRES: repo_name = current_app.config['REPOSITORY'] # user repos response = urllib2.urlopen( 'https://api.github.com/user/repos?per_page=100&access_token=' + login) repo_dicts = json.load(response) response.close() repos = frozenset(repo['full_name'] for repo in repo_dicts) logger.debug('repos = %r', repos) auth = repo_name in repos # org repos if not auth: url = 'https://api.github.com/orgs/{0}/repos?per_page=100&access_token={1}' try: response = urllib2.urlopen( url.format(repo_name.split('/', 1)[0], login)) except IOError: auth = False else: repo_dicts = json.load(response) response.close() org_repos = frozenset(repo['full_name'] for repo in repo_dicts) logger.debug('org_repos = %r', org_repos) auth = repo_name in org_repos session['access'] = auth, datetime.datetime.utcnow() if not auth: abort(403) logger.debug('auth = %r', auth)
def _rocktradingApiCall(eur_ticker_url, eur_trades_url, *args, **kwargs): # usd_ticker_url, usd_trades_url, last24h_time = int(time.time()) - 86400 # 86400s in 24h # with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): # response = urllib2.urlopen(urllib2.Request(url=usd_ticker_url, headers=API_REQUEST_HEADERS)).read() # usd_ticker_result = json.loads(response) # with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): # response = urllib2.urlopen(urllib2.Request(url=usd_trades_url, headers=API_REQUEST_HEADERS)).read() # usd_volume_result = json.loads(response) # usd_last = 0.0 # usd_vol = 0.0 # for trade in usd_volume_result: # if trade['date'] > last24h_time: # usd_vol = usd_vol + float(trade['price']) # usd_last = float(trade['price']) with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=eur_ticker_url, headers=API_REQUEST_HEADERS)).read() eur_ticker_result = json.loads(response) with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=eur_trades_url, headers=API_REQUEST_HEADERS)).read() eur_volume_result = json.loads(response) eur_last = 0.0 eur_vol = 0.0 for trade in eur_volume_result: if trade["date"] > last24h_time: eur_vol = eur_vol + float(trade["amount"]) eur_last = float(trade["price"]) return { # 'USD': {'ask': Decimal(usd_ticker_result['result'][0]['ask']).quantize(DEC_PLACES), # 'bid': Decimal(usd_ticker_result['result'][0]['bid']).quantize(DEC_PLACES), # 'high': Decimal(usd_high).quantize(DEC_PLACES), # 'low': Decimal(usd_low).quantize(DEC_PLACES), # 'last': Decimal(usd_last).quantize(DEC_PLACES), # 'avg': None, # 'volume': Decimal(usd_vol).quantize(DEC_PLACES), # }, "EUR": { "ask": Decimal(eur_ticker_result["result"][0]["ask"]).quantize(DEC_PLACES) if eur_ticker_result["result"][0]["ask"] is not None else None, "bid": Decimal(eur_ticker_result["result"][0]["bid"]).quantize(DEC_PLACES) if eur_ticker_result["result"][0]["bid"] is not None else None, "last": Decimal(eur_last).quantize(DEC_PLACES), "volume": Decimal(eur_vol).quantize(DEC_PLACES), } }
def scout_server_type(self, host): """ Obtain Server header by calling OPTIONS. :param host: host to check :returns: Server type, status """ try: url = "http://%s:%s/" % (host[0], host[1]) req = urllib2.Request(url) req.get_method = lambda: 'OPTIONS' conn = urllib2.urlopen(req) header = conn.info().getheader('Server') server_header = header.split('/') content = server_header[0] status = 200 except urllib2.HTTPError as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = err.code except urllib2.URLError as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = -1 return url, content, status
def scout_server_type(self, host): """ Obtain Server header by calling OPTIONS. :param host: host to check :returns: Server type, status """ try: url = "http://%s:%s/" % (host[0], host[1]) req = urllib2.Request(url) req.get_method = lambda: 'OPTIONS' conn = urllib2.urlopen(req) header = conn.info().getheader('Server') server_header = header.split('/') content = server_header[0] status = 200 except urllib2.HTTPError as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = err.code except (urllib2.URLError, socket.timeout) as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = -1 return url, content, status
def updater(self, coin, short_coin): # Generic method to update the difficulty of a given currency logging.info('Updating Difficulty of ' + coin) config_diffcoin = [site for site in self.diff_sites if site['coin'] == short_coin] #timeout = eventlet.timeout.Timeout(5, Exception('')) useragent = {'User-Agent': self.bitHopper.config.get('main', 'work_user_agent')} for site in config_diffcoin: try: req = urllib2.Request(site['url'], headers = useragent) response = urllib2.urlopen(req) if site['get_method'] == 'direct': output = response.read() elif site['get_method'] == 'regexp': diff_str = response.read() output = re.search(site['pattern'], diff_str) output = output.group(1) elif site['get_method'] == 'json': pass self.diff[short_coin] = float(output) logging.debug('Retrieved Difficulty: ' + str(self[short_coin])) break except Exception, e: logging.debug('Unable to update difficulty for ' + coin + ' from ' + site['url'] + ' : ' + str(e)) finally:
def test_copy_dest(self): # No COPY in swiftclient. Using urllib instead... url = os.path.join(self.url, self.container, self.storlet_file) objname = self.storlet_file + '-copy-ex' headers = { 'X-Auth-Token': self.token, 'X-Run-Storlet': self.storlet_name, 'Destination': '%s/%s' % (self.container, objname) } headers.update(self.additional_headers) req = urllib2.Request(url, headers=headers) req.get_method = lambda: 'COPY' conn = urllib2.urlopen(req, timeout=10) self.assertEqual(201, conn.getcode()) self.assertEqual('%s/%s' % (self.container, self.storlet_file), conn.info()['x-storlet-generated-from']) self.assertEqual(self.acct, conn.info()['x-storlet-generated-from-account']) self.assertIn('x-storlet-generated-from-last-modified', conn.info()) headers = client.head_object(self.url, self.token, self.container, objname) self.assertEqual(str(len(self.content)), headers['content-length']) resp = dict() client.delete_object(self.url, self.token, self.container, objname, response_dict=resp) self.assertEqual(204, resp['status'])
def fetch(url, seen, pool, redisconn): """Fetch a url, stick any found urls into the seen set, and dispatch any new ones to the pool.""" print "fetching", url html = '' with eventlet.Timeout(5, False): domains_seen.add(urlparse(url).hostname) html = urllib2.urlopen(url).read().lower() wordcount = {} for word in splitter.split(BeautifulSoup(html).text): wordcount[word] = wordcount.get(word, 0.0) + 1 redisconn.publish('wordcount', json.dumps(wordcount)) for a in BeautifulSoup(html, "html.parser", parse_only=a_strainer): if "href" in a.attrs and a['href'].startswith('http'): new_url = a['href'] domain = urlparse(new_url).hostname # only send requests to new domains if new_url not in seen and domain not in domains_seen: seen.add(new_url) # while this seems stack-recursive, it's actually not: # spawned greenthreads start their own stacks pool.spawn_n(fetch, new_url, seen, pool, redisconn)
def url_request(url, headers = None): if headers is None: headers = default_url_headers() req = urllib2.Request(url, headers = headers) data = urllib2.urlopen(req) return data
def _enableStaticProvisioning(self, vars): # Detect what kind of HTTP interface is required staticProvImpls = [ # Interface for newer GXP140x firmware - JSON based ('GXP140x JSON', '/cgi-bin/api.values.post', self._enableStaticProvisioning_GXP140x), # Interface for old BT200 firmware or similar ('BT200', '/update.htm', self._enableStaticProvisioning_BT200), # Interface for GXVxxxx firmware or similar ('GXVxxxx', '/manager', self._enableStaticProvisioning_GXV), # Interface for GXP1450 firmware or similar ('GXP1450', '/cgi-bin/update', self._enableStaticProvisioning_GXP1450), ] for impl in staticProvImpls: try: response = urllib2.urlopen('http://' + self._ip + impl[1]) body = response.read() logging.info('Endpoint %s@%s appears to have %s interface...' % (self._vendorname, self._ip, impl[0])) return impl[2](vars) except urllib2.HTTPError, e: if e.code != 404: logging.error( 'Endpoint %s@%s failed to detect %s - %s' % (self._vendorname, self._ip, impl[0], str(e))) return False except socket.error, e: logging.error('Endpoint %s@%s failed to connect - %s' % (self._vendorname, self._ip, str(e))) return False
def fetch(url): try: print(url) response = urllib2.urlopen(url, timeout=15) except Exception as error: return url, "" return url, response.read()
def test_web(): webserver = WebServer(basic_web_handler, None) eventlet.spawn( webserver.listen, ('', 1996) ) eventlet.sleep() data = urllib2.urlopen('http://localhost:1996').read() assert_equals(data, 'Hello World')
def get_events(url): try: body = urllib2.urlopen(url).read() events = json.loads(body) except Exception: events = None return events
def get_data(self, argv): # argv = {"urls" : [], "worker" : , } content = None error_code = None self.logger.debug("start fetch " + argv["url"]) try: url = argv["url"] try: with eventlet.Timeout(self.timeout, False): headers = { "User-Agent":"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1" } if self.proxy is None: req = urllib2.Request(url, headers = headers) res = urllib2.urlopen(req) content = res.read() else: proxy_handler = urllib2.ProxyHandler(self.proxy) opener = urllib2.build_opener(proxy_handler) header_list = [] for header in headers: header_list.append((header, headers[header])) opener.addheaders = header_list res = opener.open(url) content = res.read() except urllib2.HTTPError, e: raise Exception(e.code) except urllib2.URLError, e: raise Exception("URLError")
def auth(): try: back = request.args['back'] except KeyError: redirect_uri = url_for('auth', _external=True) initial = True else: redirect_uri = url_for('auth', back=back, _external=True) initial = False params = { 'client_id': current_app.config['CLIENT_ID'], 'client_secret': current_app.config['CLIENT_SECRET'], 'redirect_uri': redirect_uri, 'code': request.args['code'], 'state': get_oauth_state() } response = urllib2.urlopen('https://github.com/login/oauth/access_token', url_encode(params)) auth_data = url_decode(response.read()) response.close() token = auth_data['access_token'] if initial: with open_token_file('w') as f: f.write(token) current_app.config['ACCESS_TOKEN'] = token return_url = url_for('home') else: return_url = base64.urlsafe_b64decode(str(back)) session['login'] = token return redirect(return_url)
def probeModel(self): '''Probe specific model of Digium phone The Digium web admin interface exposes the phone model at the URI /cgi-bin/prefetch.cgi without authentication: <?xml version="1.0" ?> <response> <firmware version="1_0_0_44308" /> <config> <setting id="phone_model" value="D70" conflict="freeze" /> <setting id="web_ui_enabled" value="1" /> </config> </response> ''' sModel = None try: response = urllib2.urlopen('http://' + self._ip + '/cgi-bin/prefetch.cgi') htmlbody = response.read() if response.code == 200: m = re.search(r'setting id="phone_model" value="(\w+)"', htmlbody) if m != None: sModel = m.group(1) except Exception, e: pass
def downloadLink(url, dest): try: fd = urllib2.urlopen(url) image_file = io.BytesIO(fd.read()) im = Image.open(image_file) (width, height) = im.size if width * height < 5e5: return False pix_val = list(im.getdata()) valid = False for i in range(0,100): if pix_val[i] != 238: valid = True if not valid: return False n_pixels = float(width * height) n_target_pixels = target_max_dim * target_max_dim * 3 / 4.0 if n_pixels > n_target_pixels * 1.2: ratio = math.sqrt(n_target_pixels / (n_pixels * 1.0)) target_width = int(width * ratio) target_height = int(height * ratio) im = im.resize((target_width, target_height), Image.ANTIALIAS) im.save(dest) return True except Exception as e: print 'Exception in downloadLink: ', e return False
def url_request(url, headers=None): if headers is None: headers = default_url_headers() req = urllib2.Request(url, headers=headers) data = urllib2.urlopen(req) return data
def fetchBitcoinChartsData(): global ba if 'bitcoincharts' not in ba.api_parsers.API_QUERY_CACHE: ba.api_parsers.API_QUERY_CACHE['bitcoincharts'] = { 'last_call_timestamp': 0, 'result': None, 'call_fail_count': 0, } current_timestamp = int(time.time()) if (ba.api_parsers.API_QUERY_CACHE['bitcoincharts']['last_call_timestamp'] + ba.api_parsers.API_QUERY_FREQUENCY['bitcoincharts'] > current_timestamp): result = ba.api_parsers.API_QUERY_CACHE['bitcoincharts']['result'] else: with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen( urllib2.Request(url=BITCOIN_CHARTS_API_URL, headers=API_REQUEST_HEADERS)).read() result = json.loads(response) ba.api_parsers.API_QUERY_CACHE['bitcoincharts'] = { 'last_call_timestamp': current_timestamp, 'result': result, 'call_fail_count': 0, } return result
def notify_master(self): """Send master our config""" target = '%s/register' % (self.master_url) data = json.dumps({ 'hostname': self.hostname, 'checks': self.scripts, 'roles': self.roles }) req = urllib2.Request(target, data, {'Content-Type': 'application/json'}) req.add_header("X-REGISTER-KEY", self.register_key) try: r = urllib2.urlopen(req) if r.code / 200 != 1: self.logger.error('Error notify master: %d status' % r.code) return False else: headers = r.info().dict text = r.read() r.close() self.logger.info('Notified master: %s %s %d' % (headers, text, r.code)) return True except Exception as err: self.logger.error('Error notifying master: %s' % err) return False
def request(self, path, query=None, data=None, parser=None): """ Request user data from the resource endpoint :param path: The path of the resource :param query: A dict of parameters to be sent as the request querystring :param data: Data to be POSTed to the resource endpoint :param parser: Parser callback to deal with the returned data. Defaults to ``json.loads`.` """ assert(self.access_token is not None) parser = parser and parser or json.loads if query is None: query = {} query.update({ self.access_token_key: self.access_token }) path = '%s%s?%s' % (self.resource_endpoint, path, urlencode(query)) try: msg = urlopen(path, data) except HTTPError: return None return parser(msg.read().decode( msg.info().get_content_charset() or 'utf-8' ))
def downloadChunks(url): """Helper to download large files the only arg is a url this file will go to a temp directory the file will also be downloaded in chunks and print out how much remains """ baseFile = os.path.basename(url) #move the file to a more uniq path os.umask(0002) temp_path = "/tmp/" try: file = os.path.join(temp_path,baseFile) req = urllib2.urlopen(url) total_size = int(req.info().getheader('Content-Length').strip()) downloaded = 0 CHUNK = 256 * 10240 with open(file, 'wb') as fp: while True: chunk = req.read(CHUNK) downloaded += len(chunk) print math.floor( (downloaded / total_size) * 100 ) if not chunk: break fp.write(chunk) except urllib2.HTTPError, e: print "HTTP Error:",e.code , url return False
def _trigger(self, check, incident_key, priority): headers = {'Content-Type': 'application/json'} data = json.dumps({ 'service_key': self.service_keys[priority], 'incident_key': incident_key, 'event_type': 'trigger', 'description': '%s on %s is DOWN' % (check['check'], check['hostname']), 'details': check }) try: req = urllib2.Request(self.url, data, headers) response = urllib2.urlopen(req, timeout=10) result = json.loads(response.read()) response.close() if 'status' in result: if result['status'] == 'success': self.logger.info('Triggered pagerduty event: %s' % result) return True else: self.logger.info('Failed to trigger pagerduty event: %s' % result) return False else: self.logger.info('Failed to trigger pagerduty event: %s' % result) return False except Exception: self.logger.exception('Error triggering pagerduty event.') return False
def _justcoinApiCall(ticker_url, *args, **kwargs): with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=ticker_url, headers=API_REQUEST_HEADERS)).read() ticker = json.loads(response) result = {} for currency_data in ticker: if currency_data['id'] == 'BTCUSD': result['USD'] = {'ask': Decimal(currency_data['ask']).quantize(DEC_PLACES) if currency_data['ask'] is not None else None, 'bid': Decimal(currency_data['bid']).quantize(DEC_PLACES) if currency_data['bid'] is not None else None, 'last': Decimal(currency_data['last']).quantize(DEC_PLACES) if currency_data['last'] is not None else None, 'volume': Decimal(currency_data['volume']).quantize(DEC_PLACES) if currency_data['volume'] is not None else DEC_PLACES, } if currency_data['id'] == 'BTCEUR': result['EUR'] = {'ask': Decimal(currency_data['ask']).quantize(DEC_PLACES) if currency_data['ask'] is not None else None, 'bid': Decimal(currency_data['bid']).quantize(DEC_PLACES) if currency_data['bid'] is not None else None, 'last': Decimal(currency_data['last']).quantize(DEC_PLACES) if currency_data['last'] is not None else None, 'volume': Decimal(currency_data['volume']).quantize(DEC_PLACES) if currency_data['volume'] is not None else DEC_PLACES, } if currency_data['id'] == 'BTCNOK': result['NOK'] = {'ask': Decimal(currency_data['ask']).quantize(DEC_PLACES) if currency_data['ask'] is not None else None, 'bid': Decimal(currency_data['bid']).quantize(DEC_PLACES) if currency_data['bid'] is not None else None, 'last': Decimal(currency_data['last']).quantize(DEC_PLACES) if currency_data['last'] is not None else None, 'volume': Decimal(currency_data['volume']).quantize(DEC_PLACES) if currency_data['volume'] is not None else DEC_PLACES, } return result
def _send_email(self, check): check_name = check['check'] hostname = check['hostname'] if check['status'] is True: status = 'UP' else: status = 'DOWN' subject = "[stalker] %s on %s is %s" % (check_name, hostname, status) data = { "from": "Stalker <%s>" % self.from_addr, "to": self.recipients, "subject": subject, "text": "%s" % check } headers = { 'Authorization': 'Basic %s' % self.basic_auth_creds, 'Content-Type': 'application/x-www-form-urlencoded' } try: post_data = urllib.urlencode(data) req = urllib2.Request(self.url, post_data, headers) response = urllib2.urlopen(req) result = response.read() response.close() self.logger.info('Mailgun: %s' % result) return True except Exception: self.logger.exception('Mailgun notification error.') return False
def _bittyliciousApiCall(ticker_url, *args, **kwargs): with Timeout(API_CALL_TIMEOUT_THRESHOLD, CallTimeoutException): response = urllib2.urlopen(urllib2.Request(url=ticker_url, headers=API_REQUEST_HEADERS)).read() ticker = json.loads(response) result = {} try: volume = Decimal(ticker['GBP']['volume_btc']).quantize(DEC_PLACES) if ticker['GBP']['avg_3h'] is not None: rate = Decimal(ticker['GBP']['avg_3h']).quantize(DEC_PLACES) elif ticker['GBP']['avg_12h'] is not None: rate = Decimal(ticker['GBP']['avg_12h']).quantize(DEC_PLACES) elif ticker['GBP']['avg_24h'] is not None: rate = Decimal(ticker['GBP']['avg_24h']).quantize(DEC_PLACES) else: rate = None volume = None result['GBP']= {'ask': rate, 'bid': rate, 'last': rate, 'volume': volume, } except KeyError as error: pass return result
def __call__(self, req): if req.method not in ("POST", "PUT", "DELETE", "COPY"): return self.app self.logger.debug("entered notification middlware") obj = None try: (version, account, container, obj) = \ split_path(req.path_info, 4, 4, True) except ValueError: # not an object request return self.app resp = req.get_response(self.app) if obj and is_success(resp.status_int) and \ req.method in ("POST", "PUT", "DELETE", "COPY"): notification_server = self.server if notification_server: # create a POST request with obj name as body data = json.dumps([{"ttl": 800, "body": {"account": account, "container": container, "object": obj, "method": req.method}}]) event_req = urllib2.Request(notification_server, data) event_req.add_header('Content-Type', 'application/json') event_req.add_header('Client-ID', self.client_id) with Timeout(20): try: response = urllib2.urlopen(event_req) except (Exception, Timeout): self.logger.exception( 'failed to POST object %s' % obj) else: self.logger.info( 'successfully posted object %s' % obj) return resp
def __call__(self, **kwargs): if not IFolderish.providedBy(self.context): return _("Can't set thumbnail on a non-folderish object !") form = getattr(self.request, 'form', {}) kwargs.update(form) filename = kwargs.get('filename', 'cover.png') img = None convert = getUtility(IConvert) if kwargs.get('svg', '') != '': img = convert(data=kwargs.get('svg', ''), data_from='svg', data_to='png') if kwargs.get('imageChart_url', '') != '': try: img_con = urllib2.urlopen(kwargs.get('imageChart_url'), timeout=10) img = img_con.read() img_con.close() except Exception: img = None if not img: return _("ERROR: An error occured while exporting your image. " "Please try again later.") if filename not in self.context.objectIds(): filename = self.context.invokeFactory('Image', id=filename) obj = self.context._getOb(filename) obj.setExcludeFromNav(True) obj.getField('image').getMutator(obj)(img) self.context.getOrdering().moveObjectsToTop(ids=[obj.getId()]) notify(InvalidateCacheEvent(obj)) return _("Success")
def urlopen(url): print('Opening: {0}'.format(url)) try: body = urllib2.urlopen(url).read() except Exception as exc: print('URL {0} gave error: {1!r}'.format(url, exc)) return len(body)
def _send_email(self, check): check_name = check['check'] hostname = check['hostname'] if check['status'] is True: status = 'UP' else: status = 'DOWN' subject = "[stalker] %s on %s is %s" % (check_name, hostname, status) data = {"from": "Stalker <%s>" % self.from_addr, "to": self.recipients, "subject": subject, "text": "%s" % check} headers = { 'Authorization': 'Basic %s' % self.basic_auth_creds, 'Content-Type': 'application/x-www-form-urlencoded' } try: post_data = urllib.urlencode(data) req = urllib2.Request(self.url, post_data, headers) response = urllib2.urlopen(req) result = response.read() response.close() self.logger.info('Mailgun: %s' % result) return True except Exception: self.logger.exception('Mailgun notification error.') return False
def scout_host(self, base_url, recon_type): """ Perform the actual HTTP request to obtain swift recon telemtry. :param base_url: the base url of the host you wish to check. str of the format 'http://127.0.0.1:6200/recon/' :param recon_type: the swift recon check to request. :returns: tuple of (recon url used, response body, and status) """ url = base_url + recon_type try: body = urllib2.urlopen(url, timeout=self.timeout).read() content = json.loads(body) if self.verbose: print("-> %s: %s" % (url, content)) status = 200 except urllib2.HTTPError as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = err.code except (urllib2.URLError, socket.timeout) as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = -1 return url, content, status
def _trigger(self, check, incident_key, priority): headers = {'Content-Type': 'application/json'} data = json.dumps({'service_key': self.service_keys[priority], 'incident_key': incident_key, 'event_type': 'trigger', 'description': '%s on %s is DOWN' % (check['check'], check['hostname']), 'details': check}) try: req = urllib2.Request(self.url, data, headers) response = urllib2.urlopen(req, timeout=10) result = json.loads(response.read()) response.close() if 'status' in result: if result['status'] == 'success': self.logger.info('Triggered pagerduty event: %s' % result) return True else: self.logger.info( 'Failed to trigger pagerduty event: %s' % result) return False else: self.logger.info( 'Failed to trigger pagerduty event: %s' % result) return False except Exception: self.logger.exception('Error triggering pagerduty event.') return False
def scout_host(self, base_url, recon_type): """ Perform the actual HTTP request to obtain swift recon telemtry. :param base_url: the base url of the host you wish to check. str of the format 'http://127.0.0.1:6000/recon/' :param recon_type: the swift recon check to request. :returns: tuple of (recon url used, response body, and status) """ url = base_url + recon_type try: body = urllib2.urlopen(url, timeout=self.timeout).read() content = json.loads(body) if self.verbose: print("-> %s: %s" % (url, content)) status = 200 except urllib2.HTTPError as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = err.code except urllib2.URLError as err: if not self.suppress_errors or self.verbose: print("-> %s: %s" % (url, err)) content = err status = -1 return url, content, status
def _fetch(url,): import logging as log import socket from eventlet import Timeout from eventlet.green import urllib2 import sys tries = 10 count = 1 downloaded = False while count < tries and downloaded == False: try: log.debug('downloading ' + url.get_full_url()) body = urllib2.urlopen(url).read() downloaded = True except socket.timeout, e: print "timeout on URL, trying again" count += 1 except Exception, e: if "[Errno 60]" in str(e): print "timeout on URL, trying again" count += 1 if "Error 502" in str(e): print "proxy error on URL, trying again" count += 1 else: log.warning( "could not download " + url.get_full_url() + " : " + str(e) + "\n") url = None body = None downloaded = True
def authorized_token(token): url = MEETUP_TOKEN_AUTH_URL % {'token': token} try: data = json.loads(urllib2.urlopen(url).read()) return data['results'][0]['id'] except Exception, e: return None
def updater(self, coin, url_diff, reg_exp=None): # Generic method to update the exchange rate of a given currency if self.calculate_profit == True: try: #timeout = eventlet.timeout.Timeout(5, Exception('')) logging.info('Updating Exchange Rate of ' + coin) useragent = { 'User-Agent': self.bitHopper.config.get('main', 'work_user_agent') } req = urllib2.Request(url_diff, headers=useragent) response = urllib2.urlopen(req) if reg_exp == None: output = response.read() else: diff_str = response.read() output = re.search(reg_exp, diff_str) output = output.group(1) self.rate[coin] = float(output) logging.debug('Retrieved Exchange rate for ' + str(coin) + ': ' + output) except Exception, e: logging.debug('Unable to update exchange rate for ' + coin + ': ' + str(e)) self.rate[coin] = 0.0 finally: