def sort_api_calls(self, requests): """ Sort API calls to be comparable with test data VELA sends HTTP requests for different listings in random order - the order of listing requests is defined and important only per particular listing. Therefore we need to sort requests only according to listing number, which means: - DON'T sort non listing requests (i.e. shop request) - SORT only within group of listing requests (i.e. two groups separated by different requests) - DON'T sort requests with different url but same listing id (i.e. /v2/listings/1 and /v2/listings/1/xxx) - SORT requests with different listing ids (i.e. /v2/listings/1, /v2/listings/2/xxx, /v2/listings/3) :param requests: Array of api calls as returned by get_api_calls :return Array of api calls sorted as outlined above """ result = [] while requests: # skip sorting of non listing requests while requests and not self.is_listing_api_call(requests[0]): result.append(requests.pop(0)) if requests: temp = [] # sort group of listing requests according to their listing id only while requests and self.is_listing_api_call(requests[0]): temp.append(requests.pop(0)) result += sorted( temp, key=lambda i: self.get_listing_id_from_url(i['url'])) return result
def post(self, url, body=None, headers={}, **kwargs): body = kwargs if body is None else body requests = _client.requests.copy() headers = self._merge_dicts(requests.pop('headers', {}), headers) if _contains_file(body): files = [(k, v) for k, v in _items_or_iteritems(body) if _contains_file(v)] data = [(k, v) for k, v in _items_or_iteritems(body) if not _contains_file(v)] return Response( self.session.post(self._full_url(url), headers=headers, files=files, data=data, **requests)) else: return Response( self.session.post(self._full_url(url), headers=self._merge_dicts( {'content-type': 'application/json'}, headers), data=json.dumps(body, sort_keys=True, indent=2), **requests))
def delete(self, url, params=None, headers={}): requests = _client.requests.copy() headers = self._merge_dicts(requests.pop('headers', {}), headers) return Response( self.session.delete(self._full_url(url), headers=headers, params=params, **requests))
def get(self, url, params=None, headers={}, **kwargs): params = kwargs if params is None else params requests = _client.requests.copy() headers = self._merge_dicts(requests.pop('headers', {}), headers) return Response( self.session.get(self._full_url(url), headers=headers, params=params, **requests))
def parse(): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) while True: if not requests: continue request = requests.pop(0) result = loop.run_until_complete(request[1]()) responses[request[0]] = result
def scrape_all_charts(pitches): """Fetches all chart data from a list of pitches.""" links = [] whisky_types = [] for _, whisky in pitches.iterrows(): chart_link = 'https://www.whiskyinvestdirect.com/{}/{}/{}/{}/chart.do'.format( whisky.formattedDistillery, whisky.bondYear, whisky.bondQuarter, whisky.barrelTypeCode) links.append(chart_link) whisky_types.append(whisky.whisky_type) # Code to handle requests in parallel cookies = {'considerationCurrency': 'GBP'} session = FuturesSession(max_workers=10) requests = [(session.get(l, cookies=cookies, hooks={'response': parse_chart}), l, 5) for l in links] # Retry 5 times responses = [] while requests: request, arg, tries = requests.pop(0) resp = request.result() # print('arg={}, tries={}, resp={}'.format(arg, tries, resp)) if resp.status_code > 299 and tries > 1: requests.append(( session.get(arg, cookies=cookies, hooks={'response': parse_chart}), arg, tries - 1, )) else: responses.append(request) dfs = [] for whisky, response in zip(whisky_types, responses): df = response.result().df df['whisky_type'] = whisky dfs.append(df) dfs = pd.concat(dfs, axis=0) dfs = pd.merge(dfs, pitches, left_on='whisky_type', right_on='whisky_type', how='inner') # Remove rows where currency doesn't match dfs = dfs[dfs.Currency == dfs.considerationCurrency] dfs.drop('Currency', axis=1, inplace=True) return dfs
def http_request(requests, datasource): """ Sanity inducing, need to know which functions are making the http request. Args: requests: list, int, indexed position of roads you want to be returned. If you want just the nearest road then you'd want [0]. datasource: find_road_name(data, pair_id, index) Returns: List, str; list of closest streets/highways to the coordinates given """ # Dictionary of parsed xml from the geocode api data = datasource # nearest highways to given coordinates; using reverse geocoding routes = [] # parses out closest roads depending on the index value of requests while requests: routes.append(request_route_info(data, requests.pop())) return routes
def sanitize_command(self, cmd): """ Removes None values, Validates all required params are present @param cmd: Cmd object eg: createPhysicalNetwork @return: """ requests = {} required = [] for attribute in dir(cmd): if attribute != "__doc__" and attribute != "__init__" and\ attribute != "__module__": if attribute == "isAsync": isAsync = getattr(cmd, attribute) elif attribute == "required": required = getattr(cmd, attribute) else: requests[attribute] = getattr(cmd, attribute) cmdname = cmd.__class__.__name__.replace("Cmd", "") for requiredPara in required: if requests[requiredPara] is None: raise cloudstackException.cloudstackAPIException( cmdname, "%s is required" % requiredPara) for param, value in requests.items(): if value is None: requests.pop(param) elif isinstance(value, list): if len(value) == 0: requests.pop(param) else: if not isinstance(value[0], dict): requests[param] = ",".join(value) else: requests.pop(param) i = 0 for val in value: for k, v in val.iteritems(): requests["%s[%d].%s" % (param, i, k)] = v i = i + 1 return cmdname, isAsync, requests
def test_fetch_auth(self): """Test whether authentication works""" requests = [] bodies_csv = [read_file('data/bugzilla/bugzilla_buglist_next.csv'), ""] bodies_xml = [ read_file('data/bugzilla/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_LOGIN_URL): body = "index.cgi?logout=1" elif uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[(len(requests) + 1) % 2] requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri( httpretty.POST, BUGZILLA_LOGIN_URL, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) httpretty.register_uri( httpretty.GET, BUGZILLA_BUG_URL, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) from_date = datetime.datetime(2015, 1, 1) bg = Bugzilla(BUGZILLA_SERVER_URL, user='******', password='******') bugs = [bug for bug in bg.fetch(from_date=from_date)] self.assertEqual(len(bugs), 2) self.assertEqual(bugs[0]['data']['bug_id'][0]['__text__'], '30') self.assertEqual(len(bugs[0]['data']['activity']), 14) self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[0]['uuid'], '4b166308f205121bc57704032acdc81b6c9bb8b1') self.assertEqual(bugs[0]['updated_on'], 1426868155.0) self.assertEqual(bugs[0]['category'], 'bug') self.assertEqual(bugs[0]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['data']['bug_id'][0]['__text__'], '888') self.assertEqual(len(bugs[1]['data']['activity']), 0) self.assertEqual(bugs[1]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c') self.assertEqual(bugs[1]['updated_on'], 1439404330.0) self.assertEqual(bugs[1]['category'], 'bug') self.assertEqual(bugs[1]['tag'], BUGZILLA_SERVER_URL) # Check requests auth_expected = { 'Bugzilla_login': ['*****@*****.**'], 'Bugzilla_password': ['1234'], 'GoAheadAndLogIn': ['Log in'] } expected = [{ 'ctype': ['xml'] }, { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2015-01-01 00:00:00'] }, { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2015-08-12 18:32:11'] }, { 'ctype': ['xml'], 'id': ['30', '888'], 'excludefield': ['attachmentdata'] }, { 'id': ['30'] }, { 'id': ['888'] }] # Check authentication request auth_req = requests.pop(0) self.assertDictEqual(auth_req.parsed_body, auth_expected) # Check the rests of the headers self.assertEqual(len(requests), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests[i].querystring, expected[i])
def test_fetch_auth(self): """Test whether authentication works""" requests = [] bodies_csv = [read_file('data/bugzilla_buglist_next.csv'), ""] bodies_xml = [read_file('data/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla_bugs_details_next.xml', mode='rb')] bodies_html = [read_file('data/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla_bug_activity_empty.html', mode='rb')] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_LOGIN_URL): body="index.cgi?logout=1" elif uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[(len(requests) + 1) % 2] requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.POST, BUGZILLA_LOGIN_URL, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_URL, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) from_date = datetime.datetime(2015, 1, 1) bg = Bugzilla(BUGZILLA_SERVER_URL, user='******', password='******') bugs = [bug for bug in bg.fetch(from_date=from_date)] self.assertEqual(len(bugs), 2) self.assertEqual(bugs[0]['data']['bug_id'][0]['__text__'], '30') self.assertEqual(len(bugs[0]['data']['activity']), 14) self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[0]['uuid'], '4b166308f205121bc57704032acdc81b6c9bb8b1') self.assertEqual(bugs[0]['updated_on'], 1426868155.0) self.assertEqual(bugs[1]['data']['bug_id'][0]['__text__'], '888') self.assertEqual(len(bugs[1]['data']['activity']), 0) self.assertEqual(bugs[1]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c') self.assertEqual(bugs[1]['updated_on'], 1439404330.0) # Check requests auth_expected = { 'Bugzilla_login' : ['*****@*****.**'], 'Bugzilla_password' : ['1234'], 'GoAheadAndLogIn' : ['Log in'] } expected = [{ 'ctype' : ['xml'] }, { 'ctype' : ['csv'], 'order' : ['changeddate'], 'chfieldfrom' : ['2015-01-01 00:00:00'] }, { 'ctype' : ['csv'], 'order' : ['changeddate'], 'chfieldfrom' : ['2015-08-12 18:32:11'] }, { 'ctype' : ['xml'], 'id' : ['30', '888'], 'excludefield' : ['attachmentdata'] }, { 'id' : ['30'] }, { 'id' : ['888'] }] # Check authentication request auth_req = requests.pop(0) self.assertDictEqual(auth_req.parsed_body, auth_expected) # Check the rests of the headers self.assertEqual(len(requests), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests[i].querystring, expected[i])
def authn(): ## 認証の処理を実行 ## QSの存在チェック 無ければエラー応答 ## QSの内容は? ## 1. client_id ## 2. redirect_uris ## 3. state ## ## レスポンスタイプ code であれば処理 ## scope の範囲の逸脱が無いことをチェック # # ユーザーの認証もここで実施 # # user: フォームから取得 # scope: フォームから取得 # client_id: QSから取得 # code, state のQSをつけて、リダイレクト先へ転送する query_recv = urllib.parse.urlparse(request.url) query = dict(urllib.parse.parse_qsl(query_recv.query)) # reqidをキーとして送ったQuery String を取り出して # 辞書からキーreqidの値を削除する reqid = request.form['reqid'] query_sent = None squery = {} if reqid in requests: query_sent = requests.pop(reqid) squery = dict(urllib.parse.parse_qsl(query_sent)) else: # 送信したQuery String のIDが無ければ、不正としてエラーを返す app.logger.debug('No matching authorization request [' + reqid + ']') resp = {'error': 'No matching authorization request'} url = query['redirect_uri'] + "?" + urllib.parse.urlencode(resp) response = redirect(url, code=302) return response app.logger.debug(query_recv) app.logger.debug("client_id = " + query['client_id']) app.logger.debug("username = "******"password = "******"scope = " + request.form['scope']) #app.logger.debug("scope2 = " + request.form.getlist['scope2']) app.logger.debug("scope2 ======") scope2 = request.form.getlist('scope2') scope = "" n = 0 for x in scope2: if n == 0: scope = x else: scope = scope + " " + x n = n + 1 app.logger.debug("scope = " + scope) app.logger.debug(scope) app.logger.debug("redirect_uri = " + query['redirect_uri']) app.logger.debug("state = " + query['state']) ############################################################# # 認証 ユーザーID と パスワードのチェック LDAPで照合 ############################################################# if valid_login(request.form['username'], request.form['password']): app.logger.debug("認証成功") else: resp = {'error': 'id or password failed'} url = query['redirect_uri'] + "?" + urllib.parse.urlencode(resp) response = redirect(url, code=302) return response if 'approve' not in request.form: resp = {'error': 'access_denied'} url = query['redirect_uri'] + "?" + urllib.parse.urlencode(resp) response = redirect(url, code=302) return response app.logger.debug("approve = " + request.form['approve']) # 送信したQSのレスポンスタイプがcodeで無ければエラーを返す if squery['response_type'] != 'code': resp = {'error': 'unsupported_response_type'} url = query['redirect_uri'] + "?" + urllib.parse.urlencode(resp) response = redirect(url, code=302) return response # code へ 8桁のランダムな文字列セット code = get_random_string(8) # QSを追加して、リダイレクトするURIを作る resp_qs = {} resp_qs['code'] = code # ランダムな文字列を生成してセット resp_qs['state'] = query['state'] # クライアントから出されたランダム文字列 url = query['redirect_uri'] + "?" + urllib.parse.urlencode(resp_qs) app.logger.debug("URL = " + url) # スコープのチェック #if not check_scope(query['client_id'], request.form['scope']): if not check_scope(query['client_id'], scope): resp = {'error': 'unsupported_response_type'} url = query['redirect_uri'] + "?" + urllib.parse.urlencode(resp) response = redirect(url, code=302) return response # code をキーにして以下を保存する codes[code] = { 'request': query_recv, 'scope': scope, #request.form['scope'], 'user': request.form['username'], 'client_id': query['client_id'] } response = redirect(url, code=302) return response
def emit_requests(self): """Pack requests from `Executor.requests` (see `Executor.add_request()`) into `execute` requests and emit them to server. Responses will be saved in `Executor.responses`.""" logger.debug("Emit requests (%s pieces)", len(self.requests)) self.responses = [] self.errors = [] requests = self.requests cnt = 0 while requests: # start code for execute if cnt == 0: code = 'var arr = [];\n' + \ 'var r;\n' # collect requests req = requests.pop(0) code += 'r = {}\n'.format(req) + \ 'arr = arr + [r];\n' # step - recorded regular request cnt += 1 # check for maximum requests to execute if cnt == 25: # complete code code += 'return arr;' r = apply_vk_method('execute', **{'code': code}) # save responses self.responses += r['response'] if 'execute_errors' in r: self.errors += r['execute_errors'] # process responses while cnt: func = self.processors.pop(0) if func: func(r['response'][-cnt]) cnt -= 1 # make last execute if need if cnt: # complete code code += 'return arr;' r = apply_vk_method('execute', **{'code': code}) # save responses self.responses += r['response'] if 'execute_errors' in r: self.errors.append(r['execute_errors']) # process responses while cnt: func = self.processors.pop(0) if func: func(r['response'][-cnt]) cnt -= 1 self.requests = [] self.processors = []