def requestURL(baseurl, params={}): req = requests.Request(method='GET', url=baseurl, params=canonical_order(params)) prepped = req.prepare() return prepped.url
# Open a new browser browser = Browser('chrome', **executable_path, headless=False) # Define the components of request method = 'GET' url = 'https://auth.tdameritrade.com/auth?' client_code = api_key + '@AMER.OAUTHAP' # Define Payload, MAKE SURE TO HAVE THE CORRECT REDIRECT URI payload_auth = { 'response_type': 'code', 'redirect_uri': 'http://127.0.0.1', 'client_id': client_code } built_url = requests.Request(method, url, params=payload_auth).prepare() # Go to the URL my_url = built_url.url browser.visit(my_url) # Fill Out the Form payload_fill = {'username': username, 'password': password} browser.find_by_id('username').first.fill(payload_fill['username']) browser.find_by_id('password').first.fill(payload_fill['password']) browser.find_by_id('accept').first.click() time.sleep(1) # Get the Text Message Box browser.find_by_text('Can\'t get the text message?').first.click()
def _curl_bitmex(self, api, query=None, postdict=None, timeout=3, verb=None, rethrow_errors=False): """Send a request to BitMEX Servers.""" # Handle URL url = self.base_url + api # Default to POST if data is attached, GET otherwise if not verb: verb = 'POST' if postdict else 'GET' # Auth: Use Access Token by default, API Key/Secret if provided auth = AccessTokenAuth(self.token) if self.apiKey: auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret) def maybe_exit(e): if rethrow_errors: raise e else: exit(1) # Make the request try: req = requests.Request(verb, url, json=postdict, auth=auth, params=query) prepped = self.session.prepare_request(req) response = self.session.send(prepped, timeout=timeout) # Make non-200s throw response.raise_for_status() except requests.exceptions.HTTPError as e: # 401 - Auth error. Re-auth and re-run this request. if response.status_code == 401: if self.token is None: self.logger.error( "Login information or API Key incorrect, please check and restart." ) self.logger.error("Error: " + response.text) if postdict: self.logger.error(postdict) # Always exit, even if rethrow_errors, because this is fatal exit(1) self.logger.warning("Token expired, reauthenticating...") sleep(1) self.authenticate() return self._curl_bitmex(api, query, postdict, timeout, verb) # 404, can be thrown if order canceled does not exist. elif response.status_code == 404: if verb == 'DELETE': self.logger.error("Order not found: %s" % postdict['orderID']) return self.logger.error("Unable to contact the BitMEX API (404). " + "Request: %s \n %s" % (url, json.dumps(postdict))) maybe_exit(e) # 429, ratelimit elif response.status_code == 429: self.logger.error( "Ratelimited on current request. Sleeping, then trying again. Try fewer " + "order pairs or contact [email protected] to raise your limits. " + "Request: %s \n %s" % (url, json.dumps(postdict))) sleep(1) return self._curl_bitmex(api, query, postdict, timeout, verb) # 503 - BitMEX temporary downtime, likely due to a deploy. Try again elif response.status_code == 503: self.logger.warning( "Unable to contact the BitMEX API (503), retrying. " + "Request: %s \n %s" % (url, json.dumps(postdict))) sleep(1) return self._curl_bitmex(api, query, postdict, timeout, verb) # Duplicate clOrdID: that's fine, probably a deploy, go get the order and return it elif (response.status_code == 400 and response.json()['error'] and response.json()['error']['message'] == 'Duplicate clOrdID'): order = self._curl_bitmex( '/order', query={ 'filter': json.dumps({'clOrdID': postdict['clOrdID']}) }, verb='GET')[0] if (order['orderQty'] != postdict['quantity'] or order['price'] != postdict['price'] or order['symbol'] != postdict['symbol']): raise Exception( 'Attempted to recover from duplicate clOrdID, but order returned from API ' + 'did not match POST.\nPOST data: %s\nReturned order: %s' % (json.dumps(postdict), json.dumps(order))) # All good return order # Unknown Error else: self.logger.error("Unhandled Error: %s: %s %s" % (e, e.message, response.text)) self.logger.error("Endpoint was: %s %s: %s" % (verb, api, json.dumps(postdict))) maybe_exit(e) except requests.exceptions.Timeout as e: # Timeout, re-run this request self.logger.warning("Timed out, retrying...") return self._curl_bitmex(api, query, postdict, timeout, verb) except requests.exceptions.ConnectionError as e: self.logger.warning( "Unable to contact the BitMEX API (ConnectionError). Please check the URL. Retrying. " + "Request: %s \n %s" % (url, json.dumps(postdict))) sleep(1) return self._curl_bitmex(api, query, postdict, timeout, verb) return response.json()
def test_params_accepts_kv_list(self): request = requests.Request('http://example.com/path', params=[('a', 'b')]) self.assertEqual(request.full_url, 'http://example.com/path?a=b')
def debug_request(method, url, json_data, headers): req = requests.Request( method, url, json=json_data, headers=headers).prepare() print_request(req)
def sherlock(username, site_data, verbose=False, tor=False, unique_tor=False, proxy=None): """Run Sherlock Analysis. Checks for existence of username on various social media sites. Keyword Arguments: username -- String indicating username that report should be created against. site_data -- Dictionary containing all of the site data. verbose -- Boolean indicating whether to give verbose output. tor -- Boolean indicating whether to use a tor circuit for the requests. unique_tor -- Boolean indicating whether to use a new tor circuit for each request. proxy -- String indicating the proxy URL Return Value: Dictionary containing results from report. Key of dictionary is the name of the social network site, and the value is another dictionary with the following keys: url_main: URL of main site. url_user: URL of user on site (if account exists). exists: String indicating results of test for account existence. http_status: HTTP status code of query which checked for existence on site. response_text: Text that came back from request. May be None if there was an HTTP error when checking for existence. """ global amount fname = username.lower() + ".txt" if os.path.isfile(fname): os.remove(fname) print( (Style.BRIGHT + Fore.GREEN + "[" + Fore.YELLOW + "*" + Fore.GREEN + "] Removing previous file:" + Fore.WHITE + " {}").format(fname)) print((Style.BRIGHT + Fore.GREEN + "[" + Fore.YELLOW + "*" + Fore.GREEN + "] Checking username" + Fore.WHITE + " {}" + Fore.GREEN + " on:").format(username)) # A user agent is needed because some sites don't # return the correct information since they think that # we are bots headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:55.0) Gecko/20100101 Firefox/55.0' } # Allow 1 thread for each external service, so `len(site_data)` threads total executor = ThreadPoolExecutor(max_workers=len(site_data)) # Create session based on request methodology underlying_session = requests.session() underlying_request = requests.Request() if tor or unique_tor: underlying_request = TorRequest() underlying_session = underlying_request.session # Create multi-threaded session for all requests. Use our custom FuturesSession that exposes response time session = ElapsedFuturesSession(executor=executor, session=underlying_session) # Results from analysis of all sites results_total = {} # First create futures for all requests. This allows for the requests to run in parallel for social_network, net_info in site_data.items(): # Results from analysis of this specific site results_site = {} # Record URL of main site results_site['url_main'] = net_info.get("urlMain") # Don't make request if username is invalid for the site regex_check = net_info.get("regexCheck") if regex_check and re.search(regex_check, username) is None: # No need to do the check at the site: this user name is not allowed. print((Style.BRIGHT + Fore.WHITE + "[" + Fore.RED + "-" + Fore.WHITE + "]" + Fore.GREEN + " {}:" + Fore.YELLOW + " Illegal Username Format For This Site!" ).format(social_network)) results_site["exists"] = "illegal" else: # URL of user on site (if it exists) url = net_info["url"].format(username) results_site["url_user"] = url request_method = session.get if social_network != "GitHub": # If only the status_code is needed don't download the body if net_info["errorType"] == 'status_code': request_method = session.head # This future starts running the request in a new thread, doesn't block the main thread if proxy != None: proxies = {"http": proxy, "https": proxy} future = request_method(url=url, headers=headers, proxies=proxies) else: future = request_method(url=url, headers=headers) # Store future in data for access later net_info["request_future"] = future # Reset identify for tor (if needed) if unique_tor: underlying_request.reset_identity() # Add this site's results into final dictionary with all of the other results. results_total[social_network] = results_site # Open the file containing account links f = open_file(fname) # Core logic: If tor requests, make them here. If multi-threaded requests, wait for responses for social_network, net_info in site_data.items(): # Retrieve results again results_site = results_total.get(social_network) # Retrieve other site information again url = results_site.get("url_user") exists = results_site.get("exists") if exists is not None: # We have already determined the user doesn't exist here continue # Get the expected error type error_type = net_info["errorType"] # Default data in case there are any failures in doing a request. http_status = "?" response_text = "" # Retrieve future and ensure it has finished future = net_info["request_future"] r, error_type, response_time = get_response( request_future=future, error_type=error_type, social_network=social_network, verbose=verbose) # Attempt to get request information try: http_status = r.status_code except: pass try: response_text = r.text.encode(r.encoding) except: pass if error_type == "message": error = net_info.get("errorMsg") # Checks if the error message is in the HTML if not error in r.text: print_found(social_network, url, response_time, verbose) write_to_file(url, f) exists = "yes" amount = amount + 1 else: print_not_found(social_network, response_time, verbose) exists = "no" elif error_type == "status_code": # Checks if the status code of the response is 2XX if not r.status_code >= 300 or r.status_code < 200: print_found(social_network, url, response_time, verbose) write_to_file(url, f) exists = "yes" amount = amount + 1 else: print_not_found(social_network, response_time, verbose) exists = "no" elif error_type == "response_url": error = net_info.get("errorUrl") # Checks if the redirect url is the same as the one defined in data.json if not error in r.url: print_found(social_network, url, response_time, verbose) write_to_file(url, f) exists = "yes" amount = amount + 1 else: print_not_found(social_network, response_time, verbose) exists = "no" elif error_type == "": print((Style.BRIGHT + Fore.WHITE + "[" + Fore.RED + "-" + Fore.WHITE + "]" + Fore.GREEN + " {}:" + Fore.YELLOW + " Error!").format(social_network)) exists = "error" # Save exists flag results_site['exists'] = exists # Save results from request results_site['http_status'] = http_status results_site['response_text'] = response_text results_site['response_time_ms'] = response_time # Add this site's results into final dictionary with all of the other results. results_total[social_network] = results_site print((Style.BRIGHT + Fore.GREEN + "[" + Fore.YELLOW + "*" + Fore.GREEN + "] Saved: " + Fore.WHITE + "{}").format(fname)) final_score(amount, f) return results_total
def http_request(self, verb, path, query_data=None, post_data=None, streamed=False, files=None, **kwargs): """Make an HTTP request to the Gitlab server. Args: verb (str): The HTTP method to call ('get', 'post', 'put', 'delete') path (str): Path or full URL to query ('/projects' or 'http://whatever/v4/api/projecs') query_data (dict): Data to send as query parameters post_data (dict): Data to send in the body (will be converted to json) streamed (bool): Whether the data should be streamed files (dict): The files to send to the server **kwargs: Extra options to send to the server (e.g. sudo) Returns: A requests result object. Raises: GitlabHttpError: When the return code is not 2xx """ query_data = query_data or {} url = self._build_url(path) params = {} utils.copy_dict(params, query_data) # Deal with kwargs: by default a user uses kwargs to send data to the # gitlab server, but this generates problems (python keyword conflicts # and python-gitlab/gitlab conflicts). # So we provide a `query_parameters` key: if it's there we use its dict # value as arguments for the gitlab server, and ignore the other # arguments, except pagination ones (per_page and page) if "query_parameters" in kwargs: utils.copy_dict(params, kwargs["query_parameters"]) for arg in ("per_page", "page"): if arg in kwargs: params[arg] = kwargs[arg] else: utils.copy_dict(params, kwargs) opts = self._get_session_opts(content_type="application/json") verify = opts.pop("verify") timeout = opts.pop("timeout") # If timeout was passed into kwargs, allow it to override the default timeout = kwargs.get("timeout", timeout) # We need to deal with json vs. data when uploading files if files: json = None post_data["file"] = files.get("file") post_data["avatar"] = files.get("avatar") data = MultipartEncoder(post_data) opts["headers"]["Content-type"] = data.content_type else: json = post_data data = None # Requests assumes that `.` should not be encoded as %2E and will make # changes to urls using this encoding. Using a prepped request we can # get the desired behavior. # The Requests behavior is right but it seems that web servers don't # always agree with this decision (this is the case with a default # gitlab installation) req = requests.Request(verb, url, json=json, data=data, params=params, **opts) prepped = self.session.prepare_request(req) prepped.url = utils.sanitized_url(prepped.url) settings = self.session.merge_environment_settings( prepped.url, {}, streamed, verify, None) # obey the rate limit by default obey_rate_limit = kwargs.get("obey_rate_limit", True) # do not retry transient errors by default retry_transient_errors = kwargs.get("retry_transient_errors", False) # set max_retries to 10 by default, disable by setting it to -1 max_retries = kwargs.get("max_retries", 10) cur_retries = 0 while True: result = self.session.send(prepped, timeout=timeout, **settings) self._check_redirects(result) if 200 <= result.status_code < 300: return result if (429 == result.status_code and obey_rate_limit) or ( result.status_code in [500, 502, 503, 504] and retry_transient_errors): if max_retries == -1 or cur_retries < max_retries: wait_time = 2**cur_retries * 0.1 if "Retry-After" in result.headers: wait_time = int(result.headers["Retry-After"]) cur_retries += 1 time.sleep(wait_time) continue error_message = result.content try: error_json = result.json() for k in ("message", "error"): if k in error_json: error_message = error_json[k] except (KeyError, ValueError, TypeError): pass if result.status_code == 401: raise GitlabAuthenticationError( response_code=result.status_code, error_message=error_message, response_body=result.content, ) raise GitlabHttpError( response_code=result.status_code, error_message=error_message, response_body=result.content, )
def test_sign(self): request = requests.Request(data={}) self._APP.sign(request) self.assertDictEqual(request.data, {'token': self._APP_TOKEN})
def list_agencies(self): return requests.Request('GET', self.feed_url, params={ 'command': 'agencyList', })
def prepare_reques(self, url): request = requests.Request('GET', url) return request.prepare()
def translate(self, text, lang_tgt='auto', lang_src='auto', pronounce=False): try: lang = LANGUAGES[lang_src] except: lang_src = 'auto' try: lang = LANGUAGES[lang_tgt] except: lang_src = 'auto' text = str(text) if len(text) >= 5000: return "Warning: Can only detect less than 5000 characters" if len(text) == 0: return "" headers = { "Referer": "http://translate.google.{}/".format(self.url_suffix), "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) " "AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/47.0.2526.106 Safari/537.36", "Content-Type": "application/x-www-form-urlencoded;charset=utf-8" } freq = self._package_rpc(text, lang_src, lang_tgt) response = requests.Request( method='POST', url=self.url, data=freq, headers=headers, ) try: if self.proxies == None or type(self.proxies) != dict: self.proxies = {} with requests.Session() as s: s.proxies = self.proxies r = s.send(request=response.prepare(), verify=False, timeout=self.timeout) for line in r.iter_lines(chunk_size=1024): decoded_line = line.decode('utf-8') if "MkEWBc" in decoded_line: try: response = decoded_line response = json.loads(response) response = list(response) response = json.loads(response[0][2]) response_ = list(response) response = response_[1][0] if len(response) == 1: if len(response[0]) > 5: sentences = response[0][5] else: ## only url sentences = response[0][0] if pronounce == False: return sentences elif pronounce == True: return [sentences, None, None] translate_text = "" for sentence in sentences: sentence = sentence[0] translate_text += sentence.strip() + ' ' translate_text = translate_text if pronounce == False: return translate_text elif pronounce == True: pronounce_src = (response_[0][0]) pronounce_tgt = (response_[1][0][0][1]) return [ translate_text, pronounce_src, pronounce_tgt ] elif len(response) == 2: sentences = [] for i in response: sentences.append(i[0]) if pronounce == False: return sentences elif pronounce == True: pronounce_src = (response_[0][0]) pronounce_tgt = (response_[1][0][0][1]) return [ sentences, pronounce_src, pronounce_tgt ] except Exception as e: raise e r.raise_for_status() except requests.exceptions.ConnectTimeout as e: raise e except requests.exceptions.HTTPError as e: # Request successful, bad response raise google_new_transError(tts=self, response=r) except requests.exceptions.RequestException as e: # Request failed raise google_new_transError(tts=self)
def open(self, url, language='en', post_data=None, get_data=None, headers=None): """ Opens a connection to a webpage and saves its HTML content in ``self.content`` Args: url (str): The URL to open language (str): The language code for the ``Content-Language`` header post_data (dict): POST data for the request get_data (dict): GET data for the request """ if get_data: url += '?' + urlencode(get_data) log.debug("Opening URL: %s" % repr(url)) if self.session.proxies: log.debug("Proxies: %s" % (repr(self.session.proxies))) self._read_cookies(url) self.session.cookies = self._cookies log.debug("Cookies for %s: %s" % (repr(url), repr(self._cookies))) # Default headers for any request. Pretend like we are the usual browser. req_headers = { 'User-Agent': self.user_agent, 'Content-Language': language, 'Cache-Control': 'no-cache', 'Accept-Encoding': 'deflate, compress, gzip', 'Origin': url, 'Referer': url } # If headers passed to open() call - we overwrite headers. if headers: for key, value in headers.iteritems(): if value: req_headers[key] = value elif key.capitalize() in req_headers: del req_headers[key.capitalize()] if self.token: req_headers["Authorization"] = self.token req = None if post_data: req = requests.Request('POST', url, data=post_data, headers=req_headers) else: req = requests.Request('GET', url, headers=req_headers) prepped = self.session.prepare_request(req) self.request_headers = prepped.headers try: self._good_spider() with self.session.send(prepped) as response: self.headers = response.headers self.status = response.status_code self.url = response.url self._save_cookies() if self.response_charset: self.content = response.content.decode(self.response_charset, 'ignore') else: self.content = response.text except Exception as e: import traceback log.error("%s failed with %s:" % (repr(url), repr(e))) map(log.debug, traceback.format_exc().split("\n")) log.debug("Status for %s : %s" % (repr(url), str(self.status))) return self.status == 200
def scrape(url): req = requests.get(url) content = req.content soup = BeautifulSoup(content, 'html.parser') images = soup.find_all("div", {"class": "woocommerce-product-gallery__image"}) price = soup.find("span", { "class": "woocommerce-Price-amount" }).text.strip("$") desc_tag = soup.find_all("div", {"class": "tab-content"})[0] print(desc_tag.text) # desc = None # desc_list = [] # for tag in desc_tag: # print (tag) # desc_list.append(tag) # # desc = ''.join(desc_list) # print (desc) # print ("----") code = url.split("/")[-2] image_url = [] tags = soup.find("span", {"class": "posted_in"}) tag_list = [] category_tag = None category_tag = 'Chanel' # for tag in tags.find_all("a",href=True): # try: # print (tag.text.strip()) # except: # logging.error("tag error %s" % url) # continue # print ("============") # if tag.text.strip() in category_list: # tag_list.append(tag.text) # category_tag = tag.text.strip() # continue # for image_tmp in images: for image in image_tmp.find_all("a", href=True): image_url.append(image['href']) #if not len(tag_list) : # logging.error("logging tag not exits %s" % url) # return # print (tag_list[0]) relative_path = [] for index, image in enumerate(image_url): try: r = requests.get(requests.Request('GET', image).prepare().url, stream=True) except: logging.error("logging tag not exits,%s" % url) continue if r.status_code == 200: path = "catalog/images/bag/%s_%s.jpg" % (code, index) full_path = os.path.join("/var/www/html/replica/upload/image", path) print(full_path) if not os.path.exists(os.path.dirname(full_path)): print(os.path.dirname(full_path)) os.makedirs(os.path.dirname(full_path)) try: with open(full_path, 'wb') as f: for chunk in r.iter_content(1024): f.write(chunk) relative_path.append(path) except: logging.error("error get image,%s" % url) continue price = price.replace(",", "") print(price, desc_tag.text, relative_path, code)
def _make_request(self, endpoint: str, method: str, headers: str = 'json', params: dict = None, json: dict = None, data: dict = None) -> List[Dict]: """Used to make a request for each of the news clients. Arguments: ---- endpoint (str): The endpoint to build the URL. method (str): The Request method, can be one of the following: ['get','post','put','delete','patch']. mode (str): The content-type mode, can be one of the following: ['form','json']. params (dict): The URL params for the request. json (dict): A json data payload for a request. data (dict): A data payload for a request. Returns: ---- List[Dict]: A list of news items objects. """ # First validate the token before making the request. if not self._validate_token(): return {'token': 'Expired or invalid.'} # Build the URL. url = self._build_url(endpoint=endpoint) # Grab the headers. headers = self._headers(mode=headers) # Define a new session. new_session = requests.Session() new_session.verify = True # Prepare the request. new_request = requests.Request(method=method.upper(), headers=headers, params=params, data=data, json=json, url=url).prepare() # Send the request. response: requests.Response = new_session.send(request=new_request) # If it was okay return the data. if response.ok: return response.json() else: print('Invalid Request') return response.json()
def create_get_request(self, path, params): search = True if 'q' in params else False return requests.Request(method="GET", url=self.url(path, search), params=params)
def perform_request( self, method, path, params=None, body=None, request_timeout=DEFAULT, ignore_status=(), headers=None, ): url = self.base_url + path headers = normalize_headers(headers) if params: url = "%s?%s" % (url, urlencode(params or {})) orig_body = body if self.http_compress and body: body = self._gzip_compress(body) headers["content-encoding"] = "gzip" start = time.time() request = requests.Request(method=method, headers=headers, url=url, data=body) prepared_request = self.session.prepare_request(request) send_kwargs = { "timeout": request_timeout if request_timeout is not DEFAULT else self.request_timeout } send_kwargs.update( self.session.merge_environment_settings(prepared_request.url, {}, None, None, None)) try: response = self.session.send(prepared_request, **send_kwargs) duration = time.time() - start raw_data = response.content.decode("utf-8", "surrogatepass") except Exception as e: self.log_request_fail( method=method, url=url, body=orig_body, duration=time.time() - start, exception=e, ) if isinstance(e, requests.Timeout): raise ConnectionTimeout("Connection timed out during request", errors=(e, )) raise ConnectionError(str(e), errors=(e, )) # raise errors based on http status codes, let the client handle those if needed if (not (200 <= response.status_code < 300) and response.status_code not in ignore_status): self.log_request_fail( method=method, url=url, body=orig_body, duration=duration, status=response.status_code, response=raw_data, ) self._raise_error(response.status_code, raw_data) self.log_request_success( method=method, url=url, body=orig_body, status=response.status_code, response=raw_data, duration=duration, ) return response.status_code, response.headers, raw_data
def Call(method,url,payload=None,session=None,debug=False): """Execute v2 API call. :param url: URL paths associated with the API call :param payload: dict containing all parameters to submit with POST call :returns: decoded API json result """ if not clc._LOGIN_TOKEN_V2: API._Login() if session is None: session = clc._REQUESTS_SESSION if payload is None: payload = {} # If executing refs provided in API they are abs paths, # Else refs we build in the sdk are relative if url[0]=='/': fq_url = "%s%s" % (clc.defaults.ENDPOINT_URL_V2,url) else: fq_url = "%s/v2/%s" % (clc.defaults.ENDPOINT_URL_V2,url) session.headers.update({'Authorization': "Bearer %s" % clc._LOGIN_TOKEN_V2}) if isinstance(payload, basestring): session.headers['content-type'] = "Application/json" # added for server ops with str payload else: session.headers['content-type'] = "application/x-www-form-urlencoded" if method=="GET": r = session.request(method,fq_url, params=payload, verify=API._ResourcePath('clc/cacert.pem')) else: r = session.request(method,fq_url, data=payload, verify=API._ResourcePath('clc/cacert.pem')) if debug: API._DebugRequest(request=requests.Request(method,fq_url,data=payload,headers=session.headers).prepare(), response=r) if r.status_code>=200 and r.status_code<300: try: return(r.json()) except: return({}) else: try: e = clc.APIFailedResponse("Response code %s. %s %s %s" % (r.status_code,r.json()['message'],method,fq_url)) e.response_status_code = r.status_code e.response_json = r.json() e.response_text = r.text raise(e) except clc.APIFailedResponse: raise except: e = clc.APIFailedResponse("Response code %s. %s. %s %s" % (r.status_code,r.text,method,fq_url)) e.response_status_code = r.status_code e.response_json = {} # or should this be None? e.response_text = r.text raise(e)
def _getToken(self, username: str, password: str): """ Obtain new token using passed credentials Args: username: str, s-number (including s) password: str, corresponding password Return: Authentication token, or throws an error if something went wrong. """ ses = requests.Session() req1 = requests.Request( 'GET', 'https://auth-app-ruprd-ruprd.xpaas.caci.nl/oauth2/authorize?response_type=token&client_id=osiris-student-mobile-ruprd&redirect_uri=https://ru.osiris-student.nl' ).prepare() r = ses.send(req1) self._assureSuccess(r) auth_state_start_idx = r.url.find('AuthState=') + 10 auth_state = r.url[auth_state_start_idx:] language = 'EN' submit = 'Login' payload = { 'username': username, 'password': password, 'submit': submit, 'AuthState': requests.compat.unquote(auth_state) } req2 = requests.Request( 'POST', 'https://conext.authenticatie.ru.nl/simplesaml/module.php/core/loginuserpass.php?', params=payload, cookies=r.cookies).prepare() r2 = ses.send(req2) self._assureSuccess(r2) saml_form = r2.text[r2.text.find('name="SAMLResponse"') + 27:] saml_form = saml_form[:saml_form.find('"')] req = requests.Request( 'POST', 'https://engine.surfconext.nl/authentication/sp/consume-assertion', data={'SAMLResponse': saml_form}, cookies={ 'main': ses.cookies.get('main'), 'HTTPSERVERID': ses.cookies.get('HTTPSERVERID') }) r3 = req.prepare() ret = ses.send(r3) self._assureSuccess(ret) saml_form = ret.text[ret.text.find('name="SAMLResponse"') + 27:] saml_form = saml_form[:saml_form.find('"')] relay_state = ret.text[ret.text.find('name="RelayState"') + 25:] relay_state = relay_state[:relay_state.find('"')] req = requests.Request( 'POST', 'https://auth-app-ruprd-ruprd.xpaas.caci.nl/oauth2/authorize', data={ 'SAMLResponse': saml_form, 'RelayState': relay_state }, cookies={}).prepare() ret = ses.send(req) self._assureSuccess(ret) access_token = ret.url[ret.url.find('access_token') + 13:] access_token = access_token[:access_token.find('&')] return access_token
def _call(self, call, path, params=None, headers=None, authenticated=False): try: url = '{url}/{path}'.format(url=self.API_URL, path=path) if not params: params = {} params['a'] = call authenticate = False if authenticated: if self.api_key and self.api_secret: authenticate = True params['apikey'] = self.api_key params['nonce'] = int(time()) else: raise CCexAPIError( 'This call requires an API key and secret') req = requests.Request(method='GET', url=url, params=params, headers=headers) prep_req = req.prepare() if authenticate: signature = hmac.new(self.api_secret.encode('utf-8'), prep_req.url.encode('utf-8'), 'sha512').hexdigest() prep_req.headers['apisign'] = signature try: res = self.session.send(prep_req) except Exception as exc: raise CCexAPIRequestError(prep_req, exc) if 'Maintenance' in res.text: raise CCexAPIResponseError(res, {'message': res.text}) try: data = res.json() except Exception as exc: raise CCexAPIResponseFormatError(res, exc) # for tickers if url.endswith('json'): return data # for api methods if data.get('success') is not True: raise CCexAPIResponseError(res, data) return data.get('result') except CCexAPIError: raise except Exception as exc: raise CCexAPIError( 'Unexpected error during CCex API call: {}'.format(repr(exc)))
def Submit(url): with requests.Session() as s: r = requests.Request(method='GET', url=url) prep = r.prepare() prep.url = url return s.send(prep, verify=False, timeout=2)
def test_path_is_not_double_encoded(self): request = requests.Request("http://0.0.0.0/get/test case") self.assertEqual(request.path_url, "/get/test%20case")
def create_get_request(self, stream, params, item_id=None): if item_id: url = _join(self.v3_url(stream), self.v3_endpoint(stream, item_id)) else: url = self.v3_url(stream) return requests.Request(method="GET", url=url, params=params)
def _curl_bitmex(self, path, query=None, postdict=None, timeout=None, verb=None, rethrow_errors=True, max_retries=None): """Send a request to BitMEX Servers.""" # Handle URL url = self.base_url + path if timeout is None: timeout = self.timeout # Default to POST if data is attached, GET otherwise if not verb: verb = 'POST' if postdict else 'GET' # By default don't retry POST or PUT. Retrying GET/DELETE is okay because they are idempotent. # In the future we could allow retrying PUT, so long as 'leavesQty' is not used (not idempotent), # or you could change the clOrdID (set {"clOrdID": "new", "origClOrdID": "old"}) so that an amend # can't erroneously be applied twice. if max_retries is None: max_retries = 0 if verb in ['POST', 'PUT'] else 3 # Auth: API Key/Secret auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret) def exit_or_throw(e): if rethrow_errors: raise e else: exit(1) def retry(): self.retries += 1 if self.retries > max_retries: raise Exception("Max retries on %s (%s) hit, raising." % (path, json.dumps(postdict or ''))) return self._curl_bitmex(path, query, postdict, timeout, verb, rethrow_errors, max_retries) # Make the request response = None try: self.logger.info("sending req to %s: %s" % (url, json.dumps(postdict or query or ''))) req = requests.Request(verb, url, json=postdict, auth=auth, params=query) prepped = self.session.prepare_request(req) response = self.session.send(prepped, timeout=timeout) # Make non-200s throw response.raise_for_status() except requests.exceptions.HTTPError as e: if response is None: raise e # 401 - Auth error. This is fatal. if response.status_code == 401: self.logger.error( "API Key or Secret incorrect, please check and restart.") self.logger.error("Error: " + response.text) if postdict: self.logger.error(postdict) # Always exit, even if rethrow_errors, because this is fatal exit(1) # 404, can be thrown if order canceled or does not exist. elif response.status_code == 404: if verb == 'DELETE': self.logger.error("Order not found: %s" % postdict['orderID']) return self.logger.error("Unable to contact the BitMEX API (404). " + "Request: %s \n %s" % (url, json.dumps(postdict))) exit_or_throw(e) # 429, ratelimit; cancel orders & wait until X-Ratelimit-Reset elif response.status_code == 429: self.logger.error( "Ratelimited on current request. Sleeping, then trying again. Try fewer " + "order pairs or contact [email protected] to raise your limits. " + "Request: %s \n %s" % (url, json.dumps(postdict))) # Figure out how long we need to wait. ratelimit_reset = response.headers['X-Ratelimit-Reset'] to_sleep = int(ratelimit_reset) - int(time.time()) reset_str = datetime.datetime.fromtimestamp( int(ratelimit_reset)).strftime('%X') # We're ratelimited, and we may be waiting for a long time. Cancel orders. self.logger.warning( "Canceling all known orders in the meantime.") self.cancel([o['orderID'] for o in self.open_orders()]) self.logger.error( "Your ratelimit will reset at %s. Sleeping for %d seconds." % (reset_str, to_sleep)) time.sleep(to_sleep) # Retry the request. return retry() # 503 - BitMEX temporary downtime, likely due to a deploy. Try again elif response.status_code == 503: self.logger.warning( "Unable to contact the BitMEX API (503), retrying. " + "Request: %s \n %s" % (url, json.dumps(postdict))) time.sleep(3) return retry() elif response.status_code == 400: error = response.json()['error'] message = error['message'].lower() if error else '' # Duplicate clOrdID: that's fine, probably a deploy, go get the order(s) and return it if 'duplicate clordid' in message: orders = postdict[ 'orders'] if 'orders' in postdict else postdict IDs = json.dumps( {'clOrdID': [order['clOrdID'] for order in orders]}) orderResults = self._curl_bitmex('/order', query={'filter': IDs}, verb='GET') for i, order in enumerate(orderResults): if (order['orderQty'] != abs(postdict['orderQty']) or order['side'] != ('Buy' if postdict['orderQty'] > 0 else 'Sell') or order['price'] != postdict['price'] or order['symbol'] != postdict['symbol']): raise Exception( 'Attempted to recover from duplicate clOrdID, but order returned from API ' + 'did not match POST.\nPOST data: %s\nReturned order: %s' % (json.dumps(orders[i]), json.dumps(order))) # All good return orderResults elif 'insufficient available balance' in message: self.logger.error('Account out of funds. The message: %s' % error['message']) exit_or_throw(Exception('Insufficient Funds')) # If we haven't returned or re-raised yet, we get here. self.logger.error("Unhandled Error: %s: %s" % (e, response.text)) self.logger.error("Endpoint was: %s %s: %s" % (verb, path, json.dumps(postdict))) exit_or_throw(e) except requests.exceptions.Timeout as e: # Timeout, re-run this request self.logger.warning("Timed out on request: %s (%s), retrying..." % (path, json.dumps(postdict or ''))) return retry() except requests.exceptions.ConnectionError as e: self.logger.warning( "Unable to contact the BitMEX API (%s). Please check the URL. Retrying. " + "Request: %s %s \n %s" % (e, url, json.dumps(postdict))) time.sleep(1) return retry() # Reset retry counter on success self.retries = 0 return response.json()
def update(self): random.seed() time.sleep(random.randint(0, 120)) logger.info('>>>>> Starting update task') user_agent = fake_useragent.UserAgent(fallback=LEO_DEFAULT_USER_AGENT, path=LEO_FAKE_USER_AGENT_CACHE) current_agent = user_agent.random logger.info('Current user agent: {0}'.format(current_agent)) http_headers = {'User-Agent': current_agent} web_session = requests.Session() web_request = requests.Request('GET', CONTENT_URL, headers=http_headers) prepared_request = web_request.prepare() logger.info('Executing HTTP request (url: "{0}")'.format(CONTENT_URL)) try: response = web_session.send(prepared_request, timeout=LEO_TASK_TIMEOUT) except requests.RequestException as err: logger.exception( 'Error occurred while trying to update masterclasses: {0}'.format( err)) logger.warning( 'Task "download_images" will be retry (attempt {0} of {1})'.format( self.request.retries, LEO_DOWNLOAD_MAX_RETRIES)) raise self.retry(exc=err, countdown=(2**self.request.retries) * LEO_RETRY_DELAY) logger.info( 'Open parser config file: "{0}"'.format(LEO_PARSER_CONFIG_PATH)) with open(LEO_PARSER_CONFIG_PATH, 'r', encoding='utf8') as f_dsc: config = f_dsc.read() f_dsc.close() logger.info('Config file closed') config_dict = json.loads(config) lp_fabric = LeoParserFabric(config_dict) logger.info('Creating parser') lp = lp_fabric.create_parser() logger.info('Extracting page content (encoding: {0})'.format( response.encoding)) raw_content = response.content.decode(encoding=response.encoding).encode( encoding='utf-8').decode(encoding='utf-8') logger.info('Parsing'.format()) parsed_content = lp.parse_to_dict(raw_content) for key, body in parsed_content.items(): logger.info('Masterclass has been found (uid: {0})'.format(key)) master, created = Master.objects.get_or_create( name=body.pop('master', '')) if created: logger.info('New master was added: "{0}"'.format(master.name)) else: logger.info('Master already exists: "{0}"'.format(master.name)) location, created = Location.objects.get_or_create( name=body.pop('location', '')) if created: logger.info('New location was added: "{0}"'.format(location.name)) else: logger.info('Location already exists: "{0}"'.format(location.name)) if not Masterclass.objects.filter(uid=key).exists(): mc = Masterclass.objects.create(**body, master=master, location=location) logger.info('New masterclass was added: "{0}"'.format(mc.uid)) chain = celery.chain( download_images.si(mc.id).set(queue='downloads'), notify.si(mc.id).set(queue='notifications'), ) chain.delay() else: body.pop('uid') if body.get('duration') == 0: body.pop('duration') body = { key: value for key, value in body.items() if value not in {'', None} } Masterclass.objects.filter(uid=key).update(**body, master=master, location=location) logger.info('Masterclass already exists: "{0}"'.format(key)) logger.info('<<<<< Update task finished')
def authenticated_request(self, request_params): return self.apply_authentication(requests.Request(**request_params))
def prepared_get(self, url='http://example.com', **kwargs): return requests.Request('GET', url, **kwargs).prepare()
def ship_payload(self, payload): self._logger.debug("Begin doing HTTP POST with SOAP message") if self.session: # Prepare the http request self._logger.debug( "Begin preparing POST request with payload:\n%s", payload) try: request = requests.Request('POST', self.endpoint, data=str(payload)) prepared_request = self.session.prepare_request(request) except requests.exceptions.RequestException: error_message = "Error preparing HTTP request" #self._logger.exception(error_message) raise HttpEndPointProtocolException(error_message) else: self._logger.debug("Finished preparing POST request") # Submit the http request self._logger.debug("Begin submitting POST request") try: response = self.session.send( prepared_request, verify=self.pOptions.verify_ssl, timeout=(self.pOptions.connection_timeout, self.pOptions.read_timeout)) except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): error_message = "HTTP connection error" #self._logger.exception(error_message) raise HttpEndPointProtocolException(error_message) except requests.exceptions.RequestException: error_message = "Error preparing HTTP request" self._logger.exception(error_message) raise HttpEndPointTransportException(error_message) else: self._logger.debug("Finished submitting POST request") # now check response for errors self._logger.debug("Begin checking POST response") try: response.raise_for_status() except requests.exceptions.HTTPError: error_message = ( "DRAC WSMAN endpoint returned HTTP code '{}' Reason '{}'" ).format(response.status_code, response.reason) # response.content #self._logger.exception(error_message) if response.status_code == 401: raise HttpEndPointProtocolAuthException(error_message) else: raise HttpEndPointProtocolException(error_message) else: self._logger.debug("Received non-error HTTP response") finally: self._logger.debug("Finished checking POST response") # make sure its a string reply = response.content # Avoid unicode difficulties self._logger.debug("Received SOAP reply:\n%s", reply) # return it return reply
'date': dateforwebservice, 'provenanceUri': provenanceUri, 'concernedItems': [{ 'uri': concernedItemUri, 'typeURI': concernedItemType }], 'metadata': { 'sensor': sensor, 'position': position } })), 'file': (imagename, imagetosend, filetype[0]) } #send file and metadata to webservice req = requests.Request('POST', urlfilepost, headers=headersfilepost, files=multipart_form_data) prepared = req.prepare() def pretty_print_POST(req): """ At this point it is completely built and ready to be fired; it is "prepared". However pay attention at the formatting used in this function because it is programmed to be pretty printed and may differ from the actual request. """ print('{}\n{}\n{}\n\n{}'.format( '-----------START-----------', req.method + ' ' + req.url, '\n'.join('{}: {}'.format(k, v) for k, v in req.headers.items()),
def _loadUrl(self, url, data=None, recache=False, language=None): """Return response from The TVDB API""" if not language: language = self.config['language'] if language not in self.config['valid_languages']: raise ValueError("Invalid language %s, options are: %s" % (language, self.config['valid_languages'])) self.headers['Accept-Language'] = language # TODO: обрабатывать исключения (Handle Exceptions) # TODO: обновлять токен (Update Token) # encoded url is used for hashing in the cache so # python 2 and 3 generate the same hash if not self.__authorized: # only authorize of we haven't before and we # don't have the url in the cache fake_session_for_key = requests.Session() fake_session_for_key.headers['Accept-Language'] = language cache_key = None try: # in case the session class has no cache object, fail gracefully cache_key = self.session.cache.create_key( fake_session_for_key.prepare_request( requests.Request('GET', url))) except: pass if not cache_key or not self.session.cache.has_key(cache_key): self.authorize() response = self.session.get(url, headers=self.headers) r = response.json() log().debug("loadurl: %s lid=%s" % (url, language)) log().debug("response:") log().debug(r) error = r.get('Error') errors = r.get('errors') r_data = r.get('data') links = r.get('links') if error: if error == u'Resource not found': # raise(tvdb_resourcenotfound) # handle no data at a different level so it is more specific pass elif error == u'Not Authorized': raise (tvdb_notauthorized) elif error.startswith(u"ID: ") and error.endswith("not found"): # FIXME: Refactor error out of in this method raise tvdb_shownotfound("%s" % error) else: raise tvdb_error("%s" % error) if errors: if errors and u'invalidLanguage' in errors: # raise(tvdb_invalidlanguage(errors[u'invalidLanguage'])) # invalidLanguage does not mean there is no data # there is just less data pass if data and isinstance(data, list): data.extend(r_data) else: data = r_data if links and links['next']: url = url.split('?')[0] _url = url + "?page=%s" % links['next'] self._loadUrl(_url, data) return data
def __init__(self, method, resource, auth, headers, data, verify_ssl): """Initialize the data object.""" self._request = requests.Request( method, resource, headers=headers, auth=auth, data=data).prepare() self._verify_ssl = verify_ssl self.data = None