def get_http_request(url, payload, method='POST', headers=None, use_proxy=False, use_proxy_auth=False, trust_env=True): try: session = Session() session.trust_env = trust_env session.proxies = Util.get_proxies() if use_proxy else None session.auth = Util.get_proxy_auth() if use_proxy_auth else None request = Request( 'POST' if method not in ('GET', 'POST') else method, url, data=payload if method == 'POST' else None, params=payload if method == 'GET' else None, headers=headers ) prepped = request.prepare() response = session.send( prepped, timeout=app.config['HTTP_REQUESTS_TIMEOUT'] ) session.close() except Exception, e: response = Response() response.raise_for_status() return response, 'Error al realizar la consulta - Motivo: {}'.format(e.message)
def estatus_cuenta(self): transaccion_id = get_epoch() user_token = self._get_token() if not user_token: return '' method = 'estatus_cuenta' data = { 'rfc': self.rfc, 'user_token': user_token, 'transaccion_id': transaccion_id } data = self._SOAP[method].format(**data).encode('utf-8') headers = { 'SOAPAction': '"{}"'.format(self._ACTIONS[method]), 'Content-length': len(data), 'Content-type': 'text/xml; charset="UTF-8"' } s = Session() req = Request('POST', self.WS['clientes'], data=data, headers=headers) prepped = req.prepare() try: response = s.send(prepped, timeout=TIMEOUT) res = xml2dict.parse(response.text) ok, res = self._check_fault(res) if ok: return '' return res['RespuestaEstatusCuenta']['Estatus'] except exceptions.Timeout: self.error = 'Tiempo de espera agotado' print (self.error) except Exception as e: print ('Estatus Cuenta', e) return ''
def post_collection( project, th_collection, status=None, expect_errors=False, consumer_key=None, consumer_secret=None): # Set the credentials OAuthCredentials.set_credentials(SampleData.get_credentials()) credentials = OAuthCredentials.get_credentials(project) # The only time the credentials should be overridden are when # a client needs to test authentication failure confirmation consumer_key = consumer_key or credentials['consumer_key'] consumer_secret = consumer_secret or credentials['consumer_secret'] auth = TreeherderAuth(consumer_key, consumer_secret, project) client = TreeherderClient(protocol='http', host='localhost', auth=auth) uri = client._get_project_uri(project, th_collection.endpoint_base) req = Request('POST', uri, json=th_collection.get_collection_data(), auth=auth) prepped_request = req.prepare() response = TestApp(application).post_json( prepped_request.url, params=th_collection.get_collection_data(), status=status ) return response
def doRequest(self, method, url, params=None, parse=True, data=None): """ Send HTTP request, with given method, credentials and data to the given URL, and return the success and the result on success. """ if not self.bitbucket.auth: raise ValueError("No auth credentials.") if data: data = dict(data) else: data = {} r = Request( method=method, url=url, auth=self.bitbucket.auth, params=params, data=data ) s = Session() resp = s.send(r.prepare()) status = resp.status_code text = resp.text error = resp.reason if status >= 200 and status < 300: if parse: return json.loads(text) else: return text else: raise exceptions.DispatchError(text, url, error, status)
def timbra_xml(self, xml, id_original=0): user_token = self._get_token(id_original) if not user_token: return '' method = 'timbra_xml' data = { 'xml': escape(xml.encode('ascii', 'xmlcharrefreplace').decode('utf-8')), 'rfc': self.rfc, 'user_token': user_token, 'transaccion_id': id_original, } data = self._SOAP[method].format(**data).encode('utf-8') headers = { 'SOAPAction': '"{}"'.format(self._ACTIONS[method]), 'Content-length': len(data), 'Content-type': 'text/xml; charset="UTF-8"' } s = Session() req = Request('POST', self.WS['timbrado'], data=data, headers=headers) prepped = req.prepare() try: response = s.send(prepped, timeout=TIMEOUT) res = xml2dict.parse(response.text) ok, res = self._check_fault(res) if ok: return '' return res['RespuestaTimbraXML']['ComprobanteXML']['DatosXML'] except exceptions.Timeout: self.error = 'Tiempo de espera agotado' except Exception as e: self.error = str(e) log.debug(e) return ''
def __init__(self, url=None, callback, method='GET', headers=None, need_proxy=False, fail_times=0, timeout=TIMEOUT): Request.__init__(self, method, url, headers) self.callback = callback self.need_proxy = need_proxy self.fail_times = fail_times self.timeout = timeout
def test_contents_copy_no_content(self): self.session.put(self.addr_m2_ref) req = Request('COPY', self.addr_m2_ref_path, headers={'Destination': self.path_paste_wrong}) resp = self.session.send(req.prepare()) self.assertTrue(resp.status_code, 400)
def _get_response(self, content_type, url, headers, file_descriptor): s = Session() response = None req = Request(content_type, url, headers=headers, data=file_descriptor) prepared = req.prepare() try: response = s.send(prepared) except exceptions.Timeout: raise except exceptions.TooManyRedirects: raise except exceptions.RequestException: raise if response.status_code != requests.codes.ok: try: raise BackblazeException(response.status_code, response.json()['message'], response, headers) except ValueError: raise BackblazeException(response.status_code, response.text, response, headers) return response
def login(self, username, password): url = 'http://www.renren.com/PLogin.do' data = { 'email':username, 'password':password, 'icode':'', 'origURL':'ttp://www.renren.com/home', 'domain':'renren.com', 'key_id':1, 'captcha_type':'web_login', } req = Request('POST', url, data=data, headers=self.headers ) prepped = req.prepare() # do something with prepped.body # do something with prepped.headers resp = self.send(prepped, # stream=stream, # verify=verify, # proxies=proxies, # cert=cert, # timeout=timeout ) self._html = resp.text self._extract_params() # login success: # ver=7.0; domain=.renren.com; path=/, loginfrom=null; domain=.renren.com; path=/, JSESSIONID=abcnxywIfD6yF6Gelpsov; path=/ print 'login succeeded!' if u'王那' in resp.text else 'login failed'
def _http(self, method, path, json=True, **kw): """ A wrapper for http requests to streamtools. """ # serialize all incoming json if 'data' in kw: kw['data'] = ujson.dumps(kw['data']) # construct the url endpoint url = 'http://{}/{}'.format(self.url, path) # special handling for streaming kwarg stream = kw.pop('stream', False) # format the request req = Request(method, url, **kw) # execute resp = self.s.send(req.prepare(), stream=stream) # return if json: return ujson.loads(resp.content) return resp
def generate_prepared_request(method, url, headers, data, params, handlers): """Add handlers and prepare a Request. Parameters method (str) HTTP Method. (e.g. 'POST') headers (dict) Headers to send. data (JSON-formatted str) Body to attach to the request. params (dict) Dictionary of URL parameters to append to the URL. handlers (list) List of callback hooks, for error handling. Returns (requests.PreparedRequest) The fully mutable PreparedRequest object, containing the exact bytes to send to the server. """ request = Request( method=method, url=url, headers=headers, data=data, params=params, ) handlers.append(error_handler) for handler in handlers: request.register_hook('response', handler) return request.prepare()
def get_request(url,header): req = Request(method="GET",url=url,headers=header) req_prepared = req.prepare() res = Response() s = Session() res = s.send(req_prepared) return res
def _upload_file(self, params, prepped, api_session): # header for upload s3_session = Session() api_url = self._endpoint_url + "getS3url" req = Request('POST', api_url) upload_prepped = req.prepare() self._encode_headers(upload_prepped.headers) upload_prepped.headers['Content-Type'] = 'application/json' upload_prepped.headers['User-Agent'] = self._build_user_agent_header() self._signer.sign(upload_prepped) # prepare params for s3 url url_parameters = {'fileName': '', 'tenant': ''} if 'fileName' in params and params['fileName']: url_parameters['fileName'] = params['file_name'] elif 'fileLocation' in params and params['fileLocation']: if os.path.isfile(params['fileLocation']): fileName = os.path.basename(params['fileLocation']) url_parameters['fileName'] = fileName if 'tenant' in params and params['tenant']: url_parameters['tenant'] = params['tenant'] # prepare the body serializer = Serializer() serial_obj = serializer.serialize_to_request(url_parameters, None) upload_prepped.prepare_body(serial_obj['body'], None) resp = s3_session.send(upload_prepped) resp = json.loads(json.dumps(resp.json())) # upload file to S3 bucket if 'url' in resp and 'fileLocation' in params and params['fileLocation']: put(resp['url'], data=open(params['fileLocation']).read()) # build upload parameters upload_params = {'rowDelim': '', 'colDelim': '', 'headerFields': [], 'tenant': '', 'fileType': 0} # now do actual upload if 'tenant' in params and params['tenant']: upload_params['tenant'] = params['tenant'] upload_params['fileLocation'] = params['fileLocation'] if os.path.isfile(params['fileLocation']): fileName = os.path.basename(params['fileLocation']) upload_params['fileName'] = fileName if 'fileName' in params and params['fileName']: upload_params['fileName'] = params['fileName'] if 'sourcePlatform' in params and params['sourcePlatform']: upload_params['sourcePlatform'] = params['sourcePlatform'] if 'colDelim' in params and params['colDelim']: upload_params['colDelim'] = params['colDelim'] if 'rowDelim' in params and params['rowDelim']: upload_params['rowDelim'] = params['rowDelim'] if 'headerFields' in params and params['headerFields']: upload_params['headerFields'] = params['headerFields'] if 'fileType' in params and params['fileType']: upload_params['fileType'] = params['fileType'] # prepare the body serializer = Serializer() serial_obj = serializer.serialize_to_request(upload_params, None) prepped.prepare_body(serial_obj['body'], None) resp = api_session.send(prepped) resp = json.dumps(resp.json()) return resp
def prepare(self): headers = {} if self.common_headers: headers.update(self.common_headers) if self.headers: headers.update(self.headers) query_params = {} data = {} if self.method in ["OPTIONS", "GET", "DELETE"]: query_params.update(self.params) else: if self.files: # ignore data param formatting so that a multipart form will be made data = self.params else: data = self.data_param_format.format(self.params) headers["Content-Type"] = self.data_param_format.content_type raw_req = Request( method=self.method, url=self.url, headers=headers, data=data, params=query_params, files=self.files ) self.prepared_request = raw_req.prepare() return self.prepared_request
def build_request(self, verb, data, verb_attrs, files=None): self.verb = verb self._request_dict = data self._request_id = uuid.uuid4() url = self.build_request_url(verb) headers = self.build_request_headers(verb) headers.update({'User-Agent': UserAgent, 'X-EBAY-SDK-REQUEST-ID': str(self._request_id)}) # if we are adding files, we ensure there is no Content-Type header already defined # otherwise Request will use the existing one which is likely not to be multipart/form-data # data must also be a dict so we make it so if needed requestData = self.build_request_data(verb, data, verb_attrs) if files: del(headers['Content-Type']) if isinstance(requestData, basestring): requestData = {'XMLPayload':requestData} request = Request(self.method, url, data=requestData, headers=headers, files=files, ) self.request = request.prepare()
def _ads_request(method, url, headers=None, data=None, url_params=None): logging.warn('Ads Request:%s' % url) s = Session() req = Request(string.upper(method), url, headers=headers, data=data, params=url_params) prepped = req.prepare() rsp = s.send(prepped) return rsp.text
def dispatch(self, method, url, auth=None, params=None, **kwargs): """ Send HTTP request, with given method, credentials and data to the given URL, and return the status code and the result on success. """ r = Request( method=method, url=url, auth=auth, params=params, data=kwargs) s = Session() resp = s.send(r.prepare()) status = resp.status_code text = resp.text error = resp.reason if status >= 200 and status < 300: if text: try: return (True, json.loads(text)) except TypeError: pass except ValueError: pass return (True, text) elif status >= 300 and status < 400: return (False, 'Unauthorized access, ' 'please check your credentials.') elif status >= 400 and status < 500: return (False, 'Service not found.') elif status >= 500 and status < 600: return (False, 'Server error.') else: return (False, error)
def post_request(self, url, data, timeout=10, retries=3): """Wrap requests.post for asyncronous posts - includes timeout & retry""" logger.debug("task: %s retries:%s", self.request.id, self.request.retries) s = Session() req = Request('POST', url, data=data) prepped = req.prepare() try: resp = s.send(prepped, timeout=timeout) if resp.status_code < 400: logger.info("{} received from {}".format(resp.status_code, url)) else: logger.error("{} received from {}".format(resp.status_code, url)) except RequestException as exc: """Typically raised on timeout or connection error retry after countdown seconds unless retry threshold has been exceeded """ logger.warn("{} on {}".format(exc.message, url)) if self.request.retries < retries: raise self.retry(exc=exc, countdown=20) else: logger.error( "max retries exceeded for {}, last failure: {}".format( url, exc)) except Exception as exc: logger.error("Unexpected exception on {} : {}".format(url, exc))
def get_subscription_permissions(token_det, subscription_id): initialise() url = "https://management.azure.com/subscriptions/"+subscription_id+"/providers/microsoft.authorization/permissions?api-version=2014-07-01-preview" headers = {"Authorization":"Bearer " + token_det} req = Request(method="GET",url=url,headers=headers) req_prepped = req.prepare() s = Session() res = Response() hasPermission = False #hasPermission = {} res = s.send(req_prepped) per_sp = "microsoft.authorization/roleassignments/write" per_gen = "microsoft.authorization/*/write" hasPermission = res.content if (res.status_code == 200): resJSON = json.loads(res.content) actions_result = resJSON["value"] for actions_r in actions_result: actions = actions_r["actions"] notactions = actions_r["notActions"] if per_sp in actions: if per_gen not in notactions: hasPermission = True else: if per_gen in actions: if per_gen not in notactions: hasPermission = True return hasPermission
def main(args): if len(args) <= 1: print "Usage getentries.py startindex endindex" sys.exit(1) elif args[0].isdigit() == False or args[1].isdigit() == False: print "Usage getentries.py startindex endindex" sys.exit(1) startindex = int(args[0]) endindex = int(args[1]) operation = 'ct/v1/get-entries' url = 'http://ct.googleapis.com/aviator/{}'.format(operation) params = urllib.urlencode({'start':startindex,'end':endindex - 1}) s = Session() r = Request('GET', '{}?{}'.format(url,params), ) prepped = r.prepare() r = s.send(prepped) if r.status_code == 200: entries = r.json()['entries'] print entries else: print r.status_code print r.text
def ban_url_list(self, url_list): """ Bans a list of urls. """ if isinstance(url_list, list) and url_list: if self.hostnames and self.varnish_nodes: url_combo = '(' + '|'.join(url_list) + ')' for hostname in self.hostnames: header = {'X-Ban-Url': url_combo, 'X-Ban-Host': hostname} s = Session() for node in self.varnish_nodes: try: req = Request('BAN', node, headers=header ) prepped = req.prepare() resp = s.send(prepped, timeout=2) except Exception: log.error('Error sending ban to ' + node) else: if codes.ok != resp.status_code: log.error('Error sending ban to ' + node) else: log.warning('No varnish nodes provided to clear the cache') else: log.warning('No URLs provided')
def get_acacess_token_app(tenant_id,client_id,client_secret,resource): initialise() url = "https://login.windows.net/" + tenant_id + "/oauth2/token" body_data = "&grant_type=client_credentials&resource="+ resource +"&client_id="+ client_id + "&client_secret="+ urllib.quote_plus(client_secret) headers = {"Content-Type":"application/x-www-form-urlencoded"} req = Request(method="POST",url=url,data=body_data) req_prepped = req.prepare() s = Session() res = Response() res = s.send(req_prepped) access_token_det = {} if (res.status_code == 200): responseJSON = json.loads(res.content) access_token_det["details"]= responseJSON["access_token"] access_token_det["status"]="1" access_token_det["exp_time"]=responseJSON["expires_in"] access_token_det["exp_date"]=responseJSON["expires_on"] access_token_det["accessDetails"]=responseJSON else: access_token_det["details"]= str(res.status_code) + str(res.json()) access_token_det["status"]="0" return access_token_det
def build_environ(method, path, host='localhost', accept_type='text/html', content_type=None, query=None, form=None, files=None, cookies=None): if '://' in host: url = host.rstrip('/') + '/' + path.lstrip('/') else: url = 'http://' + host.strip('/') + '/' + path.lstrip('/') request = Request(method, url, None, files, form, query, cookies) prepared = request.prepare() parsed_url = parse_url(prepared.url) environ = { 'HTTP_HOST': parsed_url.host, 'PATH_INFO': parsed_url.path, 'REQUEST_METHOD': prepared.method, 'HTTP_ACCEPT': accept_type, 'QUERY_STRING': parsed_url.query or '', } for key, value in iteritems(prepared.headers): key = underscore(key) if key not in ['content_type', 'content_length']: key = 'http_' + key environ[key.upper()] = value if content_type is not None: environ['CONTENT_TYPE'] = content_type environ['wsgi.input'] = BytesIO(prepared.body) return environ
def __init__(self, url, callback, method='GET', headers=None, timeout=3, proxy=None, params=None, data=None): Request.__init__(url, method, headers) self.callback = callback self.timeout = timeout self.proxy = proxy self.params = params self.data = data
def call_api(self, operation_name, params=None): if not operation_name: return if params is None: params = {} api_session = Session() api_url = self._endpoint_url + operation_name req = Request('POST', api_url) prepped = req.prepare() self._encode_headers(prepped.headers) prepped.headers['Content-Type'] = 'application/json' prepped.headers['User-Agent'] = self._build_user_agent_header() self._signer.sign(prepped) # check if operation is for 'upload' if operation_name == 'upload': # get s3url for the upload and then do a upload resp = self._upload_file(params, prepped, api_session) return resp # prepare the body serializer = Serializer() serial_obj = serializer.serialize_to_request(params, None) prepped.prepare_body(serial_obj['body'], None) resp = api_session.send(prepped) return resp
def _make_request(self, method, url, data=None, params=None, **kwargs): if params: self.params.update(params) if kwargs.get('headers'): self.headers.update(kwargs['headers']) if data: data = self._stringify_dict_list(data) url = self.BASE_URL + self.VERSION + url req = Request(method, url, data=data, headers=self.headers, params=self.params) prepped = req.prepare() try: response = self._session.send(prepped, timeout=self.timeout) if response.status_code > 299: if 'message' in str(response.content): response.reason += ': {}'.format(response.json()['message']) response.raise_for_status() except requests.HTTPError: msg = '{} {}'.format(response.status_code, response.reason) raise HttpError(msg) except requests.Timeout: raise TimeoutError('{} {} timed out after {} seconds'.format( method, url, self.timeout[0] + self.timeout[1] )) except requests.ConnectionError as e: raise ClientError('Could not reach: {} {} {}'.format(method, url, e)) return response.json()
def prepare_and_send_request(request_method, request_url, payload=None, authorization=None): headers = { 'Accept' : 'application/json', 'Accept-Encoding' : 'gzip,deflate,sdch', 'Accept-Language' : 'en-US,en;q=0.8,pt-BR;q=0.6,pt;q=0.4', 'Cache-Control' : 'no-cache', 'Connection' : 'keep-alive', } if ( authorization != None ): headers['Authorization'] = "Basic " + base64.b64encode("%s:%s" % (authorization, TEST_USER_PASSWORD)) if ( payload == None ): req = Request(request_method, request_url, headers=headers) r = req.prepare() else: headers['Content-Type'] = 'application/json' req = Request(request_method, request_url, data=json.dumps(payload), headers=headers) r = req.prepare() s = Session() s.mount('https://', SSLAdapter('TLSv1')) try: resp = s.send(r, verify=False, timeout=1) except Timeout, SSLError: return prepare_and_send_request(request_method, request_url, payload, authorization)
def recuperar_acuse(self, uuid): transaccion_id = get_epoch() user_token = self._get_token(transaccion_id) if not user_token: return '' method = 'recuperar_acuse' data = { 'uuid': uuid, 'rfc': self.rfc, 'user_token': user_token, 'transaccion_id': transaccion_id, } data = self._SOAP[method].format(**data).encode('utf-8') headers = { 'SOAPAction': '"{}"'.format(self._ACTIONS[method]), 'Content-length': len(data), 'Content-type': 'text/xml; charset="UTF-8"' } s = Session() req = Request('POST', self.WS['cancelacion'], data=data, headers=headers) prepped = req.prepare() try: response = s.send(prepped, timeout=TIMEOUT) res = xml2dict.parse(response.text) ok, res = self._check_fault(res) if ok: return '' return res['RespuestaRecuperarAcuse']['AcuseXML'].replace("'", '"') except exceptions.Timeout: self.error = 'Tiempo de espera agotado' print (self.error) except Exception as e: print ('Recuperar Acuse', e) return
def get_estatus(self, data): data['emisor_rfc'] = escape(data['emisor_rfc']) data['receptor_rfc'] = escape(data['receptor_rfc']) data = self._soap.format(**data).encode('utf-8') headers = { 'SOAPAction': '"http://tempuri.org/IConsultaCFDIService/Consulta"', 'Content-length': len(data), 'Content-type': 'text/xml; charset="UTF-8"' } s = Session() s.verify = False req = Request('POST', self._webservice, data=data, headers=headers) prepped = req.prepare() try: response = s.send(prepped, timeout=5) res = xml2dict.parse(response.text) ok, res = self._check_fault(res) if ok: return False self.msg = res['ConsultaResponse']['ConsultaResult']['a:Estado'] return True except exceptions.Timeout: self.error = 'Tiempo de espera agotado' except Exception as e: print (e) return False
def authorize_url(self, duration, scopes, state, implicit=False): """Return the URL used out-of-band to grant access to your application. :param duration: Either ``permanent`` or ``temporary``. ``temporary`` authorizations generate access tokens that last only 1 hour. ``permanent`` authorizations additionally generate a refresh token that can be indefinitely used to generate new hour-long access tokens. Only ``temporary`` can be specified if ``implicit`` is set to ``True``. :param scopes: A list of OAuth scopes to request authorization for. :param state: A string that will be reflected in the callback to ``redirect_uri``. This value should be temporarily unique to the client for whom the URL was generated for. :param implicit: (optional) Use the implicit grant flow (default: False). This flow is only available for UntrustedAuthenticators. """ if self.redirect_uri is None: raise InvalidInvocation('redirect URI not provided') if implicit and not isinstance(self, UntrustedAuthenticator): raise InvalidInvocation('Only UntrustedAuthentictor instances can ' 'use the implicit grant flow.') if implicit and duration != 'temporary': raise InvalidInvocation('The implicit grant flow only supports ' 'temporary access tokens.') params = {'client_id': self.client_id, 'duration': duration, 'redirect_uri': self.redirect_uri, 'response_type': 'token' if implicit else 'code', 'scope': ' '.join(scopes), 'state': state} url = self._requestor.reddit_url + const.AUTHORIZATION_PATH request = Request('GET', url, params=params) return request.prepare().url
def get_statuses(self, timeout=None): request = Request('GET', self.__create_url(None, 'statuses')) response = self.__send_request(request, timeout) return response.json()
def test_encode_request(self): r = Request('GET', "http://www.example.com/lala") rs = self.encoder.encode(r) self.assertIn(r.method, rs) self.assertIn(r.url, rs)
def send_http_req(req): req = RequestParser(req) s = Session() r = Request(req.method, req.url, data=req.body) r = set_headers(r, req.headers) return as_str(s.send(r))
# r = requests.get('https://www.taobao.com', timeout = 1) # print(r.status_code) #请求分为两个阶段,即连接(connect)和读取(read) #如果要分别指定,就可以传入一个元组: # r = requests.get('https://www.taobao.com', timeout=(5,11, 30)) #如果想永久等待,可以直接将timeout设置为None,或者不设置直接留空,因为默认是None # r = requests.get('https://www.taobao.com', timeout=None) # r = requests.get('https://www.taobao.com') #身份验证 from requests.auth import HTTPBasicAuth r = requests.get('http://localhost', auth = HTTPBasicAuth('username', 'password')) print(r.status_code) #可以直接传一个元组,它会默认使用HTTPBasicAuth这个类来认证 r = requests.get('http://localhost', auth=('username', 'password')) print(r.status_code) #Prepared_Request from requests import Request, Session url = 'http://httpbin.org/post' data = { 'name': 'germy' } headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.116 Safari/537.36' } s = Session() req = Request('POST', url, data = data, headers = headers) prepped = s.prepare_request(req) r = s.send(prepped) print(r.text)
def get_rated_list(): req = Request('GET', url=get_api_url("user.ratedList"), params={'activeOnly': 'true'}) return get_from_api(req)
def extract(self): req_params = {'handle': self._handle, 'count': self._count, 'from': 1} req = Request('GET', url=get_api_url('user.status'), params=req_params) return self._pool.submit(req)
def _request(self, method: str, path: str, **kwargs) -> Any: request = Request(method, self._ENDPOINT + path, **kwargs) self._sign_request(request) response = self._session.send(request.prepare()) return self._process_response(response)
def test_CT_TC_WS_FED_BROKER_ACCESS_CONTROL_RBAC_OK_SP_initiated( self, settings): """ Scenario: User logs in to SP1 where he has the appropriate role. Same user tries to log in to SP2, SP that he is authorized to access. He should be able to access SP2 without authenticating again. :param settings: :return: """ s = Session() # Service provider settings sp = settings["sps_wsfed"][0] sp_ip = sp["ip"] sp_port = sp["port"] sp_scheme = sp["http_scheme"] sp_path = sp["path"] sp_message = sp["logged_in_message"] # Service provider 2 settings sp2 = settings["sps_wsfed"][1] sp2_ip = sp2["ip"] sp2_port = sp2["port"] sp2_scheme = sp2["http_scheme"] sp2_path = sp2["path"] sp2_message = sp2["logged_in_message"] # Identity provider settings idp_ip = settings["idp"]["ip"] idp_port = settings["idp"]["port"] idp_scheme = settings["idp"]["http_scheme"] idp_broker = settings["idp"]["wsfed_broker"] idp_form_id = settings["idp"]["login_form_update"] idp_username = settings["idp_external"]["test_realm"]["username"] idp_password = settings["idp_external"]["test_realm"]["password"] idp2_ip = settings["idp_external"]["ip"] idp2_port = settings["idp_external"]["port"] idp2_scheme = settings["idp_external"]["http_scheme"] keycloak_login_form_id = settings["idp"]["login_form_id"] # Common header for all the requests header = req.get_header() # We check that test works for both types of identity provider idp_brokers = [ settings["idp"]["saml_broker"], settings["idp"]["wsfed_broker"] ] for idp_broker in idp_brokers: response = req.access_sp_ws_fed(logger, s, header, sp_ip, sp_port, sp_scheme, sp_path) session_cookie = response.cookies redirect_url = response.headers['Location'] header_redirect_idp = { **header, 'Host': "{ip}:{port}".format(ip=idp_ip, port=idp_port), 'Referer': "{ip}:{port}".format(ip=sp_ip, port=sp_port) } response = req.redirect_to_idp(logger, s, redirect_url, header_redirect_idp, session_cookie) keycloak_cookie = response.cookies if response.status_code == HTTPStatus.UNAUTHORIZED and response.headers[ 'WWW-Authenticate'] == 'Negotiate': response = req.kerberos_form_fallback(logger, s, response, header, { **keycloak_cookie, **session_cookie }) # In the login page we can choose to login with the external IDP soup = BeautifulSoup(response.content, 'html.parser') div = soup.find("div", {"id": "kc-social-providers"}) assert div is not None # we can have several idp external; choose the one needed for the test all_li = div.find_all('li') for li in all_li: if li.span.text == idp_broker: external_idp_url = "{scheme}://{ip}:{port}".format( scheme=idp_scheme, ip=idp_ip, port=idp_port) + li.a['href'] assert external_idp_url is not None # Select to login with the external IDP req_choose_external_idp = Request( method='GET', url="{url}".format(url=external_idp_url), headers=header, cookies=keycloak_cookie) prepared_request = req_choose_external_idp.prepare() log_request(logger, req_choose_external_idp) response = s.send(prepared_request, verify=False, allow_redirects=False) logger.debug(response.status_code) assert response.status_code == HTTPStatus.OK or response.status_code == HTTPStatus.FOUND # get the HTTP binding response with the url to the external IDP soup = BeautifulSoup(response.content, 'html.parser') form = soup.body.form url_form = form.get('action') inputs = form.find_all('input') method_form = form.get('method') params = {} for input in inputs: params[input.get('name')] = input.get('value') header_redirect_external_idp = { **header, 'Host': "{ip}:{port}".format(ip=idp2_ip, port=idp2_port), 'Referer': "{ip}:{port}".format(ip=idp_ip, port=idp_port) } # Redirect to external IDP if idp_broker == "cloudtrust_saml": req_redirect_external_idp = Request( method=method_form, url="{url}".format(url=url_form), data=params, headers=header_redirect_external_idp) else: req_redirect_external_idp = Request( method=method_form, url="{url}".format(url=url_form), params=params, headers=header_redirect_external_idp) # url_parts = list(urlparse.urlparse(url_form)) # query = dict(urlparse.parse_qsl(url_parts[4])) # query.update(params) # url_parts[4] = urlencode(query) # referer_url = urlparse.urlunparse(url_parts) referer_url = url_form prepared_request = req_redirect_external_idp.prepare() log_request(logger, req_redirect_external_idp) response = s.send(prepared_request, verify=False, allow_redirects=False) logger.debug(response.status_code) # if we have an identity provider saml, we do an extra redirect if idp_broker == "cloudtrust_saml": redirect_url = response.headers['Location'] keycloak_cookie2 = response.cookies response = req.redirect_to_idp(logger, s, redirect_url, header, keycloak_cookie2) else: keycloak_cookie2 = response.cookies soup = BeautifulSoup(response.content, 'html.parser') form = soup.find("form", {"id": keycloak_login_form_id}) assert form is not None url_form = form.get('action') method_form = form.get('method') inputs = form.find_all('input') input_name = [] for input in inputs: input_name.append(input.get('name')) assert "username" in input_name assert "password" in input_name credentials_data = {} credentials_data["username"] = idp_username credentials_data["password"] = idp_password # Authenticate to the external IDP response = req.send_credentials_to_idp(logger, s, header, idp2_ip, idp2_port, referer_url, url_form, credentials_data, { **keycloak_cookie2, **session_cookie }, method_form) assert response.status_code == HTTPStatus.OK or response.status_code == HTTPStatus.FOUND # get the HTTP binding response with the url to the broker IDP soup = BeautifulSoup(response.content, 'html.parser') form = soup.body.form url_form = form.get('action') inputs = form.find_all('input') method_form = form.get('method') token = {} for input in inputs: token[input.get('name')] = input.get('value') req_token_from_external_idp = Request( method=method_form, url="{url}".format(url=url_form), data=token, cookies=keycloak_cookie, headers=header) prepared_request = req_token_from_external_idp.prepare() log_request(logger, req_token_from_external_idp) response = s.send(prepared_request, verify=False, allow_redirects=False) if response.status_code == HTTPStatus.FOUND: new_cookie = response.cookies redirect_url = response.headers['Location'] response = req.redirect_to_idp(logger, s, redirect_url, header, { **keycloak_cookie, **new_cookie }) response = req.broker_fill_in_form(logger, s, response, header, keycloak_cookie, new_cookie, idp_broker, idp_form_id) keycloak_cookie3 = response.cookies logger.debug(response.status_code) # Get the token from the broker IDP soup = BeautifulSoup(response.content, 'html.parser') form = soup.body.form url_form = form.get('action') inputs = form.find_all('input') method_form = form.get('method') token = {} for input in inputs: token[input.get('name')] = input.get('value') # Access SP with the token (response, sp_cookie) = req.access_sp_with_token( logger, s, header, sp_ip, sp_port, sp_scheme, idp_scheme, idp_ip, idp_port, method_form, url_form, token, session_cookie, keycloak_cookie2) assert response.status_code == HTTPStatus.OK # assert that we are logged in assert re.search(sp_message, response.text) is not None # User is logged in on SP1 # Attempt to perform login on SP2 response = req.access_sp_ws_fed(logger, s, header, sp2_ip, sp2_port, sp2_scheme, sp2_path) session_cookie2 = response.cookies redirect_url = response.headers['Location'] header_redirect_idp = { **header, 'Host': "{ip}:{port}".format(ip=idp_ip, port=idp_port), 'Referer': "{ip}:{port}".format(ip=sp2_ip, port=sp2_port) } response = req.redirect_to_idp(logger, s, redirect_url, header_redirect_idp, {**keycloak_cookie3}) soup = BeautifulSoup(response.content, 'html.parser') form = soup.body.form url_form = form.get('action') inputs = form.find_all('input') method_form = form.get('method') token = {} for input in inputs: token[input.get('name')] = input.get('value') (response, sp2_cookie) = req.access_sp_with_token( logger, s, header, sp2_ip, sp2_port, sp2_scheme, idp_scheme, idp_ip, idp_port, method_form, url_form, token, session_cookie2, keycloak_cookie2) assert response.status_code == HTTPStatus.OK assert re.search(sp2_message, response.text) is not None
def _prepare_request(reddit_session, url, params, data, auth, files, method=None): """Return a requests Request object that can be "prepared".""" # Requests using OAuth for authorization must switch to using the oauth # domain. if getattr(reddit_session, '_use_oauth', False): headers = {'Authorization': 'bearer %s' % reddit_session.access_token} config = reddit_session.config for prefix in (config.api_url, config.permalink_url): if url.startswith(prefix): if config.log_requests >= 1: sys.stderr.write('substituting {} for {} in url\n'.format( config.oauth_url, prefix)) url = config.oauth_url + url[len(prefix):] break else: headers = {} headers.update(reddit_session.http.headers) if method: pass elif data or files: method = 'POST' else: method = 'GET' # Log the request if logging is enabled if reddit_session.config.log_requests >= 1: sys.stderr.write('{0}: {1}\n'.format(method, url)) if reddit_session.config.log_requests >= 2: if params: sys.stderr.write('params: {0}\n'.format(params)) if data: sys.stderr.write('data: {0}\n'.format(data)) if auth: sys.stderr.write('auth: {0}\n'.format(auth)) # Prepare request request = Request(method=method, url=url, headers=headers, params=params, auth=auth, cookies=reddit_session.http.cookies) if method == 'GET': return request # Most POST requests require adding `api_type` and `uh` to the data. if data is True: data = {} if isinstance(data, dict): if not auth: data.setdefault('api_type', 'json') if reddit_session.modhash: data.setdefault('uh', reddit_session.modhash) else: request.headers.setdefault('Content-Type', 'application/json') request.data = data request.files = files return request
def perform_request(self, nest_request, verbose_errors=True): url = self._make_nest_api_url(nest_request) data_payload = None http_code = None exception = None headers = None num_tries = nest_request.get_num_tries() no_success = True while num_tries > 0 and no_success: #if not our first try, wait for the delay period if num_tries != nest_request.get_num_tries(): time.sleep(nest_request.get_retry_delay_secs()) num_tries = num_tries - 1 try: headers = nest_request.get_headers() # set the optional auth_token if self.auth_token is not None: headers = headers.copy() # don't modify caller's object headers['Authorization'] = 'Bearer ' + self.auth_token ##see http://docs.python-requests.org/en/latest/user/advanced/#request-and-response-objects request = Request( nest_request.get_http_op(), url, data=nest_request.get_data_payload(), files=nest_request.get_files_payload(), headers=headers, params=nest_request.get_query_params(), ) prepped_request = self.session.prepare_request(request) op = nest_request.get_http_op() full_url = prepped_request.url if VERBOSE: log(op + ': ' + str(full_url)) log('request body: ') log(str(nest_request.get_data_payload())) resp = self.session.send( prepped_request, timeout=nest_request.get_timeout_secs(), allow_redirects=True, verify=False) data_payload = resp.text if VERBOSE: log('response code: ' + str(resp.status_code)) if resp.headers[ 'Content-Type'] == 'text/html; charset=utf-8': log('response body: <<supressing html response>>') else: log('reponse body: ' + resp.text) http_code = resp.status_code headers = resp.headers except Timeout as te: exception = te except ConnectionError as te: exception = te except Exception as e: traceback.print_exc() exception = e nest_response = NestHttpResponse(nest_request, http_code=http_code, exception=exception, data_payload=data_payload, headers=headers) if nest_response.did_succeed(): no_success = False else: if verbose_errors: log(nest_response.get_error_message()) log("num tries remaining: " + str(num_tries)) return nest_response
def extract_page(self, parameters): from requests import Request, Session from random import randint from time import sleep # Check if we have an url and filename if 'url' not in parameters: return { 'error': True, 'error_message': 'No URL in the parameters', } if 'filename' not in parameters: return { 'error': True, 'error_message': 'No FILENAME in the parameters', } # print(' Url: ' + parameters['url']) # TODO Test output to debug the code if 'requestType' not in parameters: parameters['requestType'] = 'GET' # Set the request Headers headers = parameters['headers'] if 'headers' in parameters else None # GET Parameters params = parameters['params'] if 'params' in parameters else None # Request Body for 'x-url-encoded' parameters data = parameters['data'] if 'data' in parameters else None # Set the request Cookies cookies = parameters['cookies'] if 'cookies' in parameters else None # Set the request PoolManager proxy = parameters['proxy'] if 'proxy' in parameters else None # Set the max retries number max_retries = parameters['retries'] if 'retries' in parameters else 1 retries_wait_range = parameters['retries_wait_range'] if 'retries_wait_range' in parameters else [3, 5] current_retry = 1 html_source = None # Prepare the desired Request and make the download session = Session() # html_source = requests.request('GET', url, headers=headers, proxies=self.proxies, cookies=self.cookieJar) desired_request = Request(parameters['requestType'], parameters['url'], headers=headers, params=params, data=data, cookies=cookies) prepared_request = desired_request.prepare() error = { 'error': True, 'error_message': 'First request', } while error['error'] and current_retry <= max_retries: html_source = session.send(prepared_request, proxies=proxy) # Check for error after the data download error = self.extraction_error_check(html_source) if error['error']: if current_retry == max_retries: filename = 'error-' + parameters['filename'] else: sleep(randint(retries_wait_range[0], retries_wait_range[1])) filename = 'retry-' + str(current_retry) + '-' + parameters['filename'] else: filename = parameters['filename'] # Save the downloaded data into the HDD self.save_source_to_file(html_source, os.path.dirname(self.tempDir) + '/' + filename) # Increase the request number and sleep a random time from the range current_retry = current_retry + 1 return html_source
def test_CT_TC_WS_FED_IDP_LOGOUT_PERIMETRIC(self, settings, login_sso_form): """ Scenario: user is logged in on several SPs. The user logs out of one SP. Access to all the other SPs should require a new log in. :param settings: :return: """ s = Session() # Service provider settings sp_ip = settings["service_provider"]["ip"] sp_port = settings["service_provider"]["port"] sp_scheme = settings["service_provider"]["http_scheme"] sp_logout_path = settings["service_provider"]["logout_path"] sp_message = settings["service_provider"]["logged_out_message"] sp_path = settings["service_provider"]["path"] # Service provider 2 settings sp2_ip = settings["service_provider2"]["ip"] sp2_port = settings["service_provider2"]["port"] sp2_scheme = settings["service_provider2"]["http_scheme"] sp2_logout_path = settings["service_provider2"]["logout_path"] sp2_path = settings["service_provider2"]["path"] sp2_message = settings["service_provider2"]["logged_in_message"] # Identity provider settings idp_ip = settings["identity_provider"]["ip"] idp_port = settings["identity_provider"]["port"] idp_scheme = settings["identity_provider"]["http_scheme"] # Common header for all the requests header = { 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.5", 'User-Agent': "Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:59.0) Gecko/20100101 Firefox/59.0", 'Connection': "keep-alive", 'Upgrade-Insecure-Requests': "1", } # Perform login using the fixture login_sso_form sp_cookie, keycloak_cookie = login_sso_form # User is logged in on SP1 # Perform login on SP2 response = req.access_sp_ws_fed(s, header, sp2_ip, sp2_port, sp2_scheme, sp2_path) session_cookie = response.cookies redirect_url = response.headers['Location'] header_redirect_idp = { **header, 'Host': "{ip}:{port}".format(ip=idp_ip, port=idp_port), 'Referer': "{ip}:{port}".format(ip=sp2_ip, port=sp2_port) } response = req.redirect_to_idp(s, redirect_url, header_redirect_idp, {**keycloak_cookie}) soup = BeautifulSoup(response.content, 'html.parser') form = soup.body.form url_form = form.get('action') inputs = form.find_all('input') method_form = form.get('method') ws_fed_response = {} for input in inputs: ws_fed_response[input.get('name')] = input.get('value') (response, sp2_cookie) = req.access_sp_with_token( s, header, sp2_ip, sp2_port, idp_scheme, idp_ip, idp_port, method_form, url_form, ws_fed_response, session_cookie, keycloak_cookie) # req_get_sp_login_reload_page = Request( # method='GET', # url="{scheme}://{ip}:{port}/{path}".format( # scheme=sp2_scheme, # port=sp2_port, # ip=sp2_ip, # path=sp2_path # ), # headers=header_sp2_reload_page, # cookies={**session_cookie} # ) # # prepared_request = req_get_sp_login_reload_page.prepare() # # logger.debug( # json.dumps( # prepared_request_to_json(req_get_sp_login_reload_page), # sort_keys=True, # indent=4, # separators=(',', ': ') # ) # ) # # response = s.send(prepared_request, verify=False, allow_redirects=False) # # logger.debug(response.status_code) # # # the user is logged in and refreshing the page will return an OK # assert response.status_code == 200 # User is now logged in on both applications: SP1 and SP2 # Logout from the first applications header_sp_logout_page = { **header, 'Host': "{ip}:{port}".format(ip=sp_ip, port=sp_port), 'Referer': "{scheme}://{ip}:{port}".format(scheme=sp_scheme, ip=sp_ip, port=sp_port) } req_get_sp_logout_page = Request( method='GET', url="{scheme}://{ip}:{port}/{path}".format(scheme=sp_scheme, port=sp_port, ip=sp_ip, path=sp_logout_path), headers=header_sp_logout_page, cookies={**sp_cookie}) prepared_request = req_get_sp_logout_page.prepare() logger.debug( json.dumps(prepared_request_to_json(req_get_sp_logout_page), sort_keys=True, indent=4, separators=(',', ': '))) response = s.send(prepared_request, verify=False, allow_redirects=False) logger.debug(response.status_code) # new session cookie session_cookie2 = response.cookies redirect_url = response.headers['Location'] req_sp_logout_redirect = Request(method='GET', url=redirect_url, headers=header_sp_logout_page, cookies={**sp_cookie}) prepared_request = req_sp_logout_redirect.prepare() logger.debug( json.dumps(prepared_request_to_json(req_sp_logout_redirect), sort_keys=True, indent=4, separators=(',', ': '))) response = s.send(prepared_request, verify=False, allow_redirects=False) logger.debug(response.status_code) redirect_url = response.headers['Location'] response = req.redirect_to_idp(s, redirect_url, header, sp_cookie) assert response.status_code == 200 soup = BeautifulSoup(response.content, 'html.parser') form = soup.body.form url_form = form.get('action') method_form = form.get('method') inputs = form.find_all('input') # Send ws fed response token = {} for input in inputs: token[input.get('name')] = input.get('value') (response, cookie) = req.access_sp_with_token(s, header, sp_ip, sp_port, idp_scheme, idp_ip, idp_port, method_form, url_form, token, sp_cookie, sp_cookie) assert response.status_code == 200 assert re.search(sp_message, response.text) is not None # Check that when the user accesses the secured page of SP1 with the old session cookie, # he is redirected to log in req_get_sp_login_reload_page = Request( method='GET', url="{scheme}://{ip}:{port}/{path}".format(scheme=sp_scheme, port=sp_port, ip=sp_ip, path=sp_path), headers=header, cookies={**session_cookie}) prepared_request = req_get_sp_login_reload_page.prepare() logger.debug( json.dumps(prepared_request_to_json(req_get_sp_login_reload_page), sort_keys=True, indent=4, separators=(',', ': '))) response = s.send(prepared_request, verify=False, allow_redirects=False) logger.debug(response.status_code) # Assert that the refresh page gives a 302 which signals that the user is logged out of SP assert response.status_code == 302 # Check if the user is logged out from SP2: perform a refresh of the page; we expect to get a redirect header_sp2_reload_page = { **header, 'Host': "{ip}:{port}".format(ip=sp2_ip, port=sp2_port), } req_get_sp_login_reload_page = Request( method='GET', url="{scheme}://{ip}:{port}/{path}".format(scheme=sp2_scheme, port=sp2_port, ip=sp2_ip, path=sp2_path), headers=header_sp2_reload_page, cookies={**session_cookie}) prepared_request = req_get_sp_login_reload_page.prepare() logger.debug( json.dumps(prepared_request_to_json(req_get_sp_login_reload_page), sort_keys=True, indent=4, separators=(',', ': '))) response = s.send(prepared_request, verify=False, allow_redirects=False) logger.debug(response.status_code) # Assert that the refresh page gives a 302 which signals that the user is logged out of SP2 assert response.status_code == 302
def login_broker_sso_form(settings, pytestconfig): """ Fixture to perform the log in when we have a broker and an external IDP :param settings: settings of the IDP and SP :param pytestconfig: fixture that provides the standard used for log in: WSFED or SAML :return: """ standard = pytestconfig.getoption('standard') s = Session() # Standard if standard == "WSFED": client = "sps_wsfed" idp_broker = settings["idp"]["saml_broker"] elif standard == "SAML": client = "sps_saml" idp_broker = settings["idp"]["wsfed_broker"] # Service provider settings sp = settings[client][0] sp_ip = sp["ip"] sp_port = sp["port"] sp_scheme = sp["http_scheme"] sp_path = sp["path"] # Identity provider settings idp_ip = settings["idp"]["ip"] idp_port = settings["idp"]["port"] idp_scheme = settings["idp"]["http_scheme"] idp2_ip = settings["idp_external"]["ip"] idp2_port = settings["idp_external"]["port"] idp2_scheme = settings["idp_external"]["http_scheme"] idp_username = settings["idp_external"]["test_realm"]["username"] idp_password = settings["idp_external"]["test_realm"]["password"] keycloak_login_form_id = settings["idp"]["login_form_id"] # Common header for all the requests header = req.get_header() (session_cookie, response) = req.access_sp_saml(logger, s, header, sp_ip, sp_port, sp_scheme, sp_path, idp_ip, idp_port) # store the cookie received from keycloak keycloak_cookie = response.cookies redirect_url = response.headers['Location'] header_redirect_idp = { **header, 'Host': "{ip}:{port}".format(ip=idp_ip, port=idp_port), 'Referer': "{ip}:{port}".format(ip=sp_ip, port=sp_port) } response = req.redirect_to_idp(logger, s, redirect_url, header_redirect_idp, keycloak_cookie) # In the login page we can choose to login with the external IDP soup = BeautifulSoup(response.content, 'html.parser') div = soup.find("div", {"id": "kc-social-providers"}) # we can have several idp external; choose the one needed for the test all_li = div.find_all('li') for li in all_li: if li.span.text == idp_broker: external_idp_url = "{scheme}://{ip}:{port}".format( scheme=idp_scheme, ip=idp_ip, port=idp_port) + li.a['href'] # Select to login with the external IDP req_choose_external_idp = Request(method='GET', url="{url}".format(url=external_idp_url), headers=header, cookies=keycloak_cookie) prepared_request = req_choose_external_idp.prepare() log_request(logger, req_choose_external_idp) response = s.send(prepared_request, verify=False, allow_redirects=False) logger.debug(response.status_code) # get the HTTP binding response with the url to the external IDP soup = BeautifulSoup(response.content, 'html.parser') form = soup.body.form url_form = form.get('action') inputs = form.find_all('input') method_form = form.get('method') params = {} for input in inputs: params[input.get('name')] = input.get('value') header_redirect_external_idp = { **header, 'Host': "{ip}:{port}".format(ip=idp2_ip, port=idp2_port), 'Referer': "{ip}:{port}".format(ip=idp_ip, port=idp_port) } # Redirect to external IDP if idp_broker == "cloudtrust_saml": req_redirect_external_idp = Request( method=method_form, url="{url}".format(url=url_form), data=params, headers=header_redirect_external_idp) else: req_redirect_external_idp = Request( method=method_form, url="{url}".format(url=url_form), params=params, headers=header_redirect_external_idp) referer_url = url_form prepared_request = req_redirect_external_idp.prepare() log_request(logger, req_redirect_external_idp) response = s.send(prepared_request, verify=False, allow_redirects=False) logger.debug(response.status_code) # if we have an identity provider saml, we do an extra redirect if idp_broker == "cloudtrust_saml": redirect_url = response.headers['Location'] keycloak_cookie2 = response.cookies response = req.redirect_to_idp(logger, s, redirect_url, header, keycloak_cookie2) else: keycloak_cookie2 = response.cookies soup = BeautifulSoup(response.content, 'html.parser') form = soup.find("form", {"id": keycloak_login_form_id}) url_form = form.get('action') method_form = form.get('method') inputs = form.find_all('input') input_name = [] for input in inputs: input_name.append(input.get('name')) credentials_data = {} credentials_data["username"] = idp_username credentials_data["password"] = idp_password # Authenticate to the external IDP response = req.send_credentials_to_idp(logger, s, header, idp2_ip, idp2_port, referer_url, url_form, credentials_data, { **keycloak_cookie2, **session_cookie }, method_form) keycloak_cookie3 = response.cookies # get the HTTP binding response with the url to the broker IDP soup = BeautifulSoup(response.content, 'html.parser') form = soup.body.form url_form = form.get('action') inputs = form.find_all('input') method_form = form.get('method') token = {} for input in inputs: token[input.get('name')] = input.get('value') req_token_from_external_idp = Request(method=method_form, url="{url}".format(url=url_form), data=token, cookies=keycloak_cookie, headers=header) prepared_request = req_token_from_external_idp.prepare() log_request(logger, req_token_from_external_idp) response = s.send(prepared_request, verify=False, allow_redirects=False) logger.debug(response.status_code) if response.status_code == HTTPStatus.FOUND: # user logs in for the first time and has to fill in a form response = req.broker_fill_in_form(logger, s, response, header, keycloak_cookie, idp_broker, settings) # Get the token (SAML response) from the broker IDP soup = BeautifulSoup(response.content, 'html.parser') form = soup.body.form url_form = form.get('action') inputs = form.find_all('input') method_form = form.get('method') token = {} for input in inputs: token[input.get('name')] = input.get('value') # Access SP with the token (response, sp_cookie) = req.access_sp_with_token(logger, s, header, sp_ip, sp_port, sp_scheme, idp_scheme, idp_ip, idp_port, method_form, url_form, token, session_cookie, keycloak_cookie) return sp_cookie, keycloak_cookie3, response.status_code
def delete_realm(settings): """ Fixture to perform the deletion of a realm from Keycloak :param settings: :return: """ # Identity provider settings idp_ip = settings["idp"]["ip"] idp_port = settings["idp"]["port"] idp_scheme = settings["idp"]["http_scheme"] idp_username = settings["idp"]["master_realm"]["username"] idp_password = settings["idp"]["master_realm"]["password"] idp_client_id = settings["idp"]["master_realm"]["client_id"] idp_realm_id = settings["idp"]["master_realm"]["name"] idp_realm_test = settings["idp"]["test_realm"]["name"] s = Session() access_token_data = { "client_id": idp_client_id, "username": idp_username, "password": idp_password, "grant_type": "password" } access_token = req.get_access_token(logger, s, access_token_data, idp_scheme, idp_port, idp_ip, idp_realm_id) header = { 'Accept': "application/json,text/plain, */*", 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.5", 'User-Agent': "Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:59.0) Gecko/20100101 Firefox/59.0", 'Connection': "keep-alive", 'Content-Type': "application/json", 'Referer': "{scheme}://{ip}:{port}/auth/admin/master/console/".format( scheme=idp_scheme, ip=idp_ip, port=idp_port), 'Host': "{ip}:{port}".format(ip=idp_ip, port=idp_port), "DNT": "1", "Keep-Alive": "timeout=15, max=3", 'Authorization': 'Bearer ' + access_token } req_delete_realm = Request( method='DELETE', url="{scheme}://{ip}:{port}/auth/admin/realms/{realm}".format( scheme=idp_scheme, ip=idp_ip, port=idp_port, realm=idp_realm_test), headers=header, ) prepared_request = req_delete_realm.prepare() log_request(logger, req_delete_realm) response = s.send(prepared_request, verify=False) logger.debug(response.status_code) return response
def get_activity(self): now = datetime.now(timezone.utc).astimezone().isoformat() influx_payload = [] params = {'cmd': 'get_activity'} req = self.session.prepare_request( Request('GET', self.server.url + self.endpoint, params=params)) g = connection_handler(self.session, req, self.server.verify_ssl) if not g: return get = g['response']['data'] fields = itemgetter_with_default(**TautulliStream._field_defaults) try: sessions = [ TautulliStream(*fields(session)) for session in get['sessions'] ] except TypeError as e: self.logger.error( 'TypeError has occurred : %s while creating TautulliStream structure', e) return for session in sessions: # Check to see if ip_address_public attribute exists as it was introduced in v2 try: getattr(session, 'ip_address_public') except AttributeError: self.logger.error( 'Public IP attribute missing!!! Do you have an old version of Tautulli (v1)?' ) exit(1) try: geodata = self.geoiphandler.lookup(session.ip_address_public) except (ValueError, AddressNotFoundError): self.logger.debug('Public IP missing for Tautulli session...') if not self.my_ip: # Try the fallback ip in the config file try: self.logger.debug( 'Attempting to use the fallback IP...') geodata = self.geoiphandler.lookup( self.server.fallback_ip) except AddressNotFoundError as e: self.logger.error('%s', e) self.my_ip = self.session.get( 'http://ip.42.pl/raw').text self.logger.debug( 'Looked the public IP and set it to %s', self.my_ip) geodata = self.geoiphandler.lookup(self.my_ip) else: geodata = self.geoiphandler.lookup(self.my_ip) if not all([geodata.location.latitude, geodata.location.longitude ]): latitude = 37.234332396 longitude = -115.80666344 else: latitude = geodata.location.latitude longitude = geodata.location.longitude if not geodata.city.name: location = '👽' else: location = geodata.city.name decision = session.transcode_decision if decision == 'copy': decision = 'direct stream' video_decision = session.stream_video_decision if video_decision == 'copy': video_decision = 'direct stream' elif video_decision == '': video_decision = 'Music' quality = session.stream_video_resolution if not quality: quality = session.container.upper() elif quality in ('SD', 'sd', '4k'): quality = session.stream_video_resolution.upper() elif session.stream_video_full_resolution: quality = session.stream_video_full_resolution else: quality = session.stream_video_resolution + 'p' player_state = session.state.lower() if player_state == 'playing': player_state = 0 elif player_state == 'paused': player_state = 1 elif player_state == 'buffering': player_state = 3 # Platform Version Overrides product_version = session.product_version if session.platform in ('Roku', 'osx', 'windows'): product_version = session.product_version.split('-')[0] # Platform Overrides platform_name = session.platform if platform_name in 'osx': platform_name = 'macOS' if platform_name in 'windows': platform_name = 'Windows' # TV Show episode renaming if session.media_type == "episode": season = session.parent_title.split(' ') # Season number padding season = "S" + season[1].zfill(2) # Episode number padding if len(session.media_index) == 1: episode = "E" + session.media_index.zfill(2) else: episode = "E" + session.media_index # Update title for episodes only ep_title = session.grandparent_title + " - " + season + episode + " - " + session.title else: ep_title = session.full_title hash_id = hashit( f'{session.session_id}{session.session_key}{session.username}{session.full_title}' ) influx_payload.append({ "measurement": "Tautulli", "tags": { "type": "Session", "session_id": session.session_id, "friendly_name": session.friendly_name, "username": session.username, "title": ep_title, "product": session.product, "platform": platform_name, "product_version": product_version, "quality": quality, "video_decision": video_decision.title(), "transcode_decision": decision.title(), "transcode_hw_decoding": session.transcode_hw_decoding, "transcode_hw_encoding": session.transcode_hw_encoding, "media_type": session.media_type.title(), "audio_codec": session.audio_codec.upper(), "audio_profile": session.audio_profile.upper(), "stream_audio_codec": session.stream_audio_codec.upper(), "quality_profile": session.quality_profile, "progress_percent": session.progress_percent, "region_code": geodata.subdivisions.most_specific.iso_code, "location": location, "full_location": f'{geodata.subdivisions.most_specific.name} - {geodata.city.name}', "latitude": latitude, "longitude": longitude, "player_state": player_state, "device_type": platform_name, "relayed": session.relayed, "secure": session.secure, "server": self.server.id }, "time": now, "fields": { "hash": hash_id } }) influx_payload.append({ "measurement": "Tautulli", "tags": { "type": "current_stream_stats", "server": self.server.id }, "time": now, "fields": { "stream_count": int(get['stream_count']), "total_bandwidth": int(get['total_bandwidth']), "wan_bandwidth": int(get['wan_bandwidth']), "lan_bandwidth": int(get['lan_bandwidth']), "transcode_streams": int(get['stream_count_transcode']), "direct_play_streams": int(get['stream_count_direct_play']), "direct_streams": int(get['stream_count_direct_stream']) } }) self.dbmanager.write_points(influx_payload)
def tile_url(layer, z, x, y, style=None, internal=True): """Construct actual tile request to QGIS Server. Different than tile_url_format, this method will return url for requesting a tile, with all parameters filled out. :param layer: Layer to use :type layer: Layer :param z: TMS coordinate, zoom parameter :type z: int, str :param x: TMS coordinate, longitude parameter :type x: int, str :param y: TMS coordinate, latitude parameter :type y: int, str :param style: Layer style to choose :type style: str :param internal: Flag to switch between public url and internal url. Public url will be served by Django Geonode (proxified). :type internal: bool :return: Tile url :rtype: str """ try: qgis_layer = QGISServerLayer.objects.get(layer=layer) except QGISServerLayer.DoesNotExist: msg = 'No QGIS Server Layer for existing layer {0}'.format(layer.name) logger.debug(msg) raise x = int(x) y = int(y) z = int(z) # Call the WMS top, left = num2deg(x, y, z) bottom, right = num2deg(x + 1, y + 1, z) transform = CoordTransform(SpatialReference(4326), SpatialReference(3857)) top_left_corner = Point(left, top, srid=4326) bottom_right_corner = Point(right, bottom, srid=4326) top_left_corner.transform(transform) bottom_right_corner.transform(transform) bottom = bottom_right_corner.y right = bottom_right_corner.x top = top_left_corner.y left = top_left_corner.x bbox = ','.join(str(val) for val in [left, bottom, right, top]) if not style: style = 'default' if style not in [s.name for s in qgis_layer.styles.all()]: if qgis_layer.default_style: style = qgis_layer.default_style.name query_string = { 'SERVICE': 'WMS', 'VERSION': '1.3.0', 'REQUEST': 'GetMap', 'BBOX': bbox, 'CRS': 'EPSG:3857', 'WIDTH': '256', 'HEIGHT': '256', 'MAP': qgis_layer.qgis_project_path, 'LAYERS': layer.name, 'STYLE': style, 'FORMAT': 'image/png', 'TRANSPARENT': 'true', 'DPI': '96', 'MAP_RESOLUTION': '96', 'FORMAT_OPTIONS': 'dpi:96' } qgis_server_url = qgis_server_endpoint(internal) url = Request('GET', qgis_server_url, params=query_string).prepare().url return url
def get_project(self, project): """Retrieves a project.""" url = self.gerrit.url('PROJECT', project_name=project) r = Request(method='GET', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r)
def __execute(self, client=None, timeout=None, **kwargs): if not kwargs.get('type'): kwargs['type'] = 'LIMIT' request = Request('POST', self.__create_order_url(client), json=kwargs) response = self.__send_request(request) return response.json()
def get_account(self, client=None, timeout=None): request = Request('GET', self.__create_url(client, 'accounts')) response = self.__send_request(request, timeout) return response.json()
def __change_clients_status(self, status, timeout=None): request = Request('PATCH', self.__create_url(None, 'clients'), json={'status': status}) self.__send_request(request, timeout)
def get_records( output, email, unpack=False, process_level='raw', group_by='event', minpga=None, maxpga=None, min_station_dist=None, max_station_dist=None, network=None, station_type='Ground', include_inactive=False, station_name=None, min_station_latitude=None, max_station_latitude=None, min_station_longitude=None, max_station_longitude=None, station_latitude=None, station_longitude=None, radius_km=None, station_code=None, event_name=None, minmag=None, maxmag=None, fault_type=None, startdate=None, enddate=None, min_event_latitude=None, max_event_latitude=None, min_event_longitude=None, max_event_longitude=None, event_latitude=None, event_longitude=None, event_radius=None, eventid=None, ): """Retrieve strong motion waveform records from CESMD website. Args: output (str): Filename or directory where downloaded zip data will be written. unpack (bool): If True, all zipped files will be unpacked (output will become a directory name.) email (str): Email address of requesting user. process_level (str): One of 'raw','processed','plots'. group_by (str): One of 'event', 'station' minpga (float): Minimum PGA value. maxpga (float): Maximum PGA value. min_station_dist (float): Minimum station distance from epicenter. max_station_dist (float): Maximum station distance from epicenter. network (str): Source network of strong motion data. station_type (str): Type of strong motion station (array, dam, etc.) include_inactive (bool): Include results from stations that are no longer active. station_name (str): Search only for station matching input name. min_station_latitude (float): Latitude station min when using a box search. max_station_latitude (float): Latitude station max when using a box search. min_station_longitude (float): Longitude station min when using a box search. max_station_longitude (float): Longitude station max when using a box search. station_latitude (float): Center latitude for station search. station_longitude (float): Center longitude for station search. radius_km (float): Radius (km) for station search. station_code (str): Particular station code to search for. event_name (str): Earthquake name to search for. minmag (float): Magnitude minimum when using a magnitude search. maxmag (float): Magnitude maximum when using a magnitude search. fault_type (str): Fault type. start_date (str): Start date/time in YYYY-MM-DD HH:MM:SS format end_date (str): End date/time in YYYY-MM-DD HH:MM:SS format min_event_latitude (float): Latitude event min when using a box search. max_event_latitude (float): Latitude event max when using a box search. min_event_longitude (float): Longitude event min when using a box search. max_event_longitude (float): Longitude event max when using a box search. event_latitude (float): Center earthquake latitude for radius search. event_longitude (float): Center earthquake longitude for radius search. event_radius (float): Earthquake search radius (km). eventid (str): NEIC or other ANSS event ID. Returns: tuple: (Top level output directory, list of data files) Raises: KeyError """ # getting the inputargs must be the first line of the method! inputargs = locals().copy() del inputargs['output'] del inputargs['unpack'] # note: this only supports one of the options or all of them, # no other combinations. ?? if process_level not in PROCESS_LEVELS: fmt = 'Only process levels of %s are supported (%s was input)' tpl = (','.join(PROCESS_LEVELS), process_level) raise KeyError(fmt % tpl) if group_by not in GROUP_OPTIONS: fmt = 'Only process levels of %s are supported (%s was input)' tpl = (','.join(GROUP_OPTIONS), group_by) raise KeyError(fmt % tpl) # determine which network user wanted if network is not None and network not in NETWORKS: fmt = 'Network with ID %s not found in list of supported networks.' tpl = network raise KeyError(fmt % tpl) if station_type is not None and station_type not in STATION_TYPES: fmt = 'Station type %s not found in list of supported types.' tpl = station_type raise KeyError(fmt % tpl) # convert 'Ground' to 'G' for example inputargs['station_type'] = STATION_TYPES[inputargs['station_type']] # check against list of fault types if fault_type is not None and fault_type not in FAULT_TYPES: fmt = 'Fault type %s not found in supported fault types %s.' tpl = (fault_type, ','.join(FAULT_TYPES)) raise KeyError(fmt % tpl) # make sure there is only one method being used to select station # geographically if min_station_latitude is not None and station_latitude is not None: raise Exception( 'Select stations either by bounding box or by radius, not both.') # make sure there is only one method being used to select events # geographically if min_event_latitude is not None and event_latitude is not None: raise Exception( 'Select events either by bounding box or by radius, not both.') # now convert process levels to string webservice expects levels = {'processed': 'P', 'raw': 'R', 'plots': 'T', 'all': 'P,R,T'} inputargs['process_level'] = levels[process_level] # now convert input args to keys of parameters expected by params = {} for key, value in inputargs.items(): if key in KEY_TABLE: params[KEY_TABLE[key]] = value else: params[key] = value # convert all booleans to strings that are 'true' and 'false' for key, value in params.items(): if isinstance(value, bool): if value: params[key] = 'true' else: params[key] = 'false' # add in a couple of parameters that seem to be required params['orderby'] = 'epidist-asc' params['nodata'] = '404' params['rettype'] = 'dataset' session = Session() request = Request('GET', URL_TEMPLATE, params=params).prepare() url = request.url response = session.get(request.url) if not response.status_code == 200: fmt = 'Your url "%s" returned a status code of %i with message: "%s"' raise ConnectionError(fmt % (url, response.status_code, response.reason)) if unpack: if not os.path.exists(output): os.makedirs(output) fbytes = io.BytesIO(response.content) myzip = zipfile.ZipFile(fbytes, mode='r') members = myzip.namelist() for member in members: finfo = myzip.getinfo(member) if finfo.is_dir(): continue if not member.lower().endswith('.zip'): fin = myzip.open(member) flatfile = member.replace('/', '_') outfile = os.path.join(output, flatfile) with open(outfile, 'wb') as fout: fout.write(fin.read()) fin.close() else: zfiledata = io.BytesIO(myzip.read(member)) try: tmpzip = zipfile.ZipFile(zfiledata, mode='r') tmp_members = tmpzip.namelist() for tmp_member in tmp_members: tfinfo = tmpzip.getinfo(tmp_member) if not tfinfo.is_dir(): fin = tmpzip.open(tmp_member) flatfile = tmp_member.replace('/', '_') parent, _ = os.path.splitext(member) parent = parent.replace('/', '_') # sometimes the member ends with .zip.zip (??) parent = parent.replace('.zip', '') datadir = os.path.join(output, parent) if not os.path.exists(datadir): os.makedirs(datadir) outfile = os.path.join(datadir, flatfile) with open(outfile, 'wb') as fout: fout.write(fin.read()) fin.close() tmpzip.close() zfiledata.close() except BaseException as e: fmt = ('Could not unpack sub-zip file "%s" due to error ' '"%s". Skipping.') print(fmt % (member, str(e))) continue myzip.close() datafiles = [] for root, fdir, files in os.walk(output): for tfile in files: if not tfile.endswith('.json'): datafile = os.path.join(root, tfile) datafiles.append(datafile) return (os.path.abspath(output), datafiles) else: if not output.endswith('.zip'): output += '.zip' with open(output, 'wb', encoding='utf-8') as f: f.write(response.content) return (output, [])
def cancel_all(self, client=None, timeout=None): request = Request('DELETE', self.__create_order_url(client)) self.__send_request(request, timeout)
def test_multiple_settings_webhook(self, requests_mock): def handle_get(url, auth): with open( os.path.join(os.path.dirname(__file__), 'test_data', 'shipments.json')) as shipment_json: shipment_json = json.load(shipment_json) shipment = shipment_json.get("shipments")[0] shipment["orderKey"] = self.pick_list shipment["orderNumber"] = self.pick_list response_mock = Mock() response_mock.status_code = 200 response_mock.json.return_value = shipment_json return response_mock def handle_delete(url, auth): #if called with the webhook authentication raise an error webhook_auth = (self.settingId.api_key, self.settingId.get_password('api_secret')) if auth == webhook_auth: raise RuntimeError( "Requests delete called with webhook settings") else: response_mock = Mock() response_mock.status_code = 200 return response_mock #Add second setting shipstation_settings = frappe.new_doc("Shipstation Settings") shipstation_settings.update({ "api_key": '_Test_98980898989890', "api_secret": '98989898989898', }) #Map lead source shipstation_settings.append("store_mapping", { "source": self.lead_source.name, "store_id": "_Test_Store_ID" }) #Map transporter shipstation_settings.append("transporter_mapping", { "transporter": "_Test Supplier", "carrier_code": "canada_post" }) second_setting = shipstation_settings.insert(ignore_permissions=True) #Add the two settings (orderIds) to the delivery note delivery_note = frappe.get_doc('Delivery Note', self.delivery_note.name) order_table = frappe.new_doc('Shipstation Order ID', delivery_note, 'ais_shipstation_order_ids') order_table.update({ 'settings_id': self.settingId.name, 'shipstation_order_id': '_Test_orderId' }) order_table.save() order_table = frappe.new_doc('Shipstation Order ID', delivery_note, 'ais_shipstation_order_ids') order_table.update({ 'settings_id': second_setting.name, 'shipstation_order_id': '_Test_orderId2' }) order_table.save() #Mock the request URL url = "http://deverp.metactical.com/api/method/metactical.api.shipstation.orders_shipped_webhook?settingid=" + self.settingId.name data = '{"resource_url": "https://test.shipstationurl.com", "resource_type": "SHIP_NOTIFY"}' #frappe.request.url = "http://deverp.metactical.com/api/method/metactical.api.shipstation.orders_shipped_webhook?settingid=" + self.settingId.name frappe.request = Request('Post', url, data=data) requests_mock.get = Mock(side_effect=handle_get) requests_mock.delete = Mock(side_effect=handle_delete) orders_shipped_webhook() #Assert delete called with second authentication settings auth = (second_setting.api_key, second_setting.get_password('api_secret')) requests_mock.delete.assert_called_with( 'https://ssapi.shipstation.com/orders/_Test_orderId2', auth=auth) #Verify the delivery note is submitted and the shipment data has been saved delivery_note_name = frappe.db.get_value("Delivery Note", {"pick_list": self.pick_list}) delivery_note = frappe.get_doc("Delivery Note", delivery_note_name) self.assertEqual(delivery_note.docstatus, 1) self.assertEqual(delivery_note.transporter, "_Test Supplier") self.assertEqual(delivery_note.lr_no, "7302361059843272") self.assertEqual(delivery_note.ais_package_weight, "192.0 ounces") self.assertEqual(delivery_note.lr_date, datetime.date(datetime(2021, 9, 27))) self.assertEqual(delivery_note.ais_shipment_cost, 22.25) self.assertEqual(delivery_note.ais_package_size, "30.0l x 10.0w x 10.0h") self.assertEqual(delivery_note.ais_updated_by_shipstation, 1)
def get_time_series_for_fields( self, fields: List[Field], start_time: datetime = None, window: Window = Window.RAW, end_time: Optional[datetime] = None, ) -> List[FieldTimeSeries]: """Get complete (non-paginated) time series data for each field in `fields`""" assert (start_time is None) == ( end_time is None ), "Either both start and end time should be provided, or both should be missing" # Build requests queue params = { "timeStart": int(start_time.timestamp()) if start_time else None, "timeEnd": int(end_time.timestamp()) if end_time else None, "window": window.value, "limit": 5000, } queue: List[Tuple[str, BatchRequest]] = [ ( f.field_human_name, # type: ignore BatchRequest.from_request( Request( method="GET", url=self._url(f"outputs/{f.output_id}/fields/{f.field_human_name}/data"), params=params, ) ), ) for f in fields ] # Make all requests in queue MAX_BATCH_REQUESTS = 200 records: Dict[str, Dict[datetime, str]] = defaultdict(dict) while queue: # Make next batch of requests requests = dict(queue[:MAX_BATCH_REQUESTS]) del queue[:MAX_BATCH_REQUESTS] logger.info(f"Making {len(requests)} batched requests to IOT API") responses = self._batch_request(requests) any_success = False # Process responses for name, resp in responses.items(): if resp.ok: any_success = True series = { Parsers.datetime(r["event_time"]): Parsers.unknown(r["value"]) for r in resp.body["records"] } records[name].update(series) # Add request for next page next_page_url = resp.body["meta"]["next_page_url"] if next_page_url: queue.append((name, BatchRequest(method="GET", uri=next_page_url))) else: # Retry request logger.warning(f"Got bad response from IOT API ({resp.body}). Retrying...") queue.append((name, requests[name])) if not any_success: raise IOError(f"All {len(requests)} batched requests to IOT API failed") fields_by_name = {f.field_human_name: f for f in fields} return [ FieldTimeSeries(field=fields_by_name[name], time_series=series) for name, series in records.items() ]
def delete_project_description(self, project): """Deletes the description of a project.""" url = self.gerrit.url('PROJECT_DESCRIPTION', project_name=project) r = Request(method='DELETE', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r)
def get_metadata( eqlat=None, eqlon=None, eqtime=None, eqradius=10, abandoned=False, station_type='Ground', eqtimewindow=10, # seconds station_radius=200): """Retrieve station metadata JSON from CESMD web service. Args: eqlat (float): Earthquake latitude. eqlon (float): Earthquake longitude. eqtime (datetime): Earthquake origin time. eqradius (float): Earthquake search radius (km). abandoned (bool): Whether or not to include abandoned stations in the search. station_type (str): One of the following station types: [%s] eqtimewidow (float): Earthquake time search window in sec. station_radius (str): Radius (km) to search for stations from epicenter. Returns: dict: Dictionary of event/station information. Raises: ValueError ConnectionError """ % (','.join(STATION_TYPES)) params = { 'rettype': 'metadata', 'groupby': 'event', 'format': 'json', 'nodata': 404, 'sttype': STATION_TYPES[station_type], 'abandoned': abandoned } has_event_info = (eqlat is not None) and (eqlon is not None) and (eqtime is not None) if not has_event_info: raise ValueError( 'get_metadata must get either event id or event information.') else: starttime = eqtime - timedelta(seconds=eqtimewindow // 2) endtime = eqtime + timedelta(seconds=eqtimewindow // 2) params['elat'] = eqlat params['elon'] = eqlon params['erad'] = eqradius params['startdate'] = starttime.strftime('%Y-%m-%dT%H:%M:%S') params['enddate'] = endtime.strftime('%Y-%m-%dT%H:%M:%S') params['maxepidist'] = station_radius session = Session() request = Request('GET', URL_TEMPLATE, params=params).prepare() response = session.get(request.url) if response.status_code != 200: fmt = 'Could not retrieve data from url "%s": Server response %i' raise ConnectionError(fmt % (request.url, response.status_code)) metadata = response.json() return metadata
def list_projects(self): """Lists the projects accessible by the caller. """ url = self.gerrit.url('LIST_PROJECTS') r = Request(method='GET', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r)
def get_project_parent(self, project): """Retrieves the name of a project’s parent project. For the All-Projects root project an empty string is returned.""" url = self.gerrit.url('PROJECT_PARENT', project_name=project) r = Request(method='GET', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r)
def list_dashboards(self, project): """List custom dashboards for a project.""" url = self.gerrit.url('LIST_DASHBOARDS', project_name=project) r = Request(method='GET', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r)
def get_project_description(self, project): """Retrieves the description of a project.""" url = self.gerrit.url('PROJECT_DESCRIPTION', project_name=project) r = Request(method='GET', url=url, auth=self.gerrit.auth) return self.gerrit.dispatch(r)