def search(query, data_path, gdocs_public_doc, size=1000): cookies = "" token = "" with open(data_path, 'r') as f: out = json.loads(f.read()) token = out["keys"]["gdoc"] cookies = out["cookies"] data = { "request": '["documentsuggest.search.search_request","{}",[{}],null,1]'.format( query, size) } req = httpx.post( 'https://docs.google.com/document/d/{}/explore/search?token={}'.format( gdocs_public_doc, token), cookies=cookies, data=data) if req.status_code != 200: exit("Error (GDocs): request gives {}".format(req.status_code)) output = json.loads(req.text.replace(")]}'", "")) #pprint(output) if type(output[0][1]) == "str" and output[0][1].lower() == "xsrf": exit( f"\n[-] Error : XSRF detected.\nIt means your cookies have expired, please generate new ones." ) results = [] for result in output[0][1]: link = result[0][0] title = result[0][1] desc = result[0][2] results.append({"title": title, "desc": desc, "link": link}) return results
def authenticate(self): """ This method is used to authenticate to the Zayo API system using the client-id and client-secret values obtained from the environment. This method is called during instance initialization and the access token can be obtained via the `access_token` property. Notes ----- According to the Zayo API documentation, a token is valid for 1hr. Plan accordingly. """ client_id = getenv("ZAYO_CLIENT_ID") client_secret = getenv("ZAYO_CLIENT_SECRET") payload = { "client_id": client_id, "client_secret": client_secret, "grant_type": "client_credentials", "scope": "openid", } res = httpx.post(url=consts.ZAYO_URL_AUTH, data=payload) res.raise_for_status() self._auth_payload = res.json()
def retrieve_kelvin_access_token(host: str, username: str, password: str, verify: bool) -> str: logger.info("Retrieving Kelvin access token...") try: resp = httpx.post( URL_TOKEN.format(host=host), headers={"Content-Type": "application/x-www-form-urlencoded"}, data={ "username": username, "password": password }, verify=verify, ) except httpx.NetworkError as exc: raise TestServerConnectionError( f"Error retrieving token from Kelvin REST API: {exc}") if resp.status_code != 200: raise TestServerConnectionError( # pragma: no cover f"Error retrieving token from Kelvin REST API: [{resp.status_code}] {resp.reason_phrase}", reason=resp.reason_phrase, status=resp.status_code, ) json_resp = resp.json() return json_resp["access_token"]
def sync_detailed( *, client: Client, string_prop: Union[Unset, str] = "the default string", datetime_prop: Union[Unset, datetime.datetime] = isoparse("1010-10-10T00:00:00"), date_prop: Union[Unset, datetime.date] = isoparse("1010-10-10").date(), float_prop: Union[Unset, float] = 3.14, int_prop: Union[Unset, int] = 7, boolean_prop: Union[Unset, bool] = False, list_prop: Union[Unset, List[AnEnum]] = UNSET, union_prop: Union[Unset, float, str] = "not a float", union_prop_with_ref: Union[Unset, float, AnEnum] = 0.6, enum_prop: Union[Unset, AnEnum] = UNSET, model_prop: Union[ModelWithUnionProperty, Unset] = UNSET, required_model_prop: ModelWithUnionProperty, ) -> Response[Union[None, HTTPValidationError]]: kwargs = _get_kwargs( client=client, string_prop=string_prop, datetime_prop=datetime_prop, date_prop=date_prop, float_prop=float_prop, int_prop=int_prop, boolean_prop=boolean_prop, list_prop=list_prop, union_prop=union_prop, union_prop_with_ref=union_prop_with_ref, enum_prop=enum_prop, model_prop=model_prop, required_model_prop=required_model_prop, ) response = httpx.post(**kwargs, ) return _build_response(response=response)
def getFinalResult(self): try: self.resp.append(httpx.post( self.url, data=self.data, cookies=self.cookies)) except: self.result = self.errmsg return None self.doc.append(HTML(self.resp[2].text)) try: self.finalResult = self.doc[2].xpath( '//*[@id="lableft"]/text()')[0] df = self.findPat("账户余额:.*元", 5, -1, float) du = self.findPat("剩余电量:.*度", 5, -1, float) result = { "账户余额": df, "剩余电量": du } return result except IndexError: result = { "账户余额": -1.0, "剩余电量": -1.0 } return result
def authenticate(self) -> bool: """ Authenticate the client. Returns: - the specific product response associate to request. """ response = post( f"{self.endpoint}/auth/", data={"username": self.username, "password": self.password}, ) if response.status_code == 401: return False access_token = response.json().get("access_token") if not access_token: raise ValueError( "The access_token key is not present " "on Authorization response." ) self.access_token = access_token return True
def promote_app(self, app): resp = httpx.get( f"{self.config.cs_url}/apps/api/v1/{app['owner']}/{app['title']}/tags/", headers={"Authorization": f"Token {self.cs_api_token}"}, ) assert (resp.status_code == 200 ), f"Got: {resp.url} {resp.status_code} {resp.text}" staging_tag = resp.json()["staging_tag"]["image_tag"] app_version = resp.json()["staging_tag"]["version"] if app_version is None: app_version = self.get_version(app, print_stdout=False) resp = httpx.post( f"{self.config.cs_url}/apps/api/v1/{app['owner']}/{app['title']}/tags/", json={ "latest_tag": staging_tag or self.tag, "staging_tag": None, "version": app_version, }, headers={"Authorization": f"Token {self.cs_api_token}"}, ) assert (resp.status_code == 200 ), f"Got: {resp.url} {resp.status_code} {resp.text}" sys.stdout.write(resp.json()["latest_tag"]["image_tag"])
def iterate_request_ids_with_responses() -> Iterable[Tuple[str, bytes]]: """Send requests to webKnossos and record the schema of their replies""" from webknossos.client._generated.api.default import ( annotation_info, annotation_infos_by_task_id, build_info, current_user_info, dataset_info, dataset_list, dataset_sharing_token, datastore_list, generate_token_for_data_store, project_info_by_id, project_info_by_name, task_info, task_infos_by_project_id, team_list, user_info_by_id, user_list, user_logged_time, ) from webknossos.client.context import _get_generated_client from webknossos.utils import snake_to_camel_case organization_id = "Organization_X" dataset_name = "e2006_knossos" task_id = "581367a82faeb37a008a5352" user_id = "570b9f4d2a7c0e4d008da6ef" project_id = "58135bfd2faeb3190181c057" project_name = "Test_Project" explorative_annotation_id = "58135c192faeb34c0081c05d" extract_200_response( httpx.post( url=f"{WK_URL}/data/triggers/checkInboxBlocking?token={WK_TOKEN}", )) response = httpx.get( url=f"{WK_URL}/api/datasets/{organization_id}/{dataset_name}", headers={"X-Auth-Token": f"{WK_TOKEN}"}, ) assert ( response.status_code == 200 and response.json()["isActive"] ), f"You need to copy or link any dataset to binaryData/{organization_id}/{dataset_name}." d = datetime.utcnow() unixtime = calendar.timegm(d.utctimetuple()) client = _get_generated_client(enforce_auth=True) yield ( "annotationInfo", extract_200_response( annotation_info.sync_detailed( id=explorative_annotation_id, client=client, timestamp=unixtime, )), ) yield ( "datasetInfo", extract_200_response( dataset_info.sync_detailed( organization_name=organization_id, data_set_name=dataset_name, client=client, )), ) yield ( "datasetList", extract_200_response(dataset_list.sync_detailed(client=client, )), ) yield ( "datasetSharingToken", extract_200_response( dataset_sharing_token.sync_detailed( organization_name=organization_id, data_set_name=dataset_name, client=client, )), ) yield ( "taskInfo", extract_200_response( task_info.sync_detailed( id=task_id, client=client, ), ), ) yield ( "userInfoById", extract_200_response( user_info_by_id.sync_detailed( id=user_id, client=client, ), ), ) yield ( "teamList", extract_200_response(team_list.sync_detailed(client=client, ), ), ) yield ( "projectInfoById", extract_200_response( project_info_by_id.sync_detailed( id=project_id, client=client, ), ), ) yield ( "projectInfoByName", extract_200_response( project_info_by_name.sync_detailed(name=project_name, client=client), ), ) yield ( "taskInfosByProjectId", extract_200_response( task_infos_by_project_id.sync_detailed( id=project_id, client=client, ), ), ) yield ( "annotationInfosByTaskId", extract_200_response( annotation_infos_by_task_id.sync_detailed(id=task_id, client=client), ), ) yield ( "userLoggedTime", extract_200_response( user_logged_time.sync_detailed( id=user_id, client=client, ), ), ) for api_endpoint in [ datastore_list, build_info, current_user_info, generate_token_for_data_store, user_list, ]: api_endpoint_name = api_endpoint.__name__.split(".")[-1] api_endpoint_name = snake_to_camel_case(api_endpoint_name) yield ( api_endpoint_name, extract_200_response(api_endpoint.sync_detailed(client=client)), )
def post_01(): b = {"name": "daine", "sex": "male"} print(httpx.post("http://localhost:8889/postSecond", data=b).json())
def main(): parser = ArgumentParser(description='An app to rate and suggest university courses') sp = parser.add_subparsers(dest='action') sp.required = True sp.add_parser('init') sp.add_parser('delete') p = sp.add_parser('run') p.add_argument('-p', '--port', help='Port to run on. Default: 8001', type=int, default=8001) p = sp.add_parser('load') p.add_argument('data_json', help='JSON file with list of {"CourseNumber": "CS100", "CourseName": "", "CourseDescription": ""}') p.add_argument('university', help='University code to upload to') p.add_argument('auth', help='Authentication in form of username:password') p.add_argument('-e', '--existing-only', help="Don't create an account/university if they don't exist") p.add_argument('-s', '--server-url', help='URL of server to upload to', default='http://localhost:8001') p = sp.add_parser('add-default', help='Add default attributes') p.add_argument('-s', '--server-url', help='URL of server to upload to', default='http://localhost:8001') p.add_argument('auth', help='Authentication in form of username:password') args = parser.parse_args() if args.action == 'init': init_db() elif args.action == 'delete': delete_db() elif args.action == 'load': with open(args.data_json) as f: courses = json.load(f) base = args.server_url.rstrip('/') token = get_token(args.auth, args.server_url, args.existing_only) auth_headers = {'Authorization': 'Bearer {}'.format(token)} r = httpx.get(base + '/university/' + args.university) if r.is_error: if args.existing_only: print('University does not exist') raise SystemExit(1) print('Creating university...') r = httpx.post(base + '/university', json=dict(name=args.university, code=args.university), headers=auth_headers) assert not r.is_error for course in courses: code = course['CourseNumber'] title = course['CourseName'] description = course['CourseDescription'] print('Uploading {}...'.format(code)) r = httpx.post(base + '/university/{}/course'.format(args.university), json=dict(code=code, title=title, description=description), headers=auth_headers) if r.is_error: print('Failed to upload ({}): {}'.format(r.status_code, r.content)) elif args.action == 'add-default': base = args.server_url.rstrip('/') token = get_token(args.auth, args.server_url) auth_headers = {'Authorization': 'Bearer {}'.format(token)} defaults_attributes = [ {'name': 'Difficulty', 'description': 'How hard you found the course'}, {'name': 'Curriculum', 'description': 'How well the course was layed out'}, {'name': 'Usefulness', 'description': 'How applicable the course is to the real world'}, {'name': '_Overall', 'description': 'Overall course rating'} ] for attr in defaults_attributes: print('Uploading attribute "{}"...'.format(attr['name'])) r = httpx.post(base + '/ratingAttribute', json=attr, headers=auth_headers) if r.is_error: print('Failed to upload attribute {} ({}): {}'.format(attr, r.status_code, r.content)) elif args.action == 'run': log_level = 'info' reload = False if DEBUG: log_level = 'debug' reload = True uvicorn.run("courator:app", host="0.0.0.0", port=args.port, log_level=log_level, reload=reload)
def test_unhandled(self): # testing disabled resp = httpx.post(MGMT + '/unhandled', json=False, verify=False) resp.raise_for_status() resp = httpx.delete(MGMT + '/unhandled', verify=False) resp.raise_for_status() path = '/unhandled-%s' % time.time() resp = httpx.get(SRV1 + path) self.assertEqual(404, resp.status_code) resp = httpx.get(MGMT + '/unhandled?format=yaml', verify=False) resp.raise_for_status() self.assertEqual(resp.headers['x-mockintosh-unhandled-data'], 'false') self.assertEqual(resp.text, 'services: []\n') # testing enabled resp = httpx.post(SRV1 + '/__admin/unhandled', json=True) resp.raise_for_status() path = '/unhandled-%s' % time.time() resp = httpx.get(SRV1 + path, headers={ "hdr1": "val1", "hdr2": "val2", "hdr3": "val3" }) self.assertEqual(404, resp.status_code) resp = httpx.get(MGMT + '/unhandled?format=yaml', verify=False) resp.raise_for_status() self.assertTrue(resp.text.startswith('services:')) self.assertNotEqual(resp.text, 'services: []\n') self.assertEqual(resp.headers['x-mockintosh-unhandled-data'], 'true') resp = httpx.get(MGMT + '/unhandled', verify=False) resp.raise_for_status() self.assertEqual('{', resp.text[0]) config = resp.json() self.assertFalse([x for x in config['services'] if not x['endpoints']]) for endp in config['services'][0]['endpoints']: if endp['path'] == path: hdrs = [x.lower() for x in endp.get('headers', {}).keys()] self.assertNotIn('host', hdrs) self.assertNotIn('user-agent', hdrs) self.assertNotIn('connection', hdrs) self.assertIn('hdr1', hdrs) self.assertIn('hdr2', hdrs) self.assertIn('hdr3', hdrs) break else: self.fail("Did not find endpoint") resp = httpx.get(SRV1 + path, headers={ "hdr1": "val1", "hdr2": "val22" }) self.assertEqual(404, resp.status_code) resp = httpx.get(SRV1 + '/__admin/unhandled') resp.raise_for_status() config = resp.json() for endp in config['services'][0]['endpoints']: if endp['path'] == path: hdrs = [x.lower() for x in endp.get('headers', {}).keys()] self.assertIn('hdr1', hdrs) self.assertNotIn('hdr2', hdrs) self.assertNotIn('hdr3', hdrs) break else: self.fail("Did not find endpoint") resp = httpx.get(SRV1 + path, headers={ "hdr1": "val1", "hdr2": "val2", "hdr3": "val3" }) self.assertEqual(404, resp.status_code) resp = httpx.get(SRV1 + '/__admin/unhandled') resp.raise_for_status() config = resp.json() for endp in config['services'][0]['endpoints']: if endp['path'] == path: hdrs = [x.lower() for x in endp.get('headers', {}).keys()] self.assertIn('hdr1', hdrs) self.assertNotIn('hdr2', hdrs) self.assertNotIn('hdr3', hdrs) break else: self.fail("Did not find endpoint")
def add_crontab_CRONTAB( self, post_name: str = '', post_type: str = 'day', post_where1: str = '', post_hour: str = '1', post_minute: str = '30', post_week: str = '', post_sType: str = 'toShell', post_sBody: str = '', post_sName: str = '', post_backupTo: str = 'localhost', post_save: str = '', post_urladdress: str = 'undefined', post_save_local: str = 'undefined', post_notice: str = 'undefined', post_notice_channel: str = 'undefined', ) -> dict: ''' 添加计划任务 post_name: 计划任务名称 post_type: 执行周期类型 [day, week] post_where1: pass post_hour: 小时 post_minute: 分钟 post_week: 周几 [1, 2, 3, 4, 5, 6, 0(周日)] post_sType: 任务类型 [toShell, ] post_sBody: 脚本内容/排除规则 `\\n`换行 post_sName: 备份目录/备份网站/备份数据库 post_backupTo: 备份位置 [localhost, ] post_save: 保留最新备份数 post_urladdress: pass post_save_local: pass post_notice: pass post_notice_channel: pass ''' # 拼接URL地址 url = self.__PANEL_URL + '/crontab?action=AddCrontab' # 准备POST数据 post_data = self.__get_key_data() # 取签名 post_data['name'] = post_name post_data['type'] = post_type post_data['where1'] = post_where1 post_data['hour'] = post_hour post_data['minute'] = post_minute post_data['week'] = post_week post_data['sType'] = post_sType post_data['sBody'] = post_sBody post_data['sName'] = post_sName post_data['backupTo'] = post_backupTo post_data['save'] = post_save post_data['urladdress'] = post_urladdress post_data['save_local'] = post_save_local post_data['notice'] = post_notice post_data['notice_channel'] = post_notice_channel # 请求面板接口 res = httpx.post(url, data=post_data) return res.json()
def axios(req): fres = {'status': 200, 'headers': {}, 'data': ''} headers = {} if 'headers' in req: headers = req['headers'] if 'content-type' not in headers or 'Content-Type' not in headers: headers[ 'content-type'] = 'application/x-www-form-urlencoded; charset=UTF-8' if 'user-agent' not in headers: try: headers['user-agent'] = uagent[CONFIG['CONFIG_Axios'] ['uagent']]['header'] except Exception as e: console.error(e) headers[ 'user-agent'] = 'Mozilla/5.0 (Linux; U; elecV2P; x64) ePhone Super Max Plus++' proxies = {} timeout = 5 try: if CONFIG['CONFIG_Axios']['proxy']: proxy = CONFIG['CONFIG_Axios']['proxy'] if 'host' not in proxy: proxy['host'] = 'localhost' if 'port' not in proxy: proxy['port'] = 8001 phttp = proxy['host'] + ':' + str(proxy['port']) if 'auth' in proxy and 'username' in proxy[ 'auth'] and 'password' in proxy['auth']: phttp = proxy['auth']['username'] + ':' + proxy['auth'][ 'password'] + '@' + phttp proxies['http://'] = 'http://' + phttp if CONFIG['CONFIG_Axios']['timeout']: timeout = round(CONFIG['CONFIG_Axios']['timeout'] / 1000, 2) except Exception as e: console.error(e) if isinstance(req, str): req = {'url': req, 'method': 'get'} if 'url' not in req: fres['status'] = -1 fres['data'] = 'axios error: a request url is expect' console.error(fres['data']) return fres if 'body' in req: req['data'] = req['body'] if 'data' not in req: req['data'] = '' if 'method' not in req: req['method'] = 'get' console.debug('axios request:', req, 'proxies:', proxies, 'timeout:', timeout) try: req['method'] = req['method'].lower() if req['method'] == 'get': res = httpx.get(req['url'], headers=headers, proxies=proxies, timeout=timeout) elif req['method'] == 'post': res = httpx.post(req['url'], headers=headers, data=req['data'], proxies=proxies, timeout=timeout) elif req['method'] == 'put': res = httpx.put(req['url'], headers=headers, data=req['data'], proxies=proxies, timeout=timeout) elif req['method'] == 'delete': res = httpx.delete(req['url'], headers=headers, data=req['data'], proxies=proxies, timeout=timeout) elif req['method'] == 'options': res = httpx.options(req['url'], headers=headers, data=req['data'], proxies=proxies, timeout=timeout) else: class res: status_code = -1 headers = {} text = 'unknow request method ' + req['method'] console.error('unknow request method', req['method']) fres['status'] = res.status_code fres['headers'] = res.headers fres['data'] = res.text except Exception as e: console.error(e) fres['status'] = -1 fres['data'] = 'axios ' + req['method'] + ' ' + req[ 'url'] + ' error: ' + str(e) return fres
import json import httpx from credentials import kibana_url export_response = (httpx.post( f"{kibana_url}/s/search/api/saved_objects/_export", data={ "type": "dashboard", "includeReferencesDeep": True }, headers={ "kbn-xsrf": "true" }, ).read().decode("utf-8")) saved_objects = export_response.split("\n") for object in saved_objects: parsed_object = json.loads(object) try: file_name = f"{parsed_object['type']}-{parsed_object['id']}.json" print(f"Writing object to file: {file_name}") with open(f"/data/{file_name}", "w", encoding="utf-8") as f: json.dump(parsed_object, f, ensure_ascii=False, indent=4) except KeyError: print(parsed_object)
def post(self, suburl, params, auth=None): return httpx.post(self.addr + suburl, verify=self.verify, json=params, headers=auth.headers() if auth != None else None)
def post_to_slack(slack_webhook, msg): httpx.post( "https://hooks.slack.com/services/{}".format(slack_webhook), data='{"text": "' + msg + '"}', headers={"Content-type": "application/json"}, )
def build_database(repo_path): all_times = created_changed_times(repo_path) db = sqlite_utils.Database(repo_path / "til.db") table = db.table("til", pk="path") for filepath in root.glob("*/*.md"): fp = filepath.open() title = fp.readline().lstrip("#").strip() body = fp.read().strip() path = str(filepath.relative_to(root)) slug = filepath.stem url = "https://github.com/shireenrao/til/blob/master/{}".format(path) # Do we need to render the markdown? path_slug = path.replace("/", "_") try: row = table.get(path_slug) previous_body = row["body"] previous_html = row["html"] except (NotFoundError, KeyError): previous_body = None previous_html = None record = { "path": path_slug, "slug": slug, "topic": path.split("/")[0], "title": title, "url": url, "relative_url": path, "body": body, } if (body != previous_body) or not previous_html: retries = 0 response = None while retries < 3: headers = {} if os.environ.get("GITHUB_TOKEN"): headers = { "authorization": "Bearer {}".format(os.environ["GITHUB_TOKEN"]) } response = httpx.post( "https://api.github.com/markdown", json={ # mode=gfm would expand #13 issue links and suchlike "mode": "markdown", "text": body, }, headers=headers, ) if response.status_code == 200: record["html"] = response.text print("Rendered HTML for {}".format(path)) break else: print(" sleeping 60s") time.sleep(60) retries += 1 else: assert False, "Could not render {} - last response was {}".format( path, response.headers) record.update(all_times[path]) with db.conn: table.upsert(record, alter=True) table.enable_fts(["title", "body"], tokenize="porter", create_triggers=True, replace=True)
def get_quakes(**kwargs): start = kwargs.get('start', default_start_time()) end = kwargs.get('end', default_end_time()) maxlon = kwargs.get('maxlon', -4) minlon = kwargs.get('minlon', -32) maxlat = kwargs.get('maxlat', 68) minlat = kwargs.get('minlat', 61) form_data = { "start_time": convert_separator_to_space(start), "end_time": convert_separator_to_space(end), "depth_min": kwargs.get('min_depth', 0), "depth_max": kwargs.get('max_depth', 25), "size_min": kwargs.get('min_size', 0), "size_max": kwargs.get('max_size', 9), #"magnitude_preference": ["Mlw", "Autmag"], #"event_type": ["qu"], #"originating_system": ["SIL picks"], "area": [ [maxlat, minlon], [minlat, minlon], [minlat, maxlon], [maxlat, maxlon] ], "fields": [ "event_id", "lat", "long", "time", "magnitude", "depth" #"event_type", #"originating_system" ] } rsp = httpx.post( 'https://api.vedur.is/skjalftalisa/v1/quake/array/', json=form_data ) rsp.raise_for_status() data = rsp.json()['data'] """ {'data': {'event_type': ['qu', 'qu', 'qu', 'qu'], 'event_id': [888019, 889516, 889775, 890064], 'magnitude': [4.9, 4.06, 4.21, 5.07], 'originating_system': ['SIL picks', 'SIL picks', 'SIL picks', 'SIL picks'], 'long': [-22.21036, -22.21167, -22.20224, -22.14738], 'time': [1614562298, 1614600737, 1614607957, 1614616546], 'lat': [63.92493, 63.91733, 63.93121, 63.93831]}} """ #event_types = data['event_type'] event_ids = data['event_id'] magnitudes = data['magnitude'] #orig_systems = data['originating_system'] longitudes = data['long'] times = data['time'] latitudes = data['lat'] depths = data['depth'] lists = [ event_ids, magnitudes, longitudes, times, latitudes, depths ] ret = {} ret['type'] = 'FeatureCollection' ret['metadata'] = {} quakes = [] for (event_id, magnitude, lon, time, lat, depth) in zip(*lists): q = {} q['geometry'] = { 'type': 'Point', 'coordinates': [lon, lat, -depth] } q['type'] = 'Feature' q['id'] = event_id q['properties'] = { 'lon': lon, 'lat': lat, 'depth': depth, 'mag': magnitude, #'time': time*1000 'time': datetime.fromtimestamp(time).isoformat() } quakes.append(q) #quakes.append( # { # 'event_id': event_id, # 'magnitude': magnitude, # 'longitude': lon, # 'time': datetime.fromtimestamp(time).isoformat(), # 'latitude': lat # } #) ret['features'] = quakes return ret
try: event = PartialGithubEvent.parse_file(settings.github_event_path) except ValidationError as e: logging.error(f"Error parsing event file: {e.errors()}") sys.exit(0) use_pr: Optional[PullRequest] = None for pr in repo.get_pulls(): if pr.head.sha == event.workflow_run.head_commit.id: use_pr = pr break if not use_pr: logging.error( f"No PR found for hash: {event.workflow_run.head_commit.id}") sys.exit(0) github_headers = { "Authorization": f"token {settings.input_token.get_secret_value()}" } url = f"{github_api}/repos/{settings.github_repository}/issues/{use_pr.number}/comments" logging.info(f"Using comments URL: {url}") response = httpx.post( url, headers=github_headers, json={ "body": f"🚀 Preview for commit {use_pr.head.sha} at: {settings.input_deploy_url}" }, ) if not (200 <= response.status_code <= 300): logging.error(f"Error posting comment: {response.text}") sys.exit(1) logging.info("Finished")
def stop(self, limit: int): endpoint = '/wallet/stop/' data={"limit": limit} response = httpx.post(self.base_url + endpoint, data=data, headers=self.wallet_id_headers) return response
def register(access_token: str, domain: str) -> Dict[str, Any]: """ Register a dummy audible device with access token from ``auth.login``. Returns important credentials needed for access audible api. """ body = { "requested_token_type": [ "bearer", "mac_dms", "website_cookies", "store_authentication_cookie" ], "cookies": { "website_cookies": [], "domain": f".amazon.{domain}" }, "registration_data": { "domain": "Device", "app_version": "3.26.1", "device_serial": get_random_device_serial(), "device_type": "A2CZJZGLK2JJVM", "device_name": ("%FIRST_NAME%%FIRST_NAME_POSSESSIVE_STRING%%DUPE_" "STRATEGY_1ST%Audible for iPhone"), "os_version": "13.5.1", "device_model": "iPhone", "app_name": "Audible" }, "auth_data": { "access_token": access_token }, "requested_extensions": ["device_info", "customer_info"] } resp = httpx.post(f"https://api.amazon.{domain}/auth/register", json=body) resp_json = resp.json() if resp.status_code != 200: raise Exception(resp_json) success_response = resp_json["response"]["success"] tokens = success_response["tokens"] adp_token = tokens["mac_dms"]["adp_token"] device_private_key = tokens["mac_dms"]["device_private_key"] store_authentication_cookie = tokens["store_authentication_cookie"] access_token = tokens["bearer"]["access_token"] refresh_token = tokens["bearer"]["refresh_token"] expires_s = int(tokens["bearer"]["expires_in"]) expires = (datetime.utcnow() + timedelta(seconds=expires_s)).timestamp() extensions = success_response["extensions"] device_info = extensions["device_info"] customer_info = extensions["customer_info"] website_cookies = dict() for cookie in tokens["website_cookies"]: website_cookies[cookie["Name"]] = cookie["Value"].replace(r'"', r'') return { "adp_token": adp_token, "device_private_key": device_private_key, "access_token": access_token, "refresh_token": refresh_token, "expires": expires, "website_cookies": website_cookies, "store_authentication_cookie": store_authentication_cookie, "device_info": device_info, "customer_info": customer_info }
def utxo_consolidation(self, destination_address: str): data = {"destination_address": destination_address} endpoint = '/wallet/utxo-filter' response = httpx.post(self.base_url + endpoint, data=data, headers=self.wallet_id_headers) return response
def test_tagged_responses(self): resp = httpx.post(SRV1 + '/__admin/reset-iterators') resp.raise_for_status() # no tag set - only untagged responses resp = httpx.post(SRV1 + '/__admin/tag', data="") resp.raise_for_status() resp = httpx.get(SRV1 + '/tagged') self.assertEqual("3.1", resp.text) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("3.2", resp.text) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("3.3", resp.text) # first tag set - "first" + untagged responses resp = httpx.post(SRV1 + '/__admin/tag', data="first") resp.raise_for_status() resp = httpx.get(SRV1 + '/__admin/tag') resp.raise_for_status() data = resp.json() self.assertIn("first", data['tags']) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("3.1", resp.text) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("1.1", resp.text) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("1.2", resp.text) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("3.2", resp.text) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("3.3", resp.text) # first tag set - "second" + untagged responses resp = httpx.post(MGMT + '/tag', data="second", verify=False) resp.raise_for_status() resp = httpx.get(SRV1 + '/tagged') self.assertEqual("3.1", resp.text) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("2.1", resp.text) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("3.2", resp.text) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("2.2", resp.text) resp = httpx.get(SRV1 + '/tagged') self.assertEqual("3.3", resp.text) # case of no valid response resp = httpx.get(SRV1 + '/tagged-confusing') self.assertEqual(410, resp.status_code)
def simple_send_tx(self, simple_transaction: models.SimpleTransaction): data = simple_transaction.dict() endpoint = '/wallet/simple-send-tx' response = httpx.post(self.base_url + endpoint, data=data, headers=self.wallet_id_headers) return response
#!/usr/bin/env python # -*- encoding: utf-8 -*- ''' @File : 1_request_method.py @Time : 2021-02-23 @Author : EvilRecluse @Contact : https://github.com/RecluseXU @Desc : 常用的请求方法GET, POST, PUT, DELETE, HEAD, OPTIONS ''' # here put the import lib import httpx # 常用的请求方法GET, POST, PUT, DELETE, HEAD, OPTIONS r = httpx.get('https://httpbin.org/get') r = httpx.post('https://httpbin.org/post', data={'key': 'value'}) r = httpx.put('https://httpbin.org/put', data={'key': 'value'}) r = httpx.delete('https://httpbin.org/delete') r = httpx.head('https://httpbin.org/get') r = httpx.options('https://httpbin.org/get') # 设置headers headers = {'user-agent': 'my-app/0.0.1'} r = httpx.get('http://httpbin.org/headers', headers=headers) print(r.json())
def send_tx(self, transaction: models.Transaction = None): data = transaction.dict() endpoint = '/wallet/send-tx' response = httpx.post(self.base_url + endpoint, data=data, headers=self.wallet_id_headers) return response
} } } def bootstrap_response_schemas(openapi_schema: Dict) -> None: """Inserts the response schemas into openapi_schema (in-place), as recorded by example requests.""" assert_valid_schema(openapi_schema) for operation_id, example_response in iterate_request_ids_with_responses(): set_response_schema_by_example(openapi_schema, example_response=example_response, operation_id=operation_id) if __name__ == "__main__": schema_response = httpx.get(SCHEMA_URL) schema_response.raise_for_status() converter_response = httpx.post( CONVERTER_URL, content=schema_response.text, headers={"content-type": "application/json"}, ) converter_response.raise_for_status() schema = json.loads(converter_response.text) add_api_prefix_for_non_data_paths(schema) generate_client(schema) fix_request_body(schema) bootstrap_response_schemas(schema) generate_client(schema)
def create_token(self, create_tokens: models.CreateTokens): data = create_tokens.dict() endpoint = '/wallet/create-token' response = httpx.post(self.base_url + endpoint, data=data, headers=self.wallet_id_headers) return response
def _send_to_server(self, bookmark: Bookmark): """HTTP post proxy.""" print(f">> Sending bookmark {bookmark.url} to web server") auth = "" # TODO: Add token return httpx.post(self.url, data=bookmark)
def melt_tokens(self, melt_tokens: models.MintTokens): data = melt_tokens.dict() endpoint = '/wallet/melt-tokens' response = httpx.post(self.base_url + endpoint, data=data, headers=self.wallet_id_headers) return response