async def create_or_update_pull_request( *, title: str, body: str, branch_name: str, session: aiohttp.ClientSession) -> None: secret = os.environ["GITHUB_TOKEN"] if secret.startswith("ghp"): auth = f"token {secret}" else: auth = f"Bearer {secret}" fork_owner = get_origin_owner() async with session.post( f"https://api.github.com/repos/{TYPESHED_OWNER}/typeshed/pulls", json={ "title": title, "body": body, "head": f"{fork_owner}:{branch_name}", "base": "master" }, headers={ "Accept": "application/vnd.github.v3+json", "Authorization": auth }, ) as response: resp_json = await response.json() if response.status == 422 and any( "A pull request already exists" in e.get("message", "") for e in resp_json.get("errors", [])): # Find the existing PR async with session.get( f"https://api.github.com/repos/{TYPESHED_OWNER}/typeshed/pulls", params={ "state": "open", "head": f"{fork_owner}:{branch_name}", "base": "master" }, headers={ "Accept": "application/vnd.github.v3+json", "Authorization": auth }, ) as response: response.raise_for_status() resp_json = await response.json() assert len(resp_json) >= 1 pr_number = resp_json[0]["number"] # Update the PR's title and body async with session.patch( f"https://api.github.com/repos/{TYPESHED_OWNER}/typeshed/pulls/{pr_number}", json={ "title": title, "body": body }, headers={ "Accept": "application/vnd.github.v3+json", "Authorization": auth }, ) as response: response.raise_for_status() return response.raise_for_status()
async def set_deployment_replicas(name, replicas, kubeapi: aiohttp.ClientSession): deployment = await get_deployment(name, kubeapi) annotations = deployment["metadata"]["annotations"] if "kubeonoff/original-replicas" not in annotations: annotations["kubeonoff/original-replicas"] = str( deployment["spec"]["replicas"]) patch = json.dumps({ "metadata": { "annotations": annotations }, "spec": { "replicas": replicas }, }).encode("utf-8") async with kubeapi.patch( f"{KUBE_API_URL}/apis/apps/v1/" f"namespaces/{NAMESPACE}/deployments/{name}/", headers={"Content-Type": "application/strategic-merge-patch+json"}, data=patch, ) as resp: result = await resp.text() if not 200 <= resp.status < 300: raise KubeApiError(body=result, status=resp.status) return json.loads(result)
async def acquire_job_by_id(job_id: str, http: aiohttp.ClientSession, jobs_api_url: str, mem: int, proc: int): """ Acquire the job with a given ID using the jobs API. :param job_id: The id of the job to acquire :param http: An aiohttp.ClientSession to use to make the request. :param jobs_api_url: The url for the jobs API. :return: a :class:`virtool_workflow.data_model.Job` instance with an api key (.key attribute) """ async with http.patch(f"{jobs_api_url}/jobs/{job_id}", json={"acquired": True}) as response: async with raising_errors_by_status_code(response, status_codes_to_exceptions={ "400": JobAlreadyAcquired }) as document: logger.info(document) return Job( id=document["id"], args=document["args"], mem=document["mem"] if "mem" in document else mem, proc=document["proc"] if "proc" in document else proc, status=document["status"], task=document["task"], key=document["key"], )
class Client: def __init__(self, base_url: str = BASE_URL) -> None: self.base_url = base_url self.session = ClientSession() def get(self, method: str, *args: tp.Any, **kwargs: tp.Any) -> tp.Any: return self.session.get(f"{self.base_url}/{method}", *args, **kwargs) def patch(self, method: str, *args: tp.Any, **kwargs: tp.Any) -> tp.Any: return self.session.patch(f"{self.base_url}/{method}", *args, **kwargs)
def execute(self, session: aiohttp.ClientSession): assert self.run.id is not None assert self.run.head_branch is None assert self.run.head_sha is None url = (f"https://api.github.com" f"/repos/{self.owner}/{self.repo}/check-runs/{self.run.id}") body = cattr.unstructure(self.run) logger.info('PATCH %s\n%s', url, body) return session.patch(url, headers=api_headers, json=body)
async def link_backend_user( slack_id: str, auth_header: Dict[str, str], slack_api: SlackAPI, session: ClientSession, ) -> None: """ Updates the slack user with their profile in the backend """ user_info = await slack_api.query(methods.USERS_INFO, {"user": slack_id}) email = user_info["user"]["profile"]["email"] async with session.patch( f"{BACKEND_URL}/auth/profile/admin/", headers=auth_header, params={"email": email}, json={"slackId": slack_id}, ) as response: data = await response.json() logger.info(f"Backend response from user linking: {data}")
async def onReceive(http: aiohttp.ClientSession, data: Mapping[str, Any]) -> Optional[str]: """Cambia los atributos de threat severity y status del endpoint, y le envia un CoA al nas_ip""" # Fake config object. Solo soportamos client_credentials cfg = { "clearpass": { "grant_type": "client_credentials", "api_host": data["host"], "client_id": data["user"], "client_secret": data["pass"], }, } nas_ip = data["nas_ip"] endpoint_mac = data["endpoint_mac"] threat = data["threat"] # Inicio una sesión con el clearpass que está en el fichero de configuración. # Para cambiar las credenciales: python -m aruba.clearpass with clearpass.session(cfg, verify=False) as session: # Modifico el nivel de amenaza del endpoint update = { "attributes": { "Threat Severity": "Critical", "Threat Status": "In Progress", }, } if not threat: update = { "attributes": { "Threat Severity": "Low", "Threat Status": "Resolved", }, } async with http.patch(session.api_url + "/endpoint/mac-address/{}".format(endpoint_mac), headers=session.headers(), json=update) as response: if response.status != 200: return "Error actualizando endpoint: ({}) {}".format(response.status, await response.text()) # Encuentro la última sesión en el switch query = { "filter": json.dumps({ "callingstationid": endpoint_mac, }), "sort": "-acctstarttime", "limit": 1, } session_id = "" async with http.get(session.api_url + "/session", headers=session.headers(), params=query) as response: if response.status != 200: return "Error localizando sesion: ({}) {}".format(response.status, await response.text()) for item in (await response.json())["_embedded"]["items"]: if nas_ip in item["nasipaddress"]: session_id = item["id"] break # Fuerzo un reconnect de esa sesión confirm = { "confirm_disconnect": True } async with http.post(session.api_url+"/session/{}/disconnect".format(session_id), headers=session.headers(), json=confirm) as response: if response.status != 200: return "Error desconectando sesion: ({}) {}".format(response.status, await response.text()) # return None si no hay error return None
class AioHttpClient(HttpClient): def __init__(self, *, connector=None, loop=None, cookies=None, headers=None, skip_auto_headers=None, auth=None, json_serialize=json.dumps, request_class=ClientRequest, response_class=ClientResponse, ws_response_class=ClientWebSocketResponse, version=http.HttpVersion11, cookie_jar=None, connector_owner=True, raise_for_status=False, read_timeout=sentinel, conn_timeout=None, auto_decompress=True, trust_env=False, **kwargs): """ The class packaging a class ClientSession to perform HTTP request and manager that these HTTP connection. For details of the params: http://aiohttp.readthedocs.io/en/stable/client_advanced.html#client-session """ super(AioHttpClient, self).__init__(**kwargs) self.client = ClientSession(connector=connector, loop=loop, cookies=cookies, headers=headers, skip_auto_headers=skip_auto_headers, auth=auth, json_serialize=json_serialize, request_class=request_class, response_class=response_class, ws_response_class=ws_response_class, version=version, cookie_jar=cookie_jar, connector_owner=connector_owner, raise_for_status=raise_for_status, read_timeout=read_timeout, conn_timeout=conn_timeout, auto_decompress=auto_decompress, trust_env=trust_env) def request(self, method, url, *args, **kwargs): return self.client.request(method=method, url=url, **kwargs) def get(self, url, *args, **kwargs): return self.client.get(url=url, **kwargs) def post(self, url, *args, data=None, **kwargs): return self.client.post(url=url, data=data, **kwargs) def put(self, url, *args, data=None, **kwargs): return self.client.put(url=url, data=data, **kwargs) def delete(self, url, *args, **kwargs): return self.client.delete(url=url, **kwargs) def options(self, url, *args, **kwargs): return self.client.options(url=url, **kwargs) def head(self, url, *args, **kwargs): return self.client.head(url=url, **kwargs) def patch(self, url, *args, data=None, **kwargs): return self.client.patch(url=url, data=data, **kwargs) async def close(self): await self.client.close() async def get_response(self, response): text = await response.text() return Response(url=response.url, status=response.status, charset=response.charset, content_type=response.content_type, content_length=response.content_length, reason=response.reason, headers=response.headers, text=text, selector=etree.HTML(text)) async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): await self.close()
class ApiInstance: def __init__(self, base_url: str, timeout: int = 5000, headers: dict = {}, *, logger: logging.Logger): self.base_url = base_url self.headers = headers self.timeout = timeout self.session = ClientSession(timeout=ClientTimeout(total=timeout)) self.logger = logger async def __aenter__(self) -> "ApiInstance": return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: await self.close() @staticmethod def check_result(res: Any) -> Any: if res == "": return None return json_loads_attrs(res) async def call(self, method: str, prefix: str, data: Any, headers: dict = {}): new_headers = CIMultiDict() new_headers.update(self.headers) new_headers.update(headers) data_binary = json.dumps(data).encode("utf-8") # TODO: this is too much code duplication but I cannot think of # a way outside macros that could abstract async with block # and sadly there are no macro in python if method == "get": async with self.session.get(self.base_url + "/" + prefix, headers=new_headers) as resp: await check_response(resp, self.logger) res = await resp.text() return self.check_result(res) elif method == "post": async with self.session.post(self.base_url + "/" + prefix, data=data_binary, headers=new_headers) as resp: await check_response(resp, self.logger) res = await resp.text() return self.check_result(res) elif method == "put": async with self.session.put(self.base_url + "/" + prefix, data=data_binary, headers=new_headers) as resp: await check_response(resp, self.logger) res = await resp.text() return self.check_result(res) elif method == "patch": async with self.session.patch(self.base_url + "/" + prefix, data=data_binary, headers=new_headers) as resp: await check_response(resp, self.logger) res = await resp.text() return self.check_result(res) elif method == "delete": async with self.session.delete(self.base_url + "/" + prefix, headers=new_headers) as resp: await check_response(resp, self.logger) res = await resp.text() return self.check_result(res) async def make_request(self, method: str, prefix: str, data: Any, headers: dict): try: return await self.call(method, prefix, data, headers) except (ConflictingEntityException, EntityNotFoundException, PermissionDeniedException, ProcedureInvocationException, UnauthorizedException, ValidationException, BadRequestException, PapieaServerException, ApiException): raise except: self.logger.debug("RENEWING SESSION") await self.renew_session() return await self.call(method, prefix, data, headers) async def post(self, prefix: str, data: Any, headers: dict = {}) -> Any: return await self.make_request("post", prefix, data, headers) async def put(self, prefix: str, data: Any, headers: dict = {}) -> Any: return await self.make_request("put", prefix, data, headers) async def patch(self, prefix: str, data: Any, headers: dict = {}) -> Any: return await self.make_request("patch", prefix, data, headers) async def get(self, prefix: str, headers: dict = {}) -> Any: return await self.make_request("get", prefix, {}, headers) async def delete(self, prefix: str, headers: dict = {}) -> Any: return await self.make_request("delete", prefix, {}, headers) async def close(self): await self.session.close() async def renew_session(self): await self.close() self.session = ClientSession(timeout=ClientTimeout(total=self.timeout))
async def _request( self, method: str, url: str, bucket: str, data: Optional[str] = None, caller=None, **kwargs ) -> Union[int, Balance, UnbGuild, Dict[str, Union[List[Balance], str]], List[Balance]]: """ Processes requests to the Unbelivaboat's API Args: method (str): 'PUT', 'PATCH' or 'GET' url (str): request url data (json str): server data kwargs: guild_id: id (int) of the guild page: int number of page in the case of get_leaderboard """ cs = ClientSession(headers=self._header) if caller is None: caller = self._get_caller() guild_id = kwargs.pop('guild_id', None) page = kwargs.pop('page', None) if method == 'PUT': request_manager = cs.put(url=url, data=data) elif method == 'PATCH': request_manager = cs.patch(url=url, data=data) else: # defaults to 'GET' request_manager = cs.get(url=url) bucket_handler: BucketHandler = self._get_bucket_handler(bucket) bucket_handler.prevent_429 = self._prevent_rate_limits async with self.rate_limits.global_limiter: async with bucket_handler as bh: async with cs: r = await request_manager bh.check_limit_headers( r ) # sets up the bucket rate limit attributes w/ response headers response_data = await r.json() try: if await self._check_response(response=r, bucket=bucket): if caller in [ 'edit_balance', 'set_balance', 'get_balance' ]: return _process_bal(response_data, guild_id, bucket) elif caller == 'get_leaderboard': if page is None: return _process_lb(response_data, guild_id, bucket) else: response_data['users'] = _process_lb( response_data['users'], guild_id, bucket) return response_data elif caller == 'get_permissions': return response_data['permissions'] elif caller == 'get_guild': response_data['bucket'] = bucket return UnbGuild(**response_data) except TooManyRequests as E: if self._retry_rate_limits is True: timeout = response_data['retry_after'] / 1000 + 1 await asyncio.sleep(timeout) # reschedule same request return await self._request(method, url, bucket, data, caller=caller, **kwargs) else: raise E
async def update_repo_name(client: aiohttp.ClientSession, name: str): async with client.patch(f"https://api.github.com/repos/Officeyutong/testrepo", json={"name": name}) as resp: resp: aiohttp.client.ClientResponse return resp.status, await resp.json()
async def patch(session: ClientSession) -> None: async with session.patch(f"http://{self._host}{path}", **self._api_kwargs, json=newState) as response: response.raise_for_status()
async def _patch(cls, session: aiohttp.ClientSession, url_fragment: str, data: dict, **kwargs: Any) -> Any: url = f"{Settings.rest_url}/{url_fragment}" async with session.patch(url, json=data, **kwargs) as resp: return resp.status, await resp.json()