def __init__(self, url: str, secret: str, initial_fetch: bool = True, timeout: Union[int, tuple, None] = None): """ :param url: The url to reach your OpenVidu Server instance. Typically something like https://localhost:4443/ :param secret: Secret for your OpenVidu Server :param initial_fetch: Enable the initial fetching on object creation. Defaults to `True`. If set to `False` a `fetc()` must be called before doing anything with the object. In most scenarios you won't need to change this. :param timeout: Set timeout to all Requests to the OpenVidu server. Default: None = No timeout. See https://2.python-requests.org/en/latest/user/advanced/#timeouts for possible values. """ self._session = BaseUrlSession(base_url=url) self._session.auth = HTTPBasicAuth('OPENVIDUAPP', secret) self._session.headers.update( {'User-Agent': user_agent('PyOpenVidu', __version__)}) self._session.request = partial(self._session.request, timeout=timeout) self._lock = RLock() self._openvidu_sessions = {} # id:object if initial_fetch: self.fetch() # initial fetch
class CredentialedGirderTask(Task): """ Provide a task with a requests session via self.session, this is the default task. This base task should always be used in conjunction with setting bind=True in order to access the session. """ def __call__(self, *args, **kwargs): """ Create a token and configure a requests session object with it. The child class overrides run, so __call__ must be used to hook in before a task is executed. """ # TODO: Revoke token in post task signal self.token = Token().createToken( user=getAdminUser(), days=1, scope=[TokenScope.DATA_READ, TokenScope.DATA_WRITE]) self.session = BaseUrlSession(settings.ISIC_API_URL) self.session.headers.update({'Girder-Token': str(self.token['_id'])}) retry = Retry(total=10, read=10, connect=10, backoff_factor=.2, method_whitelist=False, status_forcelist=[500, 502, 503, 504]) adapter = HTTPAdapter(max_retries=retry) self.session.mount('http://', adapter) self.session.mount('https://', adapter) # super(CredentialedGirderTask, self).__call__(*args, **kwargs)
def session(self) -> BaseUrlSession: session = BaseUrlSession( base_url=(url if (url := self.context.config.get( "resource.ampel-ztf/archive", str, raise_exc=True) ).endswith("/") else url + "/")) session.auth = BearerAuth(self.archive_token.get()) return session
def __init__(self, api_token: str, account_id: int) -> None: self.account_id = account_id # Rather than assigning directly to `self`, this is the recommended idiom so atexit.register behaves nicely with GC. session = BaseUrlSession(base_url=f'{self.api_domain}/{self.api_version}/{account_id}/') session.auth = (api_token, '') session.headers.update({"User-Agent": self.drip_py_ua, "Content-Type": 'application/json'}) register(session.close) self.session = session
def __init__(self, api_token: str, account_id: int) -> None: self.account_id = account_id # Rather than assigning directly to `self`, this is the recommended idiom so atexit.register behaves nicely with GC. session = BaseUrlSession( base_url=f"{self.api_domain}/{self.api_version}/{account_id}/") session.auth = (api_token, "") session.headers.update({"User-Agent": self.drip_py_ua}) session.mount(self.api_domain, GzipAdapter()) register(session.close) self.session = session
def __init__(self, endpoint: str, login_token: Optional[str]): self.session = BaseUrlSession(urljoin(endpoint, "api/v1/admin/")) if login_token is not None: login_resp = requests.post( urljoin(endpoint, "api/v1/auth/login"), json={"teamToken": login_token} ).json() if login_resp["kind"] == "goodLogin": auth_token = login_resp["data"]["authToken"] self.session.headers["Authorization"] = f"Bearer {auth_token}" else: raise ValueError( f"Invalid login_token provided (reason: {login_resp['kind']})" )
def __init__(self, token, *args, **kwargs): self.session = BaseUrlSession('https://api.everhour.com') self.session.headers.update({ 'X-Api-Key': token, 'X-Accept-Version': '1.2', 'Content-Type': 'application/json' })
class ZTFCutoutImages(AbsBufferComplement): """ Add cutout images from ZTF archive database """ #: Which detection to retrieve cutouts for eligible: Literal["first", "last", "brightest", "all"] = "last" def __init__(self, context: AmpelContext, **kwargs) -> None: super().__init__(**kwargs) self.session = BaseUrlSession(base_url=context.config.get( "resource.ampel-ztf/archive", str, raise_exc=True)) @backoff.on_exception( backoff.expo, requests.ConnectionError, max_tries=5, factor=10, ) @backoff.on_exception( backoff.expo, requests.HTTPError, giveup=lambda e: e.response.status_code not in {503, 504, 429, 408}, max_time=60, ) def get_cutout(self, candid: int) -> None | dict[str, bytes]: response = self.session.get(f"cutouts/{candid}") if response.status_code == 404: return None else: response.raise_for_status() return {k: b64decode(v) for k, v in response.json().items()} def complement(self, records: Iterable[AmpelBuffer], t3s: T3Store) -> None: for record in records: if (photopoints := record.get("t0")) is None: raise ValueError(f"{type(self).__name__} requires t0 records") pps = sorted( [pp for pp in photopoints if pp["id"] > 0], key=lambda pp: pp["body"]["jd"], ) if not pps: return if self.eligible == "last": candids = [pps[-1]["id"]] elif self.eligible == "first": candids = [pps[0]["id"]] elif self.eligible == "brightest": candids = [min(pps, key=lambda pp: pp["body"]["magpsf"])["id"]] elif self.eligible == "all": candids = [pp["id"] for pp in pps] cutouts = {candid: self.get_cutout(candid) for candid in candids} if "extra" not in record or record["extra"] is None: record["extra"] = {self.__class__.__name__: cutouts} else: record["extra"][self.__class__.__name__] = cutouts
def mbta_session() -> BaseUrlSession: """Return a requests.Session object for accessing the MBTA API.""" cfg = config() session = BaseUrlSession(cfg.api_root) if cfg.api_key: session.headers.update({"x-api-key": cfg.api_key}) return session
class TelegramBot: """Telegram Bot API wrapper Examples: >>> TelegramBot('YOuRAwEsomeBOtToKen', '@myprettydebugchat').send_message('Hi, darling') >>> TelegramBot('YOuRAwEsomeBOtToKen').send_message('Hi, darling', chat_id=-1762374628374) """ API_ENDPOINT = 'https://api.telegram.org/bot{}/' def __init__(self, tg_bot_token: str, chat_id: Union[int, str] = None): self._base_url = TelegramBot.API_ENDPOINT.format(tg_bot_token) self.session = BaseUrlSession(self._base_url) self.chat_id = chat_id def send_message(self, message: str, chat_id: Union[int, str] = None): method = 'sendMessage' payload = { 'chat_id': chat_id or self.chat_id, 'text': message, 'parse_mode': 'MarkdownV2' } response = self.session.post(method, data=payload).json() if not response.get('ok'): raise TelegramBotException(response)
def session(self) -> Session: """The session to use for requests. :return: The session to use for requests :rtype: Session """ if not hasattr(self, "_session"): self._session = BaseUrlSession(self.host) return self._session
def register( cls, email: str, password: str, host: str = constants.DEFAULT_HOST, cost: int = 60000, *args, **kwargs, ) -> T_User: """Registers a new user in the Standard File server. :param email: The email to register :type email: str :param password: The password to register :type password: str :param host: The host to register to, defaults to constants.DEFAULT_HOST :param host: str, optional :param cost: The password iteration cost, defaults to 60000 :param cost: int, optional :return: A new user instance :rtype: T_User """ session = BaseUrlSession(host) salt = hashlib.sha1( f"{email}:{secrets.token_hex(128 // 8 // 2)}".encode() ).hexdigest() auth_keys = cls._build_keys(password, salt, cost) response = session.post( constants.ENDPOINTS["register"], params={ "email": email, "password": auth_keys.password_key, "pw_cost": cost, "pw_salt": salt, }, ) result = ujson.loads(response.text) if response.status_code != 200: cls._handle_error(result.get("error")) user = cls(email, host, *args, **kwargs) user.auth_keys = auth_keys user.uuid = result["user"]["uuid"] user.session.headers.update({"Authorization": f"Bearer {result['token']}"}) user._authenticated = True
class USBSneaky: """ Detects plugged-in USBs, gets its info and sends it to a WorkbenchServer. USBSneaky is constantly sending the info about the USB it has plugged-in and notifies if it is has been removed. If USBSneaky doesn't update in some time WorkbenchServer will interpret the silence as the computer is off and then, unplug the USB. This is done because the computer can die or be shut down in any moment. USBSneaky is thought to be executed as a worker in a single process. """ def __init__(self, uuid: UUID, workbench_server: str): self.uuid = str(uuid) self.session = BaseUrlSession(base_url=workbench_server) self.session.verify = False urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) while True: sleep(1) try: # We keep sending this so workbench server will # notice our silence if we die and remove the USB # from its list of plugged in USBs pen = plugged_usbs(multiple=False) except NoUSBFound: with suppress(NameError): # Pen was defined, therefore we had a pen before self.send_unplug(pen['hid']) # We remove it so we are not sending it all the time del pen else: # We have found an usb pen['_uuid'] = self.uuid self.send_plug(pen) sleep(2.25) # Don't stress Workbench Server def send_plug(self, pen: dict): self.session.post('/usbs/plugged/{}'.format(pen['hid']), json=pen) def send_unplug(self, hid: str): self.session.delete('/usbs/plugged/{}'.format(hid))
def __init__(self, config: typing.Dict[str, typing.Any]) -> None: super().__init__() proxy = config.get("PROXY_HOST") or None # Header Settings self.session = BaseUrlSession("http://openapi.nsdi.go.kr/") self.session.headers.update({"User-Agent": USER_AGENT}) if proxy: apply_proxy(self.session, proxy) self.retryer = Retryer( strategy_factory=( ExponentialModulusBackoffStrategy.create_factory(2, 10) ), should_retry=lambda e: isinstance( e, (requests.exceptions.ConnectionError,) ), default_max_trials=3, )
def http_client(api_url: str, token: str, version: int = 0): if not api_url.endswith("/"): api_url = api_url + "/" api_url = api_url + f"api/{version}/" s = BaseUrlSession(api_url) s.headers.update({"Authorization": f"Bearer {token}"}) s.headers.update({"content-type": "application/json"}) return s
def test_connection_keepalive(simple_wsgi_server): """Test the connection keepalive works (duh).""" session = Session(base_url=simple_wsgi_server['url']) pooled = requests.adapters.HTTPAdapter( pool_connections=1, pool_maxsize=1000, ) session.mount('http://', pooled) def do_request(): with ExceptionTrap(requests.exceptions.ConnectionError) as trap: resp = session.get('info') resp.raise_for_status() return bool(trap) with ThreadPoolExecutor(max_workers=50) as pool: tasks = [ pool.submit(do_request) for n in range(1000) ] failures = sum(task.result() for task in tasks) assert not failures
def __init__(self, uuid: UUID, workbench_server: str): self.uuid = str(uuid) self.session = BaseUrlSession(base_url=workbench_server) self.session.verify = False urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) while True: sleep(1) try: # We keep sending this so workbench server will # notice our silence if we die and remove the USB # from its list of plugged in USBs pen = plugged_usbs(multiple=False) except NoUSBFound: with suppress(NameError): # Pen was defined, therefore we had a pen before self.send_unplug(pen['hid']) # We remove it so we are not sending it all the time del pen else: # We have found an usb pen['_uuid'] = self.uuid self.send_plug(pen) sleep(2.25) # Don't stress Workbench Server
def __init__(self, url_base: str, token: str, org_id: str = None): # Append /api/v1 to the url_base url_base = url_base.rstrip("/") + "/api/v1/" logger = getLogger() session = BaseUrlSession(url_base) # hook on responses, raise error when response is not successfull session.hooks = { "response": lambda r, *args, **kwargs: raise_response_error(r) } session.headers.update({ "Authorization": "Bearer %s" % token, # set a default Content-Type header, can be overriden by requests. "Content-Type": "application/json", }) # if there is and organization id passed, # set it in the header if org_id: logger.info("found organization id: %s" % org_id) session.headers.update({"X-Axiom-Org-Id": org_id}) self.datasets = DatasetsClient(session, logger)
class Client: """ cvr.dev API client. Contains a dedicated client for the /cvr/ part of the API. See https://docs.cvr.dev/#ra-cvr-data for more info. """ def __init__(self, api_key): self._session = BaseUrlSession(_BASE_URL) self._session.headers.update({'Authorization': api_key}) self.cvr = CVRClient(self._session) def test_api_key(self): resp = self._session.get('test/apikey') handle_response_status_code(resp) def close(self): self._session.close() def __enter__(self): return self def __exit__(self, type, value, traceback): self.close()
def await_tasks_finished(api_client: BaseUrlSession, tasks: List[Dict]): tasks_set: Set[int] = {t["id"] for t in tasks} sleep_time = 0.1 while tasks_set: sleep_time *= 2 time.sleep(sleep_time) for task_id in list(tasks_set): r = api_client.get(f"uploads/{task_id}/") raise_for_status(r) if r.json()["status"] == "FAILED": errors = r.json()["error_messages"] raise Exception( f"Upload with Task ID {task_id} failed with errors: {errors}" ) if r.json()["status"] == "FINISHED": tasks_set.remove(task_id)
def __init__(self, *, client_delay: typing.Optional[str] = None, proxy: typing.Optional[str] = None) -> None: super().__init__() self.client_delay = client_delay # Header Settings self.session = BaseUrlSession("https://www.taein.co.kr/") self.session.headers.update({"User-Agent": USER_AGENT}) if proxy: apply_proxy(self.session, proxy) self.retryer = Retryer( strategy_factory=(ExponentialModulusBackoffStrategy.create_factory( 2, 10)), should_retry=lambda e: isinstance(e, (requests.exceptions. ConnectionError, )), default_max_trials=3, )
def session(): s = BaseUrlSession('http://girder:8080/api/v1/') r = s.get('user/authentication', auth=('admin', 'password')) r.raise_for_status() s.headers.update({'Girder-Token': r.json()['authToken']['token']}) yield s
class MyTurnCA: """Main API class""" def __init__(self, api_key: str): self.logger = logging.getLogger(__name__) self.session = BaseUrlSession(base_url=MY_TURN_URL) self.session.mount('https://', HTTPAdapter(max_retries=DEFAULT_RETRY_STRATEGY)) self.session.headers.update({**REQUEST_HEADERS, GOOD_BOT_HEADER: api_key}) self.vaccine_data = self._get_vaccine_data() def _get_vaccine_data(self) -> str: """Retrieve initial vaccine data""" response = self._send_request(url=ELIGIBILITY_URL, body=ELIGIBLE_REQUEST_BODY).json() if response['eligible'] is False: raise RuntimeError('something is wrong, default /eligibility body returned \'eligible\' = False') return response['vaccineData'] def get_locations(self, latitude: float, longitude: float) -> List[Location]: """Gets available locations near the given coordinates""" body = { 'location': { 'lat': latitude, 'lng': longitude }, 'fromDate': datetime.now(tz=pytz.timezone('US/Pacific')).strftime('%Y-%m-%d'), 'vaccineData': self.vaccine_data, 'locationQuery': { 'includePools': LOCATION_POOLS, 'excludeTags': [], 'includeTags': [] } } response = self._send_request(url=LOCATIONS_URL, body=body) try: return [Location(location_id=x['extId'], name=x['name'], address=x['displayAddress'], booking_type=x['type'], vaccine_data=x['vaccineData'], distance=x['distanceInMeters']) for x in response.json()['locations']] except json.JSONDecodeError: self.logger.error(JSON_DECODE_ERROR_MSG.format(body=response.text)) return [] def get_availability(self, location: Location, start_date: date, end_date: date) -> LocationAvailability: """Gets a given vaccination location's availability""" body = { 'startDate': start_date.strftime('%Y-%m-%d'), 'endDate': end_date.strftime('%Y-%m-%d'), 'vaccineData': location.vaccine_data, 'doseNumber': 1 } response = self._send_request(url=LOCATION_AVAILABILITY_URL.format(location_id=location.location_id), body=body) try: return LocationAvailability(location=location, dates_available=[datetime.strptime(x['date'], '%Y-%m-%d').date() for x in response.json()['availability'] if x['available'] is True]) except json.JSONDecodeError: self.logger.error(JSON_DECODE_ERROR_MSG.format(body=response.text)) return LocationAvailability(location=location, dates_available=[]) def get_slots(self, location: Location, start_date: date) -> LocationAvailabilitySlots: """Gets a given location's available appointments""" body = { 'vaccineData': location.vaccine_data } response = self._send_request(url=LOCATION_AVAILABILITY_SLOTS_URL.format(location_id=location.location_id, start_date=start_date.strftime('%Y-%m-%d')), body=body) try: return LocationAvailabilitySlots(location=location, slots=[self._combine_date_and_time(start_date, x['localStartTime']) for x in response.json()['slotsWithAvailability'] if self._combine_date_and_time(start_date, x['localStartTime']) > datetime.now(tz=pytz.timezone('US/Pacific'))]) except json.JSONDecodeError: self.logger.error(JSON_DECODE_ERROR_MSG.format(body=response.text)) return LocationAvailabilitySlots(location=location, slots=[]) def get_appointments(self, latitude: float, longitude: float, start_date: date, end_date: date) -> List[LocationAvailabilitySlots]: """Retrieves available appointments from all vaccination locations near the given coordinates""" locations = self.get_locations(latitude=latitude, longitude=longitude) if not locations: return [] if start_date > end_date: raise ValueError('Provided start_date must be before end_date') appointments = [] for location in locations: days_available = self.get_availability(location=location, start_date=start_date, end_date=end_date).dates_available if not days_available: continue location_appointments = [location_appointment for location_appointment in [self.get_slots(location=location, start_date=day_available) for day_available in days_available] if location_appointment.slots] if not location_appointments: continue # combines appointments on different days for the same location appointments.append(LocationAvailabilitySlots(location=location_appointments[0].location, slots=functools.reduce(operator.add, [location_appointment.slots for location_appointment in location_appointments]))) return appointments @staticmethod def _combine_date_and_time(start_date: date, timestamp: str) -> datetime: """Private helper function to combine a date and timestamp""" return datetime.combine(start_date, datetime.strptime(timestamp, '%H:%M:%S').time(), tzinfo=pytz.timezone('US/Pacific')) def _send_request(self, url: str, body: dict) -> Response: """Private helper function to make HTTP POST requests""" self.logger.info(f'sending request to {MY_TURN_URL}{url} with body - {body}') response = self.session.post(url=url, json=body) self.logger.info(f'got response from /{url} - {response.__dict__}') return response
class OpenVidu(object): """ This object represents a OpenVidu server instance. """ def __init__(self, url: str, secret: str, initial_fetch: bool = True, timeout: Union[int, tuple, None] = None): """ :param url: The url to reach your OpenVidu Server instance. Typically something like https://localhost:4443/ :param secret: Secret for your OpenVidu Server :param initial_fetch: Enable the initial fetching on object creation. Defaults to `True`. If set to `False` a `fetc()` must be called before doing anything with the object. In most scenarios you won't need to change this. :param timeout: Set timeout to all Requests to the OpenVidu server. Default: None = No timeout. See https://2.python-requests.org/en/latest/user/advanced/#timeouts for possible values. """ self._session = BaseUrlSession(base_url=url) self._session.auth = HTTPBasicAuth('OPENVIDUAPP', secret) self._session.headers.update( {'User-Agent': user_agent('PyOpenVidu', __version__)}) self._session.request = partial(self._session.request, timeout=timeout) self._openvidu_sessions = {} # id:object self._last_fetch_result = { } # Used only to calculate the return value of the fetch() call if initial_fetch: self.fetch() # initial fetch def fetch(self) -> bool: """ Updates every property of every active Session with the current status they have in OpenVidu Server. After calling this method you can access the updated list of active sessions trough the `sessions` property. :return: true if the Session status has changed with respect to the server, false if not. This applies to any property or sub-property of the object. """ r = self._session.get("sessions") r.raise_for_status() new_data = r.json()['content'] data_changed = new_data != self._last_fetch_result self._last_fetch_result = new_data if data_changed: self._openvidu_sessions = {} # update, create valid streams for session_data in new_data: session_id = session_data['id'] self._openvidu_sessions[session_id] = OpenViduSession( self._session, session_data) return data_changed def get_session(self, session_id: str) -> OpenViduSession: """ Get a currently active session to the server. :param session_id: The ID of the session to acquire. :return: An OpenViduSession object. """ if session_id not in self._openvidu_sessions: raise OpenViduSessionDoesNotExistsError() session = self._openvidu_sessions[session_id] if not session.is_valid: raise OpenViduSessionDoesNotExistsError() return session def create_session(self, custom_session_id: str = None, media_mode: str = None) -> OpenViduSession: """ Creates a new OpenVidu session. This method calls fetch() automatically since the server does not return the proper data to construct the OpenViduSession object. https://docs.openvidu.io/en/2.16.0/reference-docs/REST-API/#post-openviduapisessions :param custom_session_id: You can fix the sessionId that will be assigned to the session with this parameter. :param media_mode: ROUTED (default) or RELAYED :return: The created OpenViduSession instance. """ # Prepare parameters if media_mode not in ['ROUTED', 'RELAYED', None]: raise ValueError( f"media_mode must be any of ROUTED or RELAYED, not {media_mode}" ) parameters = { "mediaMode": media_mode, "customSessionId": custom_session_id } parameters = {k: v for k, v in parameters.items() if v is not None} # send request r = self._session.post('sessions', json=parameters) if r.status_code == 409: raise OpenViduSessionExistsError() elif r.status_code == 400: raise ValueError() r.raise_for_status() # As of OpenVidu 2.16.0 the server returns the created session object new_session = OpenViduSession(self._session, r.json()) self._openvidu_sessions[new_session.id] = new_session return new_session @property def sessions(self) -> List[OpenViduSession]: """ Get a list of currently active sessions to the server. :return: A list of OpenViduSession objects. """ return [ sess for sess in self._openvidu_sessions.values() if sess.is_valid ] @property def session_count(self) -> int: """ Get the number of active sessions on the server. :return: The number of active sessions. """ return len(self.sessions) def get_config(self) -> dict: """ Get OpenVidu active configuration. Unlike session related calls. This call does not require prior calling of the fetch() method. Using this function will always result an API call to the backend. https://docs.openvidu.io/en/2.16.0/reference-docs/REST-API/#get-openviduapiconfig :return: The exact response from the server as a dict. """ # Note: Since 2.16.0 This endpoint is moved from toplevel under /api # https://docs.openvidu.io/en/2.16.0/reference-docs/REST-API/#get-openviduapiconfig r = self._session.get('config') r.raise_for_status() return r.json()
def __init__(self, api_key: str): self.logger = logging.getLogger(__name__) self.session = BaseUrlSession(base_url=MY_TURN_URL) self.session.mount('https://', HTTPAdapter(max_retries=DEFAULT_RETRY_STRATEGY)) self.session.headers.update({**REQUEST_HEADERS, GOOD_BOT_HEADER: api_key}) self.vaccine_data = self._get_vaccine_data()
def main(): if len(sys.argv) < 5: print( "usage: multinet.py <instance-url> <workspace> <api-token> <volume>", file=sys.stderr ) return 1 # Extract args _, base_url, workspace, api_token, volume = sys.argv # Inject auth token into every request api_client = BaseUrlSession(base_url=base_url) api_client.headers.update({"Authorization": f"Bearer {api_token}"}) print("Uploading files...") # Upload all files to S3 s3ff_client = S3FileFieldClient("/api/s3-upload/", api_client) # Upload nodes.csv with open("artifacts/nodes.csv", "rb") as file_stream: nodes_field_value = s3ff_client.upload_file( file_stream, "nodes.csv", "api.Upload.blob" )["field_value"] # Upload links.csv with open("artifacts/links.csv", "rb") as file_stream: links_field_value = s3ff_client.upload_file( file_stream, "links.csv", "api.Upload.blob" )["field_value"] # Update base url, since only workspace endpoints are needed now api_client.base_url = f"{base_url}/api/workspaces/{workspace}/" # Get names of all networks and tables networks = [x["name"] for x in api_client.get("networks/").json().get("results")] tables = [x["name"] for x in api_client.get("tables/").json().get("results")] # Filter names to ones we want to remove (like the volume) networks = list(filter(lambda x: volume in x, networks)) tables = list(filter(lambda x: volume in x, tables)) # Delete network and tables if they exist for network in networks: api_client.delete(f"networks/{network}/") for table in tables: api_client.delete(f"tables/{table}/") # Generate new network and table names NODE_TABLE_NAME = f"{volume}_nodes" EDGE_TABLE_NAME = f"{volume}_links" NETWORK_NAME = f"{volume}_{datetime.now(pytz.timezone('America/Denver')).strftime('%Y-%m-%d_%H-%M')}" # Create nodes table r = api_client.post( "uploads/csv/", json={ "field_value": nodes_field_value, "edge": False, "table_name": NODE_TABLE_NAME, "columns": { "TypeID": "category", "Verified": "boolean", "Confidence": "number", "ParentID": "category", "Created": "date", "LastModified": "date", "TypeLabel": "category", "Volume (nm^3)": "number", "MaxDimension": "number", "MinZ": "number", "MaxZ": "number", }, }, ) raise_for_status(r) nodes_upload = r.json() # Create links table r = api_client.post( "uploads/csv/", json={ "field_value": links_field_value, "edge": True, "table_name": EDGE_TABLE_NAME, "columns": { "TotalChildren": "number", "LastModified": "date", "Bidirectional": "boolean", "Type": "category", "TotalSourceArea(nm^2)": "number", "TotalTargetArea(nm^2)": "number", }, }, ) raise_for_status(r) links_upload = r.json() print("Processing files...") # Wait for nodes and links tables to be created await_tasks_finished(api_client, [nodes_upload, links_upload]) # Create network raise_for_status( api_client.post( "networks/", json={"name": NETWORK_NAME, "edge_table": EDGE_TABLE_NAME}, ) ) print("Network created.") print("Synchronization finished.")
def __init__(self, tg_bot_token: str, chat_id: Union[int, str] = None): self._base_url = TelegramBot.API_ENDPOINT.format(tg_bot_token) self.session = BaseUrlSession(self._base_url) self.chat_id = chat_id
class RCTFAdminV1: session: requests.Session def __init__(self, endpoint: str, login_token: Optional[str]): self.session = BaseUrlSession(urljoin(endpoint, "api/v1/admin/")) if login_token is not None: login_resp = requests.post( urljoin(endpoint, "api/v1/auth/login"), json={"teamToken": login_token} ).json() if login_resp["kind"] == "goodLogin": auth_token = login_resp["data"]["authToken"] self.session.headers["Authorization"] = f"Bearer {auth_token}" else: raise ValueError( f"Invalid login_token provided (reason: {login_resp['kind']})" ) @staticmethod def assertResponseKind(response: Any, kind: str) -> None: if response["kind"] != kind: raise RuntimeError(f"Server error: {response['kind']}") def list_challenges(self) -> List[Dict[str, Any]]: r = self.session.get("challs").json() self.assertResponseKind(r, "goodChallenges") return r["data"] def put_challenge(self, chall_id: str, data: Dict[str, Any]) -> None: r = self.session.put("challs/" + quote(chall_id), json={"data": data}).json() self.assertResponseKind(r, "goodChallengeUpdate") def delete_challenge(self, chall_id: str) -> None: r = self.session.delete("challs/" + quote(chall_id)).json() self.assertResponseKind(r, "goodChallengeDelete") def create_upload(self, uploads: Dict[str, bytes]) -> Dict[str, str]: """ :param uploads: uploads {name: data} :return: urls {name: url} """ if len(uploads) == 0: return {} payload = [ {"name": name, "data": "data:;base64," + b64encode(data).decode()} for name, data in uploads.items() ] r = self.session.post("upload", json={"files": payload}).json() self.assertResponseKind(r, "goodFilesUpload") return {f["name"]: f["url"] for f in r["data"]} def get_url_for_files(self, files: Dict[str, str]) -> Dict[str, Optional[str]]: """ :param files: files to get {name: sha256} :return: urls {name: url} """ payload = [{"name": name, "sha256": sha256} for name, sha256 in files.items()] r = self.session.post("upload/query", json={"uploads": payload}).json() self.assertResponseKind(r, "goodUploadsQuery") return {f["name"]: f["url"] for f in r["data"]}
class AppConnect: """App connection object. A wrapper for requests BaseUrlSession to hold Atlassian keys across command runs. Parameters ---------- server: base url of app server. username: username for connection. password: password for connection. cookie_store: path to file for cookie_store. session_headers: default headers added to every call. """ _server: str username: str _password: str session: BaseUrlSession = None auth: HTTPBasicAuth = None _response: requests = None cookie_store: os.path = None def __init__(self, server: str, username: str = None, password: str = None, cookie_store: os.path = None, session_headers: dict = None) -> None: self.server = server self.session = BaseUrlSession(base_url=server) if username: self.username = username if password: self.password = password if cookie_store: self.cookie_store = cookie_store if username and password: self.auth = HTTPBasicAuth(self.username, self.password) if session_headers: self.session.headers.update(session_headers) self.reload_cookies() @property def server(self): """server baseUrl for connection""" return self._server @server.setter def server(self, server: str): self._server = server if self.session: self.session.base_url = server @property def password(self): """password for connection.""" return base64.decodebytes(self._password).decode() @password.setter def password(self, password: str): self._password = base64.encodebytes(password.encode()) def get(self, api, headers: dict = None, params: dict = None, data: dict = None, auth: bool = False, allow_redirects=True): """send http get request. Parameters ---------- api: str url path appended to baseUrl. headers: dict of headers. params: dict of url query parameters. data: dict of data to send. auth: bool(False) send BasicAuth. allow_redirects Returns ------- """ # url = urljoin(self.server, api) url = api try: self._response = self.session.get(url, headers=headers, params=params, data=data, auth=self.auth if auth else None, allow_redirects=allow_redirects) self._response.raise_for_status() except requests.exceptions.ConnectionError as err: raise SystemExit(err) except requests.exceptions.Timeout as err: raise SystemExit(err) except requests.exceptions.TooManyRedirects as err: raise SystemExit(err) except requests.exceptions.HTTPError as err: raise SystemExit(err) return self.json_response(self._response) def delete(self, api, headers: dict = None, params=None, auth: bool = False): """send http delete request. Parameters ---------- api: str url path appended to baseUrl. headers: dict of headers. params: dict of url query parameters. auth: bool(False) send BasicAuth. Returns ------- ->json """ url = api try: self._response = self.session.delete( url, headers=headers, params=params, auth=self.auth if auth else None) self._response.raise_for_status() except requests.exceptions.ConnectionError as err: raise SystemExit(err) except requests.exceptions.Timeout as err: raise SystemExit(err) except requests.exceptions.TooManyRedirects as err: raise SystemExit(err) except requests.exceptions.HTTPError as err: raise SystemExit(err) return self.json_response(self._response) def post(self, api: str, headers: dict = None, params: dict = None, data: dict = None, auth: bool = False, allow_redirects: bool = True): """send http post request. Parameters ---------- api: str url path appended to baseUrl. headers: dict of headers. params: dict of url query parameters. data: dict of data to send. auth: bool(False) send BasicAuth. allow_redirects Returns ------- ->json """ # url = urljoin(self.server, api) url = api try: self._response = self.session.post( url, headers=headers, params=params, data=data, auth=self.auth if auth else None, allow_redirects=allow_redirects) # self._response.raise_for_status() except requests.exceptions.ConnectionError as err: raise SystemExit(err) except requests.exceptions.Timeout as err: raise SystemExit(err) except requests.exceptions.TooManyRedirects as err: raise SystemExit(err) # except requests.exceptions.HTTPError as err: # raise SystemExit(err) return self.json_response(self._response) def put(self, api: str, headers: dict = None, params: dict = None, data: dict = None, auth: bool = False): """send http put request. Parameters ---------- api: str url path appended to baseUrl. headers: dict of headers. params: dict of url query parameters. data: dict of data to send. auth: bool(False) send BasicAuth. Returns ------- ->json """ url = api try: self._response = self.session.put(url, headers=headers, params=params, data=data, auth=self.auth if auth else None) self._response.raise_for_status() except requests.exceptions.ConnectionError as err: raise SystemExit(err) except requests.exceptions.Timeout as err: raise SystemExit(err) except requests.exceptions.TooManyRedirects as err: raise SystemExit(err) except requests.exceptions.HTTPError as err: raise SystemExit(err) return self.json_response(self._response) def json_response(self, res: requests): """Always return a json response. Parameters ---------- res: requests response. Returns ------- ->json """ _json = None if res.ok: if res.cookies: self.session.cookies.update(res.cookies) self.cache_cookies() try: _json = res.json() except JSONDecodeError as err: SystemExit(err) if not _json: if res.ok: _json = json.dumps({ 'success': self._response.ok, 'status code': self._response.status_code, 'elapsed seconds': self._response.elapsed.seconds }) else: _json = json.dumps({ 'ok': self._response.ok, 'status_code': self._response.status_code, 'reason': self._response.text, 'request-url': self._response.request.url, 'request-method': self._response.request.method, 'text': self._response.text, 'redirect': self._response.is_redirect, 'elapsed': self._response.elapsed.seconds }) return _json def update_cookies(self, cookies: dict = None): """add cookie(s) to cookie jar. Parameters ---------- cookies """ self.session.cookies.update(cookies) self.cache_cookies() def cache_cookies(self): """cache cookies to file.""" if self.session.cookies: with open(self.cookie_store, 'wb') as f: pickle.dump(self.session.cookies, f) def reload_cookies(self): """reload cookies from file.""" if os.path.isfile(self.cookie_store): with open(self.cookie_store, 'rb') as f: self.session.cookies.update(pickle.load(f))
def __init__(self, context: AmpelContext, **kwargs) -> None: super().__init__(**kwargs) self.session = BaseUrlSession(base_url=context.config.get( "resource.ampel-ztf/archive", str, raise_exc=True))