def delete_datacatalog_object(object_name): url = inforlogin.base_url( ) + "/IONSERVICES/datacatalog/v1/object/{}".format(object_name) headers = inforlogin.header() res = requests.delete(url, headers=headers) log.info("datacatalog delete: {}".format(res.content)) return res
async def get_v1_payloads_list(session, filter=None, sort=None, page=None, records=None, retries=3): """ List data object properties using a filter. """ url = inforlogin.base_url() + "/IONSERVICES/datalakeapi/v1/payloads/list" headers = inforlogin.header() payload = {} if filter is not None: payload["filter"] = filter if sort is not None: payload["sort"] = sort if page is not None: payload["page"] = page if records is not None: payload["records"] = records async with session.get(url, headers=headers, params=payload) as resp: if resp.status == 401 and retries > 0: inforlogin.check_and_reconnect() return await get_v1_payloads_list(session, filter, sort, page, records, retries - 1) return await resp.json()
def post_messaging_v2_multipart_message(parameter_request, message_payload): try: url = (inforlogin.base_url() + "/IONSERVICES/api/ion/messaging/service/v2/multipartMessage") data = MultipartEncoder( fields={ "ParameterRequest": ( "filename", json.dumps(parameter_request), "application/json", ), "MessagePayload": ( "filename", message_payload, "application/octet-stream", ), }) headers = inforlogin.header() headers.update({"Content-Type": data.content_type}) session = requests.Session() retries = Retry(total=10, backoff_factor=1, status_forcelist=[502, 503, 504]) session.mount("https://", HTTPAdapter(max_retries=retries)) res = session.post(url, headers=headers, data=data) log.info("messaging v2 multipart message: {}".format(res.content)) return res except Exception as e: log.error("Error ocurred " + str(e))
def delete_v1_purge_filter(purge_filter): """ Deletes Data Objects based on the given Filter. """ url = inforlogin.base_url() + "/IONSERVICES/datalakeapi/v1/purge/filter" headers = inforlogin.header() payload = {"filter": purge_filter} res = requests.delete(url, headers=headers, params=payload) return res
def delete_v1_purge_id(ids): """ Deletes Data Objects based on the given Data Object identifiers. """ url = inforlogin.base_url() + "/IONSERVICES/datalakeapi/v1/purge/ids" headers = inforlogin.header() payload = {"id": ids} res = requests.delete(url, headers=headers, params=payload) return res
def get_v1_payloads_stream_by_id(dl_id): """ Retrieve payload based on id from datalake. """ url = inforlogin.base_url( ) + "/IONSERVICES/datalakeapi/v1/payloads/streambyid" headers = inforlogin.header() payload = {"datalakeId": dl_id} res = requests.get(url, headers=headers, params=payload) return res
def get_messaging_ping(): try: url = inforlogin.base_url( ) + "/IONSERVICES/api/ion/messaging/service/ping" headers = inforlogin.header() res = requests.get(url, headers=headers) log.info("messaging ping: {}".format(res.content)) return res except Exception as e: log.error("Error ocurred " + str(e))
async def get_v1_payloads_stream_by_id(dl_id, session, retries=3): """ Retrieve payload based on id from datalake. """ url = inforlogin.base_url( ) + "/IONSERVICES/datalakeapi/v1/payloads/streambyid" headers = inforlogin.header() payload = {"datalakeId": dl_id} async with session.get(url, headers=headers, params=payload) as resp: if resp.status == 401 and retries > 0: inforlogin.check_and_reconnect() return await get_v1_payloads_stream_by_id(dl_id, session, retries - 1) return await resp.text()
async def get_v1_payloads_splitquery(filter, session, sort=None, retries=3): """ Split a demanding filter (producing more than 10K results) into several smaller filters producing the same result (up to 9500 results per one filter). """ url = inforlogin.base_url( ) + "/IONSERVICES/datalakeapi/v1/payloads/splitquery" headers = inforlogin.header() payload = {"filter": filter} if sort is not None: payload["sort"] = sort async with session.get(url, headers=headers, params=payload) as resp: if resp.status == 401 and retries > 0: inforlogin.check_and_reconnect() return await get_v1_payloads_splitquery(filter, session, sort, retries - 1) return await resp.json()
def get_v1_payloads_list(filter=None, sort=None, page=None, records=None): """ List data object properties using a filter. """ url = inforlogin.base_url() + "/IONSERVICES/datalakeapi/v1/payloads/list" headers = inforlogin.header() payload = {} if filter is not None: payload["filter"] = filter if sort is not None: payload["sort"] = sort if page is not None: payload["page"] = page if records is not None: payload["records"] = records res = requests.get(url, headers=headers, params=payload) return res
def post_datacatalog_object(object_name, object_type: ObjectSchemaType, schema, properties): if object_type == ObjectSchemaType.ANY and (schema is not None or properties is not None): raise ValueError("Schema and properties should be None") if (object_type == ObjectSchemaType.DSV or object_type == ObjectSchemaType.DSV) and schema is None: raise ValueError("Schema cannot be None") url = inforlogin.base_url() + "/IONSERVICES/datacatalog/v1/object" headers = inforlogin.header() data = { "name": object_name, "type": object_type.value, "schema": schema, "properties": properties, } res = requests.post(url, headers=headers, data=json.dumps(data)) log.info("datacatalog post: {}".format(res.content)) return res
def test_reconnect(): inforlogin.load_config("FellowKey.ionapi") inforlogin.login() headers = inforlogin.header() headers2 = inforlogin.reconnect() assert headers["Authorization"] != headers2["Authorization"]
def sendresults(url, _headers, data, timeout=65, stream=False): retry_strategy = Retry( total=5, backoff_factor=1, status_forcelist=[429, 500, 502, 503, 504], method_whitelist=["HEAD", "POST", "GET", "OPTIONS"], ) adapter = HTTPAdapter(max_retries=retry_strategy, pool_connections=100, pool_maxsize=100) http = requests.Session() http.mount("https://", adapter) http.mount("http://", adapter) if datetime.now() > inforlogin._GLOBAL_session_expire: headers = inforlogin.reconnect() logger.info(" Reconnect and Next Reconnect will be " + str(inforlogin._GLOBAL_session_expire)) try: max_attempt = 10 for z in range(0, max_attempt): response = http.request( "POST", url, headers=inforlogin.header(), data=json.dumps(data), timeout=timeout, stream=stream, ) logger.debug("Sending request: " + curlify.to_curl(response.request)) # logger.debug("Response received: " + response.content) if response.status_code == 200: try: r = response.json() logger.debug("Response received: " + json.dumps(r)) break except ValueError: r = "JSON Error" logger.error(r) else: r = "Session Error " + str(z) logger.error(r) logger.error( f'Response: {response.status_code}: {response.content}') logger.error(f'Response content {response.content}') if z < max_attempt: logger.info(" Error try to get new session " + str(z) + "/5") headers = inforlogin.reconnect() time.sleep(10) elif z == max_attempt: raise SystemExit(r) except requests.exceptions.TooManyRedirects as e: logger.error("Too many redirects") r = "Error - Too many redirects" raise SystemExit(e) except requests.exceptions.RequestException as e: # catastrophic error. bail. logger.error("OOps: Something Else", e) raise SystemExit(e) r = "Error" return r
def main_load( url=None, ionfile=None, program=None, method=None, dataframe=None, outputfile=None, start=None, end=None, on_progress=None, ): if validators.url(url) != True: logging.info("Error: URL is not valid") return "Error: URL is not valid" if os.path.exists(ionfile) == False: logging.info("Error: File does not exist") return "Error: File does not exist" else: inforlogin.load_config(ionfile) result = spliturl(url) if "Call" in result: if len(result["Call"]) > 0: if result["Call"] == "execute": inforlogin.load_config(ionfile) token = inforlogin.login() headers = inforlogin.header() if "Bearer" not in headers["Authorization"]: return "Error: InforION Login is not working" if start is None or end is None: return execute( url, headers, program, method, dataframe, outputfile, on_progress, ) else: return execute( url, headers, program, method, dataframe, outputfile, start, end, on_progress, ) if result["Call"] == "executeSnd": config = inforlogin.load_config(ionfile) token = inforlogin.login() headers = inforlogin.header() if "Bearer" not in headers["Authorization"]: return "InforION Login is not working" return executeSnd( url, headers, program, method, dataframe, outputfile, start, end ) if method == "checklogin": token = inforlogin.login() headers = inforlogin.header() return headers["Authorization"]
def get_datacatalog_ping(): url = inforlogin.base_url() + "/IONSERVICES/datacatalog/v1/status/ping" headers = inforlogin.header() res = requests.get(url, headers=headers) log.info("datacatalog ping: {}".format(res.content)) return res