def delete_datacatalog_object(object_name): url = inforlogin.base_url( ) + "/IONSERVICES/datacatalog/v1/object/{}".format(object_name) headers = inforlogin.header() res = requests.delete(url, headers=headers) log.info("datacatalog delete: {}".format(res.content)) return res
async def get_v1_payloads_list(session, filter=None, sort=None, page=None, records=None, retries=3): """ List data object properties using a filter. """ url = inforlogin.base_url() + "/IONSERVICES/datalakeapi/v1/payloads/list" headers = inforlogin.header() payload = {} if filter is not None: payload["filter"] = filter if sort is not None: payload["sort"] = sort if page is not None: payload["page"] = page if records is not None: payload["records"] = records async with session.get(url, headers=headers, params=payload) as resp: if resp.status == 401 and retries > 0: inforlogin.check_and_reconnect() return await get_v1_payloads_list(session, filter, sort, page, records, retries - 1) return await resp.json()
def post_messaging_v2_multipart_message(parameter_request, message_payload): try: url = (inforlogin.base_url() + "/IONSERVICES/api/ion/messaging/service/v2/multipartMessage") data = MultipartEncoder( fields={ "ParameterRequest": ( "filename", json.dumps(parameter_request), "application/json", ), "MessagePayload": ( "filename", message_payload, "application/octet-stream", ), }) headers = inforlogin.header() headers.update({"Content-Type": data.content_type}) session = requests.Session() retries = Retry(total=10, backoff_factor=1, status_forcelist=[502, 503, 504]) session.mount("https://", HTTPAdapter(max_retries=retries)) res = session.post(url, headers=headers, data=data) log.info("messaging v2 multipart message: {}".format(res.content)) return res except Exception as e: log.error("Error ocurred " + str(e))
def delete_v1_purge_filter(purge_filter): """ Deletes Data Objects based on the given Filter. """ url = inforlogin.base_url() + "/IONSERVICES/datalakeapi/v1/purge/filter" headers = inforlogin.header() payload = {"filter": purge_filter} res = requests.delete(url, headers=headers, params=payload) return res
def delete_v1_purge_id(ids): """ Deletes Data Objects based on the given Data Object identifiers. """ url = inforlogin.base_url() + "/IONSERVICES/datalakeapi/v1/purge/ids" headers = inforlogin.header() payload = {"id": ids} res = requests.delete(url, headers=headers, params=payload) return res
def get_v1_payloads_stream_by_id(dl_id): """ Retrieve payload based on id from datalake. """ url = inforlogin.base_url( ) + "/IONSERVICES/datalakeapi/v1/payloads/streambyid" headers = inforlogin.header() payload = {"datalakeId": dl_id} res = requests.get(url, headers=headers, params=payload) return res
def get_messaging_ping(): try: url = inforlogin.base_url( ) + "/IONSERVICES/api/ion/messaging/service/ping" headers = inforlogin.header() res = requests.get(url, headers=headers) log.info("messaging ping: {}".format(res.content)) return res except Exception as e: log.error("Error ocurred " + str(e))
async def get_v1_payloads_stream_by_id(dl_id, session, retries=3): """ Retrieve payload based on id from datalake. """ url = inforlogin.base_url( ) + "/IONSERVICES/datalakeapi/v1/payloads/streambyid" headers = inforlogin.header() payload = {"datalakeId": dl_id} async with session.get(url, headers=headers, params=payload) as resp: if resp.status == 401 and retries > 0: inforlogin.check_and_reconnect() return await get_v1_payloads_stream_by_id(dl_id, session, retries - 1) return await resp.text()
async def get_v1_payloads_splitquery(filter, session, sort=None, retries=3): """ Split a demanding filter (producing more than 10K results) into several smaller filters producing the same result (up to 9500 results per one filter). """ url = inforlogin.base_url( ) + "/IONSERVICES/datalakeapi/v1/payloads/splitquery" headers = inforlogin.header() payload = {"filter": filter} if sort is not None: payload["sort"] = sort async with session.get(url, headers=headers, params=payload) as resp: if resp.status == 401 and retries > 0: inforlogin.check_and_reconnect() return await get_v1_payloads_splitquery(filter, session, sort, retries - 1) return await resp.json()
def get_v1_payloads_list(filter=None, sort=None, page=None, records=None): """ List data object properties using a filter. """ url = inforlogin.base_url() + "/IONSERVICES/datalakeapi/v1/payloads/list" headers = inforlogin.header() payload = {} if filter is not None: payload["filter"] = filter if sort is not None: payload["sort"] = sort if page is not None: payload["page"] = page if records is not None: payload["records"] = records res = requests.get(url, headers=headers, params=payload) return res
def post_datacatalog_object(object_name, object_type: ObjectSchemaType, schema, properties): if object_type == ObjectSchemaType.ANY and (schema is not None or properties is not None): raise ValueError("Schema and properties should be None") if (object_type == ObjectSchemaType.DSV or object_type == ObjectSchemaType.DSV) and schema is None: raise ValueError("Schema cannot be None") url = inforlogin.base_url() + "/IONSERVICES/datacatalog/v1/object" headers = inforlogin.header() data = { "name": object_name, "type": object_type.value, "schema": schema, "properties": properties, } res = requests.post(url, headers=headers, data=json.dumps(data)) log.info("datacatalog post: {}".format(res.content)) return res
def get_datacatalog_ping(): url = inforlogin.base_url() + "/IONSERVICES/datacatalog/v1/status/ping" headers = inforlogin.header() res = requests.get(url, headers=headers) log.info("datacatalog ping: {}".format(res.content)) return res