def _create_job(self, operation, object_name, external_id_field=None): """ Create a bulk job Arguments: * operation -- Bulk operation to be performed by job * object_name -- SF object * external_id_field -- unique identifier field for upsert operations """ payload = { 'operation': operation, 'object': object_name, 'contentType': 'JSON' } if operation == 'upsert': payload['externalIdFieldName'] = external_id_field url = "{}{}".format(self.bulk_url, 'job') result = call_salesforce(url=url, method='POST', session=self.session, headers=self.headers, data=json.dumps(payload)) return result.json(object_pairs_hook=OrderedDict)
def _create_job(self, operation, object_name, external_id_field=None): """ Create a bulk job Arguments: * operation -- Bulk operation to be performed by job * object_name -- SF object * external_id_field -- unique identifier field for upsert operations """ payload = { "operation": operation, "object": object_name, "contentType": "JSON" } if operation == "upsert": payload["externalIdFieldName"] = external_id_field url = "{}{}".format(self.bulk_url, "job") result = call_salesforce( url=url, method="POST", session=self.session, headers=self.headers, data=json.dumps(payload), ) return result.json(object_pairs_hook=OrderedDict)
def _get_job(self, job_id): """ Get an existing job to check the status """ url = "{}{}{}".format(self.bulk_url, 'job/', job_id) result = call_salesforce(url=url, method='GET', session=self.session, headers=self.headers) return result.json(object_pairs_hook=OrderedDict)
def _get_batch_results(self, job_id, batch_id, operation): """ retrieve a set of results from a completed job """ url = "{}{}{}{}{}{}".format(self.bulk_url, 'job/', job_id, '/batch/', batch_id, '/result') result = call_salesforce(url=url, method='GET', session=self.session, headers=self.headers) if operation == 'query': url_query_results = "{}{}{}".format(url, '/', result.json()[0]) query_result = call_salesforce(url=url_query_results, method='GET', session=self.session, headers=self.headers) return query_result.json() return result.json()
def _get_batch(self, job_id, batch_id): """ Get an existing batch to check the status """ url = "{}{}{}{}{}".format(self.bulk_url, "job/", job_id, "/batch/", batch_id) result = call_salesforce(url=url, method="GET", session=self.session, headers=self.headers) return result.json(object_pairs_hook=OrderedDict)
def _close_job(self, job_id): """ Close a bulk job """ payload = { 'state': 'Closed' } url = "{}{}{}".format(self.bulk_url, 'job/', job_id) result = call_salesforce(url=url, method='POST', session=self.session, headers=self.headers, data=json.dumps(payload)) return result.json(object_pairs_hook=OrderedDict)
def _get_batch_results(self, job_id, batch_id, operation): """ retrieve a set of results from a completed job """ url = "{}{}{}{}{}{}".format(self.bulk_url, "job/", job_id, "/batch/", batch_id, "/result") result = call_salesforce(url=url, method="GET", session=self.session, headers=self.headers) if operation == "query" or operation == "queryAll": url_query_results = "{}{}{}".format(url, "/", result.json()[0]) query_result = call_salesforce( url=url_query_results, method="GET", session=self.session, headers=self.headers, ) return query_result.json() return result.json()
def _close_job(self, job_id): """ Close a bulk job """ payload = {"state": "Closed"} url = "{}{}{}".format(self.bulk_url, "job/", job_id) result = call_salesforce( url=url, method="POST", session=self.session, headers=self.headers, data=json.dumps(payload), ) return result.json(object_pairs_hook=OrderedDict)
def _add_batch(self, job_id, data, operation): """ Add a set of data as a batch to an existing job Separating this out in case of later implementations involving multiple batches """ url = "{}{}{}{}".format(self.bulk_url, 'job/', job_id, '/batch') if operation != 'query': data = json.dumps(data) result = call_salesforce(url=url, method='POST', session=self.session, headers=self.headers, data=data) return result.json(object_pairs_hook=OrderedDict)