def showBatchResult(self, jobinfo, batchId, only_invalid=False): """ Show the specific batch result. @type JobInfo @param jobinfo: job information @type: string @param batchId: batch id """ resp = self._bulkHttp( self.__join((self.JOB, self.runningJobId, self.BATCH, batchId, self.RESULT)), None, self.__content_csv, 'GET') if jobinfo.operation == 'query': results = parseXMLResult(resp) resp = self._bulkHttp( self.__join((self.JOB, self.runningJobId, self.BATCH, batchId, self.RESULT, results.get('result', ''))), None, self.__content_csv, 'GET') results = resp.split('\n') #TODO: improve parsing response if only_invalid: invalid_results = [] counter = 0 for result in results: counter += 1 result = result.replace('"', '') split = result.split(',') if len(split) > 2 and split[1] == 'false': invalid_results.append( "%s - Row number: %s" % (split, counter)) results = invalid_results return results
def showBatchResult(self, jobinfo, batchId, only_invalid=False): """ Show the specific batch result. @type JobInfo @param jobinfo: job information @type: string @param batchId: batch id """ results = [] resp = self._bulkHttp( self.__join((self.JOB, self.runningJobId, self.BATCH, batchId, self.RESULT)), None, self.__content_csv, 'GET') getLogger(STDERR).debug(resp) if jobinfo.operation == 'query': result_ids = parseXMLResult(resp) for chunk_name in sorted(result_ids.keys()): resultid = result_ids[chunk_name] try: resp = self._bulkHttp( self.__join((self.JOB, self.runningJobId, self.BATCH, batchId, self.RESULT, resultid)), None, self.__content_csv, 'GET') except SocketError as e: if e.errno != errno.ECONNRESET: raise # Not error we are looking for # send request again resp = self._bulkHttp( self.__join((self.JOB, self.runningJobId, self.BATCH, batchId, self.RESULT, resultid)), None, self.__content_csv, 'GET') getLogger(STDERR).debug(resp) result_chunk = resp.split('\n') # get rid of last empty line if result_chunk and result_chunk[-1] == '': result_chunk = result_chunk[:-1] if not results: results = result_chunk elif result_chunk: # for other chunks header will not be added results.extend(result_chunk[1:]) # add trailing empty line to mimic standard behaviour if results: results.append('') else: results = resp.split('\n') #TODO: improve parsing response if only_invalid: invalid_results = [] counter = 0 for result in results: counter += 1 result = result.replace('"', '') split = result.split(',') if len(split) > 2 and split[1] == 'false': invalid_results.append("%s - Row number: %s" % (split, counter)) results = invalid_results return results
def _check_response(response): if response.status_code != 200: dict_result = parseXMLResult(response.content) except_msg = dict_result['sf:exceptionMessage'] except_code = dict_result['sf:exceptionCode'] raise BulkException('{message}: {code}'.format( message=except_msg, code=except_code))
def createBatch(self, jobinfo, batchdata): """ Create individual batch operation with batchdata. @type: JobInfo @param jobinfo: job information @type: string @param batchdata: information which will be sent (e.g SOQL, CSV lines in string) """ if self.runningJobId is None: self.__raise('Job in running not found') resp = self._bulkHttp( self.__join((self.JOB, self.runningJobId, self.BATCH)), batchdata, self.__content_csv) getLogger(STDERR).debug(resp) dict_result = parseXMLResult(resp) if self.__check_result(dict_result): self.__update_batch_state(jobinfo, dict_result) getLogger(STDERR).debug("Batch: %s status is: %s" % (dict_result['id'], dict_result['state'])) return dict_result['id'] else: if self._handle_errors(dict_result): return self.closeJob(jobinfo) else: self.__raise('Batch creating failed')
def test_many_results(): raw_xml = '<result-list xmlns="http://www.force.com/2009/06/asyncapi/dataload"><result>75261000003W0MS</result><result>75261000003W0Mc</result></result-list>' res = parseXMLResult(raw_xml) assert res == { u'result': u'75261000003W0MS', u'result-002': u'75261000003W0Mc' }
def createBatch(self, jobinfo, batchdata): """ Create individual batch operation with batchdata. @type: JobInfo @param jobinfo: job information @type: string @param batchdata: information which will be sent (e.g SOQL, CSV lines in string) """ if self.runningJobId is None: self.__raise('Job in running not found') resp = self._bulkHttp( self.__join((self.JOB, self.runningJobId, self.BATCH)), batchdata, self.__content_csv) dict_result = parseXMLResult(resp) if self.__check_result(dict_result): self.__update_batch_state(jobinfo, dict_result) self.logger.info("Batch: %s status is: %s" % (dict_result['id'], dict_result['state'])) return dict_result['id'] else: if self._handle_errors(dict_result): return self.closeJob(jobinfo) else: self.__raise('Batch creating failed')
def _check_response(response): if response.status_code != 200: dict_result = parseXMLResult(response.content) except_msg = dict_result['sf:exceptionMessage'] except_code = dict_result['sf:exceptionCode'] raise BulkException('{message}: {code}'.format(message=except_msg, code=except_code))
def test_big(): raw_xml = '<batchInfo>\n\ <state>Completed</state>\n\ <numberRecordsProcessed>50000</numberRecordsProcessed>\n\ <totalProcessingTime>500</totalProcessingTime>\n\ </batchInfo>' res = parseXMLResult(raw_xml) print res assert res == { 'state': 'Completed', 'numberRecordsProcessed': '50000', 'totalProcessingTime': '500' }
def createJob(self, jobinfo): """ Creating new job. @type: JobInfo @param jobinfo: will be used to populate the job information """ resp = self._bulkHttp(self.JOB, jobinfo.createJob(), self.__content_xml) getLogger(STDERR).debug(resp) dict_result = parseXMLResult(resp) if self.__check_result(dict_result): self.__update_running_job(dict_result) self.__update_jobinfo(jobinfo, dict_result) getLogger(STDERR).debug("Job: %s created" % jobinfo.id) else: if self._handle_errors(dict_result): self.createJob(jobinfo) else: self.__raise('Job creating failed')
def createJob(self, jobinfo): """ Creating new job. @type: JobInfo @param jobinfo: will be used to populate the job information """ resp = self._bulkHttp(self.JOB, jobinfo.createJob(), self.__content_xml) dict_result = parseXMLResult(resp) if self.__check_result(dict_result): self.__update_running_job(dict_result) self.__update_jobinfo(jobinfo, dict_result) self.logger.info("Job: %s created" % jobinfo.id) else: if self._handle_errors(dict_result): self.createJob(jobinfo) else: self.__raise('Job creating failed')
def updateBatchStatus(self, jobinfo, batchId): """ Update individual batch status. @type: JobInfo @param jobinfo: job information @type: string @param batchId: batch id """ resp = self._bulkHttp( self.__join((self.JOB, self.runningJobId, self.BATCH, batchId)), None, self.__content_csv, 'GET') dict_result = parseXMLResult(resp) if self.__check_result(dict_result): if dict_result['id'] in jobinfo.batch: self.__update_batch_state(jobinfo, dict_result) else: if self._handle_errors(dict_result): self.updateBatchStatus(jobinfo, batchId) else: self.__raise("Batch: %s updateing status failed" % batchId)
def closeJob(self, jobinfo): """ Closing job. @type: JobInfo @param jobinfo: indicate job information which needs to be closed """ jobinfo.state = self.CLOSED resp = self._bulkHttp(self.__join((self.JOB, jobinfo.id)), jobinfo.closeJob(), self.__content_xml) dict_result = parseXMLResult(resp) if self.__check_result(dict_result): self.__update_running_job(dict_result) self.__update_jobinfo(jobinfo, dict_result) self.logger.info("Job: %s state: %s" % (jobinfo.id, jobinfo.state)) else: if self._handle_errors(dict_result): self.closeJob(jobinfo) else: self.__raise("Job: %s closing failed" % jobinfo.id)
def closeJob(self, jobinfo): """ Closing job. @type: JobInfo @param jobinfo: indicate job information which needs to be closed """ jobinfo.state = self.CLOSED resp = self._bulkHttp(self.__join((self.JOB, jobinfo.id)), jobinfo.closeJob(), self.__content_xml) getLogger(STDERR).debug(resp) dict_result = parseXMLResult(resp) if self.__check_result(dict_result): self.__update_running_job(dict_result) self.__update_jobinfo(jobinfo, dict_result) getLogger(STDERR).debug("Job: %s state: %s" % (jobinfo.id, jobinfo.state)) else: if self._handle_errors(dict_result): self.closeJob(jobinfo) else: self.__raise("Job: %s closing failed" % jobinfo.id)
def updateBatchStatus(self, jobinfo, batchId): """ Update individual batch status. @type: JobInfo @param jobinfo: job information @type: string @param batchId: batch id """ resp = self._bulkHttp( self.__join((self.JOB, self.runningJobId, self.BATCH, batchId)), None, self.__content_csv, 'GET') getLogger(STDERR).debug(resp) dict_result = parseXMLResult(resp) if self.__check_result(dict_result): if dict_result['id'] in jobinfo.batch: self.__update_batch_state(jobinfo, dict_result) else: if self._handle_errors(dict_result): self.updateBatchStatus(jobinfo, batchId) else: self.__raise("Batch: %s updateing status failed" % batchId)
def test_empty(): raw_xml = '<empty></empty>' res = parseXMLResult(raw_xml) assert res == {}
def _set_credentials(self, response): dict_result = parseXMLResult(response.content) self.sessionid = dict_result['sessionId'] self.soap_server = dict_result['serverUrl'] self.bulk_server = dict_result['serverUrl'].split('services')[0][:-1]
def _set_credentials(self, response): dict_result = parseXMLResult(response.content) self.sessionid = dict_result['sessionId'] self.soap_server = dict_result['serverUrl'] self.bulk_server = dict_result['serverUrl'].split('services')[0]
def test_one_result(): raw_xml = '<result-list xmlns="http://www.force.com/2009/06/asyncapi/dataload"><result>75238000003axrT</result></result-list>' res = parseXMLResult(raw_xml) assert res == {u'result': u'75238000003axrT'}