def commit(self): """Execute the queued requests in a single batch API request. If **return_result** parameter was set to True during initialization, :class:`arango.job.BatchJob` instances are populated with results. :return: Batch jobs or None if **return_result** parameter was set to False during initialization. :rtype: [arango.job.BatchJob] | None :raise arango.exceptions.BatchStateError: If batch state is invalid (e.g. batch was already committed or size of response from server did not match the expected). :raise arango.exceptions.BatchExecuteError: If commit fails. """ if self._committed: raise BatchStateError('batch already committed') self._committed = True if len(self._queue) == 0: return self.jobs # Boundary used for multipart request boundary = uuid4().hex # Buffer for building the batch request payload buffer = [] for req, job in self._queue.values(): buffer.append('--{}'.format(boundary)) buffer.append('Content-Type: application/x-arango-batchpart') buffer.append('Content-Id: {}'.format(job.id)) buffer.append('\r\n{}'.format(req)) buffer.append('--{}--'.format(boundary)) request = Request( method='post', endpoint='/_api/batch', headers={ 'Content-Type': 'multipart/form-data; boundary={}'.format(boundary) }, data='\r\n'.join(buffer) ) with suppress_warning('requests.packages.urllib3.connectionpool'): resp = self._conn.send_request(request) if not resp.is_success: raise BatchExecuteError(resp, request) if not self._return_result: return None raw_resps = resp.raw_body.split('--{}'.format(boundary))[1:-1] if len(self._queue) != len(raw_resps): raise BatchStateError( 'expecting {} parts in batch response but got {}' .format(len(self._queue), len(raw_resps)) ) for raw_resp in raw_resps: # Parse and breakdown the batch response body resp_parts = raw_resp.strip().split('\r\n') raw_content_id = resp_parts[1] raw_body = resp_parts[-1] raw_status = resp_parts[3] job_id = raw_content_id.split(' ')[1] _, status_code, status_text = raw_status.split(' ', 2) # Update the corresponding batch job queued_req, queued_job = self._queue[job_id] queued_job._response = Response( method=queued_req.method, url=self._conn.url_prefix + queued_req.endpoint, headers={}, status_code=int(status_code), status_text=status_text, raw_body=raw_body ) queued_job._status = 'done' return self.jobs
def commit(self): """Execute the queued requests in a single transaction API request. If **return_result** parameter was set to True during initialization, :class:`arango.job.TransactionJob` instances are populated with results. :return: Transaction jobs or None if **return_result** parameter was set to False during initialization. :rtype: [arango.job.TransactionJob] | None :raise arango.exceptions.TransactionStateError: If the transaction was already committed. :raise arango.exceptions.TransactionExecuteError: If commit fails. """ if self._committed: raise TransactionStateError('transaction already committed') self._committed = True if len(self._queue) == 0: return self.jobs write_collections = set() read_collections = set() # Buffer for building the transaction javascript command cmd_buffer = [ 'var db = require("internal").db', 'var gm = require("@arangodb/general-graph")', 'var result = {}' ] for req, job in self._queue.values(): if isinstance(req.read, string_types): read_collections.add(req.read) elif req.read is not None: read_collections |= set(req.read) if isinstance(req.write, string_types): write_collections.add(req.write) elif req.write is not None: write_collections |= set(req.write) cmd_buffer.append('result["{}"] = {}'.format(job.id, req.command)) cmd_buffer.append('return result;') data = { 'action': 'function () {{ {} }}'.format(';'.join(cmd_buffer)), 'collections': { 'read': list(read_collections), 'write': list(write_collections), 'allowImplicit': True } } if self._timeout is not None: data['lockTimeout'] = self._timeout if self._sync is not None: data['waitForSync'] = self._sync request = Request( method='post', endpoint='/_api/transaction', data=data, ) resp = self._conn.send_request(request) if not resp.is_success: raise TransactionExecuteError(resp, request) if not self._return_result: return None result = resp.body['result'] for req, job in self._queue.values(): job._response = Response( method=req.method, url=self._conn.url_prefix + req.endpoint, headers={}, status_code=200, status_text='OK', raw_body=result.get(job.id) ) job._status = 'done' return self.jobs
def commit(self): """Execute the queued API requests in a single HTTP call. If `return_response` was set to ``True`` during initialization, the responses are saved within an :class:`arango.batch.BatchJob` object for later retrieval via its :func:`arango.batch.BatchJob.result` method :raises arango.exceptions.BatchExecuteError: if the batch request cannot be executed """ try: if not self._requests: return raw_data_list = [] for content_id, request in enumerate(self._requests, start=1): raw_data_list.append('--XXXsubpartXXX\r\n') raw_data_list.append('Content-Type: application/x-arango-batchpart\r\n') raw_data_list.append('Content-Id: {}\r\n\r\n'.format(content_id)) raw_data_list.append('{}\r\n'.format(request.stringify())) raw_data_list.append('--XXXsubpartXXX--\r\n\r\n') raw_data = ''.join(raw_data_list) res = self.post( endpoint='/_api/batch', headers={ 'Content-Type': ( 'multipart/form-data; boundary=XXXsubpartXXX' ) }, data=raw_data, ) if res.status_code not in HTTP_OK: raise BatchExecuteError(res) if not self._return_result: return for index, raw_response in enumerate( res.raw_body.split('--XXXsubpartXXX')[1:-1] ): request = self._requests[index] handler = self._handlers[index] job = self._batch_jobs[index] res_parts = raw_response.strip().split('\r\n') raw_status, raw_body = res_parts[3], res_parts[-1] _, status_code, status_text = raw_status.split(' ', 2) try: result = handler(Response( method=request.method, url=self._url_prefix + request.endpoint, headers=request.headers, http_code=int(status_code), http_text=status_text, body=raw_body )) except ArangoError as err: job.update(status='error', result=err) else: job.update(status='done', result=result) finally: self._requests = [] self._handlers = [] self._batch_jobs = []