async def request_and_raise_for_status(): json_data = kwargs.pop('json', None) if json_data is not None: if kwargs.get('data') is not None: raise ValueError( 'data and json parameters cannot be used at the same time' ) kwargs['data'] = aiohttp.BytesPayload( value=orjson.dumps(json_data), # https://github.com/ijl/orjson#serialize # # "The output is a bytes object containing UTF-8" encoding="utf-8", content_type="application/json", ) resp = await self.client_session._request(method, url, **kwargs) if raise_for_status: if resp.status >= 400: # reason should always be not None for a started response assert resp.reason is not None body = (await resp.read()).decode() resp.release() raise ClientResponseError(resp.request_info, resp.history, status=resp.status, message=resp.reason, headers=resp.headers, body=body) return resp
async def _open_submit_close(self, byte_job_specs: List[bytes], n_jobs: int, pbar) -> Batch: assert n_jobs == len(self._job_specs) b = bytearray() b.extend(b'{"bunch":') b.append(ord('[')) for i, spec in enumerate(byte_job_specs): if i > 0: b.append(ord(',')) b.extend(spec) b.append(ord(']')) b.extend(b',"batch":') b.extend(json.dumps(self._batch_spec()).encode('utf-8')) b.append(ord('}')) resp = await self._client._post( '/api/v1alpha/batches/create-fast', data=aiohttp.BytesPayload(b, content_type='application/json', encoding='utf-8'), ) batch_json = await resp.json() pbar.update(n_jobs) return Batch(self._client, batch_json['id'], self.attributes, n_jobs, self.token, submission_info=BatchSubmissionInfo(True))
async def _submit_jobs(self, batch_id, byte_job_specs, n_jobs, pbar): assert len(byte_job_specs) > 0, byte_job_specs b = bytearray() b.append(ord('[')) i = 0 while i < len(byte_job_specs): spec = byte_job_specs[i] if i > 0: b.append(ord(',')) b.extend(spec) i += 1 b.append(ord(']')) await self._client._post( f'/api/v1alpha/batches/{batch_id}/jobs/create', data=aiohttp.BytesPayload( b, content_type='application/json', encoding='utf-8')) pbar.update(n_jobs)