async def inputs(infile, _): await write_int(infile, ServiceBackend.LOAD_REFERENCES_FROM_DATASET) await write_str(infile, tmp_dir()) await write_str(infile, self.billing_project) await write_str(infile, self.remote_tmpdir) await write_str(infile, path)
async def inputs(infile, token): await write_int(infile, ServiceBackend.EXECUTE) await write_str(infile, tmp_dir()) await write_str(infile, self.billing_project) await write_str(infile, self.remote_tmpdir) await write_str(infile, self.render(ir)) await write_str(infile, token)
async def create_inputs(): with self.fs.open(dir + '/in', 'wb') as infile: write_int(infile, ServiceBackend.EXECUTE) write_str(infile, tmp_dir()) write_str(infile, self.billing_project) write_str(infile, self.bucket) write_str(infile, self.render(ir)) write_str(infile, token)
async def inputs(infile, _): await write_int(infile, ServiceBackend.IMPORT_FAM) await write_str(infile, tmp_dir()) await write_str(infile, self.billing_project) await write_str(infile, self.remote_tmpdir) await write_str(infile, path) await write_bool(infile, quant_pheno) await write_str(infile, delimiter) await write_str(infile, missing)
def load_references_from_dataset(self, path): token = secret_alnum_string() with TemporaryDirectory(ensure_exists=False) as dir: with self.fs.open(dir + '/in', 'wb') as infile: write_int(infile, ServiceBackend.LOAD_REFERENCES_FROM_DATASET) write_str(infile, tmp_dir()) write_str(infile, self.billing_project) write_str(infile, self.bucket) write_str(infile, path) batch_attributes = self.batch_attributes if 'name' not in batch_attributes: batch_attributes = {**batch_attributes, 'name': 'load_references_from_dataset(...)'} bb = self.bc.create_batch(token=token, attributes=batch_attributes) j = bb.create_jvm_job([ 'is.hail.backend.service.ServiceBackendSocketAPI2', os.environ['HAIL_SHA'], os.environ['HAIL_JAR_URL'], batch_attributes['name'], dir + '/in', dir + '/out', ], mount_tokens=True) b = bb.submit(disable_progress_bar=self.disable_progress_bar) status = b.wait(disable_progress_bar=self.disable_progress_bar) if status['n_succeeded'] != 1: raise ValueError(f'batch failed {status} {j.log()}') with self.fs.open(dir + '/out', 'rb') as outfile: success = read_bool(outfile) if success: s = read_str(outfile) try: # FIXME: do we not have to parse the result? return json.loads(s) except json.decoder.JSONDecodeError as err: raise ValueError(f'could not decode {s}') from err else: jstacktrace = read_str(outfile) raise FatalError(jstacktrace)
async def inputs(infile, _): await write_int(infile, ServiceBackend.INDEX_BGEN) await write_str(infile, tmp_dir()) await write_str(infile, self.billing_project) await write_str(infile, self.remote_tmpdir) await write_int(infile, len(files)) for fname in files: await write_str(infile, fname) await write_int(infile, len(index_file_map)) for k, v in index_file_map.items(): await write_str(infile, k) await write_str(infile, v) if referenceGenomeName is None: await write_bool(infile, False) else: await write_bool(infile, True) await write_str(infile, referenceGenomeName) await write_int(infile, len(contig_recoding)) for k, v in contig_recoding.items(): await write_str(infile, k) await write_str(infile, v) await write_bool(infile, skip_invalid_loci)
def blockmatrix_type(self, bmir): token = secret_alnum_string() with TemporaryDirectory(ensure_exists=False) as dir: with self.fs.open(dir + '/in', 'wb') as infile: write_int(infile, ServiceBackend.BLOCK_MATRIX_TYPE) write_str(infile, tmp_dir()) write_str(infile, self.render(bmir)) batch_attributes = self.batch_attributes if 'name' not in batch_attributes: batch_attributes = {**batch_attributes, 'name': 'blockmatrix_type(...)'} bb = self.bc.create_batch(token=token, attributes=batch_attributes) j = bb.create_jvm_job([ 'is.hail.backend.service.ServiceBackendSocketAPI2', os.environ['HAIL_SHA'], os.environ['HAIL_JAR_URL'], batch_attributes['name'], dir + '/in', dir + '/out', ], mount_tokens=True) b = bb.submit(disable_progress_bar=self.disable_progress_bar) status = b.wait(disable_progress_bar=self.disable_progress_bar) if status['n_succeeded'] != 1: raise ValueError(f'batch failed {status} {j.log()}') with self.fs.open(dir + '/out', 'rb') as outfile: success = read_bool(outfile) if success: s = read_str(outfile) try: return tblockmatrix._from_json(json.loads(s)) except json.decoder.JSONDecodeError as err: raise ValueError(f'could not decode {s}') from err else: jstacktrace = read_str(outfile) raise FatalError(jstacktrace)
async def inputs(infile, _): await write_int(infile, ServiceBackend.PARSE_VCF_METADATA) await write_str(infile, tmp_dir()) await write_str(infile, self.billing_project) await write_str(infile, self.remote_tmpdir) await write_str(infile, path)
async def inputs(infile, _): await write_int(infile, ServiceBackend.REFERENCE_GENOME) await write_str(infile, tmp_dir()) await write_str(infile, self.billing_project) await write_str(infile, self.remote_tmpdir) await write_str(infile, name)
async def inputs(infile, _): await write_int(infile, ServiceBackend.BLOCK_MATRIX_TYPE) await write_str(infile, tmp_dir()) await write_str(infile, self.billing_project) await write_str(infile, self.remote_tmpdir) await write_str(infile, self.render(bmir))