def get_deployables(self, path_to, on_board_perspective=True): if self.compressed: file_schema = [FileSpec(name=self.compressed)] else: file_schema = self.infer_schema_from_repo() hierarchy = self.make_hierarchy() cmds = [ self.warehouse.get_download_cmd( path_from=hierarchy.join_as_path(file_spec.name), path_to=os.path.join(path_to, file_spec.name), on_board_perspective=on_board_perspective ) for file_spec in file_schema ] msgs = [ 'Injecting file: {}'.format(file_spec.name) for file_spec in file_schema ] if self.compressed: msgs.append('Extracting {} to {}'.format(self.compressed, path_to)) cmds.append('tar -xzf {tgz_file} {path} && rm -f {tgz_file}'.format( tgz_file=self.compressed, path=path_to )) return zip(msgs, cmds)
def infer_schema_from_dict(self, dyct): if self.schema is None: schema = [FileSpec(name=name) for name in dyct.keys()] self._print_files(schema) return schema else: return self.schema
def __init__(self, proj: Project, notebook: str, file_name: str): self.proj_name = proj.name self.notebook = notebook.rstrip(Extension.IPYNB).rstrip('.') self.subject = "notebook '{}'".format(self.notebook) super().__init__( schema=[ FileSpec(name=file_name, required=True) ] )
def infer_schema_from_repo(self): if self.schema is None: self.LOG.warn("Deploying {} without a strict definition of files".format(self.subject)) schema = [ FileSpec(name=name) for name in self.warehouse.lyst(self.make_hierarchy()) ] self._print_files(schema) else: schema = self.schema return schema
def infer_schema_from_path(self, path): assert os.path.exists(path), NhaStorageError("Path not found: {}".format(path)) if self.schema is None: if os.path.isdir(path): schema = [FileSpec(name=name) for name in os.listdir(path)] else: schema = [FileSpec(name=os.path.basename(path))] path = os.path.dirname(path) self._print_files(schema) else: schema = self.schema if os.path.isfile(path): if len(schema) == 1: schema = [FileSpec(name=schema[0].name, alias=os.path.basename(path))] path = os.path.dirname(path) self.LOG.warn("File '{}' will be renamed to '{}'".format(schema[0].alias, schema[0].name)) else: n_reqs = len(list(filter(lambda f: f.required, schema))) assert n_reqs == 1, NhaStorageError("Cannot find all required files in path {}".format(path)) return path, schema
def __init__(self, schema: List[FileSpec] = None, compress_to: str = None, log=None, lightweight=False): Logged.__init__(self, log=log) self.warehouse = get_warehouse(section=self.section, log=log, lightweight=lightweight) self.compressed = None if not compress_to else '{}.tar.gz'.format(compress_to) if schema is None: self.schema = None elif isinstance(schema, list): if len(schema) == 0: self.schema = None else: self.schema = [FileSpec.from_doc(d) for d in schema] else: raise NotImplementedError()
def deploy(self, path_to): if self.compressed: file_schema = [FileSpec(name=self.compressed)] else: file_schema = self.infer_schema_from_repo() self.warehouse.deploy_files( hierarchy=self.make_hierarchy(), file_schema=file_schema, path_to=path_to ) self._decompress(path_to) self._verify_schema(path_to)
def _compress_and_store(self, path: str, to_compress: List[FileSpec] = None): work = Workpath.get_tmp() try: target = work.join(self.compressed) with tarfile.open(target, 'w:gz') as f: for file_spec in to_compress: file_path = os.path.join(path, file_spec.alias) f.add(file_path, arcname=file_spec.name) file_spec = FileSpec(name=self.compressed) self.validate_file_sizes([file_spec]) self.warehouse.store_files( hierarchy=self.make_hierarchy(), file_schema=[file_spec] ) finally: work.dispose()