def existing(workspace:String, full:SmartBoolean=False, order:Int=1): filter = __region_names__[order] datasets = ws.existing_datasets(workspace, group=__region_group__, filter=filter) if full: return {'{}/{}'.format(__region_group__, k): dataset_repr(v) for k, v in datasets.items()} return {k: dataset_repr(v) for k, v in datasets.items()}
def get_levels(workspace: String, full: SmartBoolean = False): datasets = ws.existing_datasets(workspace, group=__group_pattern__) datasets = [dataset_repr(v) for k, v in datasets.items()] if full: for ds in datasets: ds['id'] = '{}/{}'.format(__group_pattern__, ds['id']) return datasets
def create(workspace: String, order: Int = 1, big: bool = False): region_type = __region_names__[order] if big: logger.debug("Creating int64 regions") ds = ws.auto_create_dataset( workspace, region_type, __region_group__, __region_dtype__, dtype=np.uint64, fill=__region_fill__, ) else: logger.debug("Creating int32 regions") ds = ws.auto_create_dataset( workspace, region_type, __region_group__, __region_dtype__, dtype=np.uint32, fill=__region_fill__, ) ds.set_attr("kind", region_type) return dataset_repr(ds)
def existing(workspace: String, full: SmartBoolean = False, filter: SmartBoolean = True): datasets = ws.existing_datasets(workspace, group=__feature_group__) if full: datasets = { '{}/{}'.format(__feature_group__, k): dataset_repr(v) for k, v in datasets.items() } else: datasets = {k: dataset_repr(v) for k, v in datasets.items()} if filter: datasets = { k: v for k, v in datasets.items() if v['kind'] != 'unknown' } return datasets
def create(workspace: String, feature_type: String): ds = ws.auto_create_dataset(workspace, feature_type, __feature_group__, __feature_dtype__, fill=__feature_fill__) ds.set_attr('kind', feature_type) return dataset_repr(ds)
def existing( workspace: String, full: SmartBoolean = False, filter: SmartBoolean = True ): datasets = ws.existing_datasets(workspace, group=__pipeline_group__) if full: datasets = { "{}/{}".format(__pipeline_group__, k): dataset_repr(v) for k, v in datasets.items() } else: datasets = {k: dataset_repr(v) for k, v in datasets.items()} if filter: datasets = {k: v for k, v in datasets.items() if v["kind"] != "unknown"} return datasets
def get_levels(workspace: String, full: SmartBoolean = False): datasets = ws.existing_datasets(workspace, group=__group_pattern__) datasets = [dataset_repr(v) for k, v in datasets.items()] # TODO: unreached if full: for ds in datasets: ds["id"] = "{}/{}".format(__group_pattern__, ds["id"]) return datasets
def create(workspace: String, feature_type: String): ds = ws.auto_create_dataset( workspace, feature_type, __feature_group__, __feature_dtype__, fill=__feature_fill__, ) ds.set_attr("kind", feature_type) logger.debug(f"Created (empty) feature of kind {feature_type}") return dataset_repr(ds)
def add_level(workspace: String): ds = ws.auto_create_dataset(workspace, 'level', __group_pattern__, __level_dtype__, fill=__level_fill__, chunks=CHUNK_SIZE) print(ds, type(ds)) ds.set_attr('kind', 'level') ds.set_attr('modified', [0] * ds.total_chunks) return dataset_repr(ds)
def create(workspace: String, pipeline_type: String): ds = ws.auto_create_dataset( workspace, pipeline_type, __pipeline_group__, __pipeline_dtype__, fill=__pipeline_fill__, ) ds.set_attr("kind", pipeline_type) return dataset_repr(ds)
def create(workspace: String, order: Int = 0): analyzer_type = __analyzer_names__[order] ds = ws.auto_create_dataset( workspace, analyzer_type, __analyzer_group__, __analyzer_dtype__, fill=__analyzer_fill__, ) ds.set_attr("kind", analyzer_type) return dataset_repr(ds)
def create(workspace: String, fullname: String, order: Int = 0): objects_type = __objects_names__[order] ds = ws.auto_create_dataset( workspace, objects_type, __objects_group__, __objects_dtype__, fill=__objects_fill__, ) ds.set_attr("kind", objects_type) ds.set_attr("fullname", fullname) return dataset_repr(ds)
def add_level(workspace: String): ds = ws.auto_create_dataset( workspace, "level", __group_pattern__, __level_dtype__, fill=__level_fill__, chunks=CHUNK_SIZE, ) logger.debug(ds) ds.set_attr("kind", "level") ds.set_attr("modified", [0] * ds.total_chunks) return dataset_repr(ds)
def get_single_level(workspace: String, level: String): ds = ws.get_dataset(workspace, level, group=__group_pattern__) return dataset_repr(ds)
def create(workspace:String, order:Int=1): region_type = __region_names__[order] ds = ws.auto_create_dataset(workspace, region_type, __region_group__, __region_dtype__, fill=__region_fill__) ds.set_attr('kind', region_type) return dataset_repr(ds)