def add_dataset(doc, uri, rules, index): dataset = create_dataset(doc, uri, rules) try: index.datasets.add(dataset, sources_policy='skip') except changes.DocumentMismatchError as e: index.datasets.update(dataset, {tuple(): changes.allow_any}) return uri
def add_dataset(doc, uri, rules, index): dataset = create_dataset(doc, uri, rules) try: index.datasets.add( dataset) # Source policy to be checked in sentinel 2 datase types except changes.DocumentMismatchError as e: index.datasets.update(dataset, {tuple(): changes.allow_any}) return uri
def archive_dataset(doc, uri, rules, index, sources_policy): def get_ids(dataset): ds = index.datasets.get(dataset.id, include_sources=True) for source in ds.sources.values(): yield source.id yield dataset.id dataset = create_dataset(doc, uri, rules) index.datasets.archive(get_ids(dataset)) logging.debug("Archiving %s and all sources of %s", dataset.id, dataset.id)
def add_dataset(doc, uri, rules, index, sources_policy): dataset = create_dataset(doc, uri, rules) try: index.datasets.add(dataset, sources_policy=sources_policy) # Source policy to be checked in sentinel 2 datase types except changes.DocumentMismatchError as e: index.datasets.update(dataset, {tuple(): changes.allow_any}) logging.info("Indexing %s", uri) return uri
def add_dataset(doc, uri, rules, index): """ add a single dataset to the postgresql index 1. call create_dataset(datacube) to build the dataset object with the newly created json doc 2. call index.datasets.add with the dataset document object to populate the DB """ dataset = create_dataset(doc, uri, rules) try: index.datasets.add(dataset, sources_policy='skip') except changes.DocumentMismatchError as e: index.datasets.update(dataset, {tuple(): changes.allow_any}) return uri