Esempio n. 1
0
    def can_update(self, dataset, updates_allowed=None):
        """
        Check if dataset can be updated. Return bool,safe_changes,unsafe_changes

        :param datacube.model.Dataset dataset: Dataset to update
        :param dict updates_allowed: Allowed updates
            :rtype: bool,list[change],list[change]
        """
        existing = self.get(dataset.id, include_sources=True)
        if not existing:
            raise ValueError(
                'Unknown dataset %s, cannot update – did you intend to add it?'
                % dataset.id)

        if dataset.type.name != existing.type.name:
            raise ValueError(
                'Changing product is not supported. From %s to %s in %s' %
                (existing.type.name, dataset.type.name, dataset.id))

        # TODO: figure out (un)safe changes from metadata type?
        allowed = {
            # can always add more metadata
            tuple(): changes.allow_extension,
        }
        allowed.update(updates_allowed or {})

        doc_changes = get_doc_changes(existing.metadata_doc,
                                      jsonify_document(dataset.metadata_doc))
        good_changes, bad_changes = changes.classify_changes(
            doc_changes, allowed)

        return not bad_changes, good_changes, bad_changes
Esempio n. 2
0
    def can_update(self, product, allow_unsafe_updates=False):
        """
        Check if product can be updated. Return bool,safe_changes,unsafe_changes

        (An unsafe change is anything that may potentially make the product
        incompatible with existing datasets of that type)

        :param datacube.model.DatasetType product: Product to update
        :param bool allow_unsafe_updates: Allow unsafe changes. Use with caution.
        :rtype: bool,list[change],list[change]
        """
        DatasetType.validate(product.definition)

        existing = self.get_by_name(product.name)
        if not existing:
            raise ValueError('Unknown product %s, cannot update – did you intend to add it?' % product.name)

        updates_allowed = {
            ('description',): changes.allow_any,
            ('metadata_type',): changes.allow_any,

            # You can safely make the match rules looser but not tighter.
            # Tightening them could exclude datasets already matched to the product.
            # (which would make search results wrong)
            ('metadata',): changes.allow_truncation
        }

        doc_changes = get_doc_changes(existing.definition, jsonify_document(product.definition))
        good_changes, bad_changes = changes.classify_changes(doc_changes, updates_allowed)

        return allow_unsafe_updates or not bad_changes, good_changes, bad_changes
Esempio n. 3
0
    def can_update(self, metadata_type, allow_unsafe_updates=False):
        """
        Check if metadata type can be updated. Return bool,safe_changes,unsafe_changes

        Safe updates currently allow new search fields to be added, description to be changed.

        :param datacube.model.MetadataType metadata_type: updated MetadataType
        :param bool allow_unsafe_updates: Allow unsafe changes. Use with caution.
        :rtype: bool,list[change],list[change]
        """
        MetadataType.validate(metadata_type.definition)

        existing = self.get_by_name(metadata_type.name)
        if not existing:
            raise ValueError(
                'Unknown metadata type %s, cannot update – did you intend to add it?'
                % metadata_type.name)

        updates_allowed = {
            ('description', ):
            changes.allow_any,
            # You can add new fields safely but not modify existing ones.
            (
                'dataset', ):
            changes.allow_extension,
            ('dataset', 'search_fields'):
            changes.allow_extension
        }

        doc_changes = get_doc_changes(
            existing.definition, jsonify_document(metadata_type.definition))
        good_changes, bad_changes = changes.classify_changes(
            doc_changes, updates_allowed)

        return allow_unsafe_updates or not bad_changes, good_changes, bad_changes
Esempio n. 4
0
    def can_update(self, product, allow_unsafe_updates=False):
        """
        Check if product can be updated. Return bool,safe_changes,unsafe_changes

        (An unsafe change is anything that may potentially make the product
        incompatible with existing datasets of that type)

        :param DatasetType product: Product to update
        :param bool allow_unsafe_updates: Allow unsafe changes. Use with caution.
        :rtype: bool,list[change],list[change]
        """
        DatasetType.validate(product.definition)

        existing = self.get_by_name(product.name)
        if not existing:
            raise ValueError(
                'Unknown product %s, cannot update – did you intend to add it?'
                % product.name)

        updates_allowed = {
            ('description', ):
            changes.allow_any,
            ('license', ):
            changes.allow_any,
            ('metadata_type', ):
            changes.allow_any,

            # You can safely make the match rules looser but not tighter.
            # Tightening them could exclude datasets already matched to the product.
            # (which would make search results wrong)
            (
                'metadata', ):
            changes.allow_truncation,

            # Some old storage fields should not be in the product definition any more: allow removal.
            ('storage', 'chunking'):
            changes.allow_removal,
            ('storage', 'driver'):
            changes.allow_removal,
            ('storage', 'dimension_order'):
            changes.allow_removal,
        }

        doc_changes = get_doc_changes(existing.definition,
                                      jsonify_document(product.definition))
        good_changes, bad_changes = changes.classify_changes(
            doc_changes, updates_allowed)

        for offset, old_val, new_val in good_changes:
            _LOG.info("Safe change in %s from %r to %r",
                      _readable_offset(offset), old_val, new_val)

        for offset, old_val, new_val in bad_changes:
            _LOG.warning("Unsafe change in %s from %r to %r",
                         _readable_offset(offset), old_val, new_val)

        return allow_unsafe_updates or not bad_changes, good_changes, bad_changes
Esempio n. 5
0
def check_consistent(a, b):
    diffs = get_doc_changes(a, b)
    if len(diffs) == 0:
        return True, None

    def render_diff(offset, a, b):
        offset = '.'.join(map(str, offset))
        return '{}: {!r}!={!r}'.format(offset, a, b)

    return False, ", ".join([render_diff(offset, a, b) for offset, a, b in diffs])
Esempio n. 6
0
def make_output_type(index, config):
    source_type = index.products.get_by_name(config['source_type'])
    if not source_type:
        click.echo("Source DatasetType %s does not exist" %
                   config['source_type'])
        click.get_current_context().exit(1)

    output_type = morph_dataset_type(source_type, config)
    _LOG.info('Created DatasetType %s', output_type.name)

    # Some storage fields should not be in the product definition, and should be removed.
    # To handle backwards compatibility for now, ignore them with custom rules,
    # rather than using the default checks done by index.products.add
    existing = index.products.get_by_name(output_type.name)
    backwards_compatible_fields = True
    if existing and backwards_compatible_fields:
        updates_allowed = {
            ('description', ): changes.allow_any,
            ('metadata_type', ): changes.allow_any,
            ('storage', 'chunking'): changes.allow_any,
            ('storage', 'driver'): changes.allow_any,
            ('storage', 'dimension_order'): changes.allow_any,
            ('metadata', ): changes.allow_truncation
        }

        doc_changes = changes.get_doc_changes(
            output_type.definition,
            datacube.utils.jsonify_document(existing.definition))
        good_changes, bad_changes = changes.classify_changes(
            doc_changes, updates_allowed)
        if bad_changes:
            raise ValueError('{} differs from stored ({})'.format(
                output_type.name, ', '.join([
                    '{}: {!r}!={!r}'.format('.'.join(offset), v1, v2)
                    for offset, v1, v2 in bad_changes
                ])))
        output_type = index.products.update(output_type,
                                            allow_unsafe_updates=True)
    else:
        output_type = index.products.add(output_type)

    return source_type, output_type
Esempio n. 7
0
def test_get_doc_changes_w_baseprefix():
    rval = get_doc_changes({}, None, base_prefix=('a', ))
    assert rval == [(('a', ), {}, None)]
Esempio n. 8
0
def test_get_doc_changes(v1, v2, expected):
    rval = get_doc_changes(v1, v2)
    assert rval == expected