def get_catalog_description(self):
        try:
            active_cache = config.get('ckan.cache.active', 'false')
            catalog = None  # type: CatalogDcatApOp

            if active_cache == 'true':
                # get the ds from cache
                catalog_string = redis_cache.get_from_cache(
                    self.cache_id, pool=redis_cache.MISC_POOL)
                if catalog_string:
                    catalog = pickle.loads(catalog_string)
                    log.info('Load catalog from cache: {0}'.format(
                        self.cache_id))

            if active_cache != 'true' or catalog is None:
                self.get_description_from_ts()
                redis_cache.flush_all_from_db(redis_cache.MISC_POOL)
                redis_cache.set_value_in_cache(self.cache_id,
                                               pickle.dumps(self),
                                               864000,
                                               pool=redis_cache.MISC_POOL)
            return catalog
        except BaseException as e:
            log.error(
                "[Catalog]. Get Catalog description failed for {0}".format(
                    self.catalog_uri))
            log.error(traceback.print_exc(e))
            return None
def package_delete(context, data_dict):
    '''Delete a dataset (package).

    You must be authorized to delete the dataset.

    :param id: the id or name of the dataset to delete
    :type id: string

    '''
    model = context['model']
    user = context['user']

    _get_action('package_show')(context, data_dict)

    entity = context['package']  # type: DatasetDcatApOp

    if entity is None:
        raise NotFound

    _check_access('package_delete', context, data_dict)

    if entity.has_doi_identifier():
        raise NotAuthorized('Cannot delete a dataset with a DOI.')

    rev = model.repo.new_revision()
    rev.author = user
    rev.message = _(u'REST API: Delete Package: %s') % entity.dataset_uri

    for item in plugins.PluginImplementations(plugins.IPackageController):
        item.delete(entity)

        item.after_delete(context, data_dict)

    result = entity.delete_from_ts()
    if result:
        mapping = DatasetIdMapping.by_internal_id(
            entity.dataset_uri.split('/')[-1])
        if mapping:
            mapping.delete_from_db()
        redis_cache.delete_value_from_cache(entity.dataset_uri)
        redis_cache.flush_all_from_db(redis_cache.MISC_POOL)
        package_index.remove_dict(entity)
        model.repo.commit()
        return True
    else:
        return False
def dcat_term_translation_update_many(context, data_dict):
    '''Create or update many term translations at once.

    :param data_dict: the term translation dictionaries to create or update, e.g.:
    {
    "data": [
        {
            "lang_code": "fr",
            "term": "English term",
            "term_translation": "Translated term"
        },
        {
            "lang_code": "de",
            "term": "English term",
            "term_translation": "Translated term"
        }
        ],
        "resources": [
            {
                "de": [
                    {
                        "field": "name",
                        "field_translation": "Translated term",
                        "lang_code": "de",
                    }
                ]

                "da"

            }

             {
                "fr": [
                    {
                        "field": "name",
                        "field_translation": "Translated term",
                        "lang_code": "de",
                    }
                ]

            }

        ],
    "uri": "Uri of the dataset"
    }
    :type data_dict: dict

    :returns: a dictionary with key ``'success'`` whose value is a string
        stating how many term translations were updated
    :rtype: string

    '''
    model = context['model']
    active_cache = config.get('ckan.cache.active', 'false')

    if not (data_dict.get('data') and isinstance(data_dict.get('data'), list)):
        raise ValidationError({
            'error':
            'term_translation_update_many needs to have a '
            'list of dicts in field data'
        })

    sorted_dict = {}

    for item in data_dict.get('data'):
        if not sorted_dict.get(item.get('term', None)):
            sorted_dict[item.get('term')] = {
                item.get('lang_code'): item.get('term_translation')
            }
        elif not sorted_dict.get(item.get('term')).get(
                item.get('term_translation', None)):
            sorted_dict[item.get('term')][item.get('lang_code')] = item.get(
                'term_translation')

    sorted_resources = {}

    for resource in data_dict.get('resources', []):

        for language in resource.values():
            for value in language:
                new_resource = sorted_resources.get(value.get('resource_id'),
                                                    {})
                if not new_resource:

                    new_resource[value.get('field')] = [{
                        'lang':
                        value.get('lang_code'),
                        'field_translation':
                        value.get('field_translation')
                    }]
                    sorted_resources[value.get('resource_id')] = new_resource
                else:
                    if not new_resource.get(value.get('field'), None):
                        new_resource[value.get('field')] = [{
                            'lang':
                            value.get('lang_code'),
                            'field_translation':
                            value.get('field_translation')
                        }]
                    else:
                        new_resource[value.get('field')].append({
                            'lang':
                            value.get('lang_code'),
                            'field_translation':
                            value.get('field_translation')
                        })

    context['defer_commit'] = True
    ds_uri = data_dict.get('uri')
    action = _get_action('package_show')
    action(context, {'uri': ds_uri})
    dataset = context.get('package')  # type:DatasetDcatApOp

    titel_en = next((value for value in dataset.schema.title_dcterms.values()
                     if not value.lang or value.lang == 'en'), None)
    description_en = next(
        (value for value in dataset.schema.description_dcterms.values()
         if not value.lang or value.lang == 'en'), None)
    alt_title_en = next(
        (value for value in dataset.schema.alternative_dcterms.values()
         if not value.lang or value.lang == 'en'), None)

    if titel_en:
        new_titels = {'0': titel_en}
        for text, value in sorted_dict.items():
            if text == titel_en.value_or_uri:
                for lang, translation in value.items():
                    new_titels[str(len(new_titels))] = ResourceValue(
                        translation, lang=lang)

        dataset.schema.title_dcterms = new_titels

    if description_en:
        new_description = {'0': description_en}
        for text, value in sorted_dict.items():
            if text == description_en.value_or_uri:
                for lang, translation in value.items():
                    new_description[str(len(new_description))] = ResourceValue(
                        translation, lang=lang)

        dataset.schema.description_dcterms = new_description

    if alt_title_en:
        new_dalt_title = {'0': alt_title_en}
        for text, value in sorted_dict.items():
            if text == alt_title_en.value_or_uri:
                for lang, translation in value.items():
                    new_dalt_title[str(len(new_dalt_title))] = ResourceValue(
                        translation, lang=lang)

        dataset.schema.alternative_dcterms = new_dalt_title

    if dataset.schema.distribution_dcat:
        for uri, fields in sorted_resources.items():
            uri = "{0}/{1}".format("http://data.europa.eu/88u/distribution",
                                   uri)
            src_distribution = next(
                (dstr for dstr in dataset.schema.distribution_dcat.values()
                 if dstr.uri == uri), None)  # type: DistributionSchemaDcatApOp
            if not src_distribution:
                continue

            for field, translations in fields.items():
                if 'name' == field:
                    new_titles = next(
                        (titel
                         for titel in src_distribution.title_dcterms.values()
                         if not titel.lang or titel.lang == 'en'))
                    new_translation = {'0': new_titles}
                    for translation in translations:
                        new_translation[str(
                            len(new_translation))] = ResourceValue(
                                translation.get('field_translation'),
                                lang=translation.get('lang'))

                    src_distribution.title_dcterms = new_translation
                elif 'description':
                    new_desc = next(
                        (desc for desc in src_distribution.description_dcterms
                         if not desc.lang or desc.lang == 'en'))
                    new_translations = {'0': new_desc}
                    for translation in translations:
                        new_translation[str(
                            len(new_translation))] = ResourceValue(
                                translation.get('field_translation'),
                                lang=translation.get('lang'))

                    src_distribution.title_dcterms = new_translation

    if dataset.schema.topic_foaf:
        for uri, fields in sorted_resources.items():
            uri = "{0}/{1}".format("http://data.europa.eu/88u/document", uri)
            src_documnet = next(
                (doc for doc in dataset.schema.topic_foaf.values()
                 if doc.uri == uri), None)  # type: DocumentSchemaDcatApOp
            if not src_documnet:
                continue

            for field, translations in fields.items():
                if 'name' == field:
                    new_titles = next(
                        (titel
                         for titel in src_documnet.title_dcterms.values()
                         if not titel.lang or titel.lang == 'en'))
                    new_translation = {'0': new_titles}
                    for translation in translations:
                        new_translation[str(
                            len(new_translation))] = ResourceValue(
                                translation.get('field_translation'),
                                lang=translation.get('lang'))

                    src_documnet.title_dcterms = new_translation
                elif 'description':
                    new_desc = next(
                        (desc for desc in src_documnet.description_dcterms
                         if not desc.lang or desc.lang == 'en'))
                    new_translations = {'0': new_desc}
                    for translation in translations:
                        new_translation[str(
                            len(new_translation))] = ResourceValue(
                                translation.get('field_translation'),
                                lang=translation.get('lang'))

                    src_documnet.title_dcterms = new_translation

    try:
        result = dataset.save_to_ts()
    except BaseException as e:
        log.error('Error while saving the package {0} to Virtuoso.'.format(
            dataset.dataset_uri))
        raise ActionError(
            'Error while saving the package {0} to Virtuoso.'.format(
                dataset.dataset_uri))

    if dataset.privacy_state == 'public' and active_cache == 'true':
        redis_cache.set_value_no_ttl_in_cache(dataset.dataset_uri,
                                              pickle.dumps(dataset))
    else:
        redis_cache.delete_value_from_cache(dataset.dataset_uri)

    try:
        redis_cache.flush_all_from_db(redis_cache.MISC_POOL)
        search.rebuild(dataset.dataset_uri.split('/')[-1])
    except Exception as e:
        log.error("Error while index the package {0} to Solr".format(
            dataset.dataset_uri))
    if result:
        return {'success': '%s  updated' % (ds_uri)}
    else:
        return {'fails': '%s  not updated' % (ds_uri)}
def package_update(context, data_dict):
    '''
    This overrides core package_update  to deal with DCAT-AP datasets.
    This method handels old input type CKAN property keys used by API
    :param context:
    :param data_dict:
    :return:
    '''
    user = context['user']
    dataset = None  # type: DatasetDcatApOp
    active_cache = config.get('ckan.cache.active', 'false')
    _check_access('package_update', context, data_dict)
    old_dataset = None
    rdft = True
    if 'DCATAP' == context.get('model', ''):
        package_show_action = 'package_show'
        pkg_dict = logic.get_action('package_show')(context, {
            'id': data_dict.get('id')
        })
        dataset = context['package']
        dataset.update_dataset_for_package_dict(data_dict, {}, context)
        old_dataset = pickle.dumps(dataset)
        context['package'] = dataset

    else:  # old model, use migration. this can also be the new model comming from the UI
        # prepare the dataset object with migration function
        package_show_action = 'legacy_package_show'
        rdft = False
        if config.get('ckan.ecodp.backward_compatibility',
                      'true') in 'false, False':
            raise logic.NotFound('Function not available')

        pkg_dict = logic.get_action('package_show')(context, {
            'id': data_dict.get('name')
        })
        dataset = context['package']
        old_dataset = pickle.dumps(dataset)
        try:
            dataset = dataset_transition_util.update_dataset_for_package_dict(
                dataset, data_dict)
        except ValidationError as e:
            import traceback
            log.error('{0}'.format(e))
            log.error(traceback.print_exc())
            raise e
        except BaseException as e:
            import traceback
            log.error(traceback.print_exc())
            raise ValidationError(
                'Could {0} not transform to new model'.format(
                    dataset.dataset_uri))
            # old_data_dict = logic.get_action('package_show')(context, {'id': data_dict.get('id')})
            # old_dataset = context['package']  # type: DatasetDcatApOp
    start = time.time()
    dataset, errors = validation.validate_dacat_dataset(dataset, context)
    context['errors'] = errors
    log.info('validation took {0} sec'.format(time.time() - start))
    # TODO check the business rule of save
    if errors.get('fatal'):
        raise ValidationError(errors)
    elif errors.get(
            'error') and dataset.privacy_state == DCATAPOP_PUBLIC_DATASET:
        raise ValidationError(errors)
    rev = model.repo.new_revision()
    rev.author = user
    if 'message' in context:
        rev.message = context['message']
    else:
        rev.message = _(
            u'REST API: Update object %s') % dataset.dataset_uri.split('/')[-1]

    try:
        save_to_ts_status = dataset.save_to_ts(rev.id)
    except BaseException as e:
        log.error('Error while saving the package {0} to Virtuoso.'.format(
            dataset.dataset_uri))
        model.repo.rollback()
        raise ActionError(
            'Error while saving the package {0} to Virtuoso.'.format(
                dataset.dataset_uri))

    if save_to_ts_status:
        context_org_update = context.copy()
        context_org_update['ignore_auth'] = True
        context_org_update['defer_commit'] = True
        if not rdft:
            ext_id = data_dict.get('url')
            publisher = data_dict.get('owner_org')
            int_id = dataset.dataset_uri.split('/')[-1]
            mapping = DatasetIdMapping.by_internal_id(int_id)
            if not mapping:
                mapping = DatasetIdMapping(ext_id, int_id, publisher)
                mapping.save_to_db()
            else:
                mapping.publisher = publisher
                mapping.external_id = ext_id
                mapping.update_db()

        for item in plugins.PluginImplementations(plugins.IPackageController):
            item.edit(dataset)
            item.after_update(context, dataset)

        log.debug('Updated object %s' % dataset.dataset_uri)

        return_id_only = context.get('return_id_only', False)

        # Make sure that a user provided schema is not used on package_show
        context.pop('schema', None)

        if dataset.privacy_state == 'public' and active_cache == 'true':
            redis_cache.set_value_no_ttl_in_cache(dataset.dataset_uri,
                                                  pickle.dumps(dataset))
        else:
            redis_cache.delete_value_from_cache(dataset.dataset_uri)

        try:
            redis_cache.flush_all_from_db(redis_cache.MISC_POOL)
            search.rebuild(dataset.dataset_uri.split('/')[-1])
        except BaseException as e:
            log.error("Error while index the package {0} to Solr".format(
                dataset.dataset_uri))
            old_dataset = pickle.loads(old_dataset)
            dataset.schema = old_dataset.schema
            dataset.schema_catalog_record = old_dataset.schema_catalog_record
            dataset.privacy_state = old_dataset.privacy_state
            dataset.save_to_ts()
            search.rebuild(dataset.dataset_uri.split('/')[-1])
            model.repo.rollback()
            raise ActionError(
                'Error while index the package {0} to Solr.'.format(
                    dataset.dataset_uri))

        if not context.get('defer_commit'):
            model.repo.commit()

        for item in plugins.PluginImplementations(plugins.IResourceUrlChange):
            if item.name != 'qa':
                item.notify(dataset,
                            model.domain_object.DomainObjectOperation.changed)

        # we could update the dataset so we should still be able to read it.
        context['ignore_auth'] = True
        return_id_only = context.get('return_id_only', False)
        if return_id_only:
            output = dataset.dataset_uri
        elif 'legacy_package_show' == package_show_action:
            output = _get_action(package_show_action)(
                context, {
                    'uri': dataset.dataset_uri
                })
        else:
            _get_action(package_show_action)(context, {
                'uri': dataset.dataset_uri
            })
            output = context.get('package')

        return output
    else:
        log.error('[Action] [Update] [Failed] [Dataset:<{0}>]'.format(
            dataset.dataset_uri))
        raise ActionError(
            'Error while saving the package {0} to Virtuoso.'.format(
                dataset.dataset_uri))
示例#5
0
def resource_create(context, data_dict):
    user = context['user']
    dataset = None  # type: DatasetDcatApOp
    active_cache = config.get('ckan.cache.active', 'false')
    _check_access('package_update', context, data_dict)

    pkg = pkg_dict = logic.get_action('package_show')(
        context, {
            'id': data_dict.pop('package_id', '')
        })
    dataset = context['package']

    old_dataset = pickle.dumps(dataset)
    try:
        dataset = dataset_transition_util.update_dataset_for_package_dict(
            dataset, data_dict)
        dataset = dataset_transition_util.update_resources_for_dataset(
            [data_dict], dataset, dataset)
    except ValidationError as e:
        import traceback
        log.error('{0}'.format(e))
        log.error(traceback.print_exc())
        raise e
    except Exception as e:
        import traceback
        log.error(traceback.print_exc())
        raise ValidationError('Could {0} not transform to new model'.format(
            dataset.dataset_uri))
        # old_data_dict = logic.get_action('package_show')(context, {'id': data_dict.get('id')})
        # old_dataset = context['package']  # type: DatasetDcatApOp
    start = time.time()
    dataset, errors = validation.validate_dacat_dataset(dataset, context)
    context['errors'] = errors
    log.info('validation took {0} sec'.format(time.time() - start))
    # TODO check the business rule of save
    if errors.get('fatal'):
        # dataset.privacy_state = DCATAPOP_PRIVATE_DATASET
        # dataset.add_draft_to_title()
        raise ValidationError(errors)
    elif errors.get(
            'error') and dataset.privacy_state == DCATAPOP_PUBLIC_DATASET:
        # dataset.privacy_state = DCATAPOP_PRIVATE_DATASET
        # dataset.add_draft_to_title()
        raise ValidationError(errors)
    elif errors.get(
            'error') and dataset.privacy_state == DCATAPOP_PRIVATE_DATASET:
        # dataset.add_draft_to_title()
        pass

    rev = model.repo.new_revision()
    rev.author = user
    if 'message' in context:
        rev.message = context['message']
    else:
        rev.message = _(
            u'REST API: Update object %s') % dataset.dataset_uri.split('/')[-1]

    try:
        result = dataset.save_to_ts(rev.id)
    except BaseException as e:
        log.error('Error while saving the package {0} to Virtuoso.'.format(
            dataset.dataset_uri))
        model.repo.rollback()
        raise ActionError(
            'Error while saving the package {0} to Virtuoso.'.format(
                dataset.dataset_uri))

    context_org_update = context.copy()
    context_org_update['ignore_auth'] = True
    context_org_update['defer_commit'] = True

    for item in lib_plugins.PluginImplementations(
            lib_plugins.IPackageController):
        item.edit(dataset)

        item.after_update(context, dataset)

    log.debug('Updated object %s' % dataset.dataset_uri)

    return_id_only = context.get('return_id_only', False)

    # Make sure that a user provided schema is not used on package_show
    context.pop('schema', None)

    if dataset.privacy_state == 'public' and active_cache == 'true':
        redis_cache.set_value_no_ttl_in_cache(dataset.dataset_uri,
                                              pickle.dumps(dataset))
    else:
        redis_cache.delete_value_from_cache(dataset.dataset_uri)

    try:
        redis_cache.flush_all_from_db(redis_cache.MISC_POOL)
        search.rebuild(dataset.dataset_uri.split('/')[-1])
    except Exception as e:
        log.error("Error while index the package {0} to Solr".format(
            dataset.dataset_uri))
        old_dataset = pickle.loads(old_dataset)
        dataset.schema = old_dataset.schema
        dataset.schema_catalog_record = old_dataset.schema_catalog_record
        dataset.privacy_state = old_dataset.privacy_state
        dataset.save_to_ts()
        search.rebuild(dataset.dataset_uri.split('/')[-1])
        model.repo.rollback()
        raise ActionError('Error while index the package {0} to Solr.'.format(
            dataset.dataset_uri))

    if not context.get('defer_commit'):
        model.repo.commit()

    for item in lib_plugins.PluginImplementations(
            lib_plugins.IResourceUrlChange):
        if item.name != 'qa':
            item.notify(dataset,
                        model.domain_object.DomainObjectOperation.changed)

    # we could update the dataset so we should still be able to read it.
    context['ignore_auth'] = True
    return_id_only = context.get('return_id_only', False)

    output = _get_action('legacy_package_show')(context, {
        'uri': dataset.dataset_uri
    })

    return output