Beispiel #1
0
 def handle(self, *args, **options):
     for resource in Resource.objects.all():
         logger.warning('Save resource: {pk}'.format(pk=resource.pk))
         try:
             resource.save(current_user=None, synchronize=True)
         except Exception as e:
             logger.error(e)
Beispiel #2
0
    def delete(self, *args, current_user=None, **kwargs):
        with_user = current_user

        # On supprime la ressource CKAN
        if with_user:
            username = with_user.username
            apikey = CkanHandler.get_user(username)['apikey']
            with CkanUserHandler(apikey=apikey) as ckan_user:
                ckan_user.delete_resource(self.name)
        else:
            CkanHandler.delete_resource(self.name)

        # On supprime les ressources MRA
        try:
            MRAHandler.del_layer(self.name)
            ws_name = self.resource.dataset.organisation.slug
            if self.type == 'vector':
                MRAHandler.del_featuretype(ws_name, 'public', self.name)
            if self.type == 'raster':
                MRAHandler.del_coverage(ws_name, self.name, self.name)
                # MRAHandler.del_coveragestore(ws_name, self.name)
        except Exception as e:
            logger.error(e)
            pass

        # On supprime la table de données PostGIS
        try:
            drop_table(self.name)
        except Exception as e:
            logger.error(e)
            pass

        # Puis on supprime l'instance
        super().delete(*args, **kwargs)
Beispiel #3
0
def synchronize(modeladmin, request, queryset):
    for dataset in queryset:
        logger.info('Force save dataset {pk}: {slug}'.format(
            slug=dataset.slug or slugify(dataset.title), pk=dataset.pk))
        try:
            dataset.save(current_user=None, synchronize=True)
        except Exception as e:
            logger.error(e)
            continue
Beispiel #4
0
    def export_users_view(self, request):

        try:
            data = self.get_users_data()
        except Exception as e:
            logger.error(e)
            messages.error(request, "Une erreur est survenue.")
            url = reverse(
                'admin:auth_user_changelist', current_app=self.admin_site.name)
            return HttpResponseRedirect(url)

        response = HttpResponse(content_type='text/csv')
        writer = csv.DictWriter(response, fieldnames=data[0].keys())
        writer.writeheader()
        writer.writerows(data)
        response['Content-Disposition'] = "attachment; filename=export_users_{}.csv".format(
            timezone.now())

        return response
Beispiel #5
0
def sender(template_name, to=None, cc=None, bcc=None, attach_files=[], **kvp):
    try:
        tmpl = Mail.objects.get(template_name=template_name)
    except Mail.DoesNotExist:
        return

    if to and cc:
        for v in to:
            try:
                cc.remove(v)
            except ValueError:
                continue

    if to and bcc:
        for v in to:
            try:
                bcc.remove(v)
            except ValueError:
                continue

    subject = tmpl.subject.format(**kvp)
    body = PartialFormatter().format(tmpl.message, **kvp)
    from_email = DEFAULT_FROM_EMAIL
    connection = get_connection(fail_silently=False)

    mail = EmailMultiAlternatives(
        subject=subject, body=body,
        from_email=from_email, to=to,
        cc=cc, bcc=bcc, connection=connection)

    for attach_file in attach_files:
        mail.attach_file(attach_file)

    try:
        mail.send()
    except SMTPDataError as e:
        logger.error(e)
Beispiel #6
0
    def save(self, *args, harvest=True, **kwargs):
        Category = apps.get_model(app_label='idgo_admin',
                                  model_name='Category')
        Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
        License = apps.get_model(app_label='idgo_admin', model_name='License')
        Resource = apps.get_model(app_label='idgo_admin',
                                  model_name='Resource')
        ResourceFormats = apps.get_model(app_label='idgo_admin',
                                         model_name='ResourceFormats')

        # (1) Supprimer les jeux de données qui ne sont plus synchronisés
        previous = self.pk and RemoteCsw.objects.get(pk=self.pk)
        if previous:
            for dataset in Dataset.harvested_csw.filter(
                    remote_instance=previous):
                dataset.delete()
        else:
            # Dans le cas d'une création, on vérifie si l'URL CSW est valide
            try:
                with CswBaseHandler(self.url):
                    pass
            except CswBaseError as e:
                raise ValidationError(e.__str__(), code='url')

        # (2) Sauver l'instance
        super().save(*args, **kwargs)

        # (3) Créer/Mettre à jour les jeux de données synchronisés

        # On récupère dans le `stack` l'utilisateur effectuant l'opération
        editor = None
        for entry in inspect.stack():
            try:
                editor = entry[0].f_locals['request'].user._wrapped
            except (KeyError, AttributeError):
                continue
            break

        if not previous:
            return

        if harvest:
            # Puis on moissonne le catalogue
            try:
                ckan_ids = []
                geonet_ids = []
                with transaction.atomic():

                    with CswBaseHandler(self.url) as csw:
                        packages = csw.get_packages(
                            xml=self.getrecords or None)

                    for package in packages:
                        if not package['type'] == 'dataset':
                            continue

                        geonet_id = package['id']
                        update_frequency = dict(Dataset.FREQUENCY_CHOICES).get(
                            package.get('frequency'), 'unknown')
                        update_frequency = package.get('frequency')
                        if not (update_frequency and update_frequency in dict(
                                Dataset.FREQUENCY_CHOICES).keys()):
                            update_frequency = 'unknown'

                        date_creation = package.get('dataset_creation_date',
                                                    None)
                        if date_creation:
                            try:
                                date_creation = datetime.strptime(
                                    date_creation, ISOFORMAT_DATE)
                            except ValueError as e:
                                logger.warning(e)
                                date_creation = None

                        date_modification = package.get(
                            'dataset_modification_date', None)
                        if date_modification:
                            try:
                                date_modification = datetime.strptime(
                                    date_modification, ISOFORMAT_DATE)
                            except ValueError as e:
                                logger.warning(e)
                                date_modification = None

                        date_publication = package.get(
                            'dataset_publication_date', None)
                        if date_publication:
                            try:
                                date_publication = datetime.strptime(
                                    date_publication, ISOFORMAT_DATE)
                            except ValueError as e:
                                logger.warning(e)
                                date_publication = None

                        # Licence
                        license_titles = package.get('license_titles')
                        filters = [
                            Q(slug__in=license_titles),
                            Q(title__in=license_titles),
                            Q(alternate_titles__overlap=license_titles),
                        ]
                        license = License.objects.filter(reduce(
                            ior, filters)).distinct().first()
                        if not license:
                            try:
                                license = License.objects.get(
                                    slug=settings.DEFAULTS_VALUES.get(
                                        'LICENSE'))
                            except License.DoesNotExist:
                                license = License.objects.first()

                        # On pousse la fiche de MD dans Geonet
                        if not geonet.get_record(geonet_id):
                            try:
                                geonet.create_record(geonet_id, package['xml'])
                            except Exception as e:
                                logger.warning(
                                    'La création de la fiche de métadonnées a échoué.'
                                )
                                logger.error(e)
                            else:
                                geonet_ids.append(geonet_id)
                                geonet.publish(
                                    geonet_id)  # Toujours publier la fiche
                        else:
                            try:
                                geonet.update_record(geonet_id, package['xml'])
                            except Exception as e:
                                logger.warning(
                                    'La mise à jour de la fiche de métadonnées a échoué.'
                                )
                                logger.error(e)

                        slug = 'sync{}-{}'.format(
                            str(uuid.uuid4())[:7].lower(),
                            slugify(geonet_id))[:100]
                        kvp = {
                            'slug':
                            slug,
                            'title':
                            package.get('title'),
                            'description':
                            package.get('notes'),
                            'date_creation':
                            date_creation and date_creation.date(),
                            'date_modification':
                            date_modification and date_modification.date(),
                            'date_publication':
                            date_publication and date_publication.date(),
                            'editor':
                            editor,
                            'license':
                            license,
                            'owner_email':
                            self.organisation.email or DEFAULT_CONTACT_EMAIL,
                            'owner_name':
                            self.organisation.legal_name
                            or DEFAULT_PLATFORM_NAME,
                            'organisation':
                            self.organisation,
                            'published':
                            not package.get('private'),
                            'remote_instance':
                            self,
                            'remote_dataset':
                            geonet_id,
                            'update_frequency':
                            update_frequency,
                            'bbox':
                            package.get('bbox'),
                            # broadcaster_email
                            # broadcaster_name
                            # data_type
                            # geocover
                            'geonet_id':
                            geonet_id,
                            # granularity
                            # thumbnail
                            # support
                        }

                        dataset, created = Dataset.harvested_csw.update_or_create(
                            **kvp)
                        if created:
                            ckan_ids.append(dataset.ckan_id)

                        categories_name = [
                            m['name'] for m in package.get('groups', [])
                        ]
                        iso_topic_reverse = dict(
                            (v, k)
                            for k, v in Category._meta.fields[5].choices)

                        filters = [
                            Q(slug__in=categories_name),
                            Q(name__in=categories_name),
                            Q(iso_topic__in=[
                                m['name'] for m in package.get('groups', [])
                            ]),
                            Q(iso_topic__in=[
                                iso_topic_reverse.get(name)
                                for name in categories_name
                            ]),
                            Q(alternate_titles__overlap=categories_name),
                        ]

                        categories = Category.objects.filter(
                            reduce(ior, filters)).distinct()
                        if categories:
                            dataset.categories.set(categories, clear=True)

                        if not created:
                            dataset.keywords.clear()
                        keywords = [
                            tag['display_name'] for tag in package.get('tags')
                        ]
                        dataset.keywords.add(*keywords)

                        dataset.save(current_user=None,
                                     synchronize=True,
                                     activate=False)

                        for resource in package.get('resources', []):
                            try:
                                ckan_id = uuid.uuid4()
                            except ValueError as e:
                                logger.exception(e)
                                logger.error(
                                    "I can't crash here, so I do not pay any attention to this error."
                                )
                                continue

                            filters = []
                            protocol = resource.get('protocol')
                            protocol and filters.append(Q(protocol=protocol))
                            mimetype = resource.get('mimetype')
                            mimetype and filters.append(
                                Q(mimetype__overlap=[mimetype]))
                            try:
                                format_type = ResourceFormats.objects.get(
                                    reduce(iand, filters))
                            except (ResourceFormats.MultipleObjectsReturned,
                                    ResourceFormats.DoesNotExist, TypeError):
                                format_type = None

                            kvp = {
                                'ckan_id': ckan_id,
                                'dataset': dataset,
                                'format_type': format_type,
                                'title': resource['name'] or resource['url'],
                                'referenced_url': resource['url'],
                            }

                            try:
                                resource = Resource.objects.get(
                                    ckan_id=ckan_id)
                            except Resource.DoesNotExist:
                                resource = Resource.default.create(save_opts={
                                    'current_user':
                                    editor,
                                    'synchronize':
                                    True
                                },
                                                                   **kvp)
                            else:
                                for k, v in kvp.items():
                                    setattr(resource, k, v)
                            resource.save(current_user=editor,
                                          synchronize=True)

            except Exception as e:
                for id in ckan_ids:
                    logger.warning(
                        'Delete CKAN package : {id}.'.format(id=str(id)))
                    CkanHandler.purge_dataset(str(id))
                for id in geonet_ids:
                    logger.warning('Delete MD : {id}.'.format(id=str(id)))
                    geonet.delete_record(id)
                logger.error(e)
                raise CriticalError()
            else:
                for id in ckan_ids:
                    CkanHandler.publish_dataset(id=str(id), state='active')
Beispiel #7
0
    def save(self, *args, **kwargs):
        Category = apps.get_model(app_label='idgo_admin',
                                  model_name='Category')
        Dataset = apps.get_model(app_label='idgo_admin', model_name='Dataset')
        License = apps.get_model(app_label='idgo_admin', model_name='License')
        Resource = apps.get_model(app_label='idgo_admin',
                                  model_name='Resource')
        ResourceFormats = apps.get_model(app_label='idgo_admin',
                                         model_name='ResourceFormats')

        # (1) Supprimer les jeux de données qui ne sont plus synchronisés
        previous = self.pk and RemoteCkan.objects.get(pk=self.pk)

        if previous:
            remote_organisation__in = [
                x for x in (previous.sync_with or [])
                if x not in (self.sync_with or [])
            ]
            filter = {
                'remote_instance': previous,
                'remote_organisation__in': remote_organisation__in,
            }

            # TODO: 'Dataset.harvested_ckan.filter(**filter).delete()' ne fonctionne pas
            for dataset in Dataset.harvested_ckan.filter(**filter):
                dataset.delete()
        else:
            # Dans le cas d'une création, on vérifie si l'URL CKAN est valide
            try:
                with CkanBaseHandler(self.url):
                    pass
            except CkanBaseError as e:
                raise ValidationError(e.__str__(), code='url')

        # (2) Sauver l'instance
        super().save(*args, **kwargs)

        # (3) Créer/Mettre à jour les jeux de données synchronisés

        # On récupère dans le `stack` l'utilisateur effectuant l'opération
        editor = User.objects.get(pk=DEFAULT_USER_ID)
        for entry in inspect.stack():
            try:
                editor = entry[0].f_locals['request'].user._wrapped
            except (KeyError, AttributeError):
                continue
            break

        # Puis on moissonne le catalogue
        if self.sync_with:
            try:
                ckan_ids = []
                with transaction.atomic():

                    # TODO: Factoriser
                    for value in self.sync_with:
                        with CkanBaseHandler(self.url) as ckan:
                            ckan_organisation = ckan.get_organisation(
                                value,
                                include_datasets=True,
                                include_groups=True,
                                include_tags=True)

                        if not ckan_organisation.get('package_count', 0):
                            continue
                        for package in ckan_organisation.get('packages'):
                            if not package['state'] == 'active' \
                                    or not package['type'] == 'dataset':
                                continue
                            with CkanBaseHandler(self.url) as ckan:
                                package = ckan.get_package(package['id'])

                            ckan_id = uuid.UUID(package['id'])

                            update_frequency = dict(
                                Dataset.FREQUENCY_CHOICES).get(
                                    package.get('frequency'), 'unknown')
                            update_frequency = package.get('frequency')
                            if not (update_frequency and update_frequency
                                    in dict(Dataset.FREQUENCY_CHOICES).keys()):
                                update_frequency = 'unknown'
                            metadata_created = package.get(
                                'metadata_created', None)
                            if metadata_created:
                                metadata_created = datetime.strptime(
                                    metadata_created, ISOFORMAT_DATETIME)
                            metadata_modified = package.get(
                                'metadata_modified', None)
                            if metadata_modified:
                                metadata_modified = datetime.strptime(
                                    metadata_modified, ISOFORMAT_DATETIME)

                            try:
                                mapping_licence = MappingLicence.objects.get(
                                    remote_ckan=self,
                                    slug=package.get('license_id'))
                            except MappingLicence.DoesNotExist:
                                try:
                                    license = License.objects.get(
                                        slug='other-at')
                                except MappingLicence.DoesNotExist:
                                    license = None
                            else:
                                logger.warning("'{}' non trouvé".format(
                                    package.get('license_id')))
                                license = mapping_licence.licence

                            slug = 'sync{}-{}'.format(
                                str(uuid.uuid4())[:7].lower(),
                                package.get('name'))[:100]
                            kvp = {
                                'slug':
                                slug,
                                'title':
                                package.get('title'),
                                'description':
                                package.get('notes'),
                                'date_creation':
                                metadata_created and metadata_created.date(),
                                'date_modification':
                                metadata_modified and metadata_modified.date(),
                                # date_publication
                                'editor':
                                editor,
                                'license':
                                license,
                                'owner_email':
                                self.organisation.email
                                or DEFAULT_CONTACT_EMAIL,
                                'owner_name':
                                self.organisation.legal_name
                                or DEFAULT_PLATFORM_NAME,
                                'organisation':
                                self.organisation,
                                'published':
                                not package.get('private'),
                                'remote_instance':
                                self,
                                'remote_dataset':
                                ckan_id,
                                'remote_organisation':
                                value,
                                'update_frequency':
                                update_frequency,
                                # bbox
                                # broadcaster_email
                                # broadcaster_name
                                # data_type
                                # geocover
                                # geonet_id
                                # granularity
                                # thumbnail
                                # support
                            }

                            dataset, created = Dataset.harvested_ckan.update_or_create(
                                **kvp)

                            mapping_categories = MappingCategory.objects.filter(
                                remote_ckan=self,
                                slug__in=[
                                    m['name']
                                    for m in package.get('groups', [])
                                ])
                            if mapping_categories:
                                dataset.categories = set(
                                    mc.category for mc in mapping_categories)

                            if not created:
                                dataset.keywords.clear()
                            keywords = [
                                tag['display_name']
                                for tag in package.get('tags')
                            ]
                            dataset.keywords.add(*keywords)
                            dataset.save(current_user=None,
                                         synchronize=True,
                                         activate=False)

                            ckan_ids.append(dataset.ckan_id)

                            for resource in package.get('resources', []):
                                try:
                                    ckan_id = uuid.UUID(resource['id'])
                                except ValueError as e:
                                    logger.exception(e)
                                    logger.error(
                                        "I can't crash here, so I do not pay any attention to this error."
                                    )
                                    continue

                                try:
                                    ckan_format = resource['format'].upper()
                                    format_type = ResourceFormats.objects.get(
                                        ckan_format=ckan_format)
                                except (ResourceFormats.
                                        MultipleObjectsReturned,
                                        ResourceFormats.DoesNotExist,
                                        TypeError) as e:
                                    logger.exception(e)
                                    logger.error(
                                        "I can't crash here, so I do not pay any attention to this error."
                                    )
                                    format_type = None

                                kvp = {
                                    'ckan_id': ckan_id,
                                    'dataset': dataset,
                                    'format_type': format_type,
                                    'title': resource['name'],
                                    'referenced_url': resource['url'],
                                }

                                try:
                                    resource = Resource.objects.get(
                                        ckan_id=ckan_id)
                                except Resource.DoesNotExist:
                                    resource = Resource.default.create(
                                        save_opts={
                                            'current_user': None,
                                            'synchronize': True
                                        },
                                        **kvp)
                                else:
                                    for k, v in kvp.items():
                                        setattr(resource, k, v)
                                resource.save(current_user=None,
                                              synchronize=True)

            except Exception as e:
                for id in ckan_ids:
                    CkanHandler.purge_dataset(str(id))
                logger.error(e)
                raise CriticalError()
            else:
                for id in ckan_ids:
                    CkanHandler.publish_dataset(id=str(id), state='active')
Beispiel #8
0
    def __init__(self, *args, **kwargs):
        self.cleaned_data = {}
        super().__init__(*args, **kwargs)

        instance = kwargs.get('instance', None)
        if instance and instance.url:
            self.fields['url'].widget.attrs['readonly'] = True
            # Récupérer la liste des organisations
            try:
                with CkanBaseHandler(instance.url) as ckan:
                    organisations = ckan.get_all_organisations(
                        all_fields=True, include_dataset_count=True)
            except CkanBaseError as e:
                self.add_error('url', e.message)
            else:
                self.fields['sync_with'].choices = (
                    (organisation['name'], '{} ({})'.format(
                        organisation['display_name'],
                        organisation.get('package_count',
                                         organisation.get('packages', None))))
                    for organisation in organisations)

            mapping = []

            # Initialize categories mapping
            # =============================
            try:
                with CkanBaseHandler(instance.url) as ckan:
                    remote_categories = ckan.get_all_categories(
                        all_fields=True)
            except CkanBaseError as e:
                logger.error(e)
            else:
                fields_name = []
                for remote_category in remote_categories:
                    field_name = ''.join(['cat_', remote_category['name']])
                    fields_name.append(field_name)
                    try:
                        filter = {
                            'remote_ckan': instance,
                            'slug': field_name[4:]
                        }
                        initial = MappingCategory.objects.filter(
                            **filter).first().category
                    except Exception as e:
                        logger.warning(e)
                        try:
                            initial = Category.objects.get(slug=field_name[4:])
                        except Exception as e:
                            logger.warning(e)
                            initial = None

                    self.fields[field_name] = forms.ModelChoiceField(
                        label=remote_category['title'],
                        empty_label="Sélectionnez une valeur",
                        required=False,
                        queryset=Category.objects.all(),
                        initial=initial,
                    )

                mapping.append({
                    'name': 'Category',
                    'title': 'Categories',
                    'fields_name': fields_name,
                })

            # Initialize licences mapping
            # ===========================
            try:
                with CkanBaseHandler(instance.url) as ckan:
                    remote_licenses = ckan.get_all_licenses(all_fields=True)
            except CkanBaseError as e:
                logger.error(e)
            else:
                fields_name = []
                for remote_license in remote_licenses:
                    field_name = ''.join(['lic_', remote_license['id']])
                    fields_name.append(field_name)
                    try:
                        filter = {
                            'remote_ckan': instance,
                            'slug': field_name[4:]
                        }
                        initial = MappingLicence.objects.filter(
                            **filter).first().licence
                    except Exception as e:
                        logger.warning(e)
                        try:
                            initial = License.objects.get(slug=field_name[4:])
                        except Exception as e:
                            logger.warning(e)
                            initial = None

                    self.fields[field_name] = forms.ModelChoiceField(
                        label=remote_license['title'],
                        empty_label="Sélectionnez une valeur",
                        required=False,
                        queryset=License.objects.all(),
                        initial=initial,
                    )

                mapping.append({
                    'name': 'License',
                    'title': 'Licences',
                    'fields_name': fields_name,
                })

        else:
            self.fields['sync_with'].widget = forms.HiddenInput()
            self.fields['sync_frequency'].widget = forms.HiddenInput()
Beispiel #9
0
    def save(self,
             *args,
             current_user=None,
             synchronize=False,
             file_extras=None,
             **kwargs):

        # Version précédante de la ressource (avant modification)
        previous, created = self.pk \
            and (Resource.objects.get(pk=self.pk), False) or (None, True)

        if previous:
            # crs est immuable sauf si le jeu de données change (Cf. plus bas)
            self.crs = previous.crs

        # Quelques valeur par défaut à la création de l'instance
        if created or not (
                # Ou si l'éditeur n'est pas partenaire du CRIGE
                current_user and current_user.profile.crige_membership):

            # Mais seulement s'il s'agit de données SIG, sauf
            # qu'on ne le sait pas encore...
            self.geo_restriction = False
            self.ogc_services = True
            self.extractable = True

        # La restriction au territoire de compétence désactive toujours les services OGC
        if self.geo_restriction:
            self.ogc_services = False

        # Quelques contrôles sur les fichiers de données téléversée ou à télécharger
        filename = False
        content_type = None
        file_must_be_deleted = False  # permet d'indiquer si les fichiers doivent être supprimés à la fin de la chaine de traitement
        publish_raw_resource = True  # permet d'indiquer si les ressources brutes sont publiées dans CKAN

        if self.ftp_file:
            filename = self.ftp_file.file.name
            # Si la taille de fichier dépasse la limite autorisée,
            # on traite les données en fonction du type détecté
            if self.ftp_file.size > DOWNLOAD_SIZE_LIMIT:
                extension = self.format_type.extension.lower()
                if self.format_type.is_gis_format:
                    try:
                        gdalogr_obj = get_gdalogr_object(filename, extension)
                    except NotDataGISError:
                        # On essaye de traiter le jeux de données normalement, même si ça peut être long.
                        pass
                    else:
                        if gdalogr_obj.__class__.__name__ == 'GdalOpener':
                            s0 = str(self.ckan_id)
                            s1, s2, s3 = s0[:3], s0[3:6], s0[6:]
                            dir = os.path.join(CKAN_STORAGE_PATH, s1, s2)
                            os.makedirs(dir, mode=0o777, exist_ok=True)
                            shutil.copyfile(filename, os.path.join(dir, s3))

                            src = os.path.join(dir, s3)
                            dst = os.path.join(dir, filename.split('/')[-1])
                            try:
                                os.symlink(src, dst)
                            except FileNotFoundError as e:
                                logger.error(e)
                            else:
                                logger.debug(
                                    'Created a symbolic link {dst} pointing to {src}.'
                                    .format(dst=dst, src=src))

                        # if gdalogr_obj.__class__.__name__ == 'OgrOpener':
                        # On ne publie que le service OGC dans CKAN
                        publish_raw_resource = False

        elif (self.up_file and file_extras):
            # GDAL/OGR ne semble pas prendre de fichier en mémoire..
            # ..à vérifier mais si c'est possible comment indiquer le vsi en préfixe du filename ?
            super().save(*args, **kwargs)
            kwargs['force_insert'] = False

            filename = self.up_file.file.name
            file_must_be_deleted = True

        elif self.dl_url:
            try:
                directory, filename, content_type = download(
                    self.dl_url,
                    settings.MEDIA_ROOT,
                    max_size=DOWNLOAD_SIZE_LIMIT)
            except SizeLimitExceededError as e:
                l = len(str(e.max_size))
                if l > 6:
                    m = '{0} mo'.format(Decimal(int(e.max_size) / 1024 / 1024))
                elif l > 3:
                    m = '{0} ko'.format(Decimal(int(e.max_size) / 1024))
                else:
                    m = '{0} octets'.format(int(e.max_size))
                raise ValidationError(('La taille du fichier dépasse '
                                       'la limite autorisée : {0}.').format(m),
                                      code='dl_url')
            except Exception as e:
                if e.__class__.__name__ == 'HTTPError':
                    if e.response.status_code == 404:
                        msg = ('La ressource distante ne semble pas exister. '
                               "Assurez-vous que l'URL soit correcte.")
                    if e.response.status_code == 403:
                        msg = ("Vous n'avez pas l'autorisation pour "
                               'accéder à la ressource.')
                    if e.response.status_code == 401:
                        msg = ('Une authentification est nécessaire '
                               'pour accéder à la ressource.')
                else:
                    msg = 'Le téléchargement du fichier a échoué.'
                raise ValidationError(msg, code='dl_url')
            file_must_be_deleted = True

        # Synchronisation avec CKAN
        # =========================

        # La synchronisation doit s'effectuer avant la publication des
        # éventuelles couches de données SIG car dans le cas des données
        # de type « raster », nous utilisons le filestore de CKAN.
        if synchronize and publish_raw_resource:
            self.synchronize(content_type=content_type,
                             file_extras=file_extras,
                             filename=filename,
                             with_user=current_user)
        elif synchronize and not publish_raw_resource:
            url = reduce(urljoin, [
                settings.CKAN_URL, 'dataset/',
                str(self.dataset.ckan_id) + '/', 'resource/',
                str(self.ckan_id) + '/', 'download/',
                Path(self.ftp_file.name).name
            ])
            self.synchronize(url=url, with_user=current_user)

        # Détection des données SIG
        # =========================

        if filename:
            # On vérifie s'il s'agit de données SIG, uniquement pour
            # les extensions de fichier autorisées..
            extension = self.format_type.extension.lower()
            if self.format_type.is_gis_format:
                # Si c'est le cas, on monte les données dans la base PostGIS dédiée
                # et on déclare la couche au service OGC:WxS de l'organisation.

                # Mais d'abord, on vérifie si la ressource contient
                # déjà des « Layers », auquel cas il faudra vérifier si
                # la table de données a changée.
                existing_layers = {}
                if not created:
                    existing_layers = dict(
                        (re.sub('^(\w+)_[a-z0-9]{7}$', '\g<1>', layer.name),
                         layer.name) for layer in self.get_layers())

                try:

                    # C'est carrément moche mais c'est pour aller vite.
                    # Il faudrait factoriser tout ce bazar et créer
                    # un décorateur pour gérer le rool-back sur CKAN.

                    try:
                        gdalogr_obj = get_gdalogr_object(filename, extension)
                    except NotDataGISError:
                        tables = []
                        pass
                    else:

                        try:
                            self.format_type = ResourceFormats.objects.get(
                                extension=extension,
                                ckan_format=gdalogr_obj.format)
                        # except ResourceFormats.MultipleObjectsReturned:
                        #     pass
                        except Exception:
                            pass

                        # ==========================
                        # Jeu de données vectorielle
                        # ==========================

                        if gdalogr_obj.__class__.__name__ == 'OgrOpener':

                            # On convertit les données vers PostGIS

                            try:
                                tables = ogr2postgis(
                                    gdalogr_obj,
                                    update=existing_layers,
                                    epsg=self.crs and self.crs.auth_code
                                    or None,
                                    encoding=self.encoding)

                            except NotOGRError as e:
                                logger.warning(e)
                                file_must_be_deleted and remove_file(filename)
                                msg = (
                                    "Le fichier reçu n'est pas reconnu "
                                    'comme étant un jeu de données SIG correct.'
                                )
                                raise ValidationError(msg, code='__all__')

                            except DataDecodingError as e:
                                logger.warning(e)
                                file_must_be_deleted and remove_file(filename)
                                msg = (
                                    'Impossible de décoder correctement les '
                                    "données. Merci d'indiquer l'encodage "
                                    'ci-dessous.')
                                raise ValidationError(msg, code='encoding')

                            except WrongDataError as e:
                                logger.warning(e)
                                file_must_be_deleted and remove_file(filename)
                                msg = (
                                    'Votre ressource contient des données SIG que '
                                    'nous ne parvenons pas à lire correctement. '
                                    'Un ou plusieurs objets sont erronés.')
                                raise ValidationError(msg)

                            except NotFoundSrsError as e:
                                logger.warning(e)
                                file_must_be_deleted and remove_file(filename)
                                msg = (
                                    'Votre ressource semble contenir des données SIG '
                                    'mais nous ne parvenons pas à détecter le système '
                                    'de coordonnées. Merci de sélectionner le code du '
                                    'CRS dans la liste ci-dessous.')
                                raise ValidationError(msg, code='crs')

                            except NotSupportedSrsError as e:
                                logger.warning(e)
                                file_must_be_deleted and remove_file(filename)
                                msg = (
                                    'Votre ressource semble contenir des données SIG '
                                    'mais le système de coordonnées de celles-ci '
                                    "n'est pas supporté par l'application.")
                                raise ValidationError(msg, code='__all__')

                            except ExceedsMaximumLayerNumberFixedError as e:
                                logger.warning(e)
                                file_must_be_deleted and remove_file(filename)
                                raise ValidationError(e.__str__(),
                                                      code='__all__')

                            else:
                                # Avant de créer des relations, l'objet doit exister
                                if created:
                                    # S'il s'agit d'une création, alors on sauve l'objet.
                                    super().save(*args, **kwargs)
                                    kwargs['force_insert'] = False

                                # Ensuite, pour tous les jeux de données SIG trouvés,
                                # on crée le service ows à travers la création de `Layer`
                                try:
                                    Layer = apps.get_model(
                                        app_label='idgo_admin',
                                        model_name='Layer')
                                    for table in tables:
                                        try:
                                            Layer.objects.get(name=table['id'],
                                                              resource=self)
                                        except Layer.DoesNotExist:
                                            save_opts = {
                                                'synchronize': synchronize
                                            }
                                            Layer.vector.create(
                                                bbox=table['bbox'],
                                                name=table['id'],
                                                resource=self,
                                                save_opts=save_opts)
                                except Exception as e:
                                    logger.error(e)
                                    file_must_be_deleted and remove_file(
                                        filename)
                                    for table in tables:
                                        drop_table(table['id'])
                                    raise e

                        # ==========================
                        # Jeu de données matricielle
                        # ==========================

                        if gdalogr_obj.__class__.__name__ == 'GdalOpener':

                            coverage = gdalogr_obj.get_coverage()

                            try:
                                tables = [
                                    gdalinfo(
                                        coverage,
                                        update=existing_layers,
                                        epsg=self.crs and self.crs.auth_code
                                        or None)
                                ]

                            except NotFoundSrsError as e:
                                logger.warning(e)
                                file_must_be_deleted and remove_file(filename)
                                msg = (
                                    'Votre ressource semble contenir des données SIG '
                                    'mais nous ne parvenons pas à détecter le système '
                                    'de coordonnées. Merci de sélectionner le code du '
                                    'CRS dans la liste ci-dessous.')
                                raise ValidationError(msg, code='crs')

                            except NotSupportedSrsError as e:
                                logger.warning(e)
                                file_must_be_deleted and remove_file(filename)
                                msg = (
                                    'Votre ressource semble contenir des données SIG '
                                    'mais le système de coordonnées de celles-ci '
                                    "n'est pas supporté par l'application.")
                                raise ValidationError(msg, code='__all__')

                            else:
                                if created:
                                    # S'il s'agit d'une création, alors on sauve l'objet.
                                    super().save(*args, **kwargs)
                                    kwargs['force_insert'] = False

                            # Super Crado Code
                            s0 = str(self.ckan_id)
                            s1, s2, s3 = s0[:3], s0[3:6], s0[6:]
                            dir = os.path.join(CKAN_STORAGE_PATH, s1, s2)
                            src = os.path.join(dir, s3)
                            dst = os.path.join(dir, filename.split('/')[-1])
                            try:
                                os.symlink(src, dst)
                            except FileExistsError as e:
                                logger.warning(e)
                            except FileNotFoundError as e:
                                logger.error(e)
                            else:
                                logger.debug(
                                    'Created a symbolic link {dst} pointing to {src}.'
                                    .format(dst=dst, src=src))

                            try:
                                Layer = apps.get_model(app_label='idgo_admin',
                                                       model_name='Layer')
                                for table in tables:
                                    try:
                                        Layer.objects.get(name=table['id'],
                                                          resource=self)
                                    except Layer.DoesNotExist:
                                        Layer.raster.create(bbox=table['bbox'],
                                                            name=table['id'],
                                                            resource=self)
                            except Exception as e:
                                logger.error(e)
                                file_must_be_deleted and remove_file(filename)
                                raise e

                except Exception as e:
                    if created:
                        if current_user:
                            username = current_user.username
                            apikey = CkanHandler.get_user(username)['apikey']
                            with CkanUserHandler(apikey) as ckan:
                                ckan.delete_resource(str(self.ckan_id))
                        else:
                            CkanHandler.delete_resource(str(self.ckan_id))
                        for layer in self.get_layers():
                            layer.delete(current_user=current_user)
                    # Puis on « raise » l'erreur
                    raise e

                # On met à jour les champs de la ressource
                SupportedCrs = apps.get_model(app_label='idgo_admin',
                                              model_name='SupportedCrs')
                crs = [
                    SupportedCrs.objects.get(auth_name='EPSG',
                                             auth_code=table['epsg'])
                    for table in tables
                ]
                # On prend la première valeur (c'est moche)
                self.crs = crs and crs[0] or None

                # Si les données changent..
                if existing_layers and \
                        previous.get_layers() != self.get_layers():
                    # on supprime les anciens `layers`..
                    for layer in previous.get_layers():
                        layer.delete()
        ####
        if self.get_layers():
            extent = self.get_layers().aggregate(
                models.Extent('bbox')).get('bbox__extent')
            if extent:
                xmin, ymin = extent[0], extent[1]
                xmax, ymax = extent[2], extent[3]
                setattr(self, 'bbox', bounds_to_wkt(xmin, ymin, xmax, ymax))
        else:
            # Si la ressource n'est pas de type SIG, on passe les trois arguments
            # qui concernent exclusivement ces dernières à « False ».
            self.geo_restriction = False
            self.ogc_services = False
            self.extractable = False

        super().save(*args, **kwargs)

        # Puis dans tous les cas..
        # on met à jour le statut des couches du service cartographique..
        if not created:
            self.update_enable_layers_status()

        # on supprime les données téléversées ou téléchargées..
        if file_must_be_deleted:
            remove_file(filename)

        # [Crado] on met à jour la ressource CKAN
        if synchronize:
            CkanHandler.update_resource(str(self.ckan_id),
                                        extracting_service=str(
                                            self.extractable))

        for layer in self.get_layers():
            layer.save(synchronize=synchronize)

        self.dataset.date_modification = timezone.now().date()
        self.dataset.save(current_user=None,
                          synchronize=True,
                          update_fields=['date_modification'])
Beispiel #10
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        organisation = self.resource.dataset.organisation
        ws_name = organisation.slug

        self.mra_info = {
            'name': None,
            'title': None,
            'type': None,
            'enabled': None,
            'abstract': None,
            'bbox': None,
            'attributes': None,
            'styles': {
                'default': None,
                'styles': None
            }
        }

        try:
            l = MRAHandler.get_layer(self.name)
        except MraBaseError as e:
            logger.error(e)
            return

        # Récupération des informations de couche vecteur
        # ===============================================

        if self.type == 'vector':
            try:
                ft = MRAHandler.get_featuretype(ws_name, 'public', self.name)
            except MraBaseError:
                return
            if not l or not ft:
                return

            ll = ft['featureType']['latLonBoundingBox']
            bbox = [[ll['miny'], ll['minx']], [ll['maxy'], ll['maxx']]]
            attributes = [
                item['name'] for item in ft['featureType']['attributes']
            ]

            default_style_name = None
            styles = []
            if 'defaultStyle' in l:
                default_style_name = l['defaultStyle']['name']
                try:
                    sld = MRAHandler.get_style(l['defaultStyle']['name'])
                except MraBaseError as e:
                    logger.error(e)
                    styles = {}
                else:
                    styles = [{
                        'name':
                        'default',
                        'text':
                        'Style par défaut',
                        'url':
                        l['defaultStyle']['href'].replace('json', 'sld'),
                        'sld':
                        sld,
                    }]

            if l.get('styles'):
                for style in l.get('styles')['style']:
                    styles.append({
                        'name': style['name'],
                        'text': style['name'],
                        'url': style['href'].replace('json', 'sld'),
                        'sld': MRAHandler.get_style(style['name']),
                    })

        # Récupération des informations de couche raster
        # ==============================================

        elif self.type == 'raster':
            try:
                c = MRAHandler.get_coverage(ws_name, self.name, self.name)
            except MraBaseError:
                return
            if not l or not c:
                return

            ll = c['coverage']['latLonBoundingBox']
            bbox = [[ll['miny'], ll['minx']], [ll['maxy'], ll['maxx']]]
            attributes = []
            default_style_name = None
            styles = []

        # Puis..
        self.mra_info = {
            'name': l['name'],
            'title': l['title'],
            'type': l['type'],
            'enabled': l['enabled'],
            'abstract': l['abstract'],
            'bbox': bbox,
            'attributes': attributes,
            'styles': {
                'default': default_style_name,
                'styles': styles
            }
        }