示例#1
0
    def get_matching_share(self,
                           metadata: Metadata,
                           shares: list,
                           mode: str = "simple") -> Share:
        """Get the first Share which contains the metadata, basing match on workgroup UUID.

        :param Metadata metadata: metadata object to use for the matching
        :param list shares: list of shares to use for the matching
        :param str mode: simple mode is based only on the UID
        """
        if mode != "simple":
            raise NotImplementedError

        matching_share = [
            share for share in shares
            if share.get("_creator").get("_id") == metadata.groupId
        ]
        if len(matching_share):
            matching_share = Share(**matching_share[0])
        else:
            logger.warning(
                "No matching share found for {} ({}). The OpenCatalog URL will not be build."
                .format(metadata.title_or_name(), metadata._id))
            matching_share = None

        return matching_share
示例#2
0
 async def update(self, metadata: Metadata):
     logger.debug("Updating metadata: " + metadata.title_or_name())
     md_updated = self.isogeo.metadata.update(metadata)
     # await asyncio.sleep(2)
     if isinstance(md_updated, Metadata):
         logger.debug(f"{metadata._id} has been updated")
     elif isinstance(md_updated, tuple):
         logger.error(f"{metadata._id} can't be updated: {md_updated[1]}")
示例#3
0
    def md2docx(self,
                docx_template: DocxTemplate,
                md: Metadata,
                share: Share = None):
        """Dump Isogeo metadata into a docx template.

        :param DocxTemplate docx_template: Word template to fill
        :param Metadata metadata: metadata to dumpinto the template
        :param Share share: share in which the metadata is. Used to build the view URL.
        """
        logger.debug("Starting the export into Word .docx of {} ({})".format(
            md.title_or_name(slugged=1), md._id))

        # template context starting with metadata attributes which do not require any special formatting
        context = {
            # IDENTIFICATION
            "varType":
            self.isogeo_tr("formatTypes", md.type),
            "varTitle":
            self.fmt.clean_xml(md.title),
            "varAbstract":
            self.fmt.clean_xml(md.abstract),
            "varNameTech":
            self.fmt.clean_xml(md.name),
            "varOwner":
            md.groupName,
            "varPath":
            self.fmt.clean_xml(md.path),
            # QUALITY
            "varTopologyInfo":
            self.fmt.clean_xml(md.topologicalConsistency),
            # HISTORY
            "varCollectContext":
            self.fmt.clean_xml(md.collectionContext),
            "varCollectMethod":
            self.fmt.clean_xml(md.collectionMethod),
            "varValidityComment":
            self.fmt.clean_xml(md.validityComment),
            # GEOGRAPHY
            "varEncoding":
            self.fmt.clean_xml(md.encoding),
            "varScale":
            self.fmt.clean_xml(md.scale),
            "varGeometry":
            self.fmt.clean_xml(md.geometry),
            "varObjectsCount":
            self.fmt.clean_xml(md.features),
            # METADATA
            "varMdDtCrea":
            utils.hlpr_datetimes(md._created).strftime(self.datetimes_fmt),
            "varMdDtUpda":
            utils.hlpr_datetimes(md._modified).strftime(self.datetimes_fmt),
            "varMdDtExp":
            datetime.now().strftime(self.datetimes_fmt),
        }

        # TAGS #
        # extracting & parsing tags
        li_motscles = []
        li_theminspire = []

        # default values
        context["varInspireConformity"] = self.isogeo_tr(
            "quality", "isNotConform")

        # looping on tags
        for tag in md.tags.keys():
            # free keywords
            if tag.startswith("keyword:isogeo"):
                li_motscles.append(md.tags.get(tag))
                continue

            # INSPIRE themes
            if tag.startswith("keyword:inspire-theme"):
                li_theminspire.append(md.tags.get(tag))
                continue

            # coordinate system
            if tag.startswith("coordinate-system"):
                context["varSRS"] = md.tags.get(tag)
                continue

            # format
            if tag.startswith("format"):
                context["varFormat"] = md.tags.get(tag)
                if md.formatVersion:
                    context["varFormat"] += " " + md.formatVersion
                continue

            # INSPIRE conformity
            if tag.startswith("conformity:inspire"):
                context["varInspireConformity"] = self.isogeo_tr(
                    "quality", "isConform")
                continue

        # add tags to the template context
        context["varKeywords"] = " ; ".join(li_motscles)
        context["varKeywordsCount"] = len(li_motscles)
        context["varInspireTheme"] = " ; ".join(li_theminspire)

        # formatting links to visualize on OpenCatalog and edit on APP
        if share is not None:
            context["varViewOC"] = utils.get_view_url(
                md_id=md._id, share_id=share._id, share_token=share.urlToken)
        else:
            logger.debug("No OpenCatalog URL for metadata: {} ({})".format(
                md.title_or_name(), md._id))

        # link to APP
        context["varEditAPP"] = utils.get_edit_url(md)

        # ---- CONTACTS # ----------------------------------------------------
        contacts_out = []
        if md.contacts:
            # formatting contacts
            for ct_in in md.contacts:
                ct = {}
                # translate contact role
                ct["role"] = self.isogeo_tr("roles", ct_in.get("role"))
                # ensure other contacts fields
                ct["name"] = ct_in.get("contact").get("name", "NR")
                ct["organization"] = ct_in.get("contact").get(
                    "organization", "")
                ct["email"] = ct_in.get("contact").get("email", "")
                ct["phone"] = ct_in.get("contact").get("phone", "")
                ct["fax"] = ct_in.get("contact").get("fax", "")
                ct["addressLine1"] = ct_in.get("contact").get(
                    "addressLine1", "")
                ct["addressLine2"] = ct_in.get("contact").get(
                    "addressLine2", "")
                ct["zipCode"] = ct_in.get("contact").get("zipCode", "")
                ct["city"] = ct_in.get("contact").get("city", "")
                ct["countryCode"] = ct_in.get("contact").get("countryCode", "")
                # store into the final list
                contacts_out.append(ct)

            # add it to final context
            context["varContactsCount"] = len(contacts_out)
            context["varContactsDetails"] = contacts_out

        # ---- ATTRIBUTES --------------------------------------------------
        fields_out = []
        if md.type == "vectorDataset" and isinstance(md.featureAttributes,
                                                     list):
            for f_in in md.featureAttributes:
                field = {}
                # ensure other fields
                field["name"] = self.fmt.clean_xml(f_in.get("name", ""))
                field["alias"] = self.fmt.clean_xml(f_in.get("alias", ""))
                field["description"] = self.fmt.clean_xml(
                    f_in.get("description", ""))
                field["dataType"] = f_in.get("dataType", "")
                field["language"] = f_in.get("language", "")
                # store into the final list
                fields_out.append(field)

            # add to the final context
            context["varFieldsCount"] = len(fields_out)
            context["varFields"] = fields_out

        # ---- EVENTS ------------------------------------------------------
        events_out = []
        if md.events:
            for e in md.events:
                evt = Event(**e)
                # pop creation events (already in the export document)
                if evt.kind == "creation":
                    continue
                # prevent invalid character for XML formatting in description
                evt.description = self.fmt.clean_xml(evt.description)
                # make data human readable
                evt.date = utils.hlpr_datetimes(evt.date).strftime(
                    self.dates_fmt)
                # translate event kind
                # evt.kind = self.isogeo_tr("events", evt.kind)
                # append
                events_out.append(evt.to_dict())

            # add to the final context
            context["varEventsCount"] = len(events_out)
            context["varEvents"] = events_out

        # ---- HISTORY # -----------------------------------------------------
        # data events
        if md.created:
            context["varDataDtCrea"] = utils.hlpr_datetimes(
                md.created).strftime(self.dates_fmt)

        if md.modified:
            context["varDataDtUpda"] = utils.hlpr_datetimes(
                md.modified).strftime(self.dates_fmt)

        if md.published:
            context["varDataDtPubl"] = utils.hlpr_datetimes(
                md.published).strftime(self.dates_fmt)

        # validity
        if md.validFrom:
            context["varValidityStart"] = utils.hlpr_datetimes(
                md.validFrom).strftime(self.dates_fmt)

        # end validity date
        if md.validTo:
            context["varValidityEnd"] = utils.hlpr_datetimes(
                md.validTo).strftime(self.dates_fmt)

        # ---- SPECIFICATIONS # -----------------------------------------------
        if md.specifications:
            context["varSpecifications"] = self.fmt.specifications(
                md_specifications=md.specifications)

        # ---- CGUs # --------------------------------------------------------
        if md.conditions:
            context["varConditions"] = self.fmt.conditions(
                md_conditions=md.conditions)

        # ---- LIMITATIONS # -------------------------------------------------
        if md.limitations:
            context["varLimitations"] = self.fmt.limitations(
                md_limitations=md.limitations)

        # -- THUMBNAIL -----------------------------------------------------------------
        if md._id in self.thumbnails and Path(self.thumbnails.get(
                md._id)).is_file():
            thumbnail = str(Path(self.thumbnails.get(md._id)).resolve())
            context["varThumbnail"] = InlineImage(docx_template, thumbnail)
            logger.info("Thumbnail found for {}: {}".format(
                md.title_or_name(1), thumbnail))

        # fillfull file
        try:
            docx_template.render(context, autoescape=True)
            logger.info("Vector metadata stored: {} ({})".format(
                md.title_or_name(slugged=1), md._id))
        except etree.XMLSyntaxError as e:
            logger.error(
                "Invalid character in XML: {}. "
                "Any special character (<, <, &...)? Check: {}".format(
                    e, context.get("varEditAPP")))
        except (UnicodeEncodeError, UnicodeDecodeError) as e:
            logger.error(
                "Encoding error: {}. "
                "Any special character (<, <, &...)? Check: {}".format(
                    e, context.get("varEditAPP")))
        except Exception as e:
            logger.error("Unexpected error: {}. Check: {}".format(
                e, context.get("varEditAPP")))

        # end of function
        return
示例#4
0
    def store_md_generic(self, md: Metadata, ws: Worksheet, idx: int):
        """Exports generic metadata attributes into Excel worksheet with some dynamic
        adaptations based on metadata type.

        :param Metadata md: metadata object to export
        :param Worksheet ws: Excel worksheet to store the exported info
        :param int idx: row index in the worksheet
        """
        # pick columns referential table depending on metadata type
        if md.type == "rasterDataset":
            col = self.columns_raster
        elif md.type == "resource":
            col = self.columns_resource
        elif md.type == "service":
            col = self.columns_service
        elif md.type == "vectorDataset":
            col = self.columns_vector
        else:
            raise TypeError("Unknown metadata type: {}".format(md.type))

        logger.debug(
            "Start storing metadata {} ({}) using the matching reference columns for type of {} ..."
            .format(md.title_or_name(slugged=1), md._id, md.type))

        # -- IDENTIFICATION ------------------------------------------------------------
        if md.title:
            ws["{}{}".format(col.get("title").letter, idx)] = md.title
        if md.name:
            ws["{}{}".format(col.get("name").letter, idx)] = md.name
        if md.abstract:
            ws["{}{}".format(col.get("abstract").letter, idx)] = md.abstract

        # path to source
        try:
            src_path = Path(str(md.path))
        except OSError as e:
            logger.debug(
                "Metadata.path value is not a valid system path. Maybe an URL? Original error: {}"
                .format(e))
            urlparse(md.path).scheme != ""

        if isinstance(md.path, Path) and md.type != "service":
            if src_path.is_file():
                link_path = r'=HYPERLINK("{0}","{1}")'.format(
                    src_path.parent, src_path.resolve())
                ws["{}{}".format(col.get("path").letter, idx)] = link_path
                logger.debug("Path reachable: {}".format(src_path))
            else:
                ws["{}{}".format(col.get("path").letter,
                                 idx)] = str(src_path.resolve())
                logger.debug("Path unreachable: {}".format(str(src_path)))
        elif md.path and md.type == "service":
            link_path = r'=HYPERLINK("{0}","{1}")'.format(md.path, md.path)
            ws["{}{}".format(col.get("path").letter, idx)] = link_path
        elif md.path:
            ws["{}{}".format(col.get("path").letter, idx)] = md.path
            logger.debug("Path not recognized: {}".format(str(src_path)))
        else:
            pass

        # -- TAGS ----------------------------------------------------------------------
        keywords = []
        inspire = []
        if md.keywords:
            for k in md.keywords:
                if k.get("_tag").startswith("keyword:is"):
                    keywords.append(k.get("text"))
                elif k.get("_tag").startswith("keyword:in"):
                    inspire.append(k.get("text"))
                else:
                    logger.info("Unknown keyword type: " + k.get("_tag"))
                    continue
            if keywords:
                ws["{}{}".format(col.get("keywords").letter,
                                 idx)] = " ;\n".join(sorted(keywords))
            if inspire:
                ws["{}{}".format(col.get("inspireThemes").letter,
                                 idx)] = " ;\n".join(sorted(inspire))
        else:
            self.stats.md_empty_fields[md._id].append("keywords")
            logger.info("Vector dataset without any keyword or INSPIRE theme")

        # INSPIRE conformity
        if col.get("inspireConformance").letter is not None:
            ws["{}{}".format(col.get("inspireConformance").letter,
                             idx)] = ("conformity:inspire" in md.tags)

        # owner
        ws["{}{}".format(col.get("_creator").letter,
                         idx)] = next(v for k, v in md.tags.items()
                                      if "owner:" in k)

        # -- HISTORY -------------------------------------------------------------------
        if md.collectionContext:
            ws["{}{}".format(col.get("collectionContext").letter,
                             idx)] = md.collectionContext
        if md.collectionMethod:
            ws["{}{}".format(col.get("collectionMethod").letter,
                             idx)] = md.collectionMethod

        # validity
        if md.validFrom:
            ws["{}{}".format(col.get("validFrom").letter,
                             idx)] = utils.hlpr_datetimes(md.validFrom)

        if md.validTo:
            ws["{}{}".format(col.get("validTo").letter,
                             idx)] = utils.hlpr_datetimes(md.validTo)

        if md.updateFrequency:
            ws["{}{}".format(col.get("updateFrequency").letter,
                             idx)] = self.fmt.frequency_as_explicit_str(
                                 md.updateFrequency)
        if md.validityComment:
            ws["{}{}".format(col.get("validityComment").letter,
                             idx)] = md.validityComment

        # -- EVENTS --------------------------------------------------------------------
        # data creation date
        if md.created:
            ws["{}{}".format(col.get("created").letter,
                             idx)] = utils.hlpr_datetimes(md.created)

        # events count
        if md.events:
            ws["{}{}".format(col.get("events").letter, idx)] = len(md.events)

        # data last update
        if md.modified:
            ws["{}{}".format(col.get("modified").letter,
                             idx)] = utils.hlpr_datetimes(md.modified)

        # -- TECHNICAL -----------------------------------------------------------------
        # format
        if md.format and md.type in ("rasterDataset", "vectorDataset"):
            format_lbl = next(v for k, v in md.tags.items() if "format:" in k)
            ws["{}{}".format(col.get("format").letter,
                             idx)] = "{0} ({1} - {2})".format(
                                 format_lbl, md.formatVersion, md.encoding)
            self.stats.li_data_formats.append(format_lbl)
        elif md.format:
            ws["{}{}".format(col.get("format").letter,
                             idx)] = "{0} {1}".format(md.format,
                                                      md.formatVersion)
            self.stats.li_data_formats.append(md.format)
        else:
            pass

        # SRS
        if isinstance(md.coordinateSystem, dict):
            ws["{}{}".format(col.get("coordinateSystem").letter,
                             idx)] = "{0} ({1})".format(
                                 md.coordinateSystem.get("name"),
                                 md.coordinateSystem.get("code"))

        # bounding box (envelope)
        if md.type != "resource" and md.envelope and md.envelope.get("bbox"):
            coords = md.envelope.get("coordinates")
            if md.envelope.get("type") == "Polygon":
                bbox = ",\n".join(
                    format(coord, ".4f") for coord in md.envelope.get("bbox"))
            elif md.envelope.get("type") == "Point":
                bbox = "Centroïde : {}{}".format(coords[0], coords[1])
            else:
                bbox = ",\n".join(
                    format(coord, ".4f") for coord in md.envelope.get("bbox"))
            ws["{}{}".format(col.get("envelope").letter, idx)] = bbox

        # geometry
        if md.geometry:
            ws["{}{}".format(col.get("geometry").letter, idx)] = md.geometry

        # resolution
        if md.distance:
            ws["{}{}".format(col.get("distance").letter, idx)] = md.distance

        # scale
        if md.scale:
            ws["{}{}".format(col.get("scale").letter, idx)] = md.scale

        # features objects
        if md.features:
            ws["{}{}".format(col.get("features").letter, idx)] = md.features

        # -- QUALITY -------------------------------------------------------------------
        if md.specifications:
            ws["{}{}".format(col.get("specifications").letter,
                             idx)] = " ;\n".join(
                                 self.fmt.specifications(md.specifications))

        # topology
        if md.topologicalConsistency:
            ws["AC{}".format(idx)] = md.topologicalConsistency

        # -- FEATURE ATTRIBUTES --------------------------------------------------------
        if md.type == "vectorDataset" and isinstance(md.featureAttributes,
                                                     list):
            fields = md.featureAttributes

            # count
            ws["{}{}".format(col.get("featureAttributesCount").letter,
                             idx)] = len(fields)
            # alphabetic list
            fields_cct = sorted([
                "{} ({}) - Type : {} - Descripion : {:.20} [...]".format(
                    field.get("name"),
                    field.get("alias"),
                    field.get("dataType"),
                    # field.get("language"),
                    field.get("description", ""),
                ) for field in fields
            ])
            ws["{}{}".format(col.get("featureAttributes").letter,
                             idx)] = " ;\n".join(fields_cct)
            # if attributes analisis is activated, append fields dict
            if hasattr(self, "ws_fa"):
                self.fa_all.append(fields)
            else:
                pass

        # -- CGUs ----------------------------------------------------------------------
        if md.conditions:
            ws["{}{}".format(col.get("conditions").letter, idx)] = " ;\n".join(
                self.fmt.conditions(md.conditions))

        # -- LIMITATIONS ---------------------------------------------------------------
        if md.limitations:
            ws["{}{}".format(col.get("limitations").letter,
                             idx)] = " ;\n".join(
                                 self.fmt.limitations(md.limitations))

        # -- CONTACTS ------------------------------------------------------------------
        if md.contacts:
            contacts = [
                "{0} ({1})".format(
                    contact.get("contact").get("name"),
                    contact.get("contact").get("email"),
                ) for contact in md.contacts
            ]
            ws["{}{}".format(col.get("contacts").letter,
                             idx)] = " ;\n".join(contacts)

        # -- ACTIONS -------------------------------------------------------------------
        ws["{}{}".format(col.get("hasLinkDownload").letter,
                         idx)] = ("action:download" in md.tags)
        ws["{}{}".format(col.get("hasLinkView").letter,
                         idx)] = "action:view" in md.tags
        ws["{}{}".format(col.get("hasLinkOther").letter,
                         idx)] = ("action:other" in md.tags)

        # -- METADATA ------------------------------------------------------------------
        # id
        ws["{}{}".format(col.get("_id").letter, idx)] = md._id

        # creation
        if md._created:
            ws["{}{}".format(col.get("_created").letter,
                             idx)] = utils.hlpr_datetimes(md._created)
            # add creation date (not datetime) for later stats
            self.stats.li_dates_md_created.append(
                utils.hlpr_datetimes(md._created).date())

        # last update
        if md._modified:
            ws["{}{}".format(col.get("_modified").letter,
                             idx)] = utils.hlpr_datetimes(md._modified)
            # add modification date (not datetime) for later stats, only if different from the creation date
            if md._modified != md._created:
                self.stats.li_dates_md_modified.append(
                    utils.hlpr_datetimes(md._modified).date())

        # edit
        ws["{}{}".format(col.get("linkEdit").letter,
                         idx)] = utils.get_edit_url(md)
        if self.share is not None:
            link_visu = utils.get_view_url(md_id=md._id,
                                           share_id=self.share._id,
                                           share_token=self.share.urlToken)
            ws["{}{}".format(col.get("linkView").letter, idx)] = link_visu

        # lang
        ws["{}{}".format(col.get("language").letter, idx)] = md.language

        # log
        logger.info("Metadata stored: {} ({})".format(
            md.title_or_name(slugged=1), md._id))