Exemple #1
0
    def export(self, format):
        ret = []
        search_res_json = SearchView.search_results(self.search_request)
        if search_res_json.status_code == 500:
            return ret
        results = JSONDeserializer().deserialize(search_res_json.content)
        instances = results["results"]["hits"]["hits"]
        output = {}

        for resource_instance in instances:
            use_fieldname = self.format in ("shp",)
            resource_obj = self.flatten_tiles(
                resource_instance["_source"]["tiles"], self.datatype_factory, compact=self.compact, use_fieldname=use_fieldname
            )
            has_geom = resource_obj.pop("has_geometry")
            skip_resource = self.format in ("shp",) and has_geom is False
            if skip_resource is False:
                try:
                    output[resource_instance["_source"]["graph_id"]]["output"].append(resource_obj)
                except KeyError as e:
                    output[resource_instance["_source"]["graph_id"]] = {"output": []}
                    output[resource_instance["_source"]["graph_id"]]["output"].append(resource_obj)

        for graph_id, resources in output.items():
            graph = models.GraphModel.objects.get(pk=graph_id)
            if format == "geojson":
                headers = list(graph.node_set.filter(exportable=True).values_list("name", flat=True))
                ret = self.to_geojson(resources["output"], headers=headers, name=graph.name)
                return ret, ""

            if format == "tilecsv":
                headers = list(graph.node_set.filter(exportable=True).values_list("name", flat=True))
                headers.append("resourceid")
                ret.append(self.to_csv(resources["output"], headers=headers, name=graph.name))
            if format == "shp":
                headers = graph.node_set.filter(exportable=True).values("fieldname", "datatype", "name")[::1]
                missing_field_names = []
                for header in headers:
                    if not header["fieldname"]:
                        missing_field_names.append(header["name"])
                    header.pop("name")
                if len(missing_field_names) > 0:
                    message = _("Shapefile are fieldnames required for the following nodes: {0}".format(", ".join(missing_field_names)))
                    logger.error(message)
                    raise (Exception(message))

                headers = graph.node_set.filter(exportable=True).values("fieldname", "datatype")[::1]
                headers.append({"fieldname": "resourceid", "datatype": "str"})
                ret += self.to_shp(resources["output"], headers=headers, name=graph.name)

        full_path = self.search_request.get_full_path()
        search_request_path = self.search_request.path if full_path is None else full_path
        search_export_info = models.SearchExportHistory(
            user=self.search_request.user, numberofinstances=len(instances), url=search_request_path
        )
        search_export_info.save()

        return ret, search_export_info
    def export(self, format, report_link):
        ret = []
        search_res_json = SearchView.search_results(self.search_request)
        if search_res_json.status_code == 500:
            return ret
        results = JSONDeserializer().deserialize(search_res_json.content)
        instances = results["results"]["hits"]["hits"]
        output = {}

        for resource_instance in instances:
            use_fieldname = self.format in ("shp",)
            resource_obj = self.flatten_tiles(
                resource_instance["_source"]["tiles"], self.datatype_factory, compact=self.compact, use_fieldname=use_fieldname
            )
            has_geom = resource_obj.pop("has_geometry")
            skip_resource = self.format in ("shp",) and has_geom is False
            if skip_resource is False:
                try:
                    output[resource_instance["_source"]["graph_id"]]["output"].append(resource_obj)
                except KeyError as e:
                    output[resource_instance["_source"]["graph_id"]] = {"output": []}
                    output[resource_instance["_source"]["graph_id"]]["output"].append(resource_obj)

        for graph_id, resources in output.items():
            graph = models.GraphModel.objects.get(pk=graph_id)

            if (report_link == "true") and (format != "tilexl"):
                for resource in resources["output"]:
                    report_url = reverse("resource_report", kwargs={"resourceid": resource["resourceid"]})
                    export_namespace = settings.ARCHES_NAMESPACE_FOR_DATA_EXPORT.rstrip("/")
                    resource["Link"] = f"{export_namespace}{report_url}"

            if format == "geojson":

                if settings.EXPORT_DATA_FIELDS_IN_CARD_ORDER is True:
                    headers = self.return_ordered_header(graph_id, "csv")
                else:
                    headers = list(graph.node_set.filter(exportable=True).values_list("name", flat=True))

                if (report_link == "true") and ("Link" not in headers):
                    headers.append("Link")
                ret = self.to_geojson(resources["output"], headers=headers, name=graph.name)
                return ret, ""

            if format == "tilecsv":

                if settings.EXPORT_DATA_FIELDS_IN_CARD_ORDER is True:
                    headers = self.return_ordered_header(graph_id, "csv")
                else:
                    headers = list(graph.node_set.filter(exportable=True).values_list("name", flat=True))

                headers.append("resourceid")
                if (report_link == "true") and ("Link" not in headers):
                    headers.append("Link")
                ret.append(self.to_csv(resources["output"], headers=headers, name=graph.name))

            if format == "shp":

                if settings.EXPORT_DATA_FIELDS_IN_CARD_ORDER is True:
                    headers = self.return_ordered_header(graph_id, "shp")
                else:
                    headers = graph.node_set.filter(exportable=True).values("fieldname", "datatype", "name")[::1]

                headers.append({"fieldname": "resourceid", "datatype": "str"})

                missing_field_names = []
                for header in headers:
                    if not header["fieldname"]:
                        missing_field_names.append(header["name"])
                        header.pop("name")
                if len(missing_field_names) > 0:
                    message = _("Shapefile are fieldnames required for the following nodes: {0}".format(", ".join(missing_field_names)))
                    logger.error(message)
                    raise (Exception(message))

                if (report_link == "true") and ({"fieldname": "Link", "datatype": "str"} not in headers):
                    headers.append({"fieldname": "Link", "datatype": "str"})
                else:
                    pass
                ret += self.to_shp(resources["output"], headers=headers, name=graph.name)

            if format == "tilexl":
                headers = graph.node_set.filter(exportable=True).values("fieldname", "datatype", "name")[::1]
                headers = graph.node_set.filter(exportable=True).values("fieldname", "datatype")[::1]
                headers.append({"fieldname": "resourceid", "datatype": "str"})
                ret += self.to_tilexl(resources["output"])

            if format == "html":
                ret += self.to_html(resources["output"], name=graph.name, graph_id=str(graph.pk))

        full_path = self.search_request.get_full_path()
        search_request_path = self.search_request.path if full_path is None else full_path
        search_export_info = models.SearchExportHistory(
            user=self.search_request.user, numberofinstances=len(instances), url=search_request_path
        )
        search_export_info.save()

        return ret, search_export_info