예제 #1
0
파일: tileserver.py 프로젝트: mrcnc/arches
def clean_resource_cache(tile):
    # get the tile model's bounds
    datatype_factory = DataTypeFactory()
    nodegroup = models.NodeGroup.objects.get(pk=tile.nodegroup_id)
    for node in nodegroup.node_set.all():
        datatype = datatype_factory.get_instance(node.datatype)
        if datatype.should_cache(node) and datatype.should_manage_cache(node):
            bounds = datatype.get_bounds(tile, node)
            if bounds is not None:
                zooms = range(20)
                config = TileStache.parseConfig(
                    get_tileserver_config(node.nodeid))
                layer = config.layers[str(node.nodeid)]
                mimetype, format = layer.getTypeByExtension('pbf')

                lon1, lat1, lon2, lat2 = bounds
                south, west = min(lat1, lat2), min(lon1, lon2)
                north, east = max(lat1, lat2), max(lon1, lon2)

                northwest = Location(north, west)
                southeast = Location(south, east)

                ul = layer.projection.locationCoordinate(northwest)
                lr = layer.projection.locationCoordinate(southeast)

                padding = 0
                coordinates = generateCoordinates(ul, lr, zooms, padding)

                for (offset, count, coord) in coordinates:
                    config.cache.remove(layer, coord, format)
    for key, tile_list in tile.tiles.iteritems():
        for child_tile in tile_list:
            clean_resource_cache(child_tile)
예제 #2
0
    def get(self, request, resourceid=None, include_display_values=True):
        datatype_factory = DataTypeFactory()
        nodeid = request.GET.get("nodeid", None)
        search_term = request.GET.get("term", None)
        permitted_tiles = []
        perm = "read_nodegroup"
        tiles = models.TileModel.objects.filter(resourceinstance_id=resourceid)
        if nodeid is not None:
            node = models.Node.objects.get(pk=nodeid)
            tiles = tiles.filter(nodegroup=node.nodegroup)

        for tile in tiles:
            if request.user.has_perm(perm, tile.nodegroup):
                tile = Tile.objects.get(pk=tile.tileid)
                tile.filter_by_perm(request.user, perm)
                tile_dict = model_to_dict(tile)
                if include_display_values:
                    tile_dict["display_values"] = []
                    for node in models.Node.objects.filter(nodegroup=tile.nodegroup):
                        if str(node.nodeid) in tile.data:
                            datatype = datatype_factory.get_instance(node.datatype)
                            display_value = datatype.get_display_value(tile, node)
                            if search_term is not None and search_term in display_value:
                                tile_dict["display_values"].append({"value": display_value, "label": node.name, "nodeid": node.nodeid})
                            elif search_term is None:
                                tile_dict["display_values"].append({"value": display_value, "label": node.name, "nodeid": node.nodeid})

                if search_term is None:
                    permitted_tiles.append(tile_dict)
                elif len(tile_dict["display_values"]) > 0:
                    permitted_tiles.append(tile_dict)
        return JSONResponse({"tiles": permitted_tiles})
예제 #3
0
 def append_dsl(self, search_results_object, permitted_nodegroups, include_provisional):
     querysting_params = self.request.GET.get(details["componentname"], "")
     advanced_filters = JSONDeserializer().deserialize(querysting_params)
     datatype_factory = DataTypeFactory()
     search_query = Bool()
     advanced_query = Bool()
     grouped_query = Bool()
     grouped_queries = [grouped_query]
     for index, advanced_filter in enumerate(advanced_filters):
         tile_query = Bool()
         null_query = Bool()
         for key, val in advanced_filter.items():
             if key != "op":
                 node = models.Node.objects.get(pk=key)
                 if self.request.user.has_perm("read_nodegroup", node.nodegroup):
                     datatype = datatype_factory.get_instance(node.datatype)
                     if ("op" in val and (val["op"] == "null" or val["op"] == "not_null")) or (
                         "val" in val and (val["val"] == "null" or val["val"] == "not_null")
                     ):
                         # don't use a nested query with the null/not null search
                         datatype.append_search_filters(val, node, null_query, self.request)
                     else:
                         datatype.append_search_filters(val, node, tile_query, self.request)
         nested_query = Nested(path="tiles", query=tile_query)
         if advanced_filter["op"] == "or" and index != 0:
             grouped_query = Bool()
             grouped_queries.append(grouped_query)
         grouped_query.must(nested_query)
         grouped_query.must(null_query)
     for grouped_query in grouped_queries:
         advanced_query.should(grouped_query)
     search_query.must(advanced_query)
     search_results_object["query"].add_query(search_query)
예제 #4
0
파일: resource.py 프로젝트: k-int/arches
    def get(self, request, resourceid=None, include_display_values=True):
        datatype_factory = DataTypeFactory()
        nodeid = request.GET.get('nodeid', None)
        permitted_tiles = []
        perm = 'read_nodegroup'
        tiles = models.TileModel.objects.filter(resourceinstance_id=resourceid)
        if nodeid is not None:
            node = models.Node.objects.get(pk=nodeid)
            tiles = tiles.filter(nodegroup=node.nodegroup)

        for tile in tiles:
            if request.user.has_perm(perm, tile.nodegroup):
                tile = Tile.objects.get(pk=tile.tileid)
                tile.filter_by_perm(request.user, perm)
                tile_dict = model_to_dict(tile)
                if include_display_values:
                    tile_dict['display_values'] = []
                    for node in models.Node.objects.filter(
                            nodegroup=tile.nodegroup):
                        if str(node.nodeid) in tile.data:
                            datatype = datatype_factory.get_instance(
                                node.datatype)
                            tile_dict['display_values'].append({
                                'value':
                                datatype.get_display_value(tile, node),
                                'label':
                                node.name,
                                'nodeid':
                                node.nodeid
                            })
                permitted_tiles.append(tile_dict)

        return JSONResponse({'tiles': permitted_tiles})
예제 #5
0
    def get_primary_descriptor_from_nodes(self, resource, config):
        try:
            if 'nodegroup_id' in config and config[
                    'nodegroup_id'] != '' and config[
                        'nodegroup_id'] is not None:
                tiles = models.TileModel.objects.filter(
                    nodegroup_id=uuid.UUID(config['nodegroup_id']),
                    sortorder=0).filter(
                        resourceinstance_id=resource.resourceinstanceid)
                if len(tiles) == 0:
                    tiles = models.TileModel.objects.filter(
                        nodegroup_id=uuid.UUID(config['nodegroup_id'])).filter(
                            resourceinstance_id=resource.resourceinstanceid)
                for tile in tiles:
                    for node in models.Node.objects.filter(
                            nodegroup_id=uuid.UUID(config['nodegroup_id'])):
                        if len(tile.data.keys()) > 0:
                            data = tile.data
                        elif tile.provisionaledits is not None and len(
                                tile.provisionaledits.keys()) == 1:
                            userid = tile.provisionaledits.keys()[0]
                            data = tile.provisionaledits[userid]['value']
                        if str(node.nodeid) in data:
                            datatype_factory = DataTypeFactory()
                            datatype = datatype_factory.get_instance(
                                node.datatype)
                            value = datatype.get_display_value(tile, node)
                            config['string_template'] = config[
                                'string_template'].replace(
                                    '<%s>' % node.name, value)
        except ValueError as e:
            print(e,
                  'invalid nodegroupid participating in descriptor function.')

        return config['string_template']
예제 #6
0
파일: tile.py 프로젝트: mradamcox/arches
    def save(self, *args, **kwargs):
        request = kwargs.pop('request', None)
        index = kwargs.pop('index', True)
        self.__preSave(request)
        if self.data != {}:
            old_model = models.TileModel.objects.filter(pk=self.tileid)
            old_data = old_model[0].data if len(old_model) > 0 else None
            edit_type = 'tile create' if (old_data == None) else 'tile edit'
            try:
                user = request.user
            except:
                user = {}
            self.save_edit(user=user, edit_type=edit_type, old_value=old_data, new_value=self.data)
            for nodeid, value in self.data.iteritems():
                datatype_factory = DataTypeFactory()
                datatype = datatype_factory.get_instance(models.Node.objects.get(nodeid=nodeid).datatype)
                datatype.convert_value(self, nodeid)

        super(Tile, self).save(*args, **kwargs)
        if index and unicode(self.resourceinstance.graph_id) != unicode(settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID):
            self.index()
        for tiles in self.tiles.itervalues():
            for tile in tiles:
                tile.resourceinstance = self.resourceinstance
                tile.parenttile = self
                tile.save(*args, request=request, index=index, **kwargs)
예제 #7
0
파일: resource.py 프로젝트: fargeo/arches
    def get(self, request, resourceid=None, include_display_values=True):
        datatype_factory = DataTypeFactory()
        nodeid = request.GET.get('nodeid', None)
        permitted_tiles = []
        perm = 'read_nodegroup'
        tiles = models.TileModel.objects.filter(resourceinstance_id=resourceid)
        if nodeid is not None:
            node = models.Node.objects.get(pk=nodeid)
            tiles = tiles.filter(nodegroup=node.nodegroup)

        for tile in tiles:
            if request.user.has_perm(perm, tile.nodegroup):
                tile = Tile.objects.get(pk=tile.tileid)
                tile.filter_by_perm(request.user, perm)
                tile_dict = model_to_dict(tile)
                if include_display_values:
                    tile_dict['display_values'] = []
                    for node in models.Node.objects.filter(nodegroup=tile.nodegroup):
                        if str(node.nodeid) in tile.data:
                            datatype = datatype_factory.get_instance(node.datatype)
                            tile_dict['display_values'].append({
                                'value': datatype.get_display_value(tile, node),
                                'label': node.name,
                                'nodeid': node.nodeid
                            })
                permitted_tiles.append(tile_dict)

        return JSONResponse({'tiles': permitted_tiles})
예제 #8
0
 def get_primary_descriptor_from_nodes(self, resource, config):
     datatype_factory = None
     try:
         if "nodegroup_id" in config and config["nodegroup_id"] != "" and config["nodegroup_id"] is not None:
             tiles = models.TileModel.objects.filter(nodegroup_id=uuid.UUID(config["nodegroup_id"]), sortorder=0).filter(
                 resourceinstance_id=resource.resourceinstanceid
             )
             if len(tiles) == 0:
                 tiles = models.TileModel.objects.filter(nodegroup_id=uuid.UUID(config["nodegroup_id"])).filter(
                     resourceinstance_id=resource.resourceinstanceid
                 )
             for tile in tiles:
                 for node in models.Node.objects.filter(nodegroup_id=uuid.UUID(config["nodegroup_id"])):
                     data = {}
                     if len(list(tile.data.keys())) > 0:
                         data = tile.data
                     elif tile.provisionaledits is not None and len(list(tile.provisionaledits.keys())) == 1:
                         userid = list(tile.provisionaledits.keys())[0]
                         data = tile.provisionaledits[userid]["value"]
                     if str(node.nodeid) in data:
                         if not datatype_factory:
                             datatype_factory = DataTypeFactory()
                         datatype = datatype_factory.get_instance(node.datatype)
                         value = datatype.get_display_value(tile, node)
                         if value is None:
                             value = ""
                         config["string_template"] = config["string_template"].replace("<%s>" % node.name, str(value))
     except ValueError as e:
         print(e, "invalid nodegroupid participating in descriptor function.")
     if config["string_template"].strip() == "":
         config["string_template"] = _("Undefined")
     return config["string_template"]
예제 #9
0
파일: tileserver.py 프로젝트: fargeo/arches
def clean_resource_cache(tile):
    # get the tile model's bounds
    datatype_factory = DataTypeFactory()
    nodegroup = models.NodeGroup.objects.get(pk=tile.nodegroup_id)
    for node in nodegroup.node_set.all():
        datatype = datatype_factory.get_instance(node.datatype)
        if datatype.should_cache(node) and datatype.should_manage_cache(node):
            bounds = datatype.get_bounds(tile, node)
            if bounds is not None:
                zooms = range(20)
                config = TileStache.parseConfig(
                    get_tileserver_config(node.nodeid))
                layer = config.layers[str(node.nodeid)]
                mimetype, format = layer.getTypeByExtension('pbf')

                lon1, lat1, lon2, lat2 = bounds
                south, west = min(lat1, lat2), min(lon1, lon2)
                north, east = max(lat1, lat2), max(lon1, lon2)

                northwest = Location(north, west)
                southeast = Location(south, east)

                ul = layer.projection.locationCoordinate(northwest)
                lr = layer.projection.locationCoordinate(southeast)

                padding = 0
                coordinates = generateCoordinates(ul, lr, zooms, padding)

                for (offset, count, coord) in coordinates:
                    config.cache.remove(layer, coord, format)
    for key, tile_list in tile.tiles.iteritems():
        for child_tile in tile_list:
            clean_resource_cache(child_tile)
예제 #10
0
 def __init__(self):
     self.tiles = {}
     self.errors = {}
     self.resources = []
     self.use_ids = False
     self.datatype_factory = DataTypeFactory()
     self.resource_model_root_classes = set()
     self.non_unique_classes = set()
     self.graph_id_lookup = {}
     self.logger = logging.getLogger(__name__)
     for graph in models.GraphModel.objects.filter(isresource=True):
         node = models.Node.objects.get(graph_id=graph.pk, istopnode=True)
         self.graph_id_lookup[node.ontologyclass] = graph.pk
         if node.ontologyclass in self.resource_model_root_classes:
             # make a note of non-unique root classes
             self.non_unique_classes.add(node.ontologyclass)
         else:
             self.resource_model_root_classes.add(node.ontologyclass)
     self.resource_model_root_classes = self.resource_model_root_classes - self.non_unique_classes
     self.ontologyproperties = models.Edge.objects.values_list(
         'ontologyproperty', flat=True).distinct()
     self.logger.info("Initialized JsonLdReader")
     self.logger.debug("Found {0} Non-unique root classes".format(
         len(self.non_unique_classes)))
     self.logger.debug("Found {0} Resource Model Root classes".format(
         len(self.resource_model_root_classes)))
     self.logger.debug("Resource Model Root classes: {0}".format("\n".join(
         list(map(str, self.resource_model_root_classes)))))
예제 #11
0
 def check_for_constraint_violation(self, request):
     card = models.CardModel.objects.get(nodegroup=self.nodegroup)
     constraints = models.ConstraintModel.objects.filter(card=card)
     if constraints.count() > 0:
         for constraint in constraints:
             if constraint.uniquetoallinstances is True:
                 tiles = models.TileModel.objects.filter(nodegroup=self.nodegroup)
             else:
                 tiles = models.TileModel.objects.filter(
                     Q(resourceinstance_id=self.resourceinstance.resourceinstanceid) & Q(nodegroup=self.nodegroup)
                 )
             nodes = [node for node in constraint.nodes.all()]
             for tile in tiles:
                 if str(self.tileid) != str(tile.tileid):
                     match = False
                     duplicate_values = []
                     for node in nodes:
                         datatype_factory = DataTypeFactory()
                         datatype = datatype_factory.get_instance(node.datatype)
                         nodeid = str(node.nodeid)
                         if datatype.values_match(tile.data[nodeid], self.data[nodeid]):
                             match = True
                             duplicate_values.append(datatype.get_display_value(tile, node))
                         else:
                             match = False
                             break
                     if match is True:
                         message = _(
                             "This card violates a unique constraint. \
                             The following value is already saved: "
                         )
                         raise TileValidationError(message + (", ").join(duplicate_values))
예제 #12
0
파일: tile.py 프로젝트: digimatspa/arches
 def check_for_missing_nodes(self, request):
     missing_nodes = []
     for nodeid, value in self.data.items():
         try:
             datatype_factory = DataTypeFactory()
             node = models.Node.objects.get(nodeid=nodeid)
             datatype = datatype_factory.get_instance(node.datatype)
             datatype.clean(self, nodeid)
             if request is not None:
                 if self.data[nodeid] is None and node.isrequired is True:
                     if len(node.cardxnodexwidget_set.all()) > 0:
                         missing_nodes.append(
                             node.cardxnodexwidget_set.all()[0].label)
                     else:
                         missing_nodes.append(node.name)
         except Exception as e:
             warning = _(
                 f"Error checking for missing node. Nodeid: {nodeid} with value: {value}, not in nodes. \
                 You may have a node in your business data that no longer exists in any graphs."
             )
             logger.warning(warning)
     if missing_nodes != []:
         message = _("This card requires values for the following: ")
         message += (", ").join(missing_nodes)
         raise TileValidationError(message)
예제 #13
0
    def save(self, *args, **kwargs):
        request = kwargs.pop('request', None)
        index = kwargs.pop('index', True)
        self.__preSave(request)
        if self.data != {}:
            old_model = models.TileModel.objects.filter(pk=self.tileid)
            old_data = old_model[0].data if len(old_model) > 0 else None
            edit_type = 'tile create' if (old_data == None) else 'tile edit'
            try:
                user = request.user
            except:
                user = {}
            self.save_edit(user=user,
                           edit_type=edit_type,
                           old_value=old_data,
                           new_value=self.data)
            for nodeid, value in self.data.iteritems():
                datatype_factory = DataTypeFactory()
                datatype = datatype_factory.get_instance(
                    models.Node.objects.get(nodeid=nodeid).datatype)
                datatype.convert_value(self, nodeid)

        super(Tile, self).save(*args, **kwargs)
        if index and unicode(self.resourceinstance.graph_id) != unicode(
                settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID):
            self.index()
        for tiles in self.tiles.itervalues():
            for tile in tiles:
                tile.resourceinstance = self.resourceinstance
                tile.parenttile = self
                tile.save(*args, request=request, index=index, **kwargs)
예제 #14
0
 def append_dsl(self, search_results_object, permitted_nodegroups,
                include_provisional):
     querysting_params = self.request.GET.get(details['componentname'], '')
     advanced_filters = JSONDeserializer().deserialize(querysting_params)
     datatype_factory = DataTypeFactory()
     search_query = Bool()
     advanced_query = Bool()
     grouped_query = Bool()
     grouped_queries = [grouped_query]
     for index, advanced_filter in enumerate(advanced_filters):
         tile_query = Bool()
         for key, val in advanced_filter.iteritems():
             if key != 'op':
                 node = models.Node.objects.get(pk=key)
                 if self.request.user.has_perm('read_nodegroup',
                                               node.nodegroup):
                     datatype = datatype_factory.get_instance(node.datatype)
                     datatype.append_search_filters(val, node, tile_query,
                                                    self.request)
         nested_query = Nested(path='tiles', query=tile_query)
         if advanced_filter['op'] == 'or' and index != 0:
             grouped_query = Bool()
             grouped_queries.append(grouped_query)
         grouped_query.must(nested_query)
     for grouped_query in grouped_queries:
         advanced_query.should(grouped_query)
     search_query.must(advanced_query)
     search_results_object['query'].add_query(search_query)
예제 #15
0
    def get_context_data(self, **kwargs):
        context = super(MapBaseManagerView, self).get_context_data(**kwargs)
        datatype_factory = DataTypeFactory()
        geom_datatypes = [
            d.pk for d in models.DDataType.objects.filter(isgeometric=True)
        ]
        geom_nodes = models.Node.objects.filter(
            graph__isresource=True,
            graph__isactive=True,
            datatype__in=geom_datatypes).exclude(
                graph__graphid=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID)
        resource_layers = []
        resource_sources = []
        for node in geom_nodes:
            if self.request.user.has_perm("read_nodegroup", node.nodegroup):
                datatype = datatype_factory.get_instance(node.datatype)
                map_source = datatype.get_map_source(node)
                if map_source is not None:
                    resource_sources.append(map_source)
                    map_layer = datatype.get_map_layer(node)
                    if map_layer is not None:
                        resource_layers.append(map_layer)

        context["geom_nodes"] = geom_nodes
        context["resource_map_layers"] = resource_layers
        context["resource_map_sources"] = resource_sources

        return context
예제 #16
0
 def datatype_post_save_actions(self, request=None):
     for nodeid, value in self.data.items():
         datatype_factory = DataTypeFactory()
         node = models.Node.objects.get(nodeid=nodeid)
         datatype = datatype_factory.get_instance(node.datatype)
         if request is not None:
             datatype.handle_request(self, request, node)
예제 #17
0
    def save(self, *args, **kwargs):
        request = kwargs.pop('request', None)
        index = kwargs.pop('index', True)
        self.__preSave(request)
        missing_nodes = []
        if self.data != {}:
            old_model = models.TileModel.objects.filter(pk=self.tileid)
            old_data = old_model[0].data if len(old_model) > 0 else None
            edit_type = 'tile create' if (old_data == None) else 'tile edit'
            try:
                user = request.user
            except:
                user = {}
            self.save_edit(user=user, edit_type=edit_type, old_value=old_data, new_value=self.data)
            for nodeid, value in self.data.iteritems():
                datatype_factory = DataTypeFactory()
                node = models.Node.objects.get(nodeid=nodeid)
                datatype = datatype_factory.get_instance(node.datatype)
                datatype.convert_value(self, nodeid)
                if self.data[nodeid] == None and node.isrequired == True:
                    missing_nodes.append(node.name)

        if missing_nodes != []:
            message = _('This card requires values for the following:')
            raise ValidationError(message, (', ').join(missing_nodes))

        super(Tile, self).save(*args, **kwargs)
        if index and unicode(self.resourceinstance.graph_id) != unicode(settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID):
            self.index()
        for tiles in self.tiles.itervalues():
            for tile in tiles:
                tile.resourceinstance = self.resourceinstance
                tile.parenttile = self
                tile.save(*args, request=request, index=index, **kwargs)
예제 #18
0
    def parse_and_validate_resource(self, request, node_id):
        # currently not working

        cell_value = json.loads(request.POST.get('cell_value'))

        datatype_factory = DataTypeFactory()

        node = models.Node.objects.get(pk=node_id)
        datatype = datatype_factory.get_instance(node.datatype)

        errors = []

        # GET RID OF TRY AFTER DOMAIN VALUE REFACTOR!
        try:
            validation_errors = datatype.validate(cell_value, node=node)

            if validation_errors:
                errors.append({
                    'errors': validation_errors,
                    'node_id': node_id,
                    'cell_value': cell_value
                })

        except Exception as e:
            print(str(e))

        return JSONResponse({'errors': errors})
예제 #19
0
    def get_primary_descriptor_from_nodes(self, resource, config):
        try:
            for tile in models.TileModel.objects.filter(
                    nodegroup_id=uuid.UUID(config['nodegroup_id']),
                    sortorder=0).filter(
                        resourceinstance_id=resource.resourceinstanceid):
                for node in models.Node.objects.filter(
                        nodegroup_id=uuid.UUID(config['nodegroup_id'])):
                    if str(node.nodeid) in tile.data:
                        value = tile.data[str(node.nodeid)]
                        if value:
                            datatype_factory = DataTypeFactory()
                            datatype = datatype_factory.get_instance(
                                node.datatype)
                            display_value = datatype.get_display_value(
                                tile, node)
                            if display_value is not None:
                                value = display_value.value
                            config['string_template'] = config[
                                'string_template'].replace(
                                    '<%s>' % node.name, value)

        except ValueError as e:
            print e, 'invalid nodegroupid'

        return config['string_template']
예제 #20
0
파일: map.py 프로젝트: fargeo/arches
    def get(self, request):
        se = SearchEngineFactory().create()
        datatype_factory = DataTypeFactory()
        datatypes = models.DDataType.objects.all()
        widgets = models.Widget.objects.all()
        map_layers = models.MapLayer.objects.all()
        map_markers = models.MapMarker.objects.all()
        map_sources = models.MapSource.objects.all()
        icons = models.Icon.objects.order_by('name')
        context = self.get_context_data(
            icons=JSONSerializer().serialize(icons),
            datatypes=datatypes,
            widgets=widgets,
            map_layers=map_layers,
            map_markers=map_markers,
            map_sources=map_sources,
            datatypes_json=JSONSerializer().serialize(datatypes),
            main_script='views/map-layer-manager',
        )

        def get_resource_bounds(node):
            query = Query(se, start=0, limit=0)
            search_query = Bool()
            query.add_query(search_query)
            query.add_aggregation(GeoBoundsAgg(field='points.point', name='bounds'))
            results = query.search(index='resource', doc_type=[str(node.graph_id)])
            bounds = results['aggregations']['bounds']['bounds'] if 'bounds' in results['aggregations']['bounds'] else None
            return bounds

        context['geom_nodes_json'] = JSONSerializer().serialize(context['geom_nodes'])
        resource_layers = []
        resource_sources = []
        permissions = {}
        for node in context['geom_nodes']:
            datatype = datatype_factory.get_instance(node.datatype)
            map_layer = datatype.get_map_layer(node=node, preview=True)
            if map_layer is not None:
                count = models.TileModel.objects.filter(data__has_key=str(node.nodeid)).count()
                if count > 0:
                    map_layer['bounds'] = get_resource_bounds(node)
                else:
                    map_layer['bounds'] = None
                resource_layers.append(map_layer)
            map_source = datatype.get_map_source(node=node, preview=True)
            if map_source is not None:
                resource_sources.append(map_source)
            permissions[str(node.pk)] = {
                "users": sorted([user.email or user.username for user in get_users_for_object('read_nodegroup', node.nodegroup)]),
                "groups": sorted([group.name for group in get_groups_for_object('read_nodegroup', node.nodegroup)])
            }
        context['resource_map_layers_json'] = JSONSerializer().serialize(resource_layers)
        context['resource_map_sources_json'] = JSONSerializer().serialize(resource_sources)
        context['node_permissions'] = JSONSerializer().serialize(permissions)

        context['nav']['title'] = _('Map Layer Manager')
        context['nav']['icon'] = 'fa-server'
        context['nav']['help'] = (_('Map Layer Manager'),'help/base-help.htm')
        context['help'] = 'map-manager-help'

        return render(request, 'views/map-layer-manager.htm', context)
예제 #21
0
파일: tileserver.py 프로젝트: fargeo/arches
def seed_resource_cache():
    datatype_factory = DataTypeFactory()
    zooms = range(settings.CACHE_SEED_MAX_ZOOM + 1)
    extension = 'pbf'

    lat1, lon1, lat2, lon2 = GeoUtils().get_bounds_from_geojson(settings.CACHE_SEED_BOUNDS)
    south, west = min(lat1, lat2), min(lon1, lon2)
    north, east = max(lat1, lat2), max(lon1, lon2)

    northwest = Location(north, west)
    southeast = Location(south, east)

    padding = 0

    datatypes = [
        d.pk for d in models.DDataType.objects.filter(isgeometric=True)]
    nodes = models.Node.objects.filter(
        graph__isresource=True, datatype__in=datatypes)
    for node in nodes:
        datatype = datatype_factory.get_instance(node.datatype)
        count = models.TileModel.objects.filter(
            data__has_key=str(node.nodeid)).count()
        if datatype.should_cache(node) and count > 0:
            config = TileStache.parseConfig(get_tileserver_config(node.nodeid))
            layer = config.layers[str(node.nodeid)]
            ul = layer.projection.locationCoordinate(northwest)
            lr = layer.projection.locationCoordinate(southeast)
            coordinates = generateCoordinates(ul, lr, zooms, padding)
            for (offset, count, coord) in coordinates:
                path = '%s/%d/%d/%d.%s' % (layer.name(), coord.zoom,
                                           coord.column, coord.row, extension)

                progress = {"tile": path,
                            "offset": offset + 1,
                            "total": count}

                attempts = 3
                rendered = False

                while not rendered:
                    print '%(offset)d of %(total)d...' % progress,

                    try:
                        mimetype, content = TileStache.getTile(
                            layer, coord, extension, True)

                    except:
                        attempts -= 1
                        print 'Failed %s, will try %s more.' % (progress['tile'], ['no', 'once', 'twice'][attempts])

                        if attempts == 0:
                            print 'Failed %(zoom)d/%(column)d/%(row)d, trying next tile.\n' % coord.__dict__
                            break

                    else:
                        rendered = True
                        progress['size'] = '%dKB' % (len(content) / 1024)

                        print '%(tile)s (%(size)s)' % progress
예제 #22
0
파일: tile.py 프로젝트: digimatspa/arches
 def after_update_all(self):
     nodegroup = models.NodeGroup.objects.get(pk=self.nodegroup_id)
     datatype_factory = DataTypeFactory()
     for node in nodegroup.node_set.all():
         datatype = datatype_factory.get_instance(node.datatype)
         datatype.after_update_all()
     for tile in self.tiles:
         tile.after_update_all()
예제 #23
0
파일: tile.py 프로젝트: mrcnc/arches
 def validate(self, errors=None):
     for nodeid, value in self.data.iteritems():
         datatype_factory = DataTypeFactory()
         node = models.Node.objects.get(nodeid=nodeid)
         datatype = datatype_factory.get_instance(node.datatype)
         error = datatype.validate(value)
         if errors != None:
             errors += error
     return errors
예제 #24
0
    def __init__(self, **kwargs):
        super(CsvWriter, self).__init__(**kwargs)
        self.datatype_factory = DataTypeFactory()
        self.node_datatypes = {str(nodeid): datatype for nodeid, datatype in  Node.objects.values_list('nodeid', 'datatype').filter(~Q(datatype='semantic'), graph__isresource=True)}
        self.single_file = kwargs.pop('single_file', False)
        self.resource_export_configs = self.read_export_configs(kwargs.pop('configs', None))

        if len(self.resource_export_configs) == 0:
            raise MissingConfigException()
예제 #25
0
파일: tile.py 프로젝트: mradamcox/arches
 def after_update_all(self):
     nodegroup = models.NodeGroup.objects.get(pk=self.nodegroup_id)
     datatype_factory = DataTypeFactory()
     for node in nodegroup.node_set.all():
         datatype = datatype_factory.get_instance(node.datatype)
         datatype.after_update_all()
     for key, tile_list in self.tiles.iteritems():
         for child_tile in tile_list:
             child_tile.after_update_all()
예제 #26
0
    def save(self, tile, request):
        if request:
            datatype_factory = DataTypeFactory()
            for node in tile.nodegroup.node_set.all():
                datatype = datatype_factory.get_instance(node.datatype)
                previously_saved_tile = Tile.objects.filter(pk=tile.tileid)
                datatype.manage_files(previously_saved_tile, tile, request, node)

        return tile
예제 #27
0
 def __init__(self):
     super(CsvWriter, self).__init__()
     self.datatype_factory = DataTypeFactory()
     self.node_datatypes = {
         str(nodeid): datatype
         for nodeid, datatype in Node.objects.values_list(
             'nodeid', 'datatype').filter(~Q(datatype='semantic'),
                                          graph__isresource=True)
     }
예제 #28
0
 def after_update_all(self):
     nodegroup = models.NodeGroup.objects.get(pk=self.nodegroup_id)
     datatype_factory = DataTypeFactory()
     for node in nodegroup.node_set.all():
         datatype = datatype_factory.get_instance(node.datatype)
         datatype.after_update_all()
     for key, tile_list in self.tiles.iteritems():
         for child_tile in tile_list:
             child_tile.after_update_all()
예제 #29
0
    def validate(self, value, row_number=None, source=''):
        errors = []

        ## iterate list of values and use the concept validation on each one
        validate_concept = DataTypeFactory().get_instance('concept')
        for v in value:
            val = v.strip()
            errors += validate_concept.validate(val, row_number)
        return errors
예제 #30
0
    def validate(self, value, row_number=None, source=''):
        errors = []

        ## iterate list of values and use the concept validation on each one
        validate_concept = DataTypeFactory().get_instance('concept')
        for v in value:
            val = v.strip()
            errors += validate_concept.validate(val, row_number)
        return errors
예제 #31
0
    def validate(self, value, row_number=None, source="", node=None, nodeid=None):
        errors = []

        # iterate list of values and use the concept validation on each one
        if value is not None:
            validate_concept = DataTypeFactory().get_instance("concept")
            for v in value:
                val = v.strip()
                errors += validate_concept.validate(val, row_number)
        return errors
예제 #32
0
    def save(self, tile, request):
        if request:
            datatype_factory = DataTypeFactory()
            for node in tile.nodegroup.node_set.all():
                datatype = datatype_factory.get_instance(node.datatype)
                previously_saved_tile = Tile.objects.filter(pk=tile.tileid)
                datatype.manage_files(previously_saved_tile, tile, request,
                                      node)

        return tile
예제 #33
0
 def validate(self, errors=None):
     for nodeid, value in self.data.items():
         datatype_factory = DataTypeFactory()
         node = models.Node.objects.get(nodeid=nodeid)
         datatype = datatype_factory.get_instance(node.datatype)
         error = datatype.validate(value, node=node)
         for error_instance in error:
             if error_instance["type"] == "ERROR":
                 raise TileValidationError(_("{0}".format(error_instance["message"])))
         if errors is not None:
             errors += error
     return errors
예제 #34
0
파일: tile.py 프로젝트: msrivastava/arches
    def __init__(self, *args, **kwargs):
        """
        Init a Tile from a dictionary representation of from a model method call

        init this object by using Django query syntax, eg:
        .. code-block:: python

            Tile.objects.get(pk=some_tile_id)
            # or
            Tile.objects.filter(name=some_value_to_filter_by)

        OR, init this object with a dictionary, eg:
        .. code-block:: python

            Tile({
                name:'some name',
                tileid: '12341234-1234-1234-1324-1234123433433',
                ...
            })

        Arguments:
        args -- a dictionary of properties repsenting a Tile object
        kwargs -- unused

        """

        super(Tile, self).__init__(*args, **kwargs)
        # from models.TileModel
        # self.tileid
        # self.resourceinstance
        # self.parenttile
        # self.data
        # self.nodegroup
        # self.sortorder
        # end from models.TileModel
        self.tiles = []
        self.datatype_factory = DataTypeFactory()

        if args:
            if isinstance(args[0], dict):
                for key, value in args[0].items():
                    if not (key == "tiles"):
                        setattr(self, key, value)

                if self.tileid is None or self.tileid == "":
                    self.tileid = uuid.uuid4()

                if "tiles" in args[0]:
                    for tile_obj in args[0]["tiles"]:
                        tile = Tile(tile_obj)
                        tile.parenttile = self
                        self.tiles.append(tile)
예제 #35
0
 def validate(self, errors=None):
     for nodeid, value in self.data.iteritems():
         datatype_factory = DataTypeFactory()
         node = models.Node.objects.get(nodeid=nodeid)
         datatype = datatype_factory.get_instance(node.datatype)
         error = datatype.validate(value)
         for error_instance in error:
             if error_instance['type'] == 'ERROR':
                 print(str(error_instance)+" rejected tile with pk: "+ str(self.pk))
                 raise TileValidationError(_("Your tile: {0} ".format(error_instance["message"])))
         if errors != None:
             errors += error
     return errors
예제 #36
0
파일: tile.py 프로젝트: digimatspa/arches
 def datatype_post_save_actions(self, request=None):
     userid = None
     if request is not None:
         userid = str(request.user.id)
         if hasattr(request.user, "userprofile") is not True:
             models.UserProfile.objects.create(user=request.user)
     tile_data = self.get_tile_data(userid)
     for nodeid, value in list(tile_data.items()):
         datatype_factory = DataTypeFactory()
         node = models.Node.objects.get(nodeid=nodeid)
         datatype = datatype_factory.get_instance(node.datatype)
         if request is not None:
             datatype.handle_request(self, request, node)
예제 #37
0
파일: importer.py 프로젝트: mrcnc/arches
    def import_business_data(self,
                             file_format=None,
                             business_data=None,
                             mapping=None,
                             overwrite='append',
                             bulk=False,
                             create_concepts=False,
                             create_collections=False):
        reader = None
        start = time()
        cursor = connection.cursor()

        try:
            if file_format == None:
                file_format = self.file_format
            if business_data == None:
                business_data = self.business_data
            if mapping == None:
                mapping = self.mapping
            if file_format == 'json':
                reader = ArchesFileReader()
                reader.import_business_data(business_data, mapping)
            elif file_format == 'csv' or file_format == 'shp' or file_format == 'zip':
                if mapping != None:
                    reader = CsvReader()
                    reader.import_business_data(
                        business_data=business_data,
                        mapping=mapping,
                        overwrite=overwrite,
                        bulk=bulk,
                        create_concepts=create_concepts,
                        create_collections=create_collections)
                else:
                    print '*' * 80
                    print 'ERROR: No mapping file detected. Please indicate one with the \'-c\' paramater or place one in the same directory as your business data.'
                    print '*' * 80
                    sys.exit()

            elapsed = (time() - start)
            print 'Time to import_business_data = {0}'.format(
                datetime.timedelta(seconds=elapsed))

            reader.report_errors()

        finally:
            datatype_factory = DataTypeFactory()
            datatypes = DDataType.objects.all()
            for datatype in datatypes:
                datatype_instance = datatype_factory.get_instance(
                    datatype.datatype)
                datatype_instance.after_update_all()
예제 #38
0
    def import_business_data(self, file_format=None, business_data=None, mapping=None,
                             overwrite='append', bulk=False, create_concepts=False,
                             create_collections=False, use_multiprocessing=False):
        reader = None
        start = time()
        cursor = connection.cursor()

        try:
            if file_format == None:
                file_format = self.file_format
            if business_data == None:
                business_data = self.business_data
            if mapping == None:
                mapping = self.mapping
            if file_format == 'json':
                reader = ArchesFileReader()
                reader.import_business_data(business_data, mapping)
            elif file_format == 'jsonl':
                with open(self.file[0], 'rU') as openf:
                    lines = openf.readlines()
                    if use_multiprocessing is True:
                        pool = Pool(cpu_count())
                        pool.map(import_one_resource, lines)
                        connections.close_all()
                        reader = ArchesFileReader()
                    else:
                        reader = ArchesFileReader()
                        for line in lines:
                            archesresource = JSONDeserializer().deserialize(line)
                            reader.import_business_data({"resources": [archesresource]})
            elif file_format == 'csv' or file_format == 'shp' or file_format == 'zip':
                if mapping != None:
                    reader = CsvReader()
                    reader.import_business_data(business_data=business_data, mapping=mapping, overwrite=overwrite, bulk=bulk, create_concepts=create_concepts, create_collections=create_collections)
                else:
                    print('*'*80)
                    print('ERROR: No mapping file detected. Please indicate one with the \'-c\' paramater or place one in the same directory as your business data.')
                    print('*'*80)
                    sys.exit()

            elapsed = (time() - start)
            print('Time to import_business_data = {0}'.format(datetime.timedelta(seconds=elapsed)))

            reader.report_errors()

        finally:
            datatype_factory = DataTypeFactory()
            datatypes = DDataType.objects.all()
            for datatype in datatypes:
                datatype_instance = datatype_factory.get_instance(datatype.datatype)
                datatype_instance.after_update_all()
예제 #39
0
    def get_primary_descriptor_from_nodes(self, resource, config):
        try:
            for tile in models.TileModel.objects.filter(nodegroup_id=uuid.UUID(config['nodegroup_id']), sortorder=0).filter(resourceinstance_id=resource.resourceinstanceid):
                for node in models.Node.objects.filter(nodegroup_id=uuid.UUID(config['nodegroup_id'])):
                    if str(node.nodeid) in tile.data:
                        datatype_factory = DataTypeFactory()
                        datatype = datatype_factory.get_instance(node.datatype)
                        value = datatype.get_display_value(tile, node)
                        config['string_template'] = config['string_template'].replace('<%s>' % node.name, value)

        except ValueError as e:
            print e, 'invalid nodegroupid'

        return config['string_template']
예제 #40
0
파일: tile.py 프로젝트: azerbini/eamena
 def save(self, *args, **kwargs):
     request = kwargs.pop('request', None)
     index = kwargs.pop('index', True)
     self.__preSave(request)
     if self.data != {}:
         for nodeid, value in self.data.iteritems():
             datatype_factory = DataTypeFactory()
             datatype = datatype_factory.get_instance(models.Node.objects.get(nodeid=nodeid).datatype)
             datatype.convert_value(self, nodeid)
     super(Tile, self).save(*args, **kwargs)
     if index:
         self.index()
     for tiles in self.tiles.itervalues():
         for tile in tiles:
             tile.resourceinstance = self.resourceinstance
             tile.parenttile = self
             tile.save(*args, request=request, index=index, **kwargs)
예제 #41
0
파일: csvfile.py 프로젝트: fargeo/arches
    def __init__(self, **kwargs):
        super(CsvWriter, self).__init__(**kwargs)
        self.datatype_factory = DataTypeFactory()
        self.node_datatypes = {str(nodeid): datatype for nodeid, datatype in  Node.objects.values_list('nodeid', 'datatype').filter(~Q(datatype='semantic'), graph__isresource=True)}
        self.single_file = kwargs.pop('single_file', False)
        self.resource_export_configs = self.read_export_configs(kwargs.pop('configs', None))

        if len(self.resource_export_configs) == 0:
            raise MissingConfigException()
예제 #42
0
파일: map.py 프로젝트: azerbini/eamena
    def get(self, request):
        datatype_factory = DataTypeFactory()
        datatypes = models.DDataType.objects.all()
        widgets = models.Widget.objects.all()
        map_layers = models.MapLayer.objects.all()
        map_sources = models.MapSource.objects.all()
        icons = models.Icon.objects.order_by('name')
        context = self.get_context_data(
            icons=JSONSerializer().serialize(icons),
            datatypes=datatypes,
            widgets=widgets,
            map_layers=map_layers,
            map_sources=map_sources,
            datatypes_json=JSONSerializer().serialize(datatypes),
            main_script='views/map-layer-manager',
        )

        context['geom_nodes_json'] = JSONSerializer().serialize(context['geom_nodes'])
        resource_layers = []
        resource_sources = []
        permissions = {}
        for node in context['geom_nodes']:
            datatype = datatype_factory.get_instance(node.datatype)
            map_layer = datatype.get_map_layer(node=node, preview=True)
            if map_layer is not None:
                resource_layers.append(map_layer)
            map_source = datatype.get_map_source(node=node, preview=True)
            if map_source is not None:
                resource_sources.append(map_source)
            card = Card.objects.get(nodegroup_id=node.nodegroup_id)
            permissions[str(node.pk)] = {
                "users": card.users,
                "groups": card.groups,
            }
        context['resource_map_layers_json'] = JSONSerializer().serialize(resource_layers)
        context['resource_map_sources_json'] = JSONSerializer().serialize(resource_sources)
        context['node_permissions'] = JSONSerializer().serialize(permissions)

        context['nav']['title'] = _('Map Layer Manager')
        context['nav']['icon'] = 'fa-server'
        context['nav']['help'] = (_('Map Layer Manager'),'help/map-manager-help.htm')

        return render(request, 'views/map-layer-manager.htm', context)
예제 #43
0
파일: base.py 프로젝트: azerbini/eamena
    def get_context_data(self, **kwargs):
        datatype_factory = DataTypeFactory()
        context = super(BaseManagerView, self).get_context_data(**kwargs)
        context['graph_models'] = models.GraphModel.objects.all()
        context['graphs'] = JSONSerializer().serialize(context['graph_models'])
        if 'Resource Editor' in self.request.user.user_groups:
            context['resource_instances'] = Resource.objects.all().order_by('-createdtime')[:100]
        else:
            context['resource_instances'] = []
        context['nav'] = {
            'icon':'fa fa-chevron-circle-right',
            'title':'',
            'help':('',''),
            'menu':False,
            'search':True,
            'res_edit':False,
            'edit_history':True,
            'login':True,
            'print':False,
        }
        geom_datatypes = [d.pk for d in models.DDataType.objects.filter(isgeometric=True)]
        geom_nodes = models.Node.objects.filter(graph__isresource=True, graph__isactive=True, datatype__in=geom_datatypes)
        resource_layers = []
        resource_sources = []
        for node in geom_nodes:
            # TODO: check user node level permissions here, if user does not
            # have read permissions on this node, then do not create map layer
            # or source
            datatype = datatype_factory.get_instance(node.datatype)
            map_source = datatype.get_map_source(node)
            if map_source is not None:
                resource_sources.append(map_source)
            map_layer = datatype.get_map_layer(node)
            if map_layer is not None:
                resource_layers.append(map_layer)

        context['geom_nodes'] = geom_nodes
        context['resource_map_layers'] = resource_layers
        context['resource_map_sources'] = resource_sources
        context['iiif_manifests'] = models.IIIFManifest.objects.all()

        return context
예제 #44
0
def get_tileserver_config(layer_id):
    database = settings.DATABASES['default']
    datatype_factory = DataTypeFactory()

    try:
        node = models.Node.objects.get(pk=layer_id)
        # TODO: check user node permissions here, if  user
        # does not have read access to this node, fire an exception
        datatype = datatype_factory.get_instance(node.datatype)
        layer_config = datatype.get_layer_config(node)
    except Exception:
        layer_model = models.TileserverLayer.objects.get(name=layer_id)
        layer_config = layer_model.config

    config_dict = {
        "cache": settings.TILE_CACHE_CONFIG,
        "layers": {}
    }
    config_dict["layers"][str(layer_id)] = layer_config
    return config_dict
예제 #45
0
파일: base.py 프로젝트: mradamcox/arches
    def get_context_data(self, **kwargs):
        datatype_factory = DataTypeFactory()
        context = super(BaseManagerView, self).get_context_data(**kwargs)
        context['system_settings_graphid'] = settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID
        context['graph_models'] = models.GraphModel.objects.all().exclude(graphid=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID)
        context['graphs'] = JSONSerializer().serialize(context['graph_models'])
        context['createable_resources'] = JSONSerializer().serialize(get_createable_resource_types(self.request.user))
        context['nav'] = {
            'icon':'fa fa-chevron-circle-right',
            'title':'',
            'help':('',''),
            'menu':False,
            'search':True,
            'res_edit':False,
            'login':True,
            'print':False,
        }
        context['use_semantic_relationships'] = settings.USE_SEMANTIC_RESOURCE_RELATIONSHIPS

        geom_datatypes = [d.pk for d in models.DDataType.objects.filter(isgeometric=True)]
        geom_nodes = models.Node.objects.filter(graph__isresource=True, graph__isactive=True, datatype__in=geom_datatypes).exclude(graph__graphid=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID)
        resource_layers = []
        resource_sources = []
        for node in geom_nodes:
            if self.request.user.has_perm('read_nodegroup', node.nodegroup):
                datatype = datatype_factory.get_instance(node.datatype)
                map_source = datatype.get_map_source(node)
                if map_source is not None:
                    resource_sources.append(map_source)
                map_layer = datatype.get_map_layer(node)
                if map_layer is not None:
                    resource_layers.append(map_layer)

        context['app_name'] = settings.APP_NAME
        context['geom_nodes'] = geom_nodes
        context['resource_map_layers'] = resource_layers
        context['resource_map_sources'] = resource_sources
        context['iiif_manifests'] = models.IIIFManifest.objects.all()

        return context
예제 #46
0
파일: base.py 프로젝트: fargeo/arches
    def get_context_data(self, **kwargs):
        context = super(MapBaseManagerView, self).get_context_data(**kwargs)
        datatype_factory = DataTypeFactory()
        geom_datatypes = [d.pk for d in models.DDataType.objects.filter(isgeometric=True)]
        geom_nodes = models.Node.objects.filter(graph__isresource=True, graph__isactive=True, datatype__in=geom_datatypes).exclude(graph__graphid=settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID)
        resource_layers = []
        resource_sources = []
        for node in geom_nodes:
            if self.request.user.has_perm('read_nodegroup', node.nodegroup):
                datatype = datatype_factory.get_instance(node.datatype)
                map_source = datatype.get_map_source(node)
                if map_source is not None:
                    resource_sources.append(map_source)
                    map_layer = datatype.get_map_layer(node)
                    if map_layer is not None:
                        resource_layers.append(map_layer)

        context['geom_nodes'] = geom_nodes
        context['resource_map_layers'] = resource_layers
        context['resource_map_sources'] = resource_sources

        return context
예제 #47
0
파일: tileserver.py 프로젝트: fargeo/arches
def get_tileserver_config(layer_id, request=None):
    database = settings.DATABASES['default']
    datatype_factory = DataTypeFactory()

    try:
        node = models.Node.objects.get(pk=layer_id)
        datatype = datatype_factory.get_instance(node.datatype)
        if request == None or request.user.has_perm('read_nodegroup', node.nodegroup):
            layer_config = datatype.get_layer_config(node)
        else:
            layer_config = datatype.get_layer_config(None)
    except Exception:
        layer_model = models.TileserverLayer.objects.get(name=layer_id)
        layer_config = layer_model.config

    tile_cache_config = settings.TILE_CACHE_CONFIG

    config_dict = {
        "cache": tile_cache_config,
        "layers": {}
    }
    config_dict["layers"][str(layer_id)] = layer_config
    return config_dict
예제 #48
0
    def import_business_data(self, file_format=None, business_data=None, mapping=None, overwrite='append', bulk=False):
        reader = None
        start = time()
        cursor = connection.cursor()

        try:
            if file_format == None:
                file_format = self.file_format
            if business_data == None:
                business_data = self.business_data
            if mapping == None:
                mapping = self.mapping
            if file_format == 'json':
                reader = ArchesFileReader()
                reader.import_business_data(business_data, mapping)
            elif file_format == 'csv':
                if mapping != None:
                    reader = CsvReader()
                    reader.import_business_data(business_data=business_data, mapping=mapping, overwrite=overwrite, bulk=bulk)
                else:
                    print '*'*80
                    print 'ERROR: No mapping file detected. Please indicate one with the \'-c\' paramater or place one in the same directory as your business data.'
                    print '*'*80
                    sys.exit()

            elapsed = (time() - start)
            print 'Time to import_business_data = {0}'.format(datetime.timedelta(seconds=elapsed))

            reader.report_errors()

        finally:
            datatype_factory = DataTypeFactory()
            datatypes = DDataType.objects.all()
            for datatype in datatypes:
                datatype_instance = datatype_factory.get_instance(datatype.datatype)
                datatype_instance.after_update_all()
예제 #49
0
파일: csvfile.py 프로젝트: azerbini/eamena
class CsvWriter(Writer):

    def __init__(self):
        super(CsvWriter, self).__init__()
        self.datatype_factory = DataTypeFactory()
        self.node_datatypes = {str(nodeid): datatype for nodeid, datatype in  Node.objects.values_list('nodeid', 'datatype').filter(~Q(datatype='semantic'), graph__isresource=True)}

    def transform_value_for_export(self, datatype, value, concept_export_value_type):
        datatype_instance = self.datatype_factory.get_instance(datatype)
        value = datatype_instance.transform_export_values(value, concept_export_value_type=concept_export_value_type)
        return value

    def write_resources(self, resources, resource_export_configs=None):
        csv_records = []
        other_group_records = []
        mapping = {}
        concept_export_value_lookup = {}
        for resource_export_config in resource_export_configs:
            for node in resource_export_config['nodes']:
                if node['file_field_name'] != '':
                    mapping[node['arches_nodeid']] = node['file_field_name']
                if 'concept_export_value' in node:
                    concept_export_value_lookup[node['arches_nodeid']] = node['concept_export_value']
        csv_header = ['ResourceID'] + mapping.values()
        csvs_for_export = []

        for resource in resources:
            csv_record = {}
            other_group_record = {}
            resourceid = resource['_source']['resourceinstanceid']
            resource_graphid = resource['_source']['graph_id']
            legacyid = resource['_source']['legacyid']
            csv_record['ResourceID'] = resourceid
            other_group_record['ResourceID'] = resourceid

            for tile in resource['_source']['tiles']:
                if tile['data'] != {}:
                    for k in tile['data'].keys():
                            if tile['data'][k] != '' and k in mapping:
                                if mapping[k] not in csv_record:
                                    concept_export_value_type = None
                                    if k in concept_export_value_lookup:
                                        concept_export_value_type = concept_export_value_lookup[k]
                                    value = self.transform_value_for_export(self.node_datatypes[k], tile['data'][k], concept_export_value_type)
                                    csv_record[mapping[k]] = value
                                    del tile['data'][k]
                                else:
                                    value = self.transform_value_for_export(self.node_datatypes[k], tile['data'][k], concept_export_value_type)
                                    other_group_record[mapping[k]] = value
                            else:
                                del tile['data'][k]

            csv_records.append(csv_record)
            if other_group_record != {}:
                other_group_records.append(other_group_record)


        csv_name_prefix = resource_export_configs[0]['resource_model_name']
        iso_date = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
        csv_name = os.path.join('{0}_{1}.{2}'.format(csv_name_prefix, iso_date, 'csv'))
        dest = StringIO()
        csvwriter = csv.DictWriter(dest, delimiter=',', fieldnames=csv_header)
        csvwriter.writeheader()
        csvs_for_export.append({'name':csv_name, 'outputfile': dest})
        for csv_record in csv_records:
            csvwriter.writerow({k:str(v) for k,v in csv_record.items()})

        dest = StringIO()
        csvwriter = csv.DictWriter(dest, delimiter=',', fieldnames=csv_header)
        csvwriter.writeheader()
        csvs_for_export.append({'name':csv_name + '_groups', 'outputfile': dest})
        for csv_record in other_group_records:
            csvwriter.writerow({k:str(v) for k,v in csv_record.items()})

        return csvs_for_export
예제 #50
0
파일: csvfile.py 프로젝트: fargeo/arches
class CsvWriter(Writer):

    def __init__(self, **kwargs):
        super(CsvWriter, self).__init__(**kwargs)
        self.datatype_factory = DataTypeFactory()
        self.node_datatypes = {str(nodeid): datatype for nodeid, datatype in  Node.objects.values_list('nodeid', 'datatype').filter(~Q(datatype='semantic'), graph__isresource=True)}
        self.single_file = kwargs.pop('single_file', False)
        self.resource_export_configs = self.read_export_configs(kwargs.pop('configs', None))

        if len(self.resource_export_configs) == 0:
            raise MissingConfigException()

    def read_export_configs(self, configs):
        '''
        Reads the export configuration file or object and adds an array for records to store property data
        '''
        if configs:
            resource_export_configs = json.load(open(configs, 'r'))
            configs = [resource_export_configs]
        else:
            configs = []
            for val in GraphXMapping.objects.values('mapping'):
                configs.append(val['mapping'])

        return configs

    def transform_value_for_export(self, datatype, value, concept_export_value_type, node):
        datatype_instance = self.datatype_factory.get_instance(datatype)
        value = datatype_instance.transform_export_values(value, concept_export_value_type=concept_export_value_type, node=node)
        return value

    def write_resources(self, graph_id=None, resourceinstanceids=None):
        graph_id = self.resource_export_configs[0]['resource_model_id']
        super(CsvWriter, self).write_resources(graph_id=graph_id, resourceinstanceids=resourceinstanceids)

        csv_records = []
        other_group_records = []
        mapping = {}
        concept_export_value_lookup = {}
        for resource_export_config in self.resource_export_configs:
            for node in resource_export_config['nodes']:
                if node['file_field_name'] != '' and node['export'] == True:
                    mapping[node['arches_nodeid']] = node['file_field_name']
                if 'concept_export_value' in node:
                    concept_export_value_lookup[node['arches_nodeid']] = node['concept_export_value']
        csv_header = ['ResourceID'] + mapping.values()
        csvs_for_export = []

        for resourceinstanceid, tiles in self.resourceinstances.iteritems():
            csv_record = {}
            csv_record['ResourceID'] = resourceinstanceid
            csv_record['populated_node_groups'] = []

            tiles = sorted(tiles, key=lambda k: k.parenttile_id)
            for tile in tiles:
                other_group_record = {}
                other_group_record['ResourceID'] = resourceinstanceid
                if tile.data != {}:
                    for k in tile.data.keys():
                        if tile.data[k] != '' and k in mapping and tile.data[k] != None:
                            if mapping[k] not in csv_record and tile.nodegroup_id not in csv_record['populated_node_groups']:
                                concept_export_value_type = None
                                if k in concept_export_value_lookup:
                                    concept_export_value_type = concept_export_value_lookup[k]
                                if tile.data[k] != None:
                                    value = self.transform_value_for_export(self.node_datatypes[k], tile.data[k], concept_export_value_type, k)
                                    csv_record[mapping[k]] = value
                                del tile.data[k]
                            else:
                                concept_export_value_type = None
                                if k in concept_export_value_lookup:
                                    concept_export_value_type = concept_export_value_lookup[k]
                                value = self.transform_value_for_export(self.node_datatypes[k], tile.data[k], concept_export_value_type, k)
                                other_group_record[mapping[k]] = value
                        else:
                            del tile.data[k]

                    csv_record['populated_node_groups'].append(tile.nodegroup_id)

                if other_group_record != {'ResourceID': resourceinstanceid}:
                    other_group_records.append(other_group_record)

            if csv_record != {'ResourceID': resourceinstanceid}:
                csv_records.append(csv_record)

        iso_date = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
        file_name = '{0}_{1}'.format(self.file_prefix, iso_date)
        csv_name = os.path.join('{0}.{1}'.format(file_name, 'csv'))

        if self.single_file != True:
            dest = StringIO()
            csvwriter = csv.DictWriter(dest, delimiter=',', fieldnames=csv_header)
            csvwriter.writeheader()
            csvs_for_export.append({'name':csv_name, 'outputfile': dest})
            for csv_record in csv_records:
                if 'populated_node_groups' in csv_record:
                    del csv_record['populated_node_groups']
                csvwriter.writerow({k:str(v) for k,v in csv_record.items()})

            dest = StringIO()
            csvwriter = csv.DictWriter(dest, delimiter=',', fieldnames=csv_header)
            csvwriter.writeheader()
            csvs_for_export.append({'name':csv_name.split('.')[0] + '_groups.' + csv_name.split('.')[1], 'outputfile': dest})
            for csv_record in other_group_records:
                if 'populated_node_groups' in csv_record:
                    del csv_record['populated_node_groups']
                csvwriter.writerow({k:str(v) for k,v in csv_record.items()})
        elif self.single_file == True:
            all_records = csv_records + other_group_records
            all_records = sorted(all_records, key=lambda k: k['ResourceID'])
            dest = StringIO()
            csvwriter = csv.DictWriter(dest, delimiter=',', fieldnames=csv_header)
            csvwriter.writeheader()
            csvs_for_export.append({'name':csv_name, 'outputfile': dest})
            for csv_record in all_records:
                if 'populated_node_groups' in csv_record:
                    del csv_record['populated_node_groups']
                csvwriter.writerow({k:str(v) for k,v in csv_record.items()})

        if self.graph_id != None:
            csvs_for_export = csvs_for_export + self.write_resource_relations(file_name=file_name)

        return csvs_for_export

    def write_resource_relations(self, file_name):
        resourceids = self.resourceinstances.keys()
        relations_file = []

        if self.graph_id != settings.SYSTEM_SETTINGS_RESOURCE_MODEL_ID:
            dest = StringIO()
            csv_header = ['resourcexid','resourceinstanceidfrom','resourceinstanceidto','relationshiptype','datestarted','dateended','notes']
            csvwriter = csv.DictWriter(dest, delimiter=',', fieldnames=csv_header)
            csvwriter.writeheader()
            csv_name = os.path.join('{0}.{1}'.format(file_name, 'relations'))
            relations_file.append({'name':csv_name, 'outputfile': dest})

            relations = ResourceXResource.objects.filter(Q(resourceinstanceidfrom__in=resourceids)|Q(resourceinstanceidto__in=resourceids)).values(*csv_header)
            for relation in relations:
                relation['datestarted'] = relation['datestarted'] if relation['datestarted'] != None else ''
                relation['dateended'] = relation['dateended'] if relation['dateended'] != None else ''
                relation['notes'] = relation['notes'] if relation['notes'] != None else ''
                csvwriter.writerow({k:str(v) for k,v in relation.items()})

        return relations_file
예제 #51
0
파일: csvfile.py 프로젝트: fargeo/arches
 def __init__(self, **kwargs):
     super(TileCsvWriter, self).__init__(**kwargs)
     self.datatype_factory = DataTypeFactory()
     self.node_datatypes = {str(nodeid): datatype for nodeid, datatype in  Node.objects.values_list('nodeid', 'datatype').filter(~Q(datatype='semantic'), graph__isresource=True)}
예제 #52
0
파일: csvfile.py 프로젝트: fargeo/arches
class TileCsvWriter(Writer):

    def __init__(self, **kwargs):
        super(TileCsvWriter, self).__init__(**kwargs)
        self.datatype_factory = DataTypeFactory()
        self.node_datatypes = {str(nodeid): datatype for nodeid, datatype in  Node.objects.values_list('nodeid', 'datatype').filter(~Q(datatype='semantic'), graph__isresource=True)}

    def transform_value_for_export(self, datatype, value, concept_export_value_type, node):
        datatype_instance = self.datatype_factory.get_instance(datatype)
        value = datatype_instance.transform_export_values(value, concept_export_value_type=concept_export_value_type, node=node)
        return value

    def write_resources(self, graph_id=None, resourceinstanceids=None):
        super(TileCsvWriter, self).write_resources(graph_id=graph_id, resourceinstanceids=resourceinstanceids)

        csv_records = []
        other_group_records = []
        concept_export_value_lookup = {}
        csv_header = ['ResourceID']
        mapping = {}
        nodes = Node.objects.filter(graph_id=graph_id)
        for node in nodes:
            mapping[str(node.nodeid)] = node.name
        csv_header = ['ResourceID', 'ResourceLegacyID', 'ResourceModelID', 'TileID', 'ParentTileID', 'NodeGroupID' ] + mapping.values()
        csvs_for_export = []

        for resourceinstanceid, tiles in self.resourceinstances.iteritems():
            tiles = sorted(tiles, key=lambda k: k.parenttile_id)
            for tile in tiles:
                csv_record = {}
                csv_record['ResourceID'] = resourceinstanceid
                csv_record['ResourceModelID'] = graph_id
                csv_record['TileID'] = tile.tileid
                csv_record['ParentTileID'] = str(tile.parenttile_id)
                csv_record['NodeGroupID'] = str(tile.nodegroup_id)
                for k in tile.data.keys():
                    resource_instance = ResourceInstance.objects.get(resourceinstanceid=resourceinstanceid)
                    csv_record['ResourceLegacyID'] = str(resource_instance.legacyid) if resource_instance.legacyid is not None else str(resource_instance.resourceinstanceid)
                    if tile.data[k] != '' and tile.data[k] != None:
                        concept_export_value_type = 'label'
                        if k in concept_export_value_lookup:
                            concept_export_value_type = concept_export_value_lookup[k]
                        if tile.data[k] != None:
                            value = self.transform_value_for_export(self.node_datatypes[k], tile.data[k], concept_export_value_type, k)
                            csv_record[mapping[k]] = value
                        del tile.data[k]
                    else:
                        del tile.data[k]

                if csv_record != {'ResourceID': resourceinstanceid}:
                    csv_records.append(csv_record)

        iso_date = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
        file_name = '{0}_{1}'.format(self.file_prefix, iso_date)
        csv_name = os.path.join('{0}.{1}'.format(file_name, 'csv'))

        all_records = sorted(csv_records, key=lambda k: k['ResourceID'])
        dest = StringIO()
        csvwriter = csv.DictWriter(dest, delimiter=',', fieldnames=csv_header)
        csvwriter.writeheader()
        csvs_for_export.append({'name':csv_name, 'outputfile': dest})
        for csv_record in all_records:
            if 'populated_node_groups' in csv_record:
                del csv_record['populated_node_groups']
            csvwriter.writerow({k:str(v) for k,v in csv_record.items()})

        if self.graph_id != None:
            csvs_for_export = csvs_for_export

        return csvs_for_export
예제 #53
0
파일: search.py 프로젝트: mradamcox/arches
def build_search_results_dsl(request):
    term_filter = request.GET.get('termFilter', '')
    spatial_filter = JSONDeserializer().deserialize(request.GET.get('mapFilter', '{}'))
    export = request.GET.get('export', None)
    page = 1 if request.GET.get('page') == '' else int(request.GET.get('page', 1))
    temporal_filter = JSONDeserializer().deserialize(request.GET.get('temporalFilter', '{}'))
    advanced_filters = JSONDeserializer().deserialize(request.GET.get('advanced', '[]'))
    search_buffer = None
    se = SearchEngineFactory().create()

    if export != None:
        limit = settings.SEARCH_EXPORT_ITEMS_PER_PAGE
    else:
        limit = settings.SEARCH_ITEMS_PER_PAGE

    query = Query(se, start=limit*int(page-1), limit=limit)
    nested_agg = NestedAgg(path='points', name='geo_aggs')
    nested_agg.add_aggregation(GeoHashGridAgg(field='points.point', name='grid', precision=settings.HEX_BIN_PRECISION))
    nested_agg.add_aggregation(GeoBoundsAgg(field='points.point', name='bounds'))
    query.add_aggregation(nested_agg)

    search_query = Bool()
    permitted_nodegroups = get_permitted_nodegroups(request.user)

    if term_filter != '':
        for term in JSONDeserializer().deserialize(term_filter):
            term_query = Bool()
            if term['type'] == 'term' or term['type'] == 'string':
                string_filter = Bool()
                if term['type'] == 'term':
                    string_filter.must(Match(field='strings.string', query=term['value'], type='phrase'))
                elif term['type'] == 'string':
                    string_filter.should(Match(field='strings.string', query=term['value'], type='phrase_prefix'))
                    string_filter.should(Match(field='strings.string.folded', query=term['value'], type='phrase_prefix'))

                string_filter.filter(Terms(field='strings.nodegroup_id', terms=permitted_nodegroups))
                nested_string_filter = Nested(path='strings', query=string_filter)
                if term['inverted']:
                    search_query.must_not(nested_string_filter)
                else:
                    search_query.must(nested_string_filter)
                    # need to set min_score because the query returns results with score 0 and those have to be removed, which I don't think it should be doing
                    query.min_score('0.01')
            elif term['type'] == 'concept':
                concept_ids = _get_child_concepts(term['value'])
                conceptid_filter = Bool()
                conceptid_filter.filter(Terms(field='domains.conceptid', terms=concept_ids))
                conceptid_filter.filter(Terms(field='domains.nodegroup_id', terms=permitted_nodegroups))
                nested_conceptid_filter = Nested(path='domains', query=conceptid_filter)
                if term['inverted']:
                    search_query.must_not(nested_conceptid_filter)
                else:
                    search_query.filter(nested_conceptid_filter)

    if 'features' in spatial_filter:
        if len(spatial_filter['features']) > 0:
            feature_geom = spatial_filter['features'][0]['geometry']
            feature_properties = spatial_filter['features'][0]['properties']
            buffer = {'width':0,'unit':'ft'}
            if 'buffer' in feature_properties:
                buffer = feature_properties['buffer']
            search_buffer = _buffer(feature_geom, buffer['width'], buffer['unit'])
            feature_geom = JSONDeserializer().deserialize(search_buffer.json)
            geoshape = GeoShape(field='geometries.geom.features.geometry', type=feature_geom['type'], coordinates=feature_geom['coordinates'] )

            invert_spatial_search = False
            if 'inverted' in feature_properties:
                invert_spatial_search = feature_properties['inverted']

            spatial_query = Bool()
            if invert_spatial_search == True:
                spatial_query.must_not(geoshape)
            else:
                spatial_query.filter(geoshape)

            # get the nodegroup_ids that the user has permission to search
            spatial_query.filter(Terms(field='geometries.nodegroup_id', terms=permitted_nodegroups))
            search_query.filter(Nested(path='geometries', query=spatial_query))

    if 'fromDate' in temporal_filter and 'toDate' in temporal_filter:
        now = str(datetime.utcnow())
        start_date = SortableDate(temporal_filter['fromDate'])
        end_date = SortableDate(temporal_filter['toDate'])
        date_nodeid = str(temporal_filter['dateNodeId']) if 'dateNodeId' in temporal_filter and temporal_filter['dateNodeId'] != '' else None
        query_inverted = False if 'inverted' not in temporal_filter else temporal_filter['inverted']

        temporal_query = Bool()

        if query_inverted:
            # inverted date searches need to use an OR clause and are generally more complicated to structure (can't use ES must_not)
            # eg: less than START_DATE OR greater than END_DATE
            inverted_date_query = Bool()
            inverted_date_ranges_query = Bool()

            if start_date.is_valid():
                inverted_date_query.should(Range(field='dates.date', lt=start_date.as_float()))
                inverted_date_ranges_query.should(Range(field='date_ranges.date_range', lt=start_date.as_float()))
            if end_date.is_valid():
                inverted_date_query.should(Range(field='dates.date', gt=end_date.as_float()))
                inverted_date_ranges_query.should(Range(field='date_ranges.date_range', gt=end_date.as_float()))

            date_query = Bool()
            date_query.filter(inverted_date_query)
            date_query.filter(Terms(field='dates.nodegroup_id', terms=permitted_nodegroups))
            if date_nodeid:
                date_query.filter(Term(field='dates.nodeid', term=date_nodeid))
            else:
                date_ranges_query = Bool()
                date_ranges_query.filter(inverted_date_ranges_query)
                date_ranges_query.filter(Terms(field='date_ranges.nodegroup_id', terms=permitted_nodegroups))
                temporal_query.should(Nested(path='date_ranges', query=date_ranges_query))
            temporal_query.should(Nested(path='dates', query=date_query))

        else:
            date_query = Bool()
            date_query.filter(Range(field='dates.date', gte=start_date.as_float(), lte=end_date.as_float()))
            date_query.filter(Terms(field='dates.nodegroup_id', terms=permitted_nodegroups))
            if date_nodeid:
                date_query.filter(Term(field='dates.nodeid', term=date_nodeid))
            else:
                date_ranges_query = Bool()
                date_ranges_query.filter(Range(field='date_ranges.date_range', gte=start_date.as_float(), lte=end_date.as_float(), relation='intersects'))
                date_ranges_query.filter(Terms(field='date_ranges.nodegroup_id', terms=permitted_nodegroups))
                temporal_query.should(Nested(path='date_ranges', query=date_ranges_query))
            temporal_query.should(Nested(path='dates', query=date_query))


        search_query.filter(temporal_query)
        #print search_query.dsl

    datatype_factory = DataTypeFactory()
    if len(advanced_filters) > 0:
        advanced_query = Bool()
        grouped_query = Bool()
        grouped_queries = [grouped_query]
        for index, advanced_filter in enumerate(advanced_filters):
            tile_query = Bool()
            for key, val in advanced_filter.iteritems():
                if key != 'op':
                    node = models.Node.objects.get(pk=key)
                    if request.user.has_perm('read_nodegroup', node.nodegroup):
                        datatype = datatype_factory.get_instance(node.datatype)
                        datatype.append_search_filters(val, node, tile_query, request)
            nested_query = Nested(path='tiles', query=tile_query)
            if advanced_filter['op'] == 'or' and index != 0:
                grouped_query = Bool()
                grouped_queries.append(grouped_query)
            grouped_query.must(nested_query)
        for grouped_query in grouped_queries:
            advanced_query.should(grouped_query)
        search_query.must(advanced_query)

    query.add_query(search_query)
    if search_buffer != None:
        search_buffer = search_buffer.geojson
    return {'query': query, 'search_buffer':search_buffer}