コード例 #1
0
ファイル: resources.py プロジェクト: mradamcox/afrh
def related_resources(request, resourceid):

    ## get allowed resource types based on permissions
    allowedtypes = get_allowed_types(request)
    is_anon = False
    if request.user.username == "anonymous":
        is_anon = True
    
    if request.method == 'GET':
        lang = request.GET.get('lang', settings.LANGUAGE_CODE)
        start = request.GET.get('start', 0)
        resources = get_related_resources(resourceid, lang, start=start, limit=15, allowedtypes=allowedtypes, is_anon=is_anon)
        return JSONResponse(resources, indent=4)
    
    if 'edit' in request.user.user_groups and request.method == 'DELETE':
        se = SearchEngineFactory().create()
        data = JSONDeserializer().deserialize(request.body) 
        entityid1 = data.get('entityid1')
        entityid2 = data.get('entityid2')
        resourcexid = data.get('resourcexid')
        realtionshiptype = data.get('realtionshiptype')
        resource = Resource(entityid1)
        resource.delete_resource_relationship(entityid2, realtionshiptype)
        se.delete(index='resource_relations', doc_type='all', id=resourcexid)
        return JSONResponse({ 'success': True })
コード例 #2
0
    def __init__(self, file=None):
        self.graphs = ""
        self.reference_data = ""
        self.business_data = ""

        if not file:
            file = settings.RESOURCE_GRAPH_LOCATIONS
        else:
            file = [file]

        for path in file:
            if os.path.exists(path):
                if isfile(join(path)):
                    with open(file[0], "rU") as f:
                        archesfile = JSONDeserializer().deserialize(f)
                        if "graph" in archesfile.keys():
                            self.graphs = archesfile["graph"]
                        if "reference_data" in archesfile.keys():
                            self.reference_data = archesfile["reference_data"]
                        if "business_data" in archesfile.keys():
                            self.business_data = archesfile["business_data"]
                else:
                    print file + " is not a valid file"
            else:
                print path + " is not a valid path"
コード例 #3
0
ファイル: graph.py プロジェクト: azerbini/eamena
    def delete(self, request, graphid):
        data = JSONDeserializer().deserialize(request.body)
        if data and self.action == 'delete_node':
            graph = Graph.objects.get(graphid=graphid)
            graph.delete_node(node=data.get('nodeid', None))
            return JSONResponse({})

        return HttpResponseNotFound()
コード例 #4
0
ファイル: graph.py プロジェクト: pierrechoffe/arches
def delete_node(request, graphid):
    data = JSONDeserializer().deserialize(request.body)
    if data:
        if request.method == 'DELETE':
            graph = Graph.objects.get(graphid=graphid)
            graph.delete_node(node=data.get('nodeid', None))
            return JSONResponse({})

    return HttpResponseNotFound()
コード例 #5
0
ファイル: graph.py プロジェクト: fargeo/arches
    def delete(self, request, graphid):
        data = JSONDeserializer().deserialize(request.body)
        if data and self.action == 'delete_node':
            try:
                graph = Graph.objects.get(graphid=graphid)
                graph.delete_node(node=data.get('nodeid', None))
                return JSONResponse({})
            except GraphValidationError as e:
                return JSONResponse({'status':'false','message':e.message, 'title':e.title}, status=500)

        return HttpResponseNotFound()
コード例 #6
0
ファイル: resource.py プロジェクト: fargeo/arches
    def get_documents_to_index(self, fetchTiles=True, datatype_factory=None, node_datatypes=None):
        """
        Gets all the documents nessesary to index a single resource
        returns a tuple of a document and list of terms

        Keyword Arguments:
        fetchTiles -- instead of fetching the tiles from the database get them off the model itself
        datatype_factory -- refernce to the DataTypeFactory instance
        node_datatypes -- a dictionary of datatypes keyed to node ids

        """

        document = JSONSerializer().serializeToPython(self)
        tiles = list(models.TileModel.objects.filter(resourceinstance=self)) if fetchTiles else self.tiles
        document['tiles'] = tiles
        document['strings'] = []
        document['dates'] = []
        document['domains'] = []
        document['geometries'] = []
        document['points'] = []
        document['numbers'] = []
        document['date_ranges'] = []
        document['provisional'] = True if sum([len(t.data) for t in tiles]) == 0 else False

        terms = []

        for tile in document['tiles']:
            for nodeid, nodevalue in tile.data.iteritems():
                datatype = node_datatypes[nodeid]
                if nodevalue != '' and nodevalue != [] and nodevalue != {} and nodevalue is not None:
                    datatype_instance = datatype_factory.get_instance(datatype)
                    datatype_instance.append_to_document(document, nodevalue, nodeid, tile)
                    node_terms = datatype_instance.get_search_terms(nodevalue, nodeid)
                    for index, term in enumerate(node_terms):
                        terms.append({'_id':unicode(nodeid)+unicode(tile.tileid)+unicode(index), '_source': {'value': term, 'nodeid': nodeid, 'nodegroupid': tile.nodegroup_id, 'tileid': tile.tileid, 'resourceinstanceid':tile.resourceinstance_id, 'provisional': False}})

            if tile.provisionaledits is not None:
                provisionaledits = JSONDeserializer().deserialize(tile.provisionaledits)
                if len(provisionaledits) > 0:
                    if document['provisional'] == False:
                        document['provisional'] = 'partial'
                    for user, edit in provisionaledits.iteritems():
                        if edit['status'] == 'review':
                            for nodeid, nodevalue in edit['value'].iteritems():
                                datatype = node_datatypes[nodeid]
                                if nodevalue != '' and nodevalue != [] and nodevalue != {} and nodevalue is not None:
                                    datatype_instance = datatype_factory.get_instance(datatype)
                                    datatype_instance.append_to_document(document, nodevalue, nodeid, tile, True)
                                    node_terms = datatype_instance.get_search_terms(nodevalue, nodeid)
                                    for index, term in enumerate(node_terms):
                                        terms.append({'_id':unicode(nodeid)+unicode(tile.tileid)+unicode(index), '_source': {'value': term, 'nodeid': nodeid, 'nodegroupid': tile.nodegroup_id, 'tileid': tile.tileid, 'resourceinstanceid':tile.resourceinstance_id, 'provisional': True}})


        return document, terms
コード例 #7
0
ファイル: graph.py プロジェクト: fargeo/arches
    def post(self, request, graphid=None):
        ret = {}

        try:
            if self.action == 'import_graph':
                graph_file = request.FILES.get('importedGraph').read()
                graphs = JSONDeserializer().deserialize(graph_file)['graph']
                ret = GraphImporter.import_graph(graphs)
            else:
                if graphid is not None:
                    graph = Graph.objects.get(graphid=graphid)
                data = JSONDeserializer().deserialize(request.body)

                if self.action == 'new_graph':
                    isresource = data['isresource'] if 'isresource' in data else False
                    name = _('New Resource Model') if isresource else _('New Branch')
                    author = request.user.first_name + ' ' + request.user.last_name
                    ret = Graph.new(name=name,is_resource=isresource,author=author)

                elif self.action == 'update_node':
                    graph.update_node(data)
                    ret = graph
                    graph.save()

                elif self.action == 'update_node_layer':
                    nodeid = uuid.UUID(str(data.get('nodeid')))
                    node = graph.nodes[nodeid]
                    node.config = data['config']
                    ret = graph
                    node.save()

                elif self.action == 'append_branch':
                    ret = graph.append_branch(data['property'], nodeid=data['nodeid'], graphid=data['graphid'])
                    graph.save()

                elif self.action == 'move_node':
                    ret = graph.move_node(data['nodeid'], data['property'], data['newparentnodeid'])
                    graph.save()

                elif self.action == 'clone_graph':
                    clone_data = graph.copy()
                    ret = clone_data['copy']
                    ret.save()
                    ret.copy_functions(graph, [clone_data['nodes'], clone_data['nodegroups']])
                    form_map = ret.copy_forms(graph, clone_data['cards'])
                    ret.copy_reports(graph, [form_map, clone_data['cards'], clone_data['nodes']])

            return JSONResponse(ret)
        except GraphValidationError as e:
            return JSONResponse({'status':'false','message':e.message, 'title':e.title}, status=500)
コード例 #8
0
ファイル: graph.py プロジェクト: cropandsave/arches
def node(request, nodeid):
    if request.method == 'POST':
        data = JSONDeserializer().deserialize(request.body)
        if data:
            node = models.Node.objects.get(nodeid=nodeid)
            nodes, edges = node.get_child_nodes_and_edges()
            collectors = [node_ for node_ in nodes if node_.is_collector()]
            node_ids = [id_node.nodeid for id_node in nodes]
            nodes = [node_ for node_ in nodes if (node_.nodegroup_id not in node_ids)]
            with transaction.atomic():
                node.name = data.get('name', '')
                node.description = data.get('description', '')
                node.istopnode = data.get('istopnode', '')
                node.crmclass = data.get('crmclass', '')
                node.datatype = data.get('datatype', '')
                node.status = data.get('status', '')
                node.validations.set(data.get('validations', []))
                new_nodegroup_id = data.get('nodegroup_id', None)
                cardinality = data.get('cardinality', 'n')
                if node.nodegroup_id != new_nodegroup_id:
                    edge = models.Edge.objects.get(rangenode_id=nodeid)
                    parent_group = edge.domainnode.nodegroup
                    new_group = parent_group
                    if new_nodegroup_id == nodeid:
                        new_group, created = models.NodeGroup.objects.get_or_create(nodegroupid=nodeid, defaults={'cardinality': 'n', 'legacygroupid': None, 'parentnodegroup': None})
                        new_group.parentnodegroup = parent_group
                        new_group.cardinality = cardinality
                        new_group.save()
                        parent_group = new_group

                    for collector in collectors:
                        collector.nodegroup.parentnodegroup = parent_group
                        collector.nodegroup.save()

                    for group_node in nodes:
                        group_node.nodegroup = new_group
                        group_node.save()

                    node.nodegroup = new_group

                node.save()
                return JSONResponse({'node': node, 'group_nodes': nodes, 'collectors': collectors, 'nodegroup': node.nodegroup})

    if request.method == 'DELETE':
        node = models.Node.objects.get(nodeid=nodeid)
        nodes, edges = node.get_child_nodes_and_edges()
        edges.append(models.Edge.objects.get(rangenode=node))
        nodes.append(node)
        with transaction.atomic():
            [edge.delete() for edge in edges]
            [node.delete() for node in nodes]
            return JSONResponse({})

    return HttpResponseNotFound()
コード例 #9
0
ファイル: resources.py プロジェクト: archesproject/arches
def related_resources(request, resourceid):
    if request.method == 'GET':
        lang = request.GET.get('lang', settings.LANGUAGE_CODE)
        start = request.GET.get('start', 0)
        return JSONResponse(get_related_resources(resourceid, lang, start=start, limit=15), indent=4)
    
    if 'edit' in request.user.user_groups and request.method == 'DELETE':
        se = SearchEngineFactory().create()
        data = JSONDeserializer().deserialize(request.body) 
        entityid1 = data.get('entityid1')
        entityid2 = data.get('entityid2')
        resourcexid = data.get('resourcexid')
        realtionshiptype = data.get('realtionshiptype')
        resource = Resource(entityid1)
        resource.delete_resource_relationship(entityid2, realtionshiptype)
        se.delete(index='resource_relations', doc_type='all', id=resourcexid)
        return JSONResponse({ 'success': True })
コード例 #10
0
ファイル: graph.py プロジェクト: pierrechoffe/arches
def settings(request, graphid):
    node = models.Node.objects.get(graph_id=graphid, istopnode=True)
    graph = node.graph
    if request.method == 'POST':
        data = JSONDeserializer().deserialize(request.body)
        for key, value in data.get('graph').iteritems():
            setattr(graph, key, value)
        graph.save()
        node.set_relatable_resources(data.get('relatable_resource_ids'))
        node.ontologyclass = data.get('ontology_class') if graph.ontology is not None else None
        node.save()
        return JSONResponse({
            'success': True,
            'graph': graph,
            'relatable_resource_ids': [res.nodeid for res in node.get_relatable_resources()]
        })
    node_json = JSONSerializer().serialize(node)
    icons = models.Icon.objects.order_by('name')
    resource_graphs = models.GraphModel.objects.filter(Q(isresource=True), ~Q(graphid=graphid))
    resource_data = []
    relatable_resources = node.get_relatable_resources()
    for res in resource_graphs:
        if models.Node.objects.filter(graph=res, istopnode=True).count() > 0:
            node = models.Node.objects.get(graph=res, istopnode=True)
            resource_data.append({
                'id': node.nodeid,
                'graph': res,
                'is_relatable': (node in relatable_resources)
            })
    graphs = models.GraphModel.objects.all()
    ontologies = models.Ontology.objects.filter(parentontology=None)
    ontology_classes = models.OntologyClass.objects.values('source', 'ontology_id')
    return render(request, 'views/graph/graph-settings.htm', {
        'main_script': 'views/graph/graph-settings',
        'icons': JSONSerializer().serialize(icons),
        'graph': JSONSerializer().serialize(graph),
        'node_json': node_json,
        'graphs': JSONSerializer().serialize(graphs),
        'ontologies': JSONSerializer().serialize(ontologies),
        'ontology_classes': JSONSerializer().serialize(ontology_classes),
        'graphid': graphid,
        'resource_data': JSONSerializer().serialize(resource_data),
        'node_count': models.Node.objects.filter(graph=graph).count()
    })
コード例 #11
0
ファイル: middleware.py プロジェクト: fargeo/arches
    def get_user_from_token(self, token):
        decoded_json = jws.verify(token, settings.JWT_KEY, algorithms=[settings.JWT_ALGORITHM])
        decoded_dict = JSONDeserializer().deserialize(decoded_json)

        username = decoded_dict.get('username', None)
        expiration = decoded_dict.get('expiration', None)

        user = None
        try:
            user = User.objects.get(username=username)
            if not user.is_active:
                raise Exception()
        except:
            raise AuthenticationFailed(_('User inactive or deleted.\n\n'))

        if int(expiration) < int(time.time()):
            raise AuthenticationFailed(_('Token Expired.\n\n'))

        return user or AnonymousUser()
コード例 #12
0
ファイル: graph.py プロジェクト: azerbini/eamena
    def post(self, request, graphid):
        graph = Graph.objects.get(graphid=graphid)
        data = JSONDeserializer().deserialize(request.body)
        for key, value in data.get('graph').iteritems():
            if key in ['iconclass', 'name', 'author', 'description', 'isresource',
                'ontology_id', 'version',  'subtitle', 'isactive', 'mapfeaturecolor', 'mappointsize', 'maplinewidth']:
                setattr(graph, key, value)

        node = models.Node.objects.get(graph_id=graphid, istopnode=True)
        node.set_relatable_resources(data.get('relatable_resource_ids'))
        node.ontologyclass = data.get('ontology_class') if data.get('graph').get('ontology_id') is not None else None

        with transaction.atomic():
            graph.save()
            node.save()

        return JSONResponse({
            'success': True,
            'graph': graph,
            'relatable_resource_ids': [res.nodeid for res in node.get_relatable_resources()]
        })
コード例 #13
0
ファイル: importer.py プロジェクト: mradamcox/arches
    def __init__(self, file=None, mapping_file=None, relations_file=None):
        self.business_data = ''
        self.mapping = None
        self.graphs = ''
        self.reference_data = ''
        self.business_data = ''
        self.file_format = ''
        self.relations = ''
        csv.field_size_limit(sys.maxint)

        if not file:
            file = settings.BUSINESS_DATA_FILES
        else:
            file = [file]

        if mapping_file == None:
            try:
                mapping_file = [file[0].split('.')[0] + '.mapping']
            except:
                print '*'*80
                print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping"
                print '*'*80
                sys.exit()
        else:
            try:
                mapping_file = [mapping_file]
            except:
                print '*'*80
                print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping"
                print '*'*80
                sys.exit()

        if relations_file == None:
            try:
                relations_file = [file[0].split('.')[0] + '.relations']
            except:
                pass

        for path in relations_file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.relations = csv.DictReader(open(relations_file[0], 'r'))

        for path in mapping_file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.mapping = json.load(open(path, 'r'))
                else:
                    self.mapping = None

        for path in file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.file_format = file[0].split('.')[-1]
                    if self.file_format == 'json':
                        with open(file[0], 'rU') as f:
                            archesfile = JSONDeserializer().deserialize(f)
                            if 'graph' in archesfile.keys():
                                self.graphs = archesfile['graph']
                            if 'reference_data' in archesfile.keys():
                                self.reference_data = archesfile['reference_data']
                            if 'business_data' in archesfile.keys():
                                self.business_data = archesfile['business_data']
                    elif self.file_format == 'csv':
                        data = unicodecsv.DictReader(open(file[0], 'r'), encoding='utf-8-sig', restkey='ADDITIONAL', restval='MISSING')
                        self.business_data = list(data)
                else:
                    print str(file) + ' is not a valid file'
            else:
                print path + ' is not a valid path'
コード例 #14
0
ファイル: graph_tests.py プロジェクト: webjunkie/arches
    def test_node_update(self):
        """
        test to make sure that node groups and card are properly managed
        when changing a nodegroup value on a node being updated

        """

        # create a graph, append the node/node type graph and confirm is has the correct
        # number of nodegroups then remove the appended branches group and reconfirm that
        # the proper number of groups are properly relfected in the graph

        graph = Graph.objects.get(pk=self.rootNode.graph.graphid)
        graph.append_branch('P1_is_identified_by',
                            graphid=self.NODE_NODETYPE_GRAPHID)

        node_to_update = None
        for node_id, node in graph.nodes.iteritems():
            if node.name == 'Node':
                node_to_update = JSONDeserializer().deserialize(
                    JSONSerializer().serialize(node))
            if node.name == 'Node Type':
                node_type_node = JSONDeserializer().deserialize(
                    JSONSerializer().serialize(node))

        # confirm that nulling out a child group will then make that group a part of the parent group
        node_to_update['nodegroup_id'] = None
        graph.update_node(node_to_update)
        self.assertEqual(len(graph.get_nodegroups()), 1)
        self.assertEqual(len(graph.cards), 1)
        for node in graph.nodes.itervalues():
            self.assertEqual(graph.root.nodegroup, node.nodegroup)

        graph.append_branch('P1_is_identified_by',
                            nodeid=node_type_node['nodeid'],
                            graphid=self.SINGLE_NODE_GRAPHID)
        for edge in graph.edges.itervalues():
            if str(edge.domainnode_id) == str(node_type_node['nodeid']):
                child_nodegroup_node = JSONDeserializer().deserialize(
                    JSONSerializer().serialize(edge.rangenode))

        # make a node group with a single node and confirm that that node is now not part of it's parent node group
        child_nodegroup_node['nodegroup_id'] = child_nodegroup_node['nodeid']
        graph.update_node(child_nodegroup_node)
        self.assertEqual(len(graph.get_nodegroups()), 2)
        for node_id, node in graph.nodes.iteritems():
            if node_id == child_nodegroup_node['nodeid']:
                self.assertNotEqual(graph.root.nodegroup, node.nodegroup)
            else:
                self.assertEqual(graph.root.nodegroup, node.nodegroup)

        # make another node group with a node (that has a child) and confirm that that node and
        # it's child are now not part of it's parent node group and that both nodes are grouped together
        node_to_update['nodegroup_id'] = node_to_update['nodeid']
        graph.update_node(node_to_update)
        self.assertEqual(len(graph.get_nodegroups()), 3)
        children = graph.get_child_nodes(node_to_update['nodeid'])
        for child in children:
            if child.nodeid == child_nodegroup_node['nodeid']:
                self.assertEqual(child.nodeid, child.nodegroup_id)
            else:
                self.assertEqual(child.nodegroup_id,
                                 node_to_update['nodegroup_id'])

        # remove a node's node group and confirm that that node takes the node group of it's parent
        child_nodegroup_node['nodegroup_id'] = None
        graph.update_node(child_nodegroup_node)
        self.assertEqual(len(graph.get_nodegroups()), 2)
        children = graph.get_child_nodes(node_to_update['nodeid'])
        for child in children:
            self.assertEqual(child.nodegroup_id,
                             node_to_update['nodegroup_id'])
コード例 #15
0
    def collect_resource_instances_for_couch(self):
        """
        Uses the data definition configs of a mobile survey object to search for
        resource instances relevant to a mobile survey. Takes a user object which
        is required for search.
        """

        query = self.datadownloadconfig["custom"]
        resource_types = self.datadownloadconfig["resources"]
        all_instances = {}
        if query in ("", None) and len(resource_types) == 0:
            logger.info("No resources or data query defined")
        else:
            resources_in_couch = set()
            resources_in_couch_by_type = {}
            for res_type in resource_types:
                resources_in_couch_by_type[res_type] = []

            db = self.couch.create_db("project_" + str(self.id))
            couch_query = {"selector": {"type": "resource"}, "fields": ["_id", "graph_id"]}
            for doc in db.find(couch_query):
                resources_in_couch.add(doc["_id"])
                resources_in_couch_by_type[doc["graph_id"]].append(doc["_id"])

            if self.datadownloadconfig["download"]:
                request = HttpRequest()
                request.user = self.lasteditedby
                request.GET["mobiledownload"] = True
                if query in ("", None):
                    if len(self.bounds.coords) == 0:
                        default_bounds = settings.DEFAULT_BOUNDS
                        default_bounds["features"][0]["properties"]["inverted"] = False
                        map_filter = json.dumps(default_bounds)
                    else:
                        map_filter = json.dumps({"type": "FeatureCollection", "features": [{"geometry": json.loads(self.bounds.json)}]})
                    try:
                        for res_type in resource_types:
                            instances = {}
                            request.GET["resource-type-filter"] = json.dumps([{"graphid": res_type, "inverted": False}])
                            request.GET["map-filter"] = map_filter
                            request.GET["paging-filter"] = "1"
                            request.GET["resourcecount"] = int(self.datadownloadconfig["count"]) - len(resources_in_couch_by_type[res_type])
                            self.append_to_instances(request, instances, res_type)
                            if len(list(instances.keys())) < request.GET["resourcecount"]:
                                request.GET["map-filter"] = "{}"
                                request.GET["resourcecount"] = request.GET["resourcecount"] - len(list(instances.keys()))
                                self.append_to_instances(request, instances, res_type)
                            for key, value in instances.items():
                                all_instances[key] = value
                    except Exception as e:
                        logger.exception(e)
                else:
                    try:
                        request.GET["resourcecount"] = int(self.datadownloadconfig["count"]) - len(resources_in_couch)
                        parsed = urllib.parse.urlparse(query)
                        urlparams = urllib.parse.parse_qs(parsed.query)
                        for k, v in urlparams.items():
                            request.GET[k] = v[0]
                        search_res_json = search.search_results(request)
                        search_res = JSONDeserializer().deserialize(search_res_json.content)
                        for hit in search_res["results"]["hits"]["hits"]:
                            all_instances[hit["_source"]["resourceinstanceid"]] = hit["_source"]
                    except KeyError:
                        print("no instances found in", search_res)

            # this effectively makes sure that resources in couch always get updated
            # even if they weren't included in the search results above (assuming self.datadownloadconfig["download"] == True)
            # if self.datadownloadconfig["download"] == False then this will always update the resources in couch
            ids = list(resources_in_couch - set(all_instances.keys()))

            if len(ids) > 0:
                se = SearchEngineFactory().create()
                query = Query(se, start=0, limit=settings.SEARCH_RESULT_LIMIT)
                ids_query = Terms(field="_id", terms=ids)
                query.add_query(ids_query)
                results = query.search(index="resources")
                if results is not None:
                    for result in results["hits"]["hits"]:
                        all_instances[result["_id"]] = result["_source"]
        return all_instances
コード例 #16
0
    def delete(self, request):
        json = request.body
        if json != None:
            ret = []
            data = JSONDeserializer().deserialize(json)
            resource_instance = models.ResourceInstance.objects.get(
                pk=data['resourceinstance_id'])
            is_active = resource_instance.graph.isactive

            with transaction.atomic():
                try:
                    tile = Tile.objects.get(tileid=data['tileid'])
                except ObjectDoesNotExist:
                    return JSONResponse(
                        {
                            'status':
                            'false',
                            'message': [
                                _('This tile is no longer available'),
                                _('It was likely already deleted by another user'
                                  )
                            ]
                        },
                        status=500)
                user_is_reviewer = request.user.groups.filter(
                    name='Resource Reviewer').exists()
                if (user_is_reviewer or
                        tile.is_provisional() is True) and is_active is True:
                    if tile.filter_by_perm(request.user, 'delete_nodegroup'):
                        nodegroup = models.NodeGroup.objects.get(
                            pk=tile.nodegroup_id)
                        if tile.is_provisional() is True and len(
                                list(tile.provisionaledits.keys())) == 1:
                            provisional_editor_id = list(
                                tile.provisionaledits.keys())[0]
                            edit = tile.provisionaledits[provisional_editor_id]
                            provisional_editor = User.objects.get(
                                pk=provisional_editor_id)
                            reviewer = request.user
                            tile.delete(request=request,
                                        provisional_edit_log_details={
                                            "user":
                                            reviewer,
                                            "action":
                                            "delete edit",
                                            "edit":
                                            edit,
                                            "provisional_editor":
                                            provisional_editor
                                        })
                        else:
                            tile.delete(request=request)
                        tile.after_update_all()
                        update_system_settings_cache(tile)
                        return JSONResponse(tile)
                    else:
                        return JSONResponse(
                            {
                                'status':
                                'false',
                                'message':
                                [_('Request Failed'),
                                 _('Permission Denied')]
                            },
                            status=500)
                elif is_active is False:
                    response = {
                        'status':
                        'false',
                        'message': [
                            _('Request Failed'),
                            _('Unable to delete. Verify model status is active'
                              )
                        ]
                    }
                    return JSONResponse(response, status=500)
                else:
                    return JSONResponse(
                        {
                            'status':
                            'false',
                            'message': [
                                _('Request Failed'),
                                _('You do not have permissions to delete a tile with authoritative data.'
                                  )
                            ]
                        },
                        status=500)

        return HttpResponseNotFound()
コード例 #17
0
    def post(self, request, surveyid):
        data = JSONDeserializer().deserialize(request.body)
        if models.MobileSurveyModel.objects.filter(
                pk=data['id']).exists() is False:
            mobile_survey_model = models.MobileSurveyModel(
                id=surveyid,
                name=data['name'],
                createdby=self.request.user,
                lasteditedby=self.request.user)
            mobile_survey_model.save()

        mobile_survey = MobileSurvey.objects.get(pk=data['id'])
        self.update_identities(data, mobile_survey, mobile_survey.users.all(),
                               'users', User, models.MobileSurveyXUser)
        self.update_identities(data, mobile_survey, mobile_survey.groups.all(),
                               'groups', Group, models.MobileSurveyXGroup)

        mobile_survey_card_ids = set(
            [unicode(c.cardid) for c in mobile_survey.cards.all()])
        form_card_ids = set(data['cards'])
        cards_to_remove = mobile_survey_card_ids - form_card_ids
        cards_to_add = form_card_ids - mobile_survey_card_ids
        cards_to_update = mobile_survey_card_ids & form_card_ids

        for card_id in cards_to_add:
            models.MobileSurveyXCard.objects.create(
                card=models.CardModel.objects.get(cardid=card_id),
                mobile_survey=mobile_survey,
                sortorder=data['cards'].index(card_id))

        for card_id in cards_to_update:
            mobile_survey_card = models.MobileSurveyXCard.objects.filter(
                mobile_survey=mobile_survey).get(
                    card=models.CardModel.objects.get(cardid=card_id))
            mobile_survey_card.sortorder = data['cards'].index(card_id)
            mobile_survey_card.save()

        for card_id in cards_to_remove:
            models.MobileSurveyXCard.objects.filter(
                card=models.CardModel.objects.get(cardid=card_id),
                mobile_survey=mobile_survey).delete()

        # TODO Disabling the following section until we make emailing users optional
        # if mobile_survey.active != data['active']:
        # notify users in the mobile_survey that the state of the mobile_survey has changed
        # if data['active']:
        #     self.notify_mobile_survey_start(request, mobile_survey)
        # else:
        #     self.notify_mobile_survey_end(request, mobile_survey)
        mobile_survey.name = data['name']
        mobile_survey.description = data['description']
        mobile_survey.onlinebasemaps = data['onlinebasemaps']
        if data['startdate'] != '':
            mobile_survey.startdate = data['startdate']
        if data['enddate'] != '':
            mobile_survey.enddate = data['enddate']
        mobile_survey.datadownloadconfig = data['datadownloadconfig']
        mobile_survey.active = data['active']
        mobile_survey.tilecache = data['tilecache']
        polygons = []

        try:
            data['bounds'].upper()
            data['bounds'] = json.loads(data['bounds'])
        except AttributeError as e:
            print('bounds is not a string')

        if 'features' in data['bounds']:
            for feature in data['bounds']['features']:
                for coord in feature['geometry']['coordinates']:
                    polygons.append(Polygon(coord))

        elif len(polygons) == 0:
            try:
                if data['bounds']['type'] == 'MultiPolygon':
                    for poly in data['bounds']['coordinates']:
                        for coords in poly:
                            polygons.append(Polygon(coords))
            except AttributeError as e:
                print('bounds is not a geojson geometry object')

        mobile_survey.bounds = MultiPolygon(polygons)
        mobile_survey.lasteditedby = self.request.user

        try:
            connection_error = False
            with transaction.atomic():
                mobile_survey.save()
        except Exception as e:
            if connection_error is False:
                error_title = _('Unable to save survey')
                if 'strerror' in e and e.strerror == 'Connection refused' or 'Connection refused' in e:
                    error_message = _("Unable to connect to CouchDB")
                else:
                    error_message = e.message
                connection_error = JSONResponse(
                    {
                        'success': False,
                        'message': error_message,
                        'title': error_title
                    },
                    status=500)
            return connection_error

        return JSONResponse({'success': True, 'mobile_survey': mobile_survey})
コード例 #18
0
    def delete(self, request):
        json = request.body
        if json is not None:
            ret = []
            data = JSONDeserializer().deserialize(json)
            resource_instance = models.ResourceInstance.objects.get(
                pk=data["resourceinstance_id"])
            is_active = resource_instance.graph.isactive

            with transaction.atomic():
                try:
                    tile = Tile.objects.get(tileid=data["tileid"])
                except ObjectDoesNotExist:
                    return JSONErrorResponse(
                        _("This tile is no longer available"),
                        _("It was likely already deleted by another user"))
                user_is_reviewer = user_is_resource_reviewer(request.user)
                if (user_is_reviewer or
                        tile.is_provisional() is True) and is_active is True:
                    if tile.filter_by_perm(request.user, "delete_nodegroup"):
                        nodegroup = models.NodeGroup.objects.get(
                            pk=tile.nodegroup_id)
                        if tile.is_provisional() is True and len(
                                list(tile.provisionaledits.keys())) == 1:
                            provisional_editor_id = list(
                                tile.provisionaledits.keys())[0]
                            edit = tile.provisionaledits[provisional_editor_id]
                            provisional_editor = User.objects.get(
                                pk=provisional_editor_id)
                            reviewer = request.user
                            tile.delete(
                                request=request,
                                provisional_edit_log_details={
                                    "user": reviewer,
                                    "action": "delete edit",
                                    "edit": edit,
                                    "provisional_editor": provisional_editor,
                                },
                            )
                        else:
                            tile.delete(request=request)
                        tile.after_update_all()
                        update_system_settings_cache(tile)
                        return JSONResponse(tile)
                    else:
                        return JSONErrorResponse(_("Request Failed"),
                                                 _("Permission Denied"))
                elif is_active is False:
                    response = {
                        "status":
                        "false",
                        "message": [
                            _("Request Failed"),
                            _("Unable to delete. Verify model status is active"
                              )
                        ]
                    }
                    return JSONResponse(response, status=500)
                else:
                    return JSONErrorResponse(
                        _("Request Failed"),
                        _("You do not have permissions to delete a tile with authoritative data."
                          ))

        return HttpResponseNotFound()
コード例 #19
0
def concept(request, conceptid):
    f = request.GET.get("f", "json")
    mode = request.GET.get("mode", "")
    lang = request.GET.get("lang", settings.LANGUAGE_CODE)
    pretty = request.GET.get("pretty", False)

    if request.method == "GET":
        include_subconcepts = request.GET.get("include_subconcepts",
                                              "true") == "true"
        include_parentconcepts = request.GET.get("include_parentconcepts",
                                                 "true") == "true"
        include_relatedconcepts = request.GET.get("include_relatedconcepts",
                                                  "true") == "true"
        emulate_elastic_search = request.GET.get("emulate_elastic_search",
                                                 "false") == "true"
        depth_limit = request.GET.get("depth_limit", None)

        depth_limit = 1
        if not conceptid:
            return render(
                request,
                "views/rdm/concept-report.htm",
                {
                    "lang":
                    lang,
                    "concept_count":
                    models.Concept.objects.filter(nodetype="Concept").count(),
                    "collection_count":
                    models.Concept.objects.filter(
                        nodetype="Collection").count(),
                    "scheme_count":
                    models.Concept.objects.filter(
                        nodetype="ConceptScheme").count(),
                    "entitytype_count":
                    models.Concept.objects.filter(
                        nodetype="EntityType").count(),
                    "default_report":
                    True,
                },
            )

        labels = []

        concept_graph = Concept().get(
            id=conceptid,
            include_subconcepts=include_subconcepts,
            include_parentconcepts=include_parentconcepts,
            include_relatedconcepts=include_relatedconcepts,
            depth_limit=depth_limit,
            up_depth_limit=None,
            lang=lang,
            semantic=(mode == "semantic" or mode == ""),
        )

        languages = sort_languages(models.DLanguage.objects.all(), lang)

        valuetypes = models.DValueType.objects.all()
        relationtypes = models.DRelationType.objects.all()
        prefLabel = concept_graph.get_preflabel(lang=lang)
        for subconcept in concept_graph.subconcepts:
            subconcept.prefLabel = subconcept.get_preflabel(lang=lang)
        for relatedconcept in concept_graph.relatedconcepts:
            relatedconcept.prefLabel = relatedconcept.get_preflabel(lang=lang)
        for value in concept_graph.values:
            if value.category == "label":
                labels.append(value)

        if (mode == "semantic"
                or mode == "") and (concept_graph.nodetype == "Concept" or
                                    concept_graph.nodetype == "ConceptScheme"
                                    or concept_graph.nodetype == "EntityType"):
            if concept_graph.nodetype == "ConceptScheme":
                parent_relations = relationtypes.filter(category="Properties")
            else:
                parent_relations = (relationtypes.filter(
                    category="Semantic Relations").exclude(
                        relationtype="related").exclude(
                            relationtype="broader").exclude(
                                relationtype="broaderTransitive"))
            return render(
                request,
                "views/rdm/concept-report.htm",
                {
                    "lang":
                    lang,
                    "prefLabel":
                    prefLabel,
                    "labels":
                    labels,
                    "concept":
                    concept_graph,
                    "languages":
                    languages,
                    "sparql_providers":
                    get_sparql_providers(),
                    "valuetype_labels":
                    valuetypes.filter(category="label"),
                    "valuetype_notes":
                    valuetypes.filter(category="note"),
                    "valuetype_related_values":
                    valuetypes.filter(
                        category__in=["undefined", "identifiers"]),
                    "parent_relations":
                    parent_relations,
                    "related_relations":
                    relationtypes.filter(
                        Q(category="Mapping Properties")
                        | Q(relationtype="related")),
                    "concept_paths":
                    concept_graph.get_paths(lang=lang),
                    "graph_json":
                    JSONSerializer().serialize(
                        concept_graph.get_node_and_links(lang=lang)),
                    "direct_parents": [
                        parent.get_preflabel(lang=lang)
                        for parent in concept_graph.parentconcepts
                    ],
                },
            )
        elif mode == "collections":
            return render(
                request,
                "views/rdm/entitytype-report.htm",
                {
                    "lang":
                    lang,
                    "prefLabel":
                    prefLabel,
                    "labels":
                    labels,
                    "concept":
                    concept_graph,
                    "languages":
                    languages,
                    "valuetype_labels":
                    valuetypes.filter(category="label"),
                    "valuetype_notes":
                    valuetypes.filter(category="note"),
                    "valuetype_related_values":
                    valuetypes.filter(
                        category__in=["undefined", "identifiers"]),
                    "related_relations":
                    relationtypes.filter(relationtype="member"),
                    "concept_paths":
                    concept_graph.get_paths(lang=lang),
                },
            )

    if request.method == "POST":

        if len(request.FILES) > 0:
            skosfile = request.FILES.get("skosfile", None)
            imagefile = request.FILES.get("file", None)

            if imagefile:
                value = models.FileValue(
                    valueid=str(uuid.uuid4()),
                    value=request.FILES.get("file", None),
                    concept_id=conceptid,
                    valuetype_id="image",
                    language_id=settings.LANGUAGE_CODE,
                )
                value.save()
                return JSONResponse(value)

            elif skosfile:
                overwrite_options = request.POST.get("overwrite_options", None)
                staging_options = request.POST.get("staging_options", None)
                skos = SKOSReader()
                try:
                    rdf = skos.read_file(skosfile)
                    ret = skos.save_concepts_from_skos(rdf, overwrite_options,
                                                       staging_options)
                    return JSONResponse(ret)
                except Exception as e:
                    return JSONErrorResponse(
                        _('Unable to Load SKOS File'),
                        _('There was an issue saving the contents of the file to Arches. '
                          ) + str(e))

        else:
            data = JSONDeserializer().deserialize(request.body)
            if data:
                with transaction.atomic():
                    concept = Concept(data)
                    concept.save()
                    concept.index()

                    return JSONResponse(concept)

    if request.method == "DELETE":
        data = JSONDeserializer().deserialize(request.body)
        if data:
            with transaction.atomic():
                concept = Concept(data)
                delete_self = data[
                    "delete_self"] if "delete_self" in data else False
                if not (delete_self and concept.id in CORE_CONCEPTS):
                    if concept.nodetype == "Collection":
                        concept.delete(delete_self=delete_self)
                    else:
                        in_use = False
                        if delete_self:
                            check_concept = Concept().get(
                                data["id"], include_subconcepts=True)
                            in_use = check_concept.check_if_concept_in_use()
                        if "subconcepts" in data:
                            for subconcept in data["subconcepts"]:
                                if in_use == False:
                                    check_concept = Concept().get(
                                        subconcept["id"],
                                        include_subconcepts=True)
                                    in_use = check_concept.check_if_concept_in_use(
                                    )

                        if in_use == False:
                            concept.delete_index(delete_self=delete_self)
                            concept.delete(delete_self=delete_self)
                        else:
                            return JSONErrorResponse(
                                _('Unable to Delete'),
                                _('This concept or one of it\'s subconcepts is already in use by an existing resource.'
                                  ), {"in_use": in_use})

                return JSONResponse(concept)

    return HttpResponseNotFound
コード例 #20
0
    def __init__(self, file=None, mapping_file=None, relations_file=None):
        self.business_data = ""
        self.mapping = None
        self.graphs = ""
        self.reference_data = ""
        self.business_data = ""
        self.file_format = ""
        self.relations = ""
        try:
            csv.field_size_limit(sys.maxsize)
        except:
            csv.field_size_limit(int(ctypes.c_ulong(-1).value // 2))

        if not file:
            file = settings.BUSINESS_DATA_FILES
        else:
            file = [file]
        self.file = file
        if mapping_file is None:
            try:
                mapping_file_base = os.path.splitext(file[0])[0]
                mapping_file = [f"{mapping_file_base}.mapping"]
            except:
                print("*" * 80)
                print(
                    "ERROR: Mapping file is missing or improperly named. Make sure you have \
                    mapping file with the same basename as your business data file and the extension .mapping"
                )
                print("*" * 80)
                sys.exit()
        else:
            try:
                mapping_file = [mapping_file]
            except:
                print("*" * 80)
                print(
                    "ERROR: Mapping file is missing or improperly named. Make sure you have \
                    mapping file with the same basename as your business data file and the extension .mapping"
                )
                print("*" * 80)
                sys.exit()

        if relations_file is None:
            try:
                relations_file_base = os.path.splitext(file[0])[0]
                relations_file = [f"{relations_file_base}.relations"]
            except:
                pass

        for path in relations_file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.relations = csv.DictReader(open(relations_file[0], "r"))

        for path in mapping_file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.mapping = json.load(open(path, "r"))
                else:
                    self.mapping = None

        for path in file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.file_format = os.path.splitext(file[0])[1].strip(".")
                    if self.file_format == "json":
                        with open(file[0], "rU") as f:
                            archesfile = JSONDeserializer().deserialize(f)
                            if "graph" in list(archesfile.keys()):
                                self.graphs = archesfile["graph"]
                            if "reference_data" in list(archesfile.keys()):
                                self.reference_data = archesfile["reference_data"]
                            if "business_data" in list(archesfile.keys()):
                                self.business_data = archesfile["business_data"]
                    elif self.file_format == "csv":
                        data = csv.DictReader(open(file[0], encoding="utf-8"))
                        self.business_data = list(data)
                    elif self.file_format == "zip":
                        shp_zipfile = os.path.basename(path)
                        shp_zipfile_name = os.path.splitext(shp_zipfile)[0]
                        unzip_dir = os.path.join(os.path.dirname(path), shp_zipfile_name)
                        unzip_file(path, unzip_dir)
                        shp = [i for i in os.listdir(unzip_dir) if i.endswith(".shp")]
                        if len(shp) == 0:
                            print("*" * 80)
                            print("ERROR: There is no shapefile in this zipfile.")
                            print("*" * 80)
                            exit()
                        elif len(shp) > 1:
                            print("*" * 80)
                            print("ERROR: There are multiple shapefiles in this zipfile. Please load each individually:")
                            for s in shp:
                                print(
                                    "\npython manage.py packages -o import_business_data -s {0} -c {1} -ow [append or overwrite]".format(
                                        os.path.join(unzip_dir, s), mapping_file[0]
                                    )
                                )
                            print("*" * 80)
                            exit()
                        shp_path = os.path.join(unzip_dir, shp[0])
                        self.business_data = self.shape_to_csv(shp_path)
                    elif self.file_format == "shp":
                        self.business_data = self.shape_to_csv(path)
                else:
                    print(str(file) + " is not a valid file")
            else:
                print(path + " is not a valid path")
コード例 #21
0
def concept(request, conceptid):
    f = request.GET.get('f', 'json')
    mode = request.GET.get('mode', '')
    lang = request.GET.get('lang', settings.LANGUAGE_CODE)
    pretty = request.GET.get('pretty', False)

    if request.method == 'GET':

        include_subconcepts = request.GET.get('include_subconcepts',
                                              'true') == 'true'
        include_parentconcepts = request.GET.get('include_parentconcepts',
                                                 'true') == 'true'
        include_relatedconcepts = request.GET.get('include_relatedconcepts',
                                                  'true') == 'true'
        emulate_elastic_search = request.GET.get('emulate_elastic_search',
                                                 'false') == 'true'
        depth_limit = request.GET.get('depth_limit', None)

        if f == 'html':
            depth_limit = 1
            if not conceptid:
                return render(
                    request, 'views/rdm/concept-report.htm', {
                        'lang':
                        lang,
                        'concept_count':
                        models.Concept.objects.filter(
                            nodetype='Concept').count(),
                        'collection_count':
                        models.Concept.objects.filter(
                            nodetype='Collection').count(),
                        'scheme_count':
                        models.Concept.objects.filter(
                            nodetype='ConceptScheme').count(),
                        'entitytype_count':
                        models.Concept.objects.filter(
                            nodetype='EntityType').count(),
                        'default_report':
                        True
                    })

        ret = []
        labels = []
        this_concept = Concept().get(id=conceptid)

        if f == 'html':
            if mode == '' and (this_concept.nodetype == 'Concept'
                               or this_concept.nodetype == 'ConceptScheme'
                               or this_concept.nodetype == 'EntityType'):
                concept_graph = Concept().get(
                    id=conceptid,
                    include_subconcepts=include_subconcepts,
                    include_parentconcepts=include_parentconcepts,
                    include_relatedconcepts=include_relatedconcepts,
                    depth_limit=depth_limit,
                    up_depth_limit=None,
                    lang=lang)
            else:
                concept_graph = Concept().get(
                    id=conceptid,
                    include_subconcepts=include_subconcepts,
                    include_parentconcepts=include_parentconcepts,
                    include_relatedconcepts=include_relatedconcepts,
                    depth_limit=depth_limit,
                    up_depth_limit=None,
                    lang=lang,
                    semantic=False)

            languages = models.DLanguage.objects.all()
            valuetypes = models.DValueType.objects.all()
            relationtypes = models.DRelationType.objects.all()
            prefLabel = concept_graph.get_preflabel(lang=lang)
            for subconcept in concept_graph.subconcepts:
                subconcept.prefLabel = subconcept.get_preflabel(lang=lang)
            for relatedconcept in concept_graph.relatedconcepts:
                relatedconcept.prefLabel = relatedconcept.get_preflabel(
                    lang=lang)
            for value in concept_graph.values:
                if value.category == 'label':
                    labels.append(value)

            if mode == '' and (this_concept.nodetype == 'Concept'
                               or this_concept.nodetype == 'ConceptScheme'
                               or this_concept.nodetype == 'EntityType'):
                if concept_graph.nodetype == 'ConceptScheme':
                    parent_relations = relationtypes.filter(
                        category='Properties')
                else:
                    parent_relations = relationtypes.filter(
                        category='Semantic Relations').exclude(
                            relationtype='related').exclude(
                                relationtype='broader').exclude(
                                    relationtype='broaderTransitive')
                return render(
                    request, 'views/rdm/concept-report.htm', {
                        'lang':
                        lang,
                        'prefLabel':
                        prefLabel,
                        'labels':
                        labels,
                        'concept':
                        concept_graph,
                        'languages':
                        languages,
                        'sparql_providers':
                        get_sparql_providers(),
                        'valuetype_labels':
                        valuetypes.filter(category='label'),
                        'valuetype_notes':
                        valuetypes.filter(category='note'),
                        'valuetype_related_values':
                        valuetypes.filter(category='undefined'),
                        'parent_relations':
                        parent_relations,
                        'related_relations':
                        relationtypes.filter(
                            Q(category='Mapping Properties')
                            | Q(relationtype='related')),
                        'concept_paths':
                        concept_graph.get_paths(lang=lang),
                        'graph_json':
                        JSONSerializer().serialize(
                            concept_graph.get_node_and_links(lang=lang)),
                        'direct_parents': [
                            parent.get_preflabel(lang=lang)
                            for parent in concept_graph.parentconcepts
                        ]
                    })
            else:
                return render(
                    request, 'views/rdm/entitytype-report.htm', {
                        'lang':
                        lang,
                        'prefLabel':
                        prefLabel,
                        'labels':
                        labels,
                        'concept':
                        concept_graph,
                        'languages':
                        languages,
                        'valuetype_labels':
                        valuetypes.filter(category='label'),
                        'valuetype_notes':
                        valuetypes.filter(category='note'),
                        'valuetype_related_values':
                        valuetypes.filter(category='undefined'),
                        'related_relations':
                        relationtypes.filter(relationtype='member'),
                        'concept_paths':
                        concept_graph.get_paths(lang=lang)
                    })

        concept_graph = Concept().get(
            id=conceptid,
            include_subconcepts=include_subconcepts,
            include_parentconcepts=include_parentconcepts,
            include_relatedconcepts=include_relatedconcepts,
            depth_limit=depth_limit,
            up_depth_limit=None,
            lang=lang)

        if f == 'skos':
            include_parentconcepts = False
            include_subconcepts = True
            depth_limit = None
            skos = SKOSWriter()
            return HttpResponse(skos.write(concept_graph, format="pretty-xml"),
                                content_type="application/xml")

        if emulate_elastic_search:
            ret.append({'_type': id, '_source': concept_graph})
        else:
            ret.append(concept_graph)

        if emulate_elastic_search:
            ret = {'hits': {'hits': ret}}

        return JSONResponse(ret, indent=4 if pretty else None)

    if request.method == 'POST':

        if len(request.FILES) > 0:
            skosfile = request.FILES.get('skosfile', None)
            imagefile = request.FILES.get('file', None)

            if imagefile:
                value = models.FileValue(valueid=str(uuid.uuid4()),
                                         value=request.FILES.get('file', None),
                                         conceptid_id=conceptid,
                                         valuetype_id='image',
                                         languageid_id=settings.LANGUAGE_CODE)
                value.save()
                return JSONResponse(value)

            elif skosfile:
                skos = SKOSReader()
                rdf = skos.read_file(skosfile)
                ret = skos.save_concepts_from_skos(rdf)
                return JSONResponse(ret)

        else:
            data = JSONDeserializer().deserialize(request.body)
            if data:
                with transaction.atomic():
                    concept = Concept(data)
                    concept.save()
                    concept.index()

                    return JSONResponse(concept)

    if request.method == 'DELETE':
        data = JSONDeserializer().deserialize(request.body)

        if data:
            with transaction.atomic():

                concept = Concept(data)

                delete_self = data[
                    'delete_self'] if 'delete_self' in data else False
                if not (delete_self and concept.id in CORE_CONCEPTS):
                    in_use = False
                    if delete_self:
                        check_concept = Concept().get(data['id'],
                                                      include_subconcepts=True)
                        in_use = check_concept.check_if_concept_in_use()
                    if 'subconcepts' in data:
                        for subconcept in data['subconcepts']:
                            if in_use == False:
                                check_concept = Concept().get(
                                    subconcept['id'], include_subconcepts=True)
                                in_use = check_concept.check_if_concept_in_use(
                                )

                    if in_use == False:
                        concept.delete_index(delete_self=delete_self)
                        concept.delete(delete_self=delete_self)
                    else:
                        return JSONResponse({"in_use": in_use})

                return JSONResponse(concept)

    return HttpResponseNotFound
コード例 #22
0
    def import_business_data(
        self,
        file_format=None,
        business_data=None,
        mapping=None,
        overwrite="append",
        bulk=False,
        create_concepts=False,
        create_collections=False,
        use_multiprocessing=False,
    ):
        import arches.app.utils.task_management as task_management
        import arches.app.tasks as tasks
        reader = None
        start = time()
        cursor = connection.cursor()
        celery_worker_running = task_management.check_if_celery_available()

        try:
            if file_format is None:
                file_format = self.file_format
            if business_data is None:
                business_data = self.business_data
            if mapping is None:
                mapping = self.mapping
            if file_format == "json":
                if celery_worker_running is True:
                    res = tasks.import_resource_instances.apply_async((file_format, business_data, mapping), link_error=tasks.log_error.s())
                else:
                    reader = ArchesFileReader()
                    reader.import_business_data(business_data, mapping)
            elif file_format == "jsonl":
                with open(self.file[0], "rU") as openf:
                    lines = openf.readlines()
                    if use_multiprocessing is True:
                        pool = Pool(cpu_count())
                        pool.map(import_one_resource, lines)
                        connections.close_all()
                        reader = ArchesFileReader()
                    else:
                        reader = ArchesFileReader()
                        for line in lines:
                            archesresource = JSONDeserializer().deserialize(line)
                            reader.import_business_data({"resources": [archesresource]})
            elif file_format == "csv" or file_format == "shp" or file_format == "zip":
                if mapping is not None:
                    if celery_worker_running is True:
                        res = tasks.import_resource_instances.apply_async(
                            (file_format, business_data, mapping, overwrite, bulk, create_concepts, create_collections),
                            link_error=tasks.log_error.s(),
                        )
                    else:
                        reader = CsvReader()
                        reader.import_business_data(
                            business_data=business_data,
                            mapping=mapping,
                            overwrite=overwrite,
                            bulk=bulk,
                            create_concepts=create_concepts,
                            create_collections=create_collections,
                        )
                else:
                    print("*" * 80)
                    print(
                        f"ERROR: No mapping file detected for {self.file[0]}. Please indicate one \
                        with the '-c' paramater or place one in the same directory as your business data."
                    )
                    print("*" * 80)
                    sys.exit()

            elapsed = time() - start
            print("Time to import_business_data = {0}".format(datetime.timedelta(seconds=elapsed)))

            if reader is not None:
                reader.report_errors()

        finally:
            datatype_factory = DataTypeFactory()
            datatypes = DDataType.objects.all()
            for datatype in datatypes:
                try:
                    datatype_instance = datatype_factory.get_instance(datatype.datatype)
                    datatype_instance.after_update_all()
                except BrokenPipeError as e:
                    logger = logging.getLogger(__name__)
                    logger.info("Celery not working: tasks unavailable during import.")
コード例 #23
0
ファイル: search.py プロジェクト: cvast/arches
def build_search_results_dsl(request):
    term_filter = request.GET.get('termFilter', '')
    spatial_filter = JSONDeserializer().deserialize(
        request.GET.get('mapFilter', '{}'))
    export = request.GET.get('export', None)
    page = 1 if request.GET.get('page') == '' else int(
        request.GET.get('page', 1))
    temporal_filter = JSONDeserializer().deserialize(
        request.GET.get('temporalFilter', '{}'))
    advanced_filters = JSONDeserializer().deserialize(
        request.GET.get('advanced', '[]'))
    search_buffer = None
    se = SearchEngineFactory().create()

    if export != None:
        limit = settings.SEARCH_EXPORT_ITEMS_PER_PAGE
    else:
        limit = settings.SEARCH_ITEMS_PER_PAGE

    query = Query(se, start=limit * int(page - 1), limit=limit)
    nested_agg = NestedAgg(path='points', name='geo_aggs')
    nested_agg.add_aggregation(
        GeoHashGridAgg(field='points.point',
                       name='grid',
                       precision=settings.HEX_BIN_PRECISION))
    nested_agg.add_aggregation(
        GeoBoundsAgg(field='points.point', name='bounds'))
    query.add_aggregation(nested_agg)

    search_query = Bool()
    permitted_nodegroups = get_permitted_nodegroups(request.user)

    if term_filter != '':
        for term in JSONDeserializer().deserialize(term_filter):
            term_query = Bool()
            if term['type'] == 'term' or term['type'] == 'string':
                string_filter = Bool()
                if term['type'] == 'term':
                    string_filter.must(
                        Match(field='strings.string',
                              query=term['value'],
                              type='phrase'))
                elif term['type'] == 'string':
                    string_filter.should(
                        Match(field='strings.string',
                              query=term['value'],
                              type='phrase_prefix'))
                    string_filter.should(
                        Match(field='strings.string.folded',
                              query=term['value'],
                              type='phrase_prefix'))

                string_filter.filter(
                    Terms(field='strings.nodegroup_id',
                          terms=permitted_nodegroups))
                nested_string_filter = Nested(path='strings',
                                              query=string_filter)
                if term['inverted']:
                    search_query.must_not(nested_string_filter)
                else:
                    search_query.must(nested_string_filter)
                    # need to set min_score because the query returns results with score 0 and those have to be removed, which I don't think it should be doing
                    query.min_score('0.01')
            elif term['type'] == 'concept':
                concept_ids = _get_child_concepts(term['value'])
                conceptid_filter = Bool()
                conceptid_filter.filter(
                    Terms(field='domains.conceptid', terms=concept_ids))
                conceptid_filter.filter(
                    Terms(field='domains.nodegroup_id',
                          terms=permitted_nodegroups))
                nested_conceptid_filter = Nested(path='domains',
                                                 query=conceptid_filter)
                if term['inverted']:
                    search_query.must_not(nested_conceptid_filter)
                else:
                    search_query.filter(nested_conceptid_filter)

    if 'features' in spatial_filter:
        if len(spatial_filter['features']) > 0:
            feature_geom = spatial_filter['features'][0]['geometry']
            feature_properties = spatial_filter['features'][0]['properties']
            buffer = {'width': 0, 'unit': 'ft'}
            if 'buffer' in feature_properties:
                buffer = feature_properties['buffer']
            search_buffer = _buffer(feature_geom, buffer['width'],
                                    buffer['unit'])
            feature_geom = JSONDeserializer().deserialize(search_buffer.json)
            geoshape = GeoShape(field='geometries.geom.features.geometry',
                                type=feature_geom['type'],
                                coordinates=feature_geom['coordinates'])

            invert_spatial_search = False
            if 'inverted' in feature_properties:
                invert_spatial_search = feature_properties['inverted']

            spatial_query = Bool()
            if invert_spatial_search == True:
                spatial_query.must_not(geoshape)
            else:
                spatial_query.filter(geoshape)

            # get the nodegroup_ids that the user has permission to search
            spatial_query.filter(
                Terms(field='geometries.nodegroup_id',
                      terms=permitted_nodegroups))
            search_query.filter(Nested(path='geometries', query=spatial_query))

    if 'fromDate' in temporal_filter and 'toDate' in temporal_filter:
        now = str(datetime.utcnow())
        start_date = SortableDate(temporal_filter['fromDate'])
        end_date = SortableDate(temporal_filter['toDate'])
        date_nodeid = str(
            temporal_filter['dateNodeId']
        ) if 'dateNodeId' in temporal_filter and temporal_filter[
            'dateNodeId'] != '' else None
        query_inverted = False if 'inverted' not in temporal_filter else temporal_filter[
            'inverted']

        temporal_query = Bool()

        if query_inverted:
            # inverted date searches need to use an OR clause and are generally more complicated to structure (can't use ES must_not)
            # eg: less than START_DATE OR greater than END_DATE
            inverted_date_query = Bool()
            inverted_date_ranges_query = Bool()

            if start_date.is_valid():
                inverted_date_query.should(
                    Range(field='dates.date', lt=start_date.as_float()))
                inverted_date_ranges_query.should(
                    Range(field='date_ranges.date_range',
                          lt=start_date.as_float()))
            if end_date.is_valid():
                inverted_date_query.should(
                    Range(field='dates.date', gt=end_date.as_float()))
                inverted_date_ranges_query.should(
                    Range(field='date_ranges.date_range',
                          gt=end_date.as_float()))

            date_query = Bool()
            date_query.filter(inverted_date_query)
            date_query.filter(
                Terms(field='dates.nodegroup_id', terms=permitted_nodegroups))
            if date_nodeid:
                date_query.filter(Term(field='dates.nodeid', term=date_nodeid))
            else:
                date_ranges_query = Bool()
                date_ranges_query.filter(inverted_date_ranges_query)
                date_ranges_query.filter(
                    Terms(field='date_ranges.nodegroup_id',
                          terms=permitted_nodegroups))
                temporal_query.should(
                    Nested(path='date_ranges', query=date_ranges_query))
            temporal_query.should(Nested(path='dates', query=date_query))

        else:
            date_query = Bool()
            date_query.filter(
                Range(field='dates.date',
                      gte=start_date.as_float(),
                      lte=end_date.as_float()))
            date_query.filter(
                Terms(field='dates.nodegroup_id', terms=permitted_nodegroups))
            if date_nodeid:
                date_query.filter(Term(field='dates.nodeid', term=date_nodeid))
            else:
                date_ranges_query = Bool()
                date_ranges_query.filter(
                    Range(field='date_ranges.date_range',
                          gte=start_date.as_float(),
                          lte=end_date.as_float(),
                          relation='intersects'))
                date_ranges_query.filter(
                    Terms(field='date_ranges.nodegroup_id',
                          terms=permitted_nodegroups))
                temporal_query.should(
                    Nested(path='date_ranges', query=date_ranges_query))
            temporal_query.should(Nested(path='dates', query=date_query))

        search_query.filter(temporal_query)
        #print search_query.dsl

    datatype_factory = DataTypeFactory()
    if len(advanced_filters) > 0:
        advanced_query = Bool()
        grouped_query = Bool()
        grouped_queries = [grouped_query]
        for index, advanced_filter in enumerate(advanced_filters):
            tile_query = Bool()
            for key, val in advanced_filter.iteritems():
                if key != 'op':
                    node = models.Node.objects.get(pk=key)
                    if request.user.has_perm('read_nodegroup', node.nodegroup):
                        datatype = datatype_factory.get_instance(node.datatype)
                        datatype.append_search_filters(val, node, tile_query,
                                                       request)
            nested_query = Nested(path='tiles', query=tile_query)
            if advanced_filter['op'] == 'or' and index != 0:
                grouped_query = Bool()
                grouped_queries.append(grouped_query)
            grouped_query.must(nested_query)
        for grouped_query in grouped_queries:
            advanced_query.should(grouped_query)
        search_query.must(advanced_query)

    query.add_query(search_query)
    if search_buffer != None:
        search_buffer = search_buffer.geojson
    return {'query': query, 'search_buffer': search_buffer}
コード例 #24
0
    def post(self, request):
        data = JSONDeserializer().deserialize(request.body)

        if data['id'] is None:
            mobile_survey = MobileSurvey()
            mobile_survey.createdby = self.request.user
        else:
            mobile_survey = MobileSurvey.objects.get(pk=data['id'])
            self.update_identities(data, mobile_survey,
                                   mobile_survey.users.all(), 'users', User,
                                   models.MobileSurveyXUser)
            self.update_identities(data, mobile_survey,
                                   mobile_survey.groups.all(), 'groups', Group,
                                   models.MobileSurveyXGroup)

            mobile_survey_card_ids = set(
                [unicode(c.cardid) for c in mobile_survey.cards.all()])
            form_card_ids = set(data['cards'])
            cards_to_remove = mobile_survey_card_ids - form_card_ids
            cards_to_add = form_card_ids - mobile_survey_card_ids
            cards_to_update = mobile_survey_card_ids & form_card_ids

            for card_id in cards_to_add:
                models.MobileSurveyXCard.objects.create(
                    card=models.CardModel.objects.get(cardid=card_id),
                    mobile_survey=mobile_survey,
                    sortorder=data['cards'].index(card_id))

            for card_id in cards_to_update:
                mobile_survey_card = models.MobileSurveyXCard.objects.filter(
                    mobile_survey=mobile_survey).get(
                        card=models.CardModel.objects.get(cardid=card_id))
                mobile_survey_card.sortorder = data['cards'].index(card_id)
                mobile_survey_card.save()

            for card_id in cards_to_remove:
                models.MobileSurveyXCard.objects.filter(
                    card=models.CardModel.objects.get(cardid=card_id),
                    mobile_survey=mobile_survey).delete()

        if mobile_survey.active != data['active']:
            # notify users in the mobile_survey that the state of the mobile_survey has changed
            if data['active']:
                self.notify_mobile_survey_start(request, mobile_survey)
            else:
                self.notify_mobile_survey_end(request, mobile_survey)
        mobile_survey.name = data['name']
        mobile_survey.description = data['description']
        if data['startdate'] != '':
            mobile_survey.startdate = data['startdate']
        if data['enddate'] != '':
            mobile_survey.enddate = data['enddate']
        mobile_survey.datadownloadconfig = data['datadownloadconfig']
        mobile_survey.active = data['active']
        mobile_survey.tilecache = data['tilecache']
        polygons = []

        try:
            data['bounds'].upper()
            data['bounds'] = json.loads(data['bounds'])
        except AttributeError:
            pass

        if 'features' in data['bounds']:
            for feature in data['bounds']['features']:
                for coord in feature['geometry']['coordinates']:
                    polygons.append(Polygon(coord))

        mobile_survey.bounds = MultiPolygon(polygons)
        mobile_survey.lasteditedby = self.request.user
        try:
            connection_error = False
            with transaction.atomic():
                mobile_survey.save()
        except Exception as e:
            if connection_error == False:
                error_title = _('Unable to save survey')
                if e.strerror == 'Connection refused':
                    error_message = "Unable to connect to CouchDB"
                else:
                    error_message = e.message
                connection_error = JSONResponse(
                    {
                        'success': False,
                        'message': error_message,
                        'title': error_title
                    },
                    status=500)
            return connection_error

        return JSONResponse({'success': True, 'mobile_survey': mobile_survey})
コード例 #25
0
ファイル: graph_tests.py プロジェクト: fargeo/arches
    def test_save_and_update_dont_orphan_records_in_the_db(self):
        """
        test that the proper number of nodes, edges, nodegroups, and cards are persisted
        to the database during save and update opertaions

        """

        nodes_count_before = models.Node.objects.count()
        edges_count_before = models.Edge.objects.count()
        nodegroups_count_before = models.NodeGroup.objects.count()
        card_count_before = models.CardModel.objects.count()

        # test that data is persisited propertly when creating a new graph
        graph = Graph.new(is_resource=False)

        nodes_count_after = models.Node.objects.count()
        edges_count_after = models.Edge.objects.count()
        nodegroups_count_after = models.NodeGroup.objects.count()
        card_count_after = models.CardModel.objects.count()

        self.assertEqual(nodes_count_after-nodes_count_before, 1)
        self.assertEqual(edges_count_after-edges_count_before, 0)
        self.assertEqual(nodegroups_count_after-nodegroups_count_before, 1)
        self.assertEqual(card_count_after-card_count_before, 1)

        # test that data is persisited propertly during an append opertation
        graph.append_branch('http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by', graphid=self.NODE_NODETYPE_GRAPHID)
        graph.save()

        nodes_count_after = models.Node.objects.count()
        edges_count_after = models.Edge.objects.count()
        nodegroups_count_after = models.NodeGroup.objects.count()
        card_count_after = models.CardModel.objects.count()

        self.assertEqual(nodes_count_after-nodes_count_before, 3)
        self.assertEqual(edges_count_after-edges_count_before, 2)
        self.assertEqual(nodegroups_count_after-nodegroups_count_before, 2)
        self.assertEqual(card_count_after-card_count_before, 2)

        # test that removing a node group by setting it to None, removes it from the db
        node_to_update = None
        for node_id, node in graph.nodes.iteritems():
            if node.name == 'Node':
                self.assertTrue(node.is_collector)
                node_to_update = JSONDeserializer().deserialize(JSONSerializer().serialize(node))

        node_to_update['nodegroup_id'] = None
        graph.update_node(node_to_update.copy())
        graph.save()

        nodegroups_count_after = models.NodeGroup.objects.count()
        card_count_after = models.CardModel.objects.count()

        self.assertEqual(nodegroups_count_after-nodegroups_count_before, 1)
        self.assertEqual(card_count_after-card_count_before, 1)

        # test that adding back a node group adds it back to the db
        node_to_update['nodegroup_id'] = node_to_update['nodeid']
        graph.update_node(node_to_update)
        graph.save()

        nodegroups_count_after = models.NodeGroup.objects.count()
        card_count_after = models.CardModel.objects.count()

        self.assertEqual(nodegroups_count_after-nodegroups_count_before, 2)
        self.assertEqual(card_count_after-card_count_before, 2)
コード例 #26
0
    def post(self, request):
        if self.action == "update_tile":
            json = request.POST.get("data", None)
            accepted_provisional = request.POST.get("accepted_provisional",
                                                    None)
            if accepted_provisional is not None:
                accepted_provisional_edit = JSONDeserializer().deserialize(
                    accepted_provisional)
            if json is not None:
                data = JSONDeserializer().deserialize(json)
                data[
                    "resourceinstance_id"] = "" if "resourceinstance_id" not in data else data[
                        "resourceinstance_id"]
                if data["resourceinstance_id"] == "":
                    data["resourceinstance_id"] = uuid.uuid4()
                try:
                    models.ResourceInstance.objects.get(
                        pk=data["resourceinstance_id"])
                except ObjectDoesNotExist:
                    try:
                        resource = Resource(
                            uuid.UUID(str(data["resourceinstance_id"])))
                    except ValueError:
                        resource = Resource()
                    graphid = models.Node.objects.filter(
                        nodegroup=data["nodegroup_id"])[0].graph_id
                    resource.graph_id = graphid
                    try:
                        resource.save(user=request.user)
                        data["resourceinstance_id"] = resource.pk
                        resource.index()
                    except ModelInactiveError as e:
                        message = _(
                            "Unable to save. Please verify the model status is active"
                        )
                        return JSONResponse(
                            {
                                "status": "false",
                                "message": [_(e.title),
                                            _(str(message))]
                            },
                            status=500)
                tile_id = data["tileid"]
                resource_instance = models.ResourceInstance.objects.get(
                    pk=data["resourceinstance_id"])
                is_active = resource_instance.graph.isactive
                if tile_id is not None and tile_id != "":
                    try:
                        old_tile = Tile.objects.get(pk=tile_id)
                    except ObjectDoesNotExist as e:
                        return self.handle_save_error(
                            e, _("This tile is no longer available"),
                            _("It was likely deleted by another user"))

                tile = Tile(data)

                if tile.filter_by_perm(
                        request.user, "write_nodegroup") and is_active is True:
                    try:
                        with transaction.atomic():
                            try:
                                if accepted_provisional is None:
                                    try:
                                        tile.save(request=request)
                                    except TileValidationError as e:
                                        resource_tiles = models.TileModel.objects.filter(
                                            resourceinstance=tile.
                                            resourceinstance)
                                        if resource_tiles.count() == 0:
                                            Resource.objects.get(
                                                pk=tile.resourceinstance_id
                                            ).delete(request.user)
                                        title = _(
                                            "Unable to save. Please verify your input is valid"
                                        )
                                        return self.handle_save_error(
                                            e, tile_id, title=title)
                                    except ModelInactiveError as e:
                                        message = _(
                                            "Unable to save. Please verify the model status is active"
                                        )
                                        return JSONResponse(
                                            {
                                                "status":
                                                "false",
                                                "message":
                                                [_(e.title),
                                                 _(str(message))]
                                            },
                                            status=500)
                                else:
                                    if accepted_provisional is not None:
                                        provisional_editor = User.objects.get(
                                            pk=accepted_provisional_edit[
                                                "user"])
                                        prov_edit_log_details = {
                                            "user":
                                            request.user,
                                            "action":
                                            "accept edit",
                                            "edit":
                                            accepted_provisional_edit,
                                            "provisional_editor":
                                            provisional_editor,
                                        }
                                    tile.save(request=request,
                                              provisional_edit_log_details=
                                              prov_edit_log_details)

                                if tile.provisionaledits is not None and str(
                                        request.user.id
                                ) in tile.provisionaledits:
                                    tile.data = tile.provisionaledits[str(
                                        request.user.id)]["value"]

                            except Exception as e:
                                return self.handle_save_error(e, tile_id)

                            tile.after_update_all()
                            update_system_settings_cache(tile)

                    except Exception as e:
                        return self.handle_save_error(e, tile_id)

                    return JSONResponse(tile)
                elif is_active is False:
                    response = {
                        "status":
                        "false",
                        "message": [
                            _("Request Failed"),
                            _("Unable to Save. Verify model status is active")
                        ]
                    }
                    return JSONResponse(response, status=500)
                else:
                    return JSONErrorResponse(_("Request Failed"),
                                             _("Permission Denied"))

        if self.action == "reorder_tiles":
            json = request.body
            if json is not None:
                data = JSONDeserializer().deserialize(json)

                if "tiles" in data and len(data["tiles"]) > 0:
                    sortorder = 0
                    with transaction.atomic():
                        for tile in data["tiles"]:
                            t = Tile(tile)
                            if t.filter_by_perm(request.user,
                                                "write_nodegroup"):
                                t.sortorder = sortorder
                                t.save(update_fields=["sortorder"],
                                       request=request)
                                sortorder = sortorder + 1

                    return JSONResponse(data)

        if self.action == "delete_provisional_tile":
            user = request.POST.get("user", None)
            tileid = request.POST.get("tileid", None)
            users = request.POST.get("users", None)
            tile = Tile.objects.get(tileid=tileid)
            is_provisional = tile.is_provisional()

            if tileid is not None and user is not None:
                provisionaledits = self.delete_provisional_edit(
                    tile, user, request)

            elif tileid is not None and users is not None:
                users = jsonparser.loads(users)
                for user in users:
                    self.delete_provisional_edit(tile, user, request)

            if is_provisional == True:
                return JSONResponse({"result": "delete"})
            else:
                return JSONResponse({"result": "success"})

        return HttpResponseNotFound()
コード例 #27
0
ファイル: term_filter.py プロジェクト: michaeltfisher/arches
    def append_dsl(self, search_results_object, permitted_nodegroups,
                   include_provisional):
        search_query = Bool()
        querysting_params = self.request.GET.get(details['componentname'], '')
        for term in JSONDeserializer().deserialize(querysting_params):
            if term['type'] == 'term' or term['type'] == 'string':
                string_filter = Bool()
                if term['type'] == 'term':
                    string_filter.must(
                        Match(field='strings.string',
                              query=term['value'],
                              type='phrase'))
                elif term['type'] == 'string':
                    string_filter.should(
                        Match(field='strings.string',
                              query=term['value'],
                              type='phrase_prefix'))
                    string_filter.should(
                        Match(field='strings.string.folded',
                              query=term['value'],
                              type='phrase_prefix'))

                if include_provisional is False:
                    string_filter.must_not(
                        Match(field='strings.provisional',
                              query='true',
                              type='phrase'))
                elif include_provisional == 'only provisional':
                    string_filter.must_not(
                        Match(field='strings.provisional',
                              query='false',
                              type='phrase'))

                string_filter.filter(
                    Terms(field='strings.nodegroup_id',
                          terms=permitted_nodegroups))
                nested_string_filter = Nested(path='strings',
                                              query=string_filter)
                if term['inverted']:
                    search_query.must_not(nested_string_filter)
                else:
                    search_query.must(nested_string_filter)
                    # need to set min_score because the query returns results with score 0 and those have to be removed, which I don't think it should be doing
                    search_results_object['query'].min_score('0.01')
            elif term['type'] == 'concept':
                concept_ids = _get_child_concepts(term['value'])
                conceptid_filter = Bool()
                conceptid_filter.filter(
                    Terms(field='domains.conceptid', terms=concept_ids))
                conceptid_filter.filter(
                    Terms(field='domains.nodegroup_id',
                          terms=permitted_nodegroups))

                if include_provisional is False:
                    conceptid_filter.must_not(
                        Match(field='domains.provisional',
                              query='true',
                              type='phrase'))
                elif include_provisional == 'only provisional':
                    conceptid_filter.must_not(
                        Match(field='domains.provisional',
                              query='false',
                              type='phrase'))

                nested_conceptid_filter = Nested(path='domains',
                                                 query=conceptid_filter)
                if term['inverted']:
                    search_query.must_not(nested_conceptid_filter)
                else:
                    search_query.filter(nested_conceptid_filter)

        search_results_object['query'].add_query(search_query)
コード例 #28
0
ファイル: resource.py プロジェクト: archesproject/arches
 def delete(self, request):
     data = JSONDeserializer().deserialize(request.body)
     self.apply_permissions(data, request.user, revert=True)
     return JSONResponse(data)
コード例 #29
0
ファイル: resource_test.py プロジェクト: Carreau/arches
    def setUpClass(cls):
        se = SearchEngineFactory().create()
        se.delete_index(index='terms,concepts')
        se.delete_index(index='resources')

        cls.client = Client()
        cls.client.login(username='******', password='******')

        models.ResourceInstance.objects.all().delete()
        with open(
                os.path.join(
                    'tests/fixtures/resource_graphs/Resource Test Model.json'),
                'rU') as f:
            archesfile = JSONDeserializer().deserialize(f)
        resource_graph_importer(archesfile['graph'])

        cls.search_model_graphid = 'e503a445-fa5f-11e6-afa8-14109fd34195'
        cls.search_model_cultural_period_nodeid = '7a182580-fa60-11e6-96d1-14109fd34195'
        cls.search_model_creation_date_nodeid = '1c1d05f5-fa60-11e6-887f-14109fd34195'
        cls.search_model_destruction_date_nodeid = 'e771b8a1-65fe-11e7-9163-14109fd34195'
        cls.search_model_name_nodeid = '2fe14de3-fa61-11e6-897b-14109fd34195'
        cls.search_model_sensitive_info_nodeid = '57446fae-65ff-11e7-b63a-14109fd34195'
        cls.search_model_geom_nodeid = '3ebc6785-fa61-11e6-8c85-14109fd34195'

        cls.user = User.objects.create_user('test', '*****@*****.**',
                                            'test')
        cls.user.save()
        cls.user.groups.add(Group.objects.get(name='Guest'))

        nodegroup = models.NodeGroup.objects.get(
            pk=cls.search_model_destruction_date_nodeid)
        assign_perm('no_access_to_nodegroup', cls.user, nodegroup)

        # Add a concept that defines a min and max date
        concept = {
            "id":
            "00000000-0000-0000-0000-000000000001",
            "legacyoid":
            "ARCHES",
            "nodetype":
            "ConceptScheme",
            "values": [],
            "subconcepts": [{
                "values": [{
                    "value": "Mock concept",
                    "language": "en-US",
                    "category": "label",
                    "type": "prefLabel",
                    "id": "",
                    "conceptid": ""
                }, {
                    "value": "1950",
                    "language": "en-US",
                    "category": "note",
                    "type": "min_year",
                    "id": "",
                    "conceptid": ""
                }, {
                    "value": "1980",
                    "language": "en-US",
                    "category": "note",
                    "type": "max_year",
                    "id": "",
                    "conceptid": ""
                }],
                "relationshiptype":
                "hasTopConcept",
                "nodetype":
                "Concept",
                "id":
                "",
                "legacyoid":
                "",
                "subconcepts": [],
                "parentconcepts": [],
                "relatedconcepts": []
            }]
        }

        post_data = JSONSerializer().serialize(concept)
        content_type = 'application/x-www-form-urlencoded'
        response = cls.client.post(
            reverse(
                'concept',
                kwargs={'conceptid': '00000000-0000-0000-0000-000000000001'}),
            post_data, content_type)
        response_json = json.loads(response.content)
        valueid = response_json['subconcepts'][0]['values'][0]['id']
        cls.conceptid = response_json['subconcepts'][0]['id']

        # Add resource with Name, Cultural Period, Creation Date and Geometry
        cls.test_resource = Resource(graph_id=cls.search_model_graphid)

        # Add Name
        tile = Tile(data={cls.search_model_name_nodeid: 'Test Name 1'},
                    nodegroup_id=cls.search_model_name_nodeid)
        cls.test_resource.tiles.append(tile)

        # Add Cultural Period
        tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]},
                    nodegroup_id=cls.search_model_cultural_period_nodeid)
        cls.test_resource.tiles.append(tile)

        # Add Creation Date
        tile = Tile(data={cls.search_model_creation_date_nodeid: '1941-01-01'},
                    nodegroup_id=cls.search_model_creation_date_nodeid)
        cls.test_resource.tiles.append(tile)

        # Add Gometry
        cls.geom = {
            "type":
            "FeatureCollection",
            "features": [{
                "geometry": {
                    "type": "Point",
                    "coordinates": [0, 0]
                },
                "type": "Feature",
                "properties": {}
            }]
        }
        tile = Tile(data={cls.search_model_geom_nodeid: cls.geom},
                    nodegroup_id=cls.search_model_geom_nodeid)
        cls.test_resource.tiles.append(tile)

        cls.test_resource.save()

        # add delay to allow for indexes to be updated
        time.sleep(1)
コード例 #30
0
    def setUpClass(cls):
        # This runs once per instantiation
        cls.loadOntology()
        cls.factory = RequestFactory()
        cls.token = "abc123"
        cls.client = Client(HTTP_AUTHORIZATION="Bearer %s" % cls.token)

        sql_str = CREATE_TOKEN_SQL.format(token=cls.token, user_id=1)
        cursor = connection.cursor()
        cursor.execute(sql_str)

        skos = SKOSReader()
        rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/jsonld_test_thesaurus.xml")
        ret = skos.save_concepts_from_skos(rdf)

        skos = SKOSReader()
        rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/jsonld_test_collections.xml")
        ret = skos.save_concepts_from_skos(rdf)

        skos = SKOSReader()
        rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5098-thesaurus.xml")
        ret = skos.save_concepts_from_skos(rdf)

        skos = SKOSReader()
        rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5098-collections.xml")
        ret = skos.save_concepts_from_skos(rdf)

        # Load up the models and data only once
        with open(os.path.join("tests/fixtures/jsonld_base/models/test_1_basic_object.json"), "rU") as f:
            archesfile = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile["graph"])

        with open(os.path.join("tests/fixtures/jsonld_base/models/test_2_complex_object.json"), "rU") as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2["graph"])

        skos = SKOSReader()
        rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5098-thesaurus.xml")
        ret = skos.save_concepts_from_skos(rdf)

        skos = SKOSReader()
        rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5098-collections.xml")
        ret = skos.save_concepts_from_skos(rdf)

        with open(os.path.join("tests/fixtures/jsonld_base/models/5098_concept_list.json"), "rU") as f:
            archesfile = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile["graph"])

        management.call_command("datatype", "register", source="tests/fixtures/datatypes/color.py")
        management.call_command("datatype", "register", source="tests/fixtures/datatypes/semantic_like.py")

        with open(os.path.join("tests/fixtures/jsonld_base/models/5299-basic.json"), "rU") as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2["graph"])
        with open(os.path.join("tests/fixtures/jsonld_base/models/5299_complex.json"), "rU") as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2["graph"])

        skos = SKOSReader()
        rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5600-external-thesaurus.xml")
        ret = skos.save_concepts_from_skos(rdf)

        skos = SKOSReader()
        rdf = skos.read_file("tests/fixtures/jsonld_base/rdm/5600-external-collections.xml")
        ret = skos.save_concepts_from_skos(rdf)

        # Load up the models and data only once
        with open(os.path.join("tests/fixtures/jsonld_base/models/5121_false_ambiguity.json"), "rU") as f:
            archesfile = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile["graph"])

        with open(os.path.join("tests/fixtures/jsonld_base/models/5121_external_model.json"), "rU") as f:
            archesfile = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile["graph"])
コード例 #31
0
    def post(self, request):
        if self.action == 'update_tile':
            json = request.POST.get('data', None)
            accepted_provisional = request.POST.get('accepted_provisional',
                                                    None)
            if accepted_provisional is not None:
                accepted_provisional_edit = JSONDeserializer().deserialize(
                    accepted_provisional)
            if json is not None:
                data = JSONDeserializer().deserialize(json)
                data[
                    'resourceinstance_id'] = '' if 'resourceinstance_id' not in data else data[
                        'resourceinstance_id']
                if data['resourceinstance_id'] == '':
                    data['resourceinstance_id'] = uuid.uuid4()
                try:
                    models.ResourceInstance.objects.get(
                        pk=data['resourceinstance_id'])
                except ObjectDoesNotExist:
                    resource = Resource()
                    graphid = models.Node.objects.filter(
                        nodegroup=data['nodegroup_id'])[0].graph_id
                    resource.graph_id = graphid
                    try:
                        resource.save(user=request.user)
                        data['resourceinstance_id'] = resource.pk
                        resource.index()
                    except ModelInactiveError as e:
                        message = _(
                            'Unable to save. Please verify the model status is active'
                        )
                        return JSONResponse(
                            {
                                'status': 'false',
                                'message': [_(e.title),
                                            _(str(message))]
                            },
                            status=500)
                tile_id = data['tileid']
                resource_instance = models.ResourceInstance.objects.get(
                    pk=data['resourceinstance_id'])
                is_active = resource_instance.graph.isactive
                if tile_id is not None and tile_id != '':
                    try:
                        old_tile = Tile.objects.get(pk=tile_id)
                    except ObjectDoesNotExist as e:
                        return self.handle_save_error(
                            e, _('This tile is no longer available'),
                            _('It was likely deleted by another user'))

                tile = Tile(data)

                if tile.filter_by_perm(
                        request.user, 'write_nodegroup') and is_active is True:
                    try:
                        with transaction.atomic():
                            try:
                                if accepted_provisional is None:
                                    try:
                                        tile.save(request=request)
                                    except TileValidationError as e:
                                        resource_tiles = models.TileModel.objects.filter(
                                            resourceinstance=tile.
                                            resourceinstance)
                                        if resource_tiles.count() == 0:
                                            Resource.objects.get(
                                                pk=tile.resourceinstance_id
                                            ).delete(request.user)
                                        title = _(
                                            'Unable to save. Please verify your input is valid'
                                        )
                                        return self.handle_save_error(
                                            e, tile_id, title=title)
                                    except ModelInactiveError as e:
                                        message = _(
                                            'Unable to save. Please verify the model status is active'
                                        )
                                        return JSONResponse(
                                            {
                                                'status':
                                                'false',
                                                'message':
                                                [_(e.title),
                                                 _(str(message))]
                                            },
                                            status=500)
                                else:
                                    if accepted_provisional is not None:
                                        provisional_editor = User.objects.get(
                                            pk=accepted_provisional_edit[
                                                "user"])
                                        prov_edit_log_details = {
                                            "user":
                                            request.user,
                                            "action":
                                            "accept edit",
                                            "edit":
                                            accepted_provisional_edit,
                                            "provisional_editor":
                                            provisional_editor
                                        }
                                    tile.save(request=request,
                                              provisional_edit_log_details=
                                              prov_edit_log_details)

                                if tile.provisionaledits is not None and str(
                                        request.user.id
                                ) in tile.provisionaledits:
                                    tile.data = tile.provisionaledits[str(
                                        request.user.id)]['value']

                            except Exception as e:
                                return self.handle_save_error(e, tile_id)

                            tile.after_update_all()
                            update_system_settings_cache(tile)

                    except Exception as e:
                        return self.handle_save_error(e, tile_id)

                    return JSONResponse(tile)
                elif is_active is False:
                    response = {
                        'status':
                        'false',
                        'message': [
                            _('Request Failed'),
                            _('Unable to Save. Verify model status is active')
                        ]
                    }
                    return JSONResponse(response, status=500)
                else:
                    response = {
                        'status': 'false',
                        'message':
                        [_('Request Failed'),
                         _('Permission Denied')]
                    }
                    return JSONResponse(response, status=500)

        if self.action == 'reorder_tiles':
            json = request.body
            if json is not None:
                data = JSONDeserializer().deserialize(json)

                if 'tiles' in data and len(data['tiles']) > 0:
                    sortorder = 0
                    with transaction.atomic():
                        for tile in data['tiles']:
                            t = Tile(tile)
                            if t.filter_by_perm(request.user,
                                                'write_nodegroup'):
                                t.sortorder = sortorder
                                t.save(update_fields=['sortorder'],
                                       request=request)
                                sortorder = sortorder + 1

                    return JSONResponse(data)

        if self.action == 'delete_provisional_tile':
            user = request.POST.get('user', None)
            tileid = request.POST.get('tileid', None)
            users = request.POST.get('users', None)
            tile = Tile.objects.get(tileid=tileid)
            is_provisional = tile.is_provisional()

            if tileid is not None and user is not None:
                provisionaledits = self.delete_provisional_edit(
                    tile, user, request)

            elif tileid is not None and users is not None:
                users = jsonparser.loads(users)
                for user in users:
                    self.delete_provisional_edit(tile, user, request)

            if is_provisional == True:
                return JSONResponse({'result': 'delete'})
            else:
                return JSONResponse({'result': 'success'})

        return HttpResponseNotFound()
コード例 #32
0
ファイル: graph.py プロジェクト: aprilwebster-spatial/arches
    def post(self, request, graphid=None):
        ret = {}
        try:
            if self.action == 'import_graph':
                graph_file = request.FILES.get('importedGraph').read()
                graphs = JSONDeserializer().deserialize(graph_file)['graph']
                ret = GraphImporter.import_graph(graphs)
            else:
                if graphid is not None:
                    graph = Graph.objects.get(graphid=graphid)
                data = JSONDeserializer().deserialize(request.body)

                if self.action == 'new_graph':
                    isresource = data['isresource'] if 'isresource' in data else False
                    name = _('New Resource Model') if isresource else _('New Branch')
                    author = request.user.first_name + ' ' + request.user.last_name
                    ret = Graph.new(name=name, is_resource=isresource, author=author)

                elif self.action == 'update_node':
                    updated_values = graph.update_node(data)
                    graph.save()
                    ret = JSONSerializer().serializeToPython(graph)
                    ret['updated_values'] = updated_values

                elif self.action == 'update_node_layer':
                    nodeid = uuid.UUID(str(data.get('nodeid')))
                    node = graph.nodes[nodeid]
                    node.config = data['config']
                    ret = graph
                    node.save()

                elif self.action == 'append_branch':
                    ret = graph.append_branch(data['property'], nodeid=data['nodeid'], graphid=data['graphid'])
                    ret = ret.serialize()
                    ret['nodegroups'] = graph.get_nodegroups()
                    ret['cards'] = graph.get_cards()
                    ret['widgets'] = graph.get_widgets()
                    graph.save()

                elif self.action == 'append_node':
                    ret = graph.append_node(nodeid=data['nodeid'])
                    graph.save()

                elif self.action == 'move_node':
                    ret = graph.move_node(data['nodeid'], data['property'], data['newparentnodeid'])
                    graph.save()

                elif self.action == 'export_branch':
                    clone_data = graph.copy(root=data)
                    clone_data['copy'].save()
                    ret = {
                        'success': True,
                        'graphid': clone_data['copy'].pk
                    }

                elif self.action == 'clone_graph':
                    clone_data = graph.copy()
                    ret = clone_data['copy']
                    ret.save()
                    ret.copy_functions(graph, [clone_data['nodes'], clone_data['nodegroups']])

                elif self.action == 'reorder_nodes':
                    json = request.body
                    if json is not None:
                        data = JSONDeserializer().deserialize(json)

                        if 'nodes' in data and len(data['nodes']) > 0:
                            sortorder = 0
                            with transaction.atomic():
                                for node in data['nodes']:
                                    no = models.Node.objects.get(pk=node['nodeid'])
                                    no.sortorder = sortorder
                                    no.save()
                                    sortorder = sortorder + 1
                            ret = data

            return JSONResponse(ret)
        except GraphValidationError as e:
            return JSONResponse({'status': 'false', 'success': False, 'message': e.message, 'title': e.title}, status=500)
コード例 #33
0
 def load_file(self, archesjson):
     resources = []
     with open(archesjson, 'r') as f:
         resources = JSONDeserializer().deserialize(f.read())
     return resources['resources']
コード例 #34
0
    def get(self, request):
        if self.action == 'get_permission_manager_data':
            identities = []
            for group in Group.objects.all():
                identities.append({
                    'name':
                    group.name,
                    'type':
                    'group',
                    'id':
                    group.pk,
                    'default_permissions':
                    group.permissions.all()
                })
            for user in User.objects.filter(is_superuser=False):
                groups = []
                default_perms = []
                for group in user.groups.all():
                    groups.append(group.name)
                    default_perms = default_perms + list(
                        group.permissions.all())
                identities.append({
                    'name': user.email or user.username,
                    'groups': ', '.join(groups),
                    'type': 'user',
                    'id': user.pk,
                    'default_permissions': set(default_perms)
                })

            content_type = ContentType.objects.get_for_model(models.NodeGroup)
            nodegroup_permissions = Permission.objects.filter(
                content_type=content_type)
            ret = {
                'identities': identities,
                'permissions': nodegroup_permissions
            }
            return JSONResponse(ret)

        nodegroup_ids = JSONDeserializer().deserialize(
            request.GET.get('nodegroupIds'))
        identityId = request.GET.get('identityId')
        identityType = request.GET.get('identityType')

        ret = []
        if identityType == 'group':
            identity = Group.objects.get(pk=identityId)
            for nodegroup_id in nodegroup_ids:
                nodegroup = models.NodeGroup.objects.get(pk=nodegroup_id)
                perms = [{
                    'codename': codename,
                    'name': self.get_perm_name(codename).name
                } for codename in get_group_perms(identity, nodegroup)]
                ret.append({'perms': perms, 'nodegroup_id': nodegroup_id})
        else:
            identity = User.objects.get(pk=identityId)
            for nodegroup_id in nodegroup_ids:
                nodegroup = models.NodeGroup.objects.get(pk=nodegroup_id)
                perms = [{
                    'codename': codename,
                    'name': self.get_perm_name(codename).name
                } for codename in get_user_perms(identity, nodegroup)]

                # only get the group perms ("defaults") if no user defined object settings have been saved
                if len(perms) == 0:
                    perms = [{
                        'codename': codename,
                        'name': self.get_perm_name(codename).name
                    } for codename in set(get_group_perms(identity, nodegroup))
                             ]
                ret.append({'perms': perms, 'nodegroup_id': nodegroup_id})

        return JSONResponse(ret)
コード例 #35
0
ファイル: graph_tests.py プロジェクト: webjunkie/arches
    def test_save_and_update_dont_orphan_records_in_the_db(self):
        """
        test that the proper number of nodes, edges, nodegroups, and cards are persisted
        to the database during save and update opertaions

        """

        nodes_count_before = models.Node.objects.count()
        edges_count_before = models.Edge.objects.count()
        nodegroups_count_before = models.NodeGroup.objects.count()
        card_count_before = models.CardModel.objects.count()

        # test that data is persisited propertly when creating a new graph
        graph = Graph.new(is_resource=False)

        nodes_count_after = models.Node.objects.count()
        edges_count_after = models.Edge.objects.count()
        nodegroups_count_after = models.NodeGroup.objects.count()
        card_count_after = models.CardModel.objects.count()

        self.assertEqual(nodes_count_after - nodes_count_before, 1)
        self.assertEqual(edges_count_after - edges_count_before, 0)
        self.assertEqual(nodegroups_count_after - nodegroups_count_before, 1)
        self.assertEqual(card_count_after - card_count_before, 1)

        # test that data is persisited propertly during an append opertation
        graph.append_branch('P1_is_identified_by',
                            graphid=self.NODE_NODETYPE_GRAPHID)
        graph.save()

        nodes_count_after = models.Node.objects.count()
        edges_count_after = models.Edge.objects.count()
        nodegroups_count_after = models.NodeGroup.objects.count()
        card_count_after = models.CardModel.objects.count()

        self.assertEqual(nodes_count_after - nodes_count_before, 3)
        self.assertEqual(edges_count_after - edges_count_before, 2)
        self.assertEqual(nodegroups_count_after - nodegroups_count_before, 2)
        self.assertEqual(card_count_after - card_count_before, 2)

        # test that removing a node group by setting it to None, removes it from the db
        node_to_update = None
        for node_id, node in graph.nodes.iteritems():
            if node.name == 'Node':
                self.assertTrue(node.is_collector)
                node_to_update = JSONDeserializer().deserialize(
                    JSONSerializer().serialize(node))

        node_to_update['nodegroup_id'] = None
        graph.update_node(node_to_update.copy())
        graph.save()

        nodegroups_count_after = models.NodeGroup.objects.count()
        card_count_after = models.CardModel.objects.count()

        self.assertEqual(nodegroups_count_after - nodegroups_count_before, 1)
        self.assertEqual(card_count_after - card_count_before, 1)

        # test that adding back a node group adds it back to the db
        node_to_update['nodegroup_id'] = node_to_update['nodeid']
        graph.update_node(node_to_update)
        graph.save()

        nodegroups_count_after = models.NodeGroup.objects.count()
        card_count_after = models.CardModel.objects.count()

        self.assertEqual(nodegroups_count_after - nodegroups_count_before, 2)
        self.assertEqual(card_count_after - card_count_before, 2)
コード例 #36
0
    def get(self, request, graphid, nodeid=None):
        if self.action == 'export_graph':
            graph = get_graphs_for_export([graphid])
            graph['metadata'] = system_metadata()
            f = JSONSerializer().serialize(graph, indent=4)
            graph_name = JSONDeserializer().deserialize(f)['graph'][0]['name']

            response = HttpResponse(f, content_type='json/plain')
            response[
                'Content-Disposition'] = 'attachment; filename="%s.json"' % (
                    graph_name)
            return response
        elif self.action == 'export_mapping_file':
            files_for_export = create_mapping_configuration_file(graphid, True)
            file_name = Graph.objects.get(graphid=graphid).name

            buffer = StringIO()

            with zipfile.ZipFile(buffer, 'w', zipfile.ZIP_DEFLATED) as zip:
                for f in files_for_export:
                    f['outputfile'].seek(0)
                    zip.writestr(f['name'], f['outputfile'].read())

            zip.close()
            buffer.flush()
            zip_stream = buffer.getvalue()
            buffer.close()

            response = HttpResponse()
            response[
                'Content-Disposition'] = 'attachment; filename=' + file_name + '.zip'
            response['Content-length'] = str(len(zip_stream))
            response['Content-Type'] = 'application/zip'
            response.write(zip_stream)
            return response

        elif self.action == 'get_domain_connections':
            res = []
            graph = Graph.objects.get(graphid=graphid)
            ontology_class = request.GET.get('ontology_class', None)
            ret = graph.get_valid_domain_ontology_classes()
            for r in ret:
                res.append({
                    'ontology_property':
                    r['ontology_property'],
                    'ontology_classes': [c for c in r['ontology_classes']]
                })
            return JSONResponse(res)

        else:
            graph = Graph.objects.get(graphid=graphid)
            if self.action == 'get_related_nodes':
                parent_nodeid = request.GET.get('parent_nodeid', None)
                ret = graph.get_valid_ontology_classes(
                    nodeid=nodeid, parent_nodeid=parent_nodeid)

            elif self.action == 'get_valid_domain_nodes':
                if nodeid == '':
                    nodeid = None
                ret = graph.get_valid_domain_ontology_classes(nodeid=nodeid)

            return JSONResponse(ret)

        return HttpResponseNotFound()
コード例 #37
0
    def __init__(self, file=None, mapping_file=None, relations_file=None):
        self.business_data = ''
        self.mapping = None
        self.graphs = ''
        self.reference_data = ''
        self.business_data = ''
        self.file_format = ''
        self.relations = ''

        if not file:
            file = settings.BUSINESS_DATA_FILES
        else:
            file = [file]

        if mapping_file == None:
            try:
                mapping_file = [file[0].split('.')[0] + '.mapping']
            except:
                print '*' * 80
                print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping"
                print '*' * 80
                sys.exit()
        else:
            try:
                mapping_file = [mapping_file]
            except:
                print '*' * 80
                print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping"
                print '*' * 80
                sys.exit()

        if relations_file == None:
            try:
                relations_file = [file[0].split('.')[0] + '.relations']
            except:
                pass

        for path in relations_file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.relations = csv.DictReader(
                        open(relations_file[0], 'r'))

        for path in mapping_file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.mapping = json.load(open(path, 'r'))
                else:
                    self.mapping = None

        for path in file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.file_format = file[0].split('.')[1]
                    if self.file_format == 'json':
                        with open(file[0], 'rU') as f:
                            archesfile = JSONDeserializer().deserialize(f)
                            if 'graph' in archesfile.keys():
                                self.graphs = archesfile['graph']
                            if 'reference_data' in archesfile.keys():
                                self.reference_data = archesfile[
                                    'reference_data']
                            if 'business_data' in archesfile.keys():
                                self.business_data = archesfile[
                                    'business_data']
                    elif self.file_format == 'csv':
                        data = unicodecsv.DictReader(open(file[0], 'r'),
                                                     encoding='utf-8-sig',
                                                     restkey='ADDITIONAL',
                                                     restval='MISSING')
                        self.business_data = list(data)
                else:
                    print str(file) + ' is not a valid file'
            else:
                print path + ' is not a valid path'
コード例 #38
0
ファイル: graph.py プロジェクト: tavitm/arches
    def post(self, request, graphid=None):
        ret = {}

        try:
            if self.action == 'import_graph':
                graph_file = request.FILES.get('importedGraph').read()
                graphs = JSONDeserializer().deserialize(graph_file)['graph']
                ret = GraphImporter.import_graph(graphs)
            else:
                if graphid is not None:
                    graph = Graph.objects.get(graphid=graphid)
                data = JSONDeserializer().deserialize(request.body)

                if self.action == 'new_graph':
                    isresource = data[
                        'isresource'] if 'isresource' in data else False
                    name = _('New Resource Model') if isresource else _(
                        'New Branch')
                    author = request.user.first_name + ' ' + request.user.last_name
                    ret = Graph.new(name=name,
                                    is_resource=isresource,
                                    author=author)

                elif self.action == 'update_node':
                    graph.update_node(data)
                    ret = graph
                    graph.save()

                elif self.action == 'update_node_layer':
                    nodeid = uuid.UUID(str(data.get('nodeid')))
                    node = graph.nodes[nodeid]
                    node.config = data['config']
                    ret = graph
                    node.save()

                elif self.action == 'append_branch':
                    ret = graph.append_branch(data['property'],
                                              nodeid=data['nodeid'],
                                              graphid=data['graphid'])
                    graph.save()

                elif self.action == 'move_node':
                    ret = graph.move_node(data['nodeid'], data['property'],
                                          data['newparentnodeid'])
                    graph.save()

                elif self.action == 'clone_graph':
                    clone_data = graph.copy()
                    ret = clone_data['copy']
                    ret.save()
                    ret.copy_functions(
                        graph, [clone_data['nodes'], clone_data['nodegroups']])
                    form_map = ret.copy_forms(graph, clone_data['cards'])
                    ret.copy_reports(
                        graph,
                        [form_map, clone_data['cards'], clone_data['nodes']])

            return JSONResponse(ret)
        except GraphValidationError as e:
            return JSONResponse(
                {
                    'status': 'false',
                    'message': e.message,
                    'title': e.title
                },
                status=500)
コード例 #39
0
ファイル: importer.py プロジェクト: fargeo/arches
    def __init__(self, file=None, mapping_file=None, relations_file=None):
        self.business_data = ''
        self.mapping = None
        self.graphs = ''
        self.reference_data = ''
        self.business_data = ''
        self.file_format = ''
        self.relations = ''
        csv.field_size_limit(sys.maxint)

        if not file:
            file = settings.BUSINESS_DATA_FILES
        else:
            file = [file]

        if mapping_file == None:
            try:
                mapping_file = [file[0].split('.')[0] + '.mapping']
            except:
                print '*'*80
                print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping"
                print '*'*80
                sys.exit()
        else:
            try:
                mapping_file = [mapping_file]
            except:
                print '*'*80
                print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping"
                print '*'*80
                sys.exit()

        if relations_file == None:
            try:
                relations_file = [file[0].split('.')[0] + '.relations']
            except:
                pass

        for path in relations_file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.relations = csv.DictReader(open(relations_file[0], 'r'))

        for path in mapping_file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.mapping = json.load(open(path, 'r'))
                else:
                    self.mapping = None

        for path in file:
            if os.path.exists(path):
                if isfile(join(path)):
                    self.file_format = file[0].split('.')[-1]
                    if self.file_format == 'json':
                        with open(file[0], 'rU') as f:
                            archesfile = JSONDeserializer().deserialize(f)
                            if 'graph' in archesfile.keys():
                                self.graphs = archesfile['graph']
                            if 'reference_data' in archesfile.keys():
                                self.reference_data = archesfile['reference_data']
                            if 'business_data' in archesfile.keys():
                                self.business_data = archesfile['business_data']
                    elif self.file_format == 'csv':
                        data = unicodecsv.DictReader(open(file[0], 'rU'), encoding='utf-8-sig', restkey='ADDITIONAL', restval='MISSING')
                        self.business_data = list(data)
                    elif self.file_format == 'zip':
                        shp_zipfile = os.path.basename(path)
                        shp_zipfile_name = os.path.splitext(shp_zipfile)[0]
                        unzip_dir = os.path.join(os.path.dirname(path),shp_zipfile_name)
                        unzip_file(path,unzip_dir)
                        shp = [i for i in os.listdir(unzip_dir) if i.endswith(".shp")]
                        if len(shp) == 0:
                            print '*'*80
                            print "ERROR: There is no shapefile in this zipfile."
                            print '*'*80
                            exit()
                        elif len(shp) > 1:
                            print '*'*80
                            print "ERROR: There are multiple shapefiles in this zipfile. Please load each individually:"
                            for s in shp:
                                print "\npython manage.py packages -o import_business_data -s {0} -c {1} -ow [append or overwrite]".format(
                                    os.path.join(unzip_dir,s),mapping_file[0])
                            print '*'*80
                            exit()
                        shp_path = os.path.join(unzip_dir,shp[0])
                        self.business_data = self.shape_to_csv(shp_path)
                    elif self.file_format == 'shp':
                        self.business_data = self.shape_to_csv(path)
                else:
                    print str(file) + ' is not a valid file'
            else:
                print path + ' is not a valid path'
コード例 #40
0
ファイル: mobile_survey.py プロジェクト: michaelchamu/arches
    def post(self, request, surveyid):
        data = JSONDeserializer().deserialize(request.body)
        if models.MobileSurveyModel.objects.filter(pk=data["id"]).exists() is False:
            mobile_survey_model = models.MobileSurveyModel(
                id=surveyid, name=data["name"], createdby=self.request.user, lasteditedby=self.request.user
            )
            mobile_survey_model.save()

        mobile_survey = MobileSurvey.objects.get(pk=data["id"])
        self.update_identities(data, mobile_survey, mobile_survey.users.all(), "users", User, models.MobileSurveyXUser)
        self.update_identities(data, mobile_survey, mobile_survey.groups.all(), "groups", Group, models.MobileSurveyXGroup)

        mobile_survey_card_ids = {str(c.cardid) for c in mobile_survey.cards.all()}
        form_card_ids = set(data["cards"])
        cards_to_remove = mobile_survey_card_ids - form_card_ids
        cards_to_add = form_card_ids - mobile_survey_card_ids
        cards_to_update = mobile_survey_card_ids & form_card_ids

        for card_id in cards_to_add:
            models.MobileSurveyXCard.objects.create(
                card=models.CardModel.objects.get(cardid=card_id), mobile_survey=mobile_survey, sortorder=data["cards"].index(card_id)
            )

        for card_id in cards_to_update:
            mobile_survey_card = models.MobileSurveyXCard.objects.filter(mobile_survey=mobile_survey).get(
                card=models.CardModel.objects.get(cardid=card_id)
            )
            mobile_survey_card.sortorder = data["cards"].index(card_id)
            mobile_survey_card.save()

        for card_id in cards_to_remove:
            models.MobileSurveyXCard.objects.filter(card=models.CardModel.objects.get(cardid=card_id), mobile_survey=mobile_survey).delete()

        # TODO Disabling the following section until we make emailing users optional
        # if mobile_survey.active != data['active']:
        # notify users in the mobile_survey that the state of the mobile_survey has changed
        # if data['active']:
        #     self.notify_mobile_survey_start(request, mobile_survey)
        # else:
        #     self.notify_mobile_survey_end(request, mobile_survey)
        mobile_survey.name = data["name"]
        mobile_survey.description = data["description"]
        mobile_survey.onlinebasemaps = data["onlinebasemaps"]
        if data["startdate"] != "":
            mobile_survey.startdate = data["startdate"]
        if data["enddate"] != "":
            mobile_survey.enddate = data["enddate"]
        mobile_survey.datadownloadconfig = data["datadownloadconfig"]
        mobile_survey.active = data["active"]
        mobile_survey.tilecache = data["tilecache"]
        polygons = []

        # try:
        #     data['bounds'].upper()
        #     data['bounds'] = json.loads(data['bounds'])
        # except AttributeError as e:
        #     print('bounds is not a string')

        if "features" in data["bounds"]:
            for feature in data["bounds"]["features"]:
                for coord in feature["geometry"]["coordinates"]:
                    polygons.append(Polygon(coord))

        elif len(polygons) == 0:
            try:
                if data["bounds"]["type"] == "MultiPolygon":
                    for poly in data["bounds"]["coordinates"]:
                        for coords in poly:
                            polygons.append(Polygon(coords))
            except AttributeError as e:
                print("bounds is not a geojson geometry object")

        mobile_survey.bounds = MultiPolygon(polygons)
        mobile_survey.lasteditedby = self.request.user

        try:
            with transaction.atomic():
                mobile_survey.save()
        except ConnectionRefusedError as e:
            error_title = _("Unable to save collector project")
            error_message = _("Failed to connect to a CouchDB service")
            connection_error = JSONErrorResponse(error_title, error_message)
            return connection_error
        except Exception as e:
            error_title = _("Unable to save collector project")
            logger.exception(e)
            connection_error = JSONErrorResponse(error_title, e)
            return connection_error

        return JSONResponse({"success": True, "mobile_survey": mobile_survey})
コード例 #41
0
    def test_mobile_survey(self):
        data = {  # note that cards and resourceid belong to datatype testing model in testing_prj/pkg
            "id": self.survey_id,
            "name": "test_project",
            "active": True,
            "createdby_id": None,
            "lasteditedby_id": None,
            "startdate": "2020-01-26",
            "enddate": "2025-03-07",
            "description": "desc here 1",
            "bounds": {
                "features": [
                    {
                        "geometry": {
                            "coordinates": [
                                [
                                    [-0.194220532242397, 51.46274256605967],
                                    [0.01817698974429, 51.46437979012592],
                                    [0.013905389416863, 51.56284140042993],
                                    [-0.191845725821395, 51.56047215557854],
                                    [-0.194220532242397, 51.46274256605967],
                                ]
                            ],
                            "type": "Polygon",
                        },
                        "properties": {},
                        "type": "Feature",
                    }
                ],
                "type": "FeatureCollection",
            },
            "tilecache": None,
            "onlinebasemaps": {"default": "mapbox://styles/mapbox/streets-v9"},
            "datadownloadconfig": {"download": False, "count": 100, "resources": ["330802c5-95bd-11e8-b7ac-acde48001122"], "custom": None},
            "users": [1],
            "groups": [],
            "cards": [
                "62b84902-95ec-11e8-86d3-acde48001122",
                "c5e3afde-95c5-11e8-a63f-acde48001122",
                "8cc075cc-95eb-11e8-bb88-acde48001122",
                "35be1c14-95ed-11e8-8db0-acde48001122",
                "c1bd336b-95bd-11e8-98d6-acde48001122",
                "4e3c81e8-95bd-11e8-b2d6-acde48001122",
                "28c343d4-95c5-11e8-9fb6-acde48001122",
                "3bd30b02-95c3-11e8-8f15-acde48001122",
                "de301d4a-95c3-11e8-b74b-acde48001122",
                "5d9d643d-95c4-11e8-848a-acde48001122",
            ],
        }
        payload = JSONSerializer().serialize(data)
        content_type = "application/x-www-form-urlencoded"
        self.c.login(username="******", password="******")
        resp = {"success": False}
        try:
            raw_resp = self.c.post(
                reverse("collector_designer",
                        kwargs={"surveyid": self.survey_id}), payload,
                content_type)
            resp = JSONDeserializer().deserialize(raw_resp.content)
        except couchdb.http.Unauthorized:
            # try again
            print("Not authorized to post to couch")
            pass

        self.assertTrue(resp["success"])

        test_survey_id = ""
        # management.call_command("mobile", operation="sync_survey", id=self.survey_id)
        couch = Couch()
        couchdbs = [dbname for dbname in couch.couch]
        for db in couchdbs:
            survey_id_from_db = db[-36:]
            if self.survey_id == survey_id_from_db:
                test_survey_id = survey_id_from_db

        self.assertTrue(self.survey_id == test_survey_id)
コード例 #42
0
ファイル: graph.py プロジェクト: aprilwebster-spatial/arches
 def post(self, request):
     data = JSONDeserializer().deserialize(request.body)
     self.apply_permissions(data)
     return JSONResponse(data)
コード例 #43
0
    def test_f_big_nest_mess(self):

        data = """
{
  "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object",
  "http://www.cidoc-crm.org/cidoc-crm/P108i_was_produced_by": [
    {
      "@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
      "http://www.cidoc-crm.org/cidoc-crm/P10_falls_within": [
        {
          "@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
          "http://www.cidoc-crm.org/cidoc-crm/P14_carried_out_by": {
            "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object"
          },
          "http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "asdf",
          "http://www.cidoc-crm.org/cidoc-crm/P4_has_time-span": {
            "@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
            "http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin": {
              "@type": "http://www.w3.org/2001/XMLSchema#dateTime",
              "@value": "2019-12-03"
            },
            "http://www.cidoc-crm.org/cidoc-crm/P82b_end_of_the_end": {
              "@type": "http://www.w3.org/2001/XMLSchema#dateTime",
              "@value": "2019-12-05"
            },
            "http://www.cidoc-crm.org/cidoc-crm/P83_had_at_least_duration": {
              "@type": "http://www.cidoc-crm.org/cidoc-crm/E54_Dimension",
              "http://www.cidoc-crm.org/cidoc-crm/P90_has_value": 1
            }
          }
        },
        {
          "@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
          "http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "second part",
          "http://www.cidoc-crm.org/cidoc-crm/P4_has_time-span": {
            "@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
            "http://www.cidoc-crm.org/cidoc-crm/P83_had_at_least_duration": {
              "@type": "http://www.cidoc-crm.org/cidoc-crm/E54_Dimension",
              "http://www.cidoc-crm.org/cidoc-crm/P90_has_value": 6
            }
          }
        }
      ]
    },
    {
      "@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
      "http://www.cidoc-crm.org/cidoc-crm/P10_falls_within": {
        "@type": "http://www.cidoc-crm.org/cidoc-crm/E12_Production",
        "http://www.cidoc-crm.org/cidoc-crm/P3_has_note": "bar",
        "http://www.cidoc-crm.org/cidoc-crm/P4_has_time-span": {
          "@type": "http://www.cidoc-crm.org/cidoc-crm/E52_Time-Span",
          "http://www.cidoc-crm.org/cidoc-crm/P82a_begin_of_the_begin": {
            "@type": "http://www.w3.org/2001/XMLSchema#dateTime",
            "@value": "2019-12-07"
          },
          "http://www.cidoc-crm.org/cidoc-crm/P82b_end_of_the_end": {
            "@type": "http://www.w3.org/2001/XMLSchema#dateTime",
            "@value": "2019-12-08"
          }
        }
      }
    }
  ],
  "http://www.cidoc-crm.org/cidoc-crm/P138i_has_representation": {
    "@type": "http://www.cidoc-crm.org/cidoc-crm/E36_Visual_Item",
    "http://www.cidoc-crm.org/cidoc-crm/P2_has_type": {
      "@id": "http://*****:*****@type": "http://www.cidoc-crm.org/cidoc-crm/E55_Type",
      "http://www.w3.org/2000/01/rdf-schema#label": "material a"
    }
  }
}
"""

        with open(os.path.join("tests/fixtures/jsonld_base/models/nest_test.json"), "rU") as f:
            archesfile = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile["graph"])

        url = reverse(
            "resources_graphid",
            kwargs={"graphid": "9b596906-1540-11ea-b353-acde48001122", "resourceid": "c3b693cc-1542-11ea-b353-acde48001122"},
        )
        response = self.client.put(url, data=data, HTTP_AUTHORIZATION=f"Bearer {self.token}")
        self.assertEqual(response.status_code, 201)
        js = response.json()
        if type(js) == list:
            js = js[0]

        self.assertTrue("@id" in js)
        self.assertTrue(js["@id"] == "http://localhost:8000/resources/c3b693cc-1542-11ea-b353-acde48001122")
コード例 #44
0
    def setUpClass(cls):
        # This runs once per instantiation
        cls.loadOntology()
        cls.factory = RequestFactory()
        cls.client = Client()

        #cls.client.login(username='******', password='******')
        #cls.user = User.objects.get(username='******')

        skos = SKOSReader()
        rdf = skos.read_file('tests/fixtures/jsonld_base/rdm/jsonld_test_thesaurus.xml')
        ret = skos.save_concepts_from_skos(rdf)

        skos = SKOSReader()
        rdf = skos.read_file('tests/fixtures/jsonld_base/rdm/jsonld_test_collections.xml')
        ret = skos.save_concepts_from_skos(rdf)

        # Load up the models and data only once
        with open(os.path.join('tests/fixtures/jsonld_base/models/test_1_basic_object.json'), 'rU') as f:
            archesfile = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile['graph'])
        BusinessDataImporter('tests/fixtures/jsonld_base/data/test_1_instance.json').import_business_data()

        with open(os.path.join('tests/fixtures/jsonld_base/models/test_2_complex_object.json'), 'rU') as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2['graph'])
        BusinessDataImporter('tests/fixtures/jsonld_base/data/test_2_instances.json').import_business_data()  

        with open(os.path.join('tests/fixtures/jsonld_base/models/5136_res_inst_plus_res_inst.json'), 'rU') as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2['graph'])
        BusinessDataImporter('tests/fixtures/jsonld_base/data/test_3_instances.json').import_business_data()  

        with open(os.path.join('tests/fixtures/jsonld_base/models/nesting_test.json'), 'rU') as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2['graph'])
        BusinessDataImporter('tests/fixtures/jsonld_base/data/test_nest_instances.json').import_business_data()  

        with open(os.path.join('tests/fixtures/jsonld_base/models/4564-person.json'), 'rU') as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2['graph'])        

        with open(os.path.join('tests/fixtures/jsonld_base/models/4564-group.json'), 'rU') as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2['graph']) 

        with open(os.path.join('tests/fixtures/jsonld_base/models/4564-referenced.json'), 'rU') as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2['graph']) 
        BusinessDataImporter('tests/fixtures/jsonld_base/data/test_4564_group.json').import_business_data()
        BusinessDataImporter('tests/fixtures/jsonld_base/data/test_4564_reference.json').import_business_data()

        management.call_command('datatype', 'register', source='tests/fixtures/datatypes/color.py')
        management.call_command('datatype', 'register', source='tests/fixtures/datatypes/semantic_like.py')

        with open(os.path.join('tests/fixtures/jsonld_base/models/5299-basic.json'), 'rU') as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2['graph']) 
        BusinessDataImporter('tests/fixtures/jsonld_base/data/test_5299_instances.json').import_business_data()        

        with open(os.path.join('tests/fixtures/jsonld_base/models/5299_complex.json'), 'rU') as f:
            archesfile2 = JSONDeserializer().deserialize(f)
        ResourceGraphImporter(archesfile2['graph']) 
        BusinessDataImporter('tests/fixtures/jsonld_base/data/test_5299_complex.json').import_business_data()       
コード例 #45
0
ファイル: tile.py プロジェクト: m453h/arches
    def post(self, request):
        if self.action == 'update_tile':
            json = request.POST.get('data', None)
            accepted_provisional = request.POST.get('accepted_provisional',
                                                    None)
            if accepted_provisional != None:
                accepted_provisional_edit = JSONDeserializer().deserialize(
                    accepted_provisional)
            if json != None:
                data = JSONDeserializer().deserialize(json)
                if data['resourceinstance_id'] == '':
                    data['resourceinstance_id'] = uuid.uuid4()
                try:
                    models.ResourceInstance.objects.get(
                        pk=data['resourceinstance_id'])
                except ObjectDoesNotExist:
                    resource = Resource()
                    graphid = models.Node.objects.filter(
                        nodegroup=data['nodegroup_id'])[0].graph_id
                    resource.graph_id = graphid
                    resource.save(user=request.user)
                    data['resourceinstance_id'] = resource.pk
                    resource.index()
                tile_id = data['tileid']
                if tile_id != None and tile_id != '':
                    try:
                        old_tile = Tile.objects.get(pk=tile_id)
                        clean_resource_cache(old_tile)
                    except ObjectDoesNotExist:
                        return JSONResponse(
                            {
                                'status':
                                'false',
                                'message': [
                                    _('This tile is no longer available'),
                                    _('It was likely deleted by another user')
                                ]
                            },
                            status=500)
                tile = Tile(data)
                if tile.filter_by_perm(request.user, 'write_nodegroup'):
                    with transaction.atomic():
                        try:
                            if accepted_provisional == None:
                                tile.save(request=request)
                            else:
                                if accepted_provisional is not None:
                                    provisional_editor = User.objects.get(
                                        pk=accepted_provisional_edit["user"])
                                tile.save(
                                    provisional_edit_log_details={
                                        "user": request.user,
                                        "action": "accept edit",
                                        "edit": accepted_provisional_edit,
                                        "provisional_editor":
                                        provisional_editor
                                    })
                            if tile_id == '4345f530-aa90-48cf-b4b3-92d1185ca439':
                                import couchdb
                                import json as json_json
                                couch = couchdb.Server(settings.COUCHDB_URL)
                                for project in models.MobileSurveyModel.objects.all(
                                ):
                                    db = couch['project_' + str(project.id)]
                                    #tile = models.TileModel.objects.get(pk='4345f530-aa90-48cf-b4b3-92d1185ca439')
                                    tile_json = json_json.loads(
                                        JSONSerializer().serialize(tile))
                                    tile_json['_id'] = tile_json['tileid']
                                    for row in db.view('_all_docs',
                                                       include_docs=True):
                                        if 'tileid' in row.doc and tile_json[
                                                '_id'] == row.doc['_id']:
                                            tile_json['_rev'] = row.doc['_rev']
                                            db.save(tile_json)

                            if tile.provisionaledits is not None and str(
                                    request.user.id) in tile.provisionaledits:
                                tile.data = tile.provisionaledits[str(
                                    request.user.id)]['value']

                        except ValidationError as e:
                            return JSONResponse(
                                {
                                    'status': 'false',
                                    'message': e.args
                                },
                                status=500)
                        tile.after_update_all()
                        clean_resource_cache(tile)
                        update_system_settings_cache(tile)

                    return JSONResponse(tile)
                else:
                    return JSONResponse(
                        {
                            'status':
                            'false',
                            'message':
                            [_('Request Failed'),
                             _('Permission Denied')]
                        },
                        status=500)

        if self.action == 'reorder_tiles':
            json = request.body
            if json != None:
                data = JSONDeserializer().deserialize(json)

                if 'tiles' in data and len(data['tiles']) > 0:
                    sortorder = 0
                    with transaction.atomic():
                        for tile in data['tiles']:
                            t = Tile(tile)
                            if t.filter_by_perm(request.user,
                                                'write_nodegroup'):
                                t.sortorder = sortorder
                                t.save(update_fields=['sortorder'],
                                       request=request)
                                sortorder = sortorder + 1

                    return JSONResponse(data)

        if self.action == 'delete_provisional_tile':
            user = request.POST.get('user', None)
            tileid = request.POST.get('tileid', None)
            users = request.POST.get('users', None)
            tile = Tile.objects.get(tileid=tileid)
            is_provisional = tile.is_provisional()

            if tileid is not None and user is not None:
                provisionaledits = self.delete_provisional_edit(
                    tile, user, reviewer=request.user)

            elif tileid is not None and users is not None:
                users = jsonparser.loads(users)
                for user in users:
                    self.delete_provisional_edit(tile,
                                                 user,
                                                 reviewer=request.user)

            if is_provisional == True:
                return JSONResponse({'result': 'delete'})
            else:
                return JSONResponse({'result': 'success'})

        return HttpResponseNotFound()
コード例 #46
0
ファイル: tile.py プロジェクト: fargeo/arches
    def post(self, request):
        if self.action == 'update_tile':
            json = request.POST.get('data', None)
            if json != None:
                data = JSONDeserializer().deserialize(json)
                try:
                    models.ResourceInstance.objects.get(pk=data['resourceinstance_id'])
                except ObjectDoesNotExist:
                    resource = Resource()
                    resource.resourceinstanceid = data['resourceinstance_id']
                    graphid = models.Node.objects.filter(nodegroup=data['nodegroup_id'])[0].graph_id
                    resource.graph_id = graphid
                    resource.save(user=request.user)
                    resource.index()
                tile_id = data['tileid']
                if tile_id != None and tile_id != '':
                    old_tile = Tile.objects.get(pk=tile_id)
                    clean_resource_cache(old_tile)
                tile = Tile(data)
                if tile.filter_by_perm(request.user, 'write_nodegroup'):
                    with transaction.atomic():
                        try:
                            tile.save(request=request)
                            if tile_id == '4345f530-aa90-48cf-b4b3-92d1185ca439':
                                import couchdb
                                import json as json_json
                                couch = couchdb.Server(settings.COUCHDB_URL)
                                for project in models.MobileSurveyModel.objects.all():
                                    db = couch['project_' + str(project.id)]
                                    #tile = models.TileModel.objects.get(pk='4345f530-aa90-48cf-b4b3-92d1185ca439')
                                    tile_json = json_json.loads(JSONSerializer().serialize(tile))
                                    tile_json['_id'] = tile_json['tileid']
                                    for row in db.view('_all_docs', include_docs=True):
                                        if 'tileid' in row.doc and tile_json['_id'] == row.doc['_id']:
                                            tile_json['_rev'] = row.doc['_rev']
                                            db.save(tile_json)

                        except ValidationError as e:
                            return JSONResponse({'status':'false','message':e.args}, status=500)
                        tile.after_update_all()
                        clean_resource_cache(tile)
                        update_system_settings_cache(tile)
                    return JSONResponse(tile)
                else:
                    return JSONResponse({'status':'false','message': [_('Request Failed'), _('Permission Denied')]}, status=500)

        if self.action == 'reorder_tiles':
            json = request.body
            if json != None:
                data = JSONDeserializer().deserialize(json)

                if 'tiles' in data and len(data['tiles']) > 0:
                    sortorder = 0
                    with transaction.atomic():
                        for tile in data['tiles']:
                            t = Tile(tile)
                            if t.filter_by_perm(request.user, 'write_nodegroup'):
                                t.sortorder = sortorder
                                t.save(update_fields=['sortorder'], request=request)
                                sortorder = sortorder + 1

                    return JSONResponse(data)

        if self.action == 'delete_provisional_tile':
            data = request.POST
            if 'tileid' in data:
                provisionaledits = self.delete_provisional_edit(data, request)
                return JSONResponse(provisionaledits)

            else:
                payload = data.get('payload', None)
                if payload is not None:
                    edits = jsonparser.loads(payload)
                    for edit in edits['edits']:
                        provisionaledits = self.delete_provisional_edit(edit, request)
                return JSONResponse({'result':'success'})

        return HttpResponseNotFound()