def post(self, request): data = JSONDeserializer().deserialize(request.body) index_resources_by_type(data['graphids'], clear_index=False, batch_size=4000) return JSONResponse(data)
def post(self, request): if self.action == 'update_tile': json = request.POST.get('data', None) accepted_provisional = request.POST.get('accepted_provisional', None) if accepted_provisional != None: accepted_provisional_edit = JSONDeserializer().deserialize( accepted_provisional) if json != None: data = JSONDeserializer().deserialize(json) if data['resourceinstance_id'] == '': data['resourceinstance_id'] = uuid.uuid4() try: models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) except ObjectDoesNotExist: resource = Resource() graphid = models.Node.objects.filter( nodegroup=data['nodegroup_id'])[0].graph_id resource.graph_id = graphid resource.save(user=request.user) data['resourceinstance_id'] = resource.pk resource.index() tile_id = data['tileid'] if tile_id != None and tile_id != '': try: old_tile = Tile.objects.get(pk=tile_id) clean_resource_cache(old_tile) except ObjectDoesNotExist: return JSONResponse( { 'status': 'false', 'message': [ _('This tile is no longer available'), _('It was likely deleted by another user') ] }, status=500) tile = Tile(data) if tile.filter_by_perm(request.user, 'write_nodegroup'): with transaction.atomic(): try: if accepted_provisional == None: try: tile.save(request=request) except TileValidationError as e: return JSONResponse( { 'status': 'false', 'message': [ e.message, _('Unable to Save. Please verify your input is valid' ) ] }, status=500) except Exception as e: message = "Unable to save. A {0} has occurred. Arguments: {1!r}".format( type(e).__name__, e.args) return JSONResponse( { 'status': 'false', 'message': [ message, _('Please contact your system administrator' ) ] }, status=500) else: if accepted_provisional is not None: provisional_editor = User.objects.get( pk=accepted_provisional_edit["user"]) tile.save( provisional_edit_log_details={ "user": request.user, "action": "accept edit", "edit": accepted_provisional_edit, "provisional_editor": provisional_editor }) if tile_id == '4345f530-aa90-48cf-b4b3-92d1185ca439': import couchdb import json as json_json couch = couchdb.Server(settings.COUCHDB_URL) for project in models.MobileSurveyModel.objects.all( ): db = couch['project_' + str(project.id)] #tile = models.TileModel.objects.get(pk='4345f530-aa90-48cf-b4b3-92d1185ca439') tile_json = json_json.loads( JSONSerializer().serialize(tile)) tile_json['_id'] = tile_json['tileid'] for row in db.view('_all_docs', include_docs=True): if 'tileid' in row.doc and tile_json[ '_id'] == row.doc['_id']: tile_json['_rev'] = row.doc['_rev'] db.save(tile_json) if tile.provisionaledits is not None and str( request.user.id) in tile.provisionaledits: tile.data = tile.provisionaledits[str( request.user.id)]['value'] except ValidationError as e: return JSONResponse( { 'status': 'false', 'message': e.args }, status=500) except Exception as e: exception_title = 'Saving tile failed' exception_message = str(e) if hasattr(e, 'message') and e.message: exception_message += "({0})".format(e.message) logger.error(exception_title + ''' [Tile id: {tile_id}] \ [Exception message: {message}] \ [Exception trace: {trace}]'''.format( tile_id=tile_id, message=exception_message, trace=traceback.format_exc())) return JSONResponse( { 'status': 'false', 'message': [ _(exception_title), _(str(exception_message)) ] }, status=500) tile.after_update_all() clean_resource_cache(tile) update_system_settings_cache(tile) return JSONResponse(tile) else: return JSONResponse( { 'status': 'false', 'message': [_('Request Failed'), _('Permission Denied')] }, status=500) if self.action == 'reorder_tiles': json = request.body if json != None: data = JSONDeserializer().deserialize(json) if 'tiles' in data and len(data['tiles']) > 0: sortorder = 0 with transaction.atomic(): for tile in data['tiles']: t = Tile(tile) if t.filter_by_perm(request.user, 'write_nodegroup'): t.sortorder = sortorder t.save(update_fields=['sortorder'], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == 'delete_provisional_tile': user = request.POST.get('user', None) tileid = request.POST.get('tileid', None) users = request.POST.get('users', None) tile = Tile.objects.get(tileid=tileid) is_provisional = tile.is_provisional() if tileid is not None and user is not None: provisionaledits = self.delete_provisional_edit( tile, user, reviewer=request.user) elif tileid is not None and users is not None: users = jsonparser.loads(users) for user in users: self.delete_provisional_edit(tile, user, reviewer=request.user) if is_provisional == True: return JSONResponse({'result': 'delete'}) else: return JSONResponse({'result': 'success'}) return HttpResponseNotFound()
def append_dsl(self, search_results_object, permitted_nodegroups, include_provisional): search_query = Bool() querysting_params = self.request.GET.get(details["componentname"], "") temporal_filter = JSONDeserializer().deserialize(querysting_params) if "fromDate" in temporal_filter and "toDate" in temporal_filter: # now = str(datetime.utcnow()) start_date = ExtendedDateFormat(temporal_filter["fromDate"]) end_date = ExtendedDateFormat(temporal_filter["toDate"]) date_nodeid = (str(temporal_filter["dateNodeId"]) if "dateNodeId" in temporal_filter and temporal_filter["dateNodeId"] != "" else None) query_inverted = False if "inverted" not in temporal_filter else temporal_filter[ "inverted"] temporal_query = Bool() if query_inverted: # inverted date searches need to use an OR clause and are generally more complicated to structure (can't use ES must_not) # eg: less than START_DATE OR greater than END_DATE inverted_date_query = Bool() inverted_date_ranges_query = Bool() if start_date.is_valid(): inverted_date_query.should( Range(field="dates.date", lt=start_date.lower)) inverted_date_ranges_query.should( Range(field="date_ranges.date_range", lt=start_date.lower)) if end_date.is_valid(): inverted_date_query.should( Range(field="dates.date", gt=end_date.upper)) inverted_date_ranges_query.should( Range(field="date_ranges.date_range", gt=end_date.upper)) date_query = Bool() date_query.filter(inverted_date_query) date_query.filter( Terms(field="dates.nodegroup_id", terms=permitted_nodegroups)) if include_provisional is False: date_query.filter( Terms(field="dates.provisional", terms=["false"])) elif include_provisional == "only provisional": date_query.filter( Terms(field="dates.provisional", terms=["true"])) if date_nodeid: date_query.filter( Term(field="dates.nodeid", term=date_nodeid)) else: date_ranges_query = Bool() date_ranges_query.filter(inverted_date_ranges_query) date_ranges_query.filter( Terms(field="date_ranges.nodegroup_id", terms=permitted_nodegroups)) if include_provisional is False: date_ranges_query.filter( Terms(field="date_ranges.provisional", terms=["false"])) elif include_provisional == "only provisional": date_ranges_query.filter( Terms(field="date_ranges.provisional", terms=["true"])) temporal_query.should( Nested(path="date_ranges", query=date_ranges_query)) temporal_query.should(Nested(path="dates", query=date_query)) else: date_query = Bool() date_query.filter( Range(field="dates.date", gte=start_date.lower, lte=end_date.upper)) date_query.filter( Terms(field="dates.nodegroup_id", terms=permitted_nodegroups)) if include_provisional is False: date_query.filter( Terms(field="dates.provisional", terms=["false"])) elif include_provisional == "only provisional": date_query.filter( Terms(field="dates.provisional", terms=["true"])) if date_nodeid: date_query.filter( Term(field="dates.nodeid", term=date_nodeid)) else: date_ranges_query = Bool() date_ranges_query.filter( Range(field="date_ranges.date_range", gte=start_date.lower, lte=end_date.upper, relation="intersects")) date_ranges_query.filter( Terms(field="date_ranges.nodegroup_id", terms=permitted_nodegroups)) if include_provisional is False: date_ranges_query.filter( Terms(field="date_ranges.provisional", terms=["false"])) if include_provisional == "only provisional": date_ranges_query.filter( Terms(field="date_ranges.provisional", terms=["true"])) temporal_query.should( Nested(path="date_ranges", query=date_ranges_query)) temporal_query.should(Nested(path="dates", query=date_query)) search_query.filter(temporal_query) search_results_object["query"].add_query(search_query)
def __init__(self, file=None, mapping_file=None, relations_file=None): self.business_data = '' self.mapping = None self.graphs = '' self.reference_data = '' self.business_data = '' self.file_format = '' self.relations = '' csv.field_size_limit(sys.maxint) if not file: file = settings.BUSINESS_DATA_FILES else: file = [file] if mapping_file == None: try: mapping_file = [file[0].split('.')[0] + '.mapping'] except: print '*' * 80 print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping" print '*' * 80 sys.exit() else: try: mapping_file = [mapping_file] except: print '*' * 80 print "ERROR: Mapping file is missing or improperly named. Make sure you have mapping file with the same basename as your business data file and the extension .mapping" print '*' * 80 sys.exit() if relations_file == None: try: relations_file = [file[0].split('.')[0] + '.relations'] except: pass for path in relations_file: if os.path.exists(path): if isfile(join(path)): self.relations = csv.DictReader( open(relations_file[0], 'r')) for path in mapping_file: if os.path.exists(path): if isfile(join(path)): self.mapping = json.load(open(path, 'r')) else: self.mapping = None for path in file: if os.path.exists(path): if isfile(join(path)): self.file_format = file[0].split('.')[-1] if self.file_format == 'json': with open(file[0], 'rU') as f: archesfile = JSONDeserializer().deserialize(f) if 'graph' in archesfile.keys(): self.graphs = archesfile['graph'] if 'reference_data' in archesfile.keys(): self.reference_data = archesfile[ 'reference_data'] if 'business_data' in archesfile.keys(): self.business_data = archesfile[ 'business_data'] elif self.file_format == 'csv': data = unicodecsv.DictReader(open(file[0], 'rU'), encoding='utf-8-sig', restkey='ADDITIONAL', restval='MISSING') self.business_data = list(data) elif self.file_format == 'zip': zipfile = ZipFile(StringIO(open(file[0], 'r').read())) filenames = [ y for y in sorted(zipfile.namelist()) for ending in ['dbf', 'prj', 'shp', 'shx'] if y.endswith(ending) ] dbf, prj, shp, shx = [ StringIO(zipfile.read(filename)) for filename in filenames ] shape_file = shapefile.Reader(shp=shp, shx=shx, dbf=dbf) self.business_data = self.shape_to_csv(shape_file) elif self.file_format == 'shp': self.business_data = self.shape_to_csv( shapefile.Reader(file[0])) else: print str(file) + ' is not a valid file' else: print path + ' is not a valid path'
def __init__(self, file=None, mapping_file=None, relations_file=None): self.business_data = "" self.mapping = None self.graphs = "" self.reference_data = "" self.business_data = "" self.file_format = "" self.relations = "" try: csv.field_size_limit(sys.maxsize) except: csv.field_size_limit(int(ctypes.c_ulong(-1).value // 2)) if not file: file = settings.BUSINESS_DATA_FILES else: file = [file] self.file = file if mapping_file is None: try: mapping_file_base = os.path.splitext(file[0])[0] mapping_file = [f"{mapping_file_base}.mapping"] except: print("*" * 80) print( "ERROR: Mapping file is missing or improperly named. Make sure you have \ mapping file with the same basename as your business data file and the extension .mapping" ) print("*" * 80) sys.exit() else: try: mapping_file = [mapping_file] except: print("*" * 80) print( "ERROR: Mapping file is missing or improperly named. Make sure you have \ mapping file with the same basename as your business data file and the extension .mapping" ) print("*" * 80) sys.exit() if relations_file is None: try: relations_file_base = os.path.splitext(file[0])[0] relations_file = [f"{relations_file_base}.relations"] except: pass for path in relations_file: if os.path.exists(path): if isfile(join(path)): self.relations = csv.DictReader( open(relations_file[0], "r")) for path in mapping_file: if os.path.exists(path): if isfile(join(path)): self.mapping = json.load(open(path, "r")) else: self.mapping = None for path in file: if os.path.exists(path): if isfile(join(path)): self.file_format = os.path.splitext(file[0])[1].strip(".") if self.file_format == "json": with open(file[0], "rU") as f: archesfile = JSONDeserializer().deserialize(f) if "graph" in list(archesfile.keys()): self.graphs = archesfile["graph"] if "reference_data" in list(archesfile.keys()): self.reference_data = archesfile[ "reference_data"] if "business_data" in list(archesfile.keys()): self.business_data = archesfile[ "business_data"] elif self.file_format == "csv": data = csv.DictReader(open(file[0], encoding="utf-8")) self.business_data = list(data) elif self.file_format == "zip": shp_zipfile = os.path.basename(path) shp_zipfile_name = os.path.splitext(shp_zipfile)[0] unzip_dir = os.path.join(os.path.dirname(path), shp_zipfile_name) unzip_file(path, unzip_dir) shp = [ i for i in os.listdir(unzip_dir) if i.endswith(".shp") ] if len(shp) == 0: print("*" * 80) print( "ERROR: There is no shapefile in this zipfile." ) print("*" * 80) exit() elif len(shp) > 1: print("*" * 80) print( "ERROR: There are multiple shapefiles in this zipfile. Please load each individually:" ) for s in shp: print( "\npython manage.py packages -o import_business_data -s {0} -c {1} -ow [append or overwrite]" .format(os.path.join(unzip_dir, s), mapping_file[0])) print("*" * 80) exit() shp_path = os.path.join(unzip_dir, shp[0]) self.business_data = self.shape_to_csv(shp_path) elif self.file_format == "shp": self.business_data = self.shape_to_csv(path) else: print(str(file) + " is not a valid file") else: print(path + " is not a valid path")
def delete(self, request): data = JSONDeserializer().deserialize(request.body) self.apply_permissions(data, revert=True) return JSONResponse(data)
def test_node_update(self): """ test to make sure that node groups and card are properly managed when changing a nodegroup value on a node being updated """ # create a graph, append the node/node type graph and confirm is has the correct # number of nodegroups then remove the appended branches group and reconfirm that # the proper number of groups are properly relfected in the graph graph = Graph.objects.get(pk=self.rootNode.graph.graphid) graph.append_branch('http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by', graphid=self.NODE_NODETYPE_GRAPHID) node_to_update = None for node_id, node in graph.nodes.iteritems(): if node.name == 'Node': node_to_update = JSONDeserializer().deserialize(JSONSerializer().serialize(node)) if node.name == 'Node Type': node_type_node = JSONDeserializer().deserialize(JSONSerializer().serialize(node)) # confirm that nulling out a child group will then make that group a part of the parent group node_to_update['nodegroup_id'] = None graph.update_node(node_to_update) self.assertEqual(len(graph.get_nodegroups()), 1) self.assertEqual(len(graph.cards), 1) for node in graph.nodes.itervalues(): self.assertEqual(graph.root.nodegroup, node.nodegroup) graph.append_branch('http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by', nodeid=node_type_node['nodeid'], graphid=self.SINGLE_NODE_GRAPHID) for edge in graph.edges.itervalues(): if str(edge.domainnode_id) == str(node_type_node['nodeid']): child_nodegroup_node = JSONDeserializer().deserialize(JSONSerializer().serialize(edge.rangenode)) # make a node group with a single node and confirm that that node is now not part of it's parent node group child_nodegroup_node['nodegroup_id'] = child_nodegroup_node['nodeid'] graph.update_node(child_nodegroup_node) self.assertEqual(len(graph.get_nodegroups()), 2) for node_id, node in graph.nodes.iteritems(): if node_id == child_nodegroup_node['nodeid']: self.assertNotEqual(graph.root.nodegroup, node.nodegroup) else: self.assertEqual(graph.root.nodegroup, node.nodegroup) # make another node group with a node (that has a child) and confirm that that node and # it's child are now not part of it's parent node group and that both nodes are grouped together node_to_update['nodegroup_id'] = node_to_update['nodeid'] graph.update_node(node_to_update) self.assertEqual(len(graph.get_nodegroups()), 3) children = graph.get_child_nodes(node_to_update['nodeid']) for child in children: if child.nodeid == child_nodegroup_node['nodeid']: self.assertEqual(child.nodeid, child.nodegroup_id) else: self.assertEqual(child.nodegroup_id, node_to_update['nodegroup_id']) # remove a node's node group and confirm that that node takes the node group of it's parent child_nodegroup_node['nodegroup_id'] = None graph.update_node(child_nodegroup_node) self.assertEqual(len(graph.get_nodegroups()), 2) children = graph.get_child_nodes(node_to_update['nodeid']) for child in children: self.assertEqual(child.nodegroup_id, node_to_update['nodegroup_id'])
def post(self, request): if self.action == "update_tile": json = request.POST.get("data", None) accepted_provisional = request.POST.get("accepted_provisional", None) if accepted_provisional is not None: accepted_provisional_edit = JSONDeserializer().deserialize(accepted_provisional) if json is not None: data = JSONDeserializer().deserialize(json) data["resourceinstance_id"] = "" if "resourceinstance_id" not in data else data["resourceinstance_id"] if data["resourceinstance_id"] == "": data["resourceinstance_id"] = uuid.uuid4() try: models.ResourceInstance.objects.get(pk=data["resourceinstance_id"]) except ObjectDoesNotExist: resource = Resource() graphid = models.Node.objects.filter(nodegroup=data["nodegroup_id"])[0].graph_id resource.graph_id = graphid try: resource.save(user=request.user) data["resourceinstance_id"] = resource.pk resource.index() except ModelInactiveError as e: message = _("Unable to save. Please verify the model status is active") return JSONResponse({"status": "false", "message": [_(e.title), _(str(message))]}, status=500) tile_id = data["tileid"] resource_instance = models.ResourceInstance.objects.get(pk=data["resourceinstance_id"]) is_active = resource_instance.graph.isactive if tile_id is not None and tile_id != "": try: old_tile = Tile.objects.get(pk=tile_id) except ObjectDoesNotExist as e: return self.handle_save_error(e, _("This tile is no longer available"), _("It was likely deleted by another user")) tile = Tile(data) if tile.filter_by_perm(request.user, "write_nodegroup") and is_active is True: try: with transaction.atomic(): try: if accepted_provisional is None: try: tile.save(request=request) except TileValidationError as e: resource_tiles = models.TileModel.objects.filter(resourceinstance=tile.resourceinstance) if resource_tiles.count() == 0: Resource.objects.get(pk=tile.resourceinstance_id).delete(request.user) title = _("Unable to save. Please verify your input is valid") return self.handle_save_error(e, tile_id, title=title) except ModelInactiveError as e: message = _("Unable to save. Please verify the model status is active") return JSONResponse({"status": "false", "message": [_(e.title), _(str(message))]}, status=500) else: if accepted_provisional is not None: provisional_editor = User.objects.get(pk=accepted_provisional_edit["user"]) prov_edit_log_details = { "user": request.user, "action": "accept edit", "edit": accepted_provisional_edit, "provisional_editor": provisional_editor, } tile.save(request=request, provisional_edit_log_details=prov_edit_log_details) if tile.provisionaledits is not None and str(request.user.id) in tile.provisionaledits: tile.data = tile.provisionaledits[str(request.user.id)]["value"] except Exception as e: return self.handle_save_error(e, tile_id) tile.after_update_all() update_system_settings_cache(tile) except Exception as e: return self.handle_save_error(e, tile_id) return JSONResponse(tile) elif is_active is False: response = {"status": "false", "message": [_("Request Failed"), _("Unable to Save. Verify model status is active")]} return JSONResponse(response, status=500) else: return JSONErrorResponse(_("Request Failed"), _("Permission Denied")) if self.action == "reorder_tiles": json = request.body if json is not None: data = JSONDeserializer().deserialize(json) if "tiles" in data and len(data["tiles"]) > 0: sortorder = 0 with transaction.atomic(): for tile in data["tiles"]: t = Tile(tile) if t.filter_by_perm(request.user, "write_nodegroup"): t.sortorder = sortorder t.save(update_fields=["sortorder"], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == "delete_provisional_tile": user = request.POST.get("user", None) tileid = request.POST.get("tileid", None) users = request.POST.get("users", None) tile = Tile.objects.get(tileid=tileid) is_provisional = tile.is_provisional() if tileid is not None and user is not None: provisionaledits = self.delete_provisional_edit(tile, user, request) elif tileid is not None and users is not None: users = jsonparser.loads(users) for user in users: self.delete_provisional_edit(tile, user, request) if is_provisional == True: return JSONResponse({"result": "delete"}) else: return JSONResponse({"result": "success"}) return HttpResponseNotFound()
def post(self, request, graphid=None): ret = {} try: if self.action == 'import_graph': graph_file = request.FILES.get('importedGraph').read() graphs = JSONDeserializer().deserialize(graph_file)['graph'] ret = GraphImporter.import_graph(graphs) else: if graphid is not None: graph = Graph.objects.get(graphid=graphid) data = JSONDeserializer().deserialize(request.body) if self.action == 'new_graph': isresource = data['isresource'] if 'isresource' in data else False name = _('New Resource Model') if isresource else _('New Branch') author = request.user.first_name + ' ' + request.user.last_name ret = Graph.new(name=name, is_resource=isresource, author=author) elif self.action == 'update_node': graph.update_node(data) ret = graph graph.save() elif self.action == 'update_node_layer': nodeid = uuid.UUID(str(data.get('nodeid'))) node = graph.nodes[nodeid] node.config = data['config'] ret = graph node.save() elif self.action == 'append_branch': ret = graph.append_branch(data['property'], nodeid=data['nodeid'], graphid=data['graphid']) graph.save() elif self.action == 'append_node': ret = graph.append_node(nodeid=data['nodeid']) graph.save() elif self.action == 'move_node': ret = graph.move_node(data['nodeid'], data['property'], data['newparentnodeid']) graph.save() elif self.action == 'export_branch': clone_data = graph.copy(root=data) clone_data['copy'].save() ret = { 'success': True, 'graphid': clone_data['copy'].pk } elif self.action == 'clone_graph': clone_data = graph.copy() ret = clone_data['copy'] ret.save() ret.copy_functions(graph, [clone_data['nodes'], clone_data['nodegroups']]) form_map = ret.copy_forms(graph, clone_data['cards']) ret.copy_reports(graph, [form_map, clone_data['cards'], clone_data['nodes']]) elif self.action == 'reorder_nodes': json = request.body if json is not None: data = JSONDeserializer().deserialize(json) if 'nodes' in data and len(data['nodes']) > 0: sortorder = 0 with transaction.atomic(): for node in data['nodes']: no = models.Node.objects.get(pk=node['nodeid']) no.sortorder = sortorder no.save() sortorder = sortorder + 1 ret = data return JSONResponse(ret) except GraphValidationError as e: return JSONResponse({'status': 'false', 'success': False, 'message': e.message, 'title': e.title}, status=500)
def load_file(self, archesjson): resources = [] with open(archesjson, 'r') as f: resources = JSONDeserializer().deserialize(f.read()) return resources['resources']
def node(request, nodeid): if request.method == 'POST': data = JSONDeserializer().deserialize(request.body) if data: node = models.Node.objects.get(nodeid=nodeid) nodes, edges = node.get_child_nodes_and_edges() collectors = [node_ for node_ in nodes if node_.is_collector()] node_ids = [id_node.nodeid for id_node in nodes] nodes = [ node_ for node_ in nodes if (node_.nodegroup_id not in node_ids) ] with transaction.atomic(): node.name = data.get('name', '') node.description = data.get('description', '') node.istopnode = data.get('istopnode', '') node.crmclass = data.get('crmclass', '') node.datatype = data.get('datatype', '') node.status = data.get('status', '') node.validations.set(data.get('validations', [])) new_nodegroup_id = data.get('nodegroup_id', None) cardinality = data.get('cardinality', 'n') if node.nodegroup_id != new_nodegroup_id: edge = models.Edge.objects.get(rangenode_id=nodeid) parent_group = edge.domainnode.nodegroup new_group = parent_group if new_nodegroup_id == nodeid: new_group, created = models.NodeGroup.objects.get_or_create( nodegroupid=nodeid, defaults={ 'cardinality': 'n', 'legacygroupid': None, 'parentnodegroup': None }) new_group.parentnodegroup = parent_group new_group.cardinality = cardinality new_group.save() parent_group = new_group for collector in collectors: collector.nodegroup.parentnodegroup = parent_group collector.nodegroup.save() for group_node in nodes: group_node.nodegroup = new_group group_node.save() node.nodegroup = new_group node.save() return JSONResponse({ 'node': node, 'group_nodes': nodes, 'collectors': collectors, 'nodegroup': node.nodegroup }) if request.method == 'DELETE': node = models.Node.objects.get(nodeid=nodeid) nodes, edges = node.get_child_nodes_and_edges() edges.append(models.Edge.objects.get(rangenode=node)) nodes.append(node) with transaction.atomic(): [edge.delete() for edge in edges] [node.delete() for node in nodes] return JSONResponse({}) return HttpResponseNotFound()
def post(self, request, graphid=None): ret = {} try: if self.action == "import_graph": graph_file = request.FILES.get("importedGraph").read() graphs = JSONDeserializer().deserialize(graph_file)["graph"] ret = GraphImporter.import_graph(graphs) else: if graphid is not None: graph = Graph.objects.get(graphid=graphid) data = JSONDeserializer().deserialize(request.body) if self.action == "new_graph": isresource = data[ "isresource"] if "isresource" in data else False name = _("New Resource Model") if isresource else _( "New Branch") author = request.user.first_name + " " + request.user.last_name ret = Graph.new(name=name, is_resource=isresource, author=author) elif self.action == "update_node": updated_values = graph.update_node(data) graph.save() ret = JSONSerializer().serializeToPython(graph) ret["updated_values"] = updated_values elif self.action == "update_node_layer": nodeid = uuid.UUID(str(data.get("nodeid"))) node = graph.nodes[nodeid] node.config = data["config"] ret = graph node.save() elif self.action == "append_branch": ret = graph.append_branch(data["property"], nodeid=data["nodeid"], graphid=data["graphid"]) ret = ret.serialize() ret["nodegroups"] = graph.get_nodegroups() ret["cards"] = graph.get_cards() ret["widgets"] = graph.get_widgets() graph.save() elif self.action == "append_node": ret = graph.append_node(nodeid=data["nodeid"]) graph.save() elif self.action == "move_node": ret = graph.move_node(data["nodeid"], data["property"], data["newparentnodeid"]) graph.save() elif self.action == "export_branch": clone_data = graph.copy(root=data) clone_data["copy"].save() ret = {"success": True, "graphid": clone_data["copy"].pk} elif self.action == "clone_graph": clone_data = graph.copy() ret = clone_data["copy"] ret.save() ret.copy_functions( graph, [clone_data["nodes"], clone_data["nodegroups"]]) elif self.action == "reorder_nodes": json = request.body if json is not None: data = JSONDeserializer().deserialize(json) if "nodes" in data and len(data["nodes"]) > 0: sortorder = 0 with transaction.atomic(): for node in data["nodes"]: no = models.Node.objects.get( pk=node["nodeid"]) no.sortorder = sortorder no.save() sortorder = sortorder + 1 ret = data return JSONResponse(ret) except GraphValidationError as e: return JSONResponse( { "status": "false", "success": False, "message": e.message, "title": e.title }, status=500)
def resource_manager(request, resourcetypeid='', form_id='default', resourceid=''): if resourceid != '': resource = Resource(resourceid) elif resourcetypeid != '': resource = Resource({'entitytypeid': resourcetypeid}) if form_id == 'default': form_id = resource.form_groups[0]['forms'][0]['id'] form = resource.get_form(form_id) if request.method == 'DELETE': resource.delete_index() se = SearchEngineFactory().create() realtionships = resource.get_related_resources(return_entities=False) for realtionship in realtionships: se.delete(index='resource_relations', doc_type='all', id=realtionship.resourcexid) realtionship.delete() resource.delete() return JSONResponse({ 'success': True }) if request.method == 'POST': data = JSONDeserializer().deserialize(request.POST.get('formdata', {})) form.update(data, request.FILES) with transaction.atomic(): if resourceid != '': resource.delete_index() resource.save(user=request.user) resource.index() resourceid = resource.entityid return redirect('resource_manager', resourcetypeid=resourcetypeid, form_id=form_id, resourceid=resourceid) min_max_dates = models.Dates.objects.aggregate(Min('val'), Max('val')) if request.method == 'GET': if form != None: lang = request.GET.get('lang', settings.LANGUAGE_CODE) form.load(lang) return render_to_response('resource-manager.htm', { 'form': form, 'formdata': JSONSerializer().serialize(form.data), 'form_template': 'views/forms/' + form_id + '.htm', 'form_id': form_id, 'resourcetypeid': resourcetypeid, 'resourceid': resourceid, 'main_script': 'resource-manager', 'active_page': 'ResourceManger', 'resource': resource, 'resource_name': resource.get_primary_name(), 'resource_type_name': resource.get_type_name(), 'form_groups': resource.form_groups, 'min_date': min_max_dates['val__min'].year if min_max_dates['val__min'] != None else 0, 'max_date': min_max_dates['val__max'].year if min_max_dates['val__min'] != None else 1, 'timefilterdata': JSONSerializer().serialize(Concept.get_time_filter_data()), }, context_instance=RequestContext(request)) else: return HttpResponseNotFound('<h1>Arches form not found.</h1>')
def delete(self, request): data = JSONDeserializer().deserialize(request.body) manifest = data.get("manifest") manifest = models.IIIFManifest.objects.get(url=manifest) manifest.delete() return JSONResponse({"success": True})
def get(self, request): if self.action == "get_permission_manager_data": identities = [] for group in Group.objects.all(): identities.append({ "name": group.name, "type": "group", "id": group.pk, "default_permissions": group.permissions.all() }) for user in User.objects.filter(is_superuser=False): groups = [] default_perms = [] for group in user.groups.all(): groups.append(group.name) default_perms = default_perms + list( group.permissions.all()) identities.append({ "name": user.email or user.username, "groups": ", ".join(groups), "type": "user", "id": user.pk, "default_permissions": set(default_perms), }) content_type = ContentType.objects.get_for_model(models.NodeGroup) nodegroup_permissions = Permission.objects.filter( content_type=content_type) ret = { "identities": identities, "permissions": nodegroup_permissions } return JSONResponse(ret) nodegroup_ids = JSONDeserializer().deserialize( request.GET.get("nodegroupIds")) identityId = request.GET.get("identityId") identityType = request.GET.get("identityType") ret = [] if identityType == "group": identity = Group.objects.get(pk=identityId) for nodegroup_id in nodegroup_ids: nodegroup = models.NodeGroup.objects.get(pk=nodegroup_id) perms = [{ "codename": codename, "name": self.get_perm_name(codename).name } for codename in get_group_perms(identity, nodegroup)] ret.append({"perms": perms, "nodegroup_id": nodegroup_id}) else: identity = User.objects.get(pk=identityId) for nodegroup_id in nodegroup_ids: nodegroup = models.NodeGroup.objects.get(pk=nodegroup_id) perms = [{ "codename": codename, "name": self.get_perm_name(codename).name } for codename in get_user_perms(identity, nodegroup)] # only get the group perms ("defaults") if no user defined object settings have been saved if len(perms) == 0: perms = [{ "codename": codename, "name": self.get_perm_name(codename).name } for codename in set(get_group_perms(identity, nodegroup)) ] ret.append({"perms": perms, "nodegroup_id": nodegroup_id}) return JSONResponse(ret)
def post(self, request): data = JSONDeserializer().deserialize(request.body) if data['id'] is None: mobile_survey = models.MobileSurveyModel() mobile_survey.createdby = self.request.user else: mobile_survey = models.MobileSurveyModel.objects.get(pk=data['id']) self.update_identities(data, mobile_survey, mobile_survey.users.all(), 'users', User, models.MobileSurveyXUser) self.update_identities(data, mobile_survey, mobile_survey.groups.all(), 'groups', Group, models.MobileSurveyXGroup) mobile_survey_card_ids = set( [unicode(c.cardid) for c in mobile_survey.cards.all()]) form_card_ids = set(data['cards']) cards_to_remove = mobile_survey_card_ids - form_card_ids cards_to_add = form_card_ids - mobile_survey_card_ids cards_to_update = mobile_survey_card_ids & form_card_ids for card_id in cards_to_add: models.MobileSurveyXCard.objects.create( card=models.CardModel.objects.get(cardid=card_id), mobile_survey=mobile_survey, sortorder=data['cards'].index(card_id)) for card_id in cards_to_update: mobile_survey_card = models.MobileSurveyXCard.objects.filter( mobile_survey=mobile_survey).get( card=models.CardModel.objects.get(cardid=card_id)) mobile_survey_card.sortorder = data['cards'].index(card_id) mobile_survey_card.save() for card_id in cards_to_remove: models.MobileSurveyXCard.objects.filter( card=models.CardModel.objects.get(cardid=card_id), mobile_survey=mobile_survey).delete() if mobile_survey.active != data['active']: # notify users in the mobile_survey that the state of the mobile_survey has changed if data['active']: self.notify_mobile_survey_start(request, mobile_survey) else: self.notify_mobile_survey_end(request, mobile_survey) mobile_survey.name = data['name'] mobile_survey.description = data['description'] if data['startdate'] != '': mobile_survey.startdate = data['startdate'] if data['enddate'] != '': mobile_survey.enddate = data['enddate'] mobile_survey.datadownloadconfig = data['datadownloadconfig'] mobile_survey.active = data['active'] mobile_survey.tilecache = data['tilecache'] polygons = [] try: data['bounds'].upper() data['bounds'] = json.loads(data['bounds']) except AttributeError: pass if 'features' in data['bounds']: for feature in data['bounds']['features']: for coord in feature['geometry']['coordinates']: polygons.append(Polygon(coord)) mobile_survey.bounds = MultiPolygon(polygons) mobile_survey.lasteditedby = self.request.user with transaction.atomic(): mobile_survey.save() ordered_cards = models.MobileSurveyXCard.objects.filter( mobile_survey=mobile_survey).order_by('sortorder') ordered_ids = [unicode(mpc.card.cardid) for mpc in ordered_cards] mobile_survey_dict = mobile_survey.__dict__ mobile_survey_dict['cards'] = ordered_ids mobile_survey_dict['users'] = [u.id for u in mobile_survey.users.all()] mobile_survey_dict['groups'] = [ g.id for g in mobile_survey.groups.all() ] mobile_survey_dict['bounds'] = mobile_survey.bounds.geojson return JSONResponse({ 'success': True, 'mobile_survey': mobile_survey_dict })
def post(self, request): data = JSONDeserializer().deserialize(request.body) self.apply_permissions(data) return JSONResponse(data)
def collect_resource_instances_for_couch(self): """ Uses the data definition configs of a mobile survey object to search for resource instances relevant to a mobile survey. Takes a user object which is required for search. """ query = self.datadownloadconfig["custom"] resource_types = self.datadownloadconfig["resources"] all_instances = {} if query in ("", None) and len(resource_types) == 0: logger.info("No resources or data query defined") else: request = HttpRequest() request.user = self.lasteditedby request.GET["mobiledownload"] = True request.GET["resourcecount"] = self.datadownloadconfig["count"] if query in ("", None): if len(self.bounds.coords) == 0: default_bounds = settings.DEFAULT_BOUNDS default_bounds["features"][0]["properties"][ "inverted"] = False map_filter = json.dumps(default_bounds) else: map_filter = json.dumps({ "type": "FeatureCollection", "features": [{ "geometry": json.loads(self.bounds.json) }] }) try: for res_type in resource_types: instances = {} request.GET["resource-type-filter"] = json.dumps([{ "graphid": res_type, "inverted": False }]) request.GET["map-filter"] = map_filter request.GET["paging-filter"] = "1" request.GET["resourcecount"] = self.datadownloadconfig[ "count"] self.append_to_instances(request, instances, res_type) if len(list(instances.keys())) < int( self.datadownloadconfig["count"]): request.GET["map-filter"] = "{}" request.GET["resourcecount"] = int( self.datadownloadconfig["count"]) - len( list(instances.keys())) self.append_to_instances(request, instances, res_type) for key, value in instances.items(): all_instances[key] = value except Exception as e: logger.exception(e) else: try: instances = {} parsed = urllib.parse.urlparse(query) urlparams = urllib.parse.parse_qs(parsed.query) for k, v in urlparams.items(): request.GET[k] = v[0] search_res_json = search.search_results(request) search_res = JSONDeserializer().deserialize( search_res_json.content) for hit in search_res["results"]["hits"]["hits"]: instances[hit["_source"] ["resourceinstanceid"]] = hit["_source"] for key, value in instances.items(): all_instances[key] = value except KeyError: print("no instances found in", search_res) return all_instances
def append_dsl(self, search_results_object, permitted_nodegroups, include_provisional): search_query = Bool() querysting_params = self.request.GET.get(details["componentname"], "") for term in JSONDeserializer().deserialize(querysting_params): if term["type"] == "term" or term["type"] == "string": string_filter = Bool() if term["type"] == "term": string_filter.must( Match(field="strings.string", query=term["value"], type="phrase")) elif term["type"] == "string": string_filter.should( Match(field="strings.string", query=term["value"], type="phrase_prefix")) string_filter.should( Match(field="strings.string.folded", query=term["value"], type="phrase_prefix")) if include_provisional is False: string_filter.must_not( Match(field="strings.provisional", query="true", type="phrase")) elif include_provisional == "only provisional": string_filter.must_not( Match(field="strings.provisional", query="false", type="phrase")) string_filter.filter( Terms(field="strings.nodegroup_id", terms=permitted_nodegroups)) nested_string_filter = Nested(path="strings", query=string_filter) if term["inverted"]: search_query.must_not(nested_string_filter) else: search_query.must(nested_string_filter) # need to set min_score because the query returns results with score 0 and those have to be removed, which I don't think it should be doing search_results_object["query"].min_score("0.01") elif term["type"] == "concept": concept_ids = _get_child_concepts(term["value"]) conceptid_filter = Bool() conceptid_filter.filter( Terms(field="domains.conceptid", terms=concept_ids)) conceptid_filter.filter( Terms(field="domains.nodegroup_id", terms=permitted_nodegroups)) if include_provisional is False: conceptid_filter.must_not( Match(field="domains.provisional", query="true", type="phrase")) elif include_provisional == "only provisional": conceptid_filter.must_not( Match(field="domains.provisional", query="false", type="phrase")) nested_conceptid_filter = Nested(path="domains", query=conceptid_filter) if term["inverted"]: search_query.must_not(nested_conceptid_filter) else: search_query.filter(nested_conceptid_filter) search_results_object["query"].add_query(search_query)
def post(self, request): data = JSONDeserializer().deserialize(request.body) if data['id'] is None: mobile_survey = MobileSurvey() mobile_survey.createdby = self.request.user else: mobile_survey = MobileSurvey.objects.get(pk=data['id']) self.update_identities(data, mobile_survey, mobile_survey.users.all(), 'users', User, models.MobileSurveyXUser) self.update_identities(data, mobile_survey, mobile_survey.groups.all(), 'groups', Group, models.MobileSurveyXGroup) mobile_survey_card_ids = set([unicode(c.cardid) for c in mobile_survey.cards.all()]) form_card_ids = set(data['cards']) cards_to_remove = mobile_survey_card_ids - form_card_ids cards_to_add = form_card_ids - mobile_survey_card_ids cards_to_update = mobile_survey_card_ids & form_card_ids for card_id in cards_to_add: models.MobileSurveyXCard.objects.create(card=models.CardModel.objects.get(cardid=card_id), mobile_survey=mobile_survey, sortorder=data['cards'].index(card_id)) for card_id in cards_to_update: mobile_survey_card = models.MobileSurveyXCard.objects.filter(mobile_survey=mobile_survey).get(card=models.CardModel.objects.get(cardid=card_id)) mobile_survey_card.sortorder=data['cards'].index(card_id) mobile_survey_card.save() for card_id in cards_to_remove: models.MobileSurveyXCard.objects.filter(card=models.CardModel.objects.get(cardid=card_id), mobile_survey=mobile_survey).delete() if mobile_survey.active != data['active']: # notify users in the mobile_survey that the state of the mobile_survey has changed if data['active']: self.notify_mobile_survey_start(request, mobile_survey) else: self.notify_mobile_survey_end(request, mobile_survey) mobile_survey.name = data['name'] mobile_survey.description = data['description'] if data['startdate'] != '': mobile_survey.startdate = data['startdate'] if data['enddate'] != '': mobile_survey.enddate = data['enddate'] mobile_survey.datadownloadconfig = data['datadownloadconfig'] mobile_survey.active = data['active'] mobile_survey.tilecache = data['tilecache'] polygons = [] try: data['bounds'].upper() data['bounds'] = json.loads(data['bounds']) except AttributeError: pass if 'features' in data['bounds']: for feature in data['bounds']['features']: for coord in feature['geometry']['coordinates']: polygons.append(Polygon(coord)) mobile_survey.bounds = MultiPolygon(polygons) mobile_survey.lasteditedby = self.request.user try: connection_error = False with transaction.atomic(): mobile_survey.save() except Exception as e: if connection_error == False: error_title = _('Unable to save survey') if e.strerror == 'Connection refused': error_message = "Unable to connect to CouchDB" else: error_message = e.message connection_error = JSONResponse({'success':False,'message': error_message,'title': error_title}, status=500) return connection_error return JSONResponse({'success':True, 'mobile_survey': mobile_survey})
def test_save_and_update_dont_orphan_records_in_the_db(self): """ test that the proper number of nodes, edges, nodegroups, and cards are persisted to the database during save and update opertaions """ nodes_count_before = models.Node.objects.count() edges_count_before = models.Edge.objects.count() nodegroups_count_before = models.NodeGroup.objects.count() card_count_before = models.CardModel.objects.count() # test that data is persisited propertly when creating a new graph graph = Graph.new(is_resource=False) nodes_count_after = models.Node.objects.count() edges_count_after = models.Edge.objects.count() nodegroups_count_after = models.NodeGroup.objects.count() card_count_after = models.CardModel.objects.count() self.assertEqual(nodes_count_after-nodes_count_before, 1) self.assertEqual(edges_count_after-edges_count_before, 0) self.assertEqual(nodegroups_count_after-nodegroups_count_before, 1) self.assertEqual(card_count_after-card_count_before, 1) # test that data is persisited propertly during an append opertation graph.append_branch('http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by', graphid=self.NODE_NODETYPE_GRAPHID) graph.save() nodes_count_after = models.Node.objects.count() edges_count_after = models.Edge.objects.count() nodegroups_count_after = models.NodeGroup.objects.count() card_count_after = models.CardModel.objects.count() self.assertEqual(nodes_count_after-nodes_count_before, 3) self.assertEqual(edges_count_after-edges_count_before, 2) self.assertEqual(nodegroups_count_after-nodegroups_count_before, 2) self.assertEqual(card_count_after-card_count_before, 2) # test that removing a node group by setting it to None, removes it from the db node_to_update = None for node_id, node in graph.nodes.iteritems(): if node.name == 'Node': self.assertTrue(node.is_collector) node_to_update = JSONDeserializer().deserialize(JSONSerializer().serialize(node)) node_to_update['nodegroup_id'] = None graph.update_node(node_to_update.copy()) graph.save() nodegroups_count_after = models.NodeGroup.objects.count() card_count_after = models.CardModel.objects.count() self.assertEqual(nodegroups_count_after-nodegroups_count_before, 1) self.assertEqual(card_count_after-card_count_before, 1) # test that adding back a node group adds it back to the db node_to_update['nodegroup_id'] = node_to_update['nodeid'] graph.update_node(node_to_update) graph.save() nodegroups_count_after = models.NodeGroup.objects.count() card_count_after = models.CardModel.objects.count() self.assertEqual(nodegroups_count_after-nodegroups_count_before, 2) self.assertEqual(card_count_after-card_count_before, 2)
def post(self, request, surveyid): data = JSONDeserializer().deserialize(request.body) if models.MobileSurveyModel.objects.filter(pk=data["id"]).exists() is False: mobile_survey_model = models.MobileSurveyModel( id=surveyid, name=data["name"], createdby=self.request.user, lasteditedby=self.request.user ) mobile_survey_model.save() mobile_survey = MobileSurvey.objects.get(pk=data["id"]) self.update_identities(data, mobile_survey, mobile_survey.users.all(), "users", User, models.MobileSurveyXUser) self.update_identities(data, mobile_survey, mobile_survey.groups.all(), "groups", Group, models.MobileSurveyXGroup) mobile_survey_card_ids = {str(c.cardid) for c in mobile_survey.cards.all()} form_card_ids = set(data["cards"]) cards_to_remove = mobile_survey_card_ids - form_card_ids cards_to_add = form_card_ids - mobile_survey_card_ids cards_to_update = mobile_survey_card_ids & form_card_ids for card_id in cards_to_add: models.MobileSurveyXCard.objects.create( card=models.CardModel.objects.get(cardid=card_id), mobile_survey=mobile_survey, sortorder=data["cards"].index(card_id) ) for card_id in cards_to_update: mobile_survey_card = models.MobileSurveyXCard.objects.filter(mobile_survey=mobile_survey).get( card=models.CardModel.objects.get(cardid=card_id) ) mobile_survey_card.sortorder = data["cards"].index(card_id) mobile_survey_card.save() for card_id in cards_to_remove: models.MobileSurveyXCard.objects.filter(card=models.CardModel.objects.get(cardid=card_id), mobile_survey=mobile_survey).delete() # TODO Disabling the following section until we make emailing users optional # if mobile_survey.active != data['active']: # notify users in the mobile_survey that the state of the mobile_survey has changed # if data['active']: # self.notify_mobile_survey_start(request, mobile_survey) # else: # self.notify_mobile_survey_end(request, mobile_survey) mobile_survey.name = data["name"] mobile_survey.description = data["description"] mobile_survey.onlinebasemaps = data["onlinebasemaps"] if data["startdate"] != "": mobile_survey.startdate = data["startdate"] if data["enddate"] != "": mobile_survey.enddate = data["enddate"] mobile_survey.datadownloadconfig = data["datadownloadconfig"] mobile_survey.active = data["active"] mobile_survey.tilecache = data["tilecache"] polygons = [] # try: # data['bounds'].upper() # data['bounds'] = json.loads(data['bounds']) # except AttributeError as e: # print('bounds is not a string') if "features" in data["bounds"]: for feature in data["bounds"]["features"]: for coord in feature["geometry"]["coordinates"]: polygons.append(Polygon(coord)) elif len(polygons) == 0: try: if data["bounds"]["type"] == "MultiPolygon": for poly in data["bounds"]["coordinates"]: for coords in poly: polygons.append(Polygon(coords)) except AttributeError as e: print("bounds is not a geojson geometry object") mobile_survey.bounds = MultiPolygon(polygons) mobile_survey.lasteditedby = self.request.user try: with transaction.atomic(): mobile_survey.save() except ConnectionRefusedError as e: error_title = _("Unable to save collector project") error_message = _("Failed to connect to a CouchDB service") connection_error = JSONErrorResponse(error_title, error_message) return connection_error except Exception as e: error_title = _("Unable to save collector project") logger.exception(e) connection_error = JSONErrorResponse(error_title, e) return connection_error return JSONResponse({"success": True, "mobile_survey": mobile_survey})
def import_business_data( self, file_format=None, business_data=None, mapping=None, overwrite="append", bulk=False, create_concepts=False, create_collections=False, use_multiprocessing=False, prevent_indexing=False, transaction_id=None, ): reader = None start = time() cursor = connection.cursor() try: if file_format is None: file_format = self.file_format if business_data is None: business_data = self.business_data if mapping is None: mapping = self.mapping if file_format == "json": reader = ArchesFileReader() reader.import_business_data(business_data, mapping=mapping, overwrite=overwrite, prevent_indexing=prevent_indexing, transaction_id=transaction_id) elif file_format == "jsonl": with open(self.file[0], "rU") as openf: lines = openf.readlines() if use_multiprocessing is True: pool = Pool(cpu_count()) pool.map(import_one_resource, lines, prevent_indexing=prevent_indexing) connections.close_all() reader = ArchesFileReader() else: reader = ArchesFileReader() for line in lines: archesresource = JSONDeserializer().deserialize( line) reader.import_business_data( {"resources": [archesresource]}, overwrite=overwrite, prevent_indexing=prevent_indexing, transaction_id=transaction_id, ) elif file_format == "csv" or file_format == "shp" or file_format == "zip": if mapping is not None: reader = CsvReader() reader.import_business_data( business_data=business_data, mapping=mapping, overwrite=overwrite, bulk=bulk, create_concepts=create_concepts, create_collections=create_collections, prevent_indexing=prevent_indexing, transaction_id=transaction_id, ) else: print("*" * 80) print( f"ERROR: No mapping file detected for {self.file[0]}. Please indicate one \ with the '-c' paramater or place one in the same directory as your business data." ) print("*" * 80) elapsed = time() - start print("Time to import_business_data = {0}".format( datetime.timedelta(seconds=elapsed))) if reader is not None: reader.report_errors() finally: # cleans up the ResourceXResource table, adding any graph_id values that were unavailable during package/csv load for res_x_res in ResourceXResource.objects.filter( resourceinstanceto_graphid__isnull=True): # wrapping in a try allows for graceful handling of corrupted data try: res_x_res.resourceinstanceto_graphid = res_x_res.resourceinstanceidto.graph except: pass res_x_res.save() datatype_factory = DataTypeFactory() datatypes = DDataType.objects.all() for datatype in datatypes: try: datatype_instance = datatype_factory.get_instance( datatype.datatype) datatype_instance.after_update_all() except BrokenPipeError as e: logger = logging.getLogger(__name__) logger.info( "Celery not working: tasks unavailable during import.")
def post(self, request): if self.action == 'update_tile': json = request.POST.get('data', None) if json != None: data = JSONDeserializer().deserialize(json) try: models.ResourceInstance.objects.get( pk=data['resourceinstance_id']) except ObjectDoesNotExist: resource = Resource() resource.resourceinstanceid = data['resourceinstance_id'] graphid = models.Node.objects.filter( nodegroup=data['nodegroup_id'])[0].graph_id resource.graph_id = graphid resource.save(user=request.user) resource.index() tile_id = data['tileid'] if tile_id != None and tile_id != '': old_tile = Tile.objects.get(pk=tile_id) clean_resource_cache(old_tile) tile = Tile(data) if tile.filter_by_perm(request.user, 'write_nodegroup'): with transaction.atomic(): try: tile.save(request=request) if tile_id == '4345f530-aa90-48cf-b4b3-92d1185ca439': import couchdb import json as json_json couch = couchdb.Server(settings.COUCHDB_URL) for project in models.MobileSurveyModel.objects.all( ): db = couch['project_' + str(project.id)] #tile = models.TileModel.objects.get(pk='4345f530-aa90-48cf-b4b3-92d1185ca439') tile_json = json_json.loads( JSONSerializer().serialize(tile)) tile_json['_id'] = tile_json['tileid'] for row in db.view('_all_docs', include_docs=True): if 'tileid' in row.doc and tile_json[ '_id'] == row.doc['_id']: tile_json['_rev'] = row.doc['_rev'] db.save(tile_json) except ValidationError as e: return JSONResponse( { 'status': 'false', 'message': e.args }, status=500) tile.after_update_all() clean_resource_cache(tile) update_system_settings_cache(tile) return JSONResponse(tile) else: return JSONResponse( { 'status': 'false', 'message': [_('Request Failed'), _('Permission Denied')] }, status=500) if self.action == 'reorder_tiles': json = request.body if json != None: data = JSONDeserializer().deserialize(json) if 'tiles' in data and len(data['tiles']) > 0: sortorder = 0 with transaction.atomic(): for tile in data['tiles']: t = Tile(tile) if t.filter_by_perm(request.user, 'write_nodegroup'): t.sortorder = sortorder t.save(update_fields=['sortorder'], request=request) sortorder = sortorder + 1 return JSONResponse(data) if self.action == 'delete_provisional_tile': data = request.POST if 'tileid' in data: provisionaledits = self.delete_provisional_edit(data, request) return JSONResponse(provisionaledits) else: payload = data.get('payload', None) if payload is not None: edits = jsonparser.loads(payload) for edit in edits['edits']: provisionaledits = self.delete_provisional_edit( edit, request) return JSONResponse({'result': 'success'}) return HttpResponseNotFound()
def delete(self, request): json = request.body if json != None: ret = [] data = JSONDeserializer().deserialize(json) with transaction.atomic(): try: tile = Tile.objects.get(tileid=data['tileid']) except ObjectDoesNotExist: return JSONResponse( { 'status': 'false', 'message': [ _('This tile is no longer available'), _('It was likely already deleted by another user' ) ] }, status=500) user_is_reviewer = request.user.groups.filter( name='Resource Reviewer').exists() if user_is_reviewer or tile.is_provisional() == True: if tile.filter_by_perm(request.user, 'delete_nodegroup'): nodegroup = models.NodeGroup.objects.get( pk=tile.nodegroup_id) clean_resource_cache(tile) if tile.is_provisional() is True and len( tile.provisionaledits.keys()) == 1: provisional_editor_id = tile.provisionaledits.keys( )[0] edit = tile.provisionaledits[provisional_editor_id] provisional_editor = User.objects.get( pk=provisional_editor_id) reviewer = request.user tile.delete(request=request, provisional_edit_log_details={ "user": reviewer, "action": "delete edit", "edit": edit, "provisional_editor": provisional_editor }) else: tile.delete(request=request) tile.after_update_all() update_system_settings_cache(tile) return JSONResponse(tile) else: return JSONResponse( { 'status': 'false', 'message': [_('Request Failed'), _('Permission Denied')] }, status=500) else: return JSONResponse( { 'status': 'false', 'message': [ _('Request Failed'), _('You do not have permissions to delete a tile with authoritative data.' ) ] }, status=500) return HttpResponseNotFound()
def get(self, request, graphid, nodeid=None): if self.action == "export_graph": graph = get_graphs_for_export([graphid]) graph["metadata"] = system_metadata() f = JSONSerializer().serialize(graph, indent=4) graph_name = JSONDeserializer().deserialize(f)["graph"][0]["name"] response = HttpResponse(f, content_type="json/plain") response[ "Content-Disposition"] = 'attachment; filename="%s.json"' % ( graph_name) return response elif self.action == "export_mapping_file": files_for_export = create_mapping_configuration_file(graphid, True) file_name = Graph.objects.get(graphid=graphid).name buffer = BytesIO() with zipfile.ZipFile(buffer, "w", zipfile.ZIP_DEFLATED) as zip: for f in files_for_export: f["outputfile"].seek(0) zip.writestr(f["name"], f["outputfile"].read()) zip.close() buffer.flush() zip_stream = buffer.getvalue() buffer.close() response = HttpResponse() response[ "Content-Disposition"] = "attachment; filename=" + file_name + ".zip" response["Content-length"] = str(len(zip_stream)) response["Content-Type"] = "application/zip" response.write(zip_stream) return response elif self.action == "get_domain_connections": res = [] graph = Graph.objects.get(graphid=graphid) ontology_class = request.GET.get("ontology_class", None) ret = graph.get_valid_domain_ontology_classes() for r in ret: res.append({ "ontology_property": r["ontology_property"], "ontology_classes": [c for c in r["ontology_classes"]] }) return JSONResponse(res) else: graph = Graph.objects.get(graphid=graphid) if self.action == "get_related_nodes": parent_nodeid = request.GET.get("parent_nodeid", None) ret = graph.get_valid_ontology_classes( nodeid=nodeid, parent_nodeid=parent_nodeid) elif self.action == "get_valid_domain_nodes": if nodeid == "": nodeid = None ret = graph.get_valid_domain_ontology_classes(nodeid=nodeid) return JSONResponse(ret) return HttpResponseNotFound()
def setUpClass(cls): models.ResourceInstance.objects.all().delete() cls.client = Client() cls.client.login(username="******", password="******") with open( os.path.join( "tests/fixtures/resource_graphs/Resource Test Model.json"), "rU") as f: archesfile = JSONDeserializer().deserialize(f) resource_graph_importer(archesfile["graph"]) cls.search_model_graphid = "e503a445-fa5f-11e6-afa8-14109fd34195" cls.search_model_cultural_period_nodeid = "7a182580-fa60-11e6-96d1-14109fd34195" cls.search_model_creation_date_nodeid = "1c1d05f5-fa60-11e6-887f-14109fd34195" cls.search_model_destruction_date_nodeid = "e771b8a1-65fe-11e7-9163-14109fd34195" cls.search_model_name_nodeid = "2fe14de3-fa61-11e6-897b-14109fd34195" cls.search_model_sensitive_info_nodeid = "57446fae-65ff-11e7-b63a-14109fd34195" cls.search_model_geom_nodeid = "3ebc6785-fa61-11e6-8c85-14109fd34195" cls.user = User.objects.create_user("test", "*****@*****.**", "password") cls.user.groups.add(Group.objects.get(name="Guest")) nodegroup = models.NodeGroup.objects.get( pk=cls.search_model_destruction_date_nodeid) assign_perm("no_access_to_nodegroup", cls.user, nodegroup) # Add a concept that defines a min and max date concept = { "id": "00000000-0000-0000-0000-000000000001", "legacyoid": "ARCHES", "nodetype": "ConceptScheme", "values": [], "subconcepts": [{ "values": [ { "value": "Mock concept", "language": "en-US", "category": "label", "type": "prefLabel", "id": "", "conceptid": "" }, { "value": "1950", "language": "en-US", "category": "note", "type": "min_year", "id": "", "conceptid": "" }, { "value": "1980", "language": "en-US", "category": "note", "type": "max_year", "id": "", "conceptid": "" }, ], "relationshiptype": "hasTopConcept", "nodetype": "Concept", "id": "", "legacyoid": "", "subconcepts": [], "parentconcepts": [], "relatedconcepts": [], }], } post_data = JSONSerializer().serialize(concept) content_type = "application/x-www-form-urlencoded" response = cls.client.post( reverse( "concept", kwargs={"conceptid": "00000000-0000-0000-0000-000000000001"}), post_data, content_type) response_json = json.loads(response.content) valueid = response_json["subconcepts"][0]["values"][0]["id"] cls.conceptid = response_json["subconcepts"][0]["id"] # Add resource with Name, Cultural Period, Creation Date and Geometry cls.test_resource = Resource(graph_id=cls.search_model_graphid) # Add Name tile = Tile(data={cls.search_model_name_nodeid: "Test Name 1"}, nodegroup_id=cls.search_model_name_nodeid) cls.test_resource.tiles.append(tile) # Add Cultural Period tile = Tile(data={cls.search_model_cultural_period_nodeid: [valueid]}, nodegroup_id=cls.search_model_cultural_period_nodeid) cls.test_resource.tiles.append(tile) # Add Creation Date tile = Tile(data={cls.search_model_creation_date_nodeid: "1941-01-01"}, nodegroup_id=cls.search_model_creation_date_nodeid) cls.test_resource.tiles.append(tile) # Add Gometry cls.geom = { "type": "FeatureCollection", "features": [{ "geometry": { "type": "Point", "coordinates": [0, 0] }, "type": "Feature", "properties": {} }], } tile = Tile(data={cls.search_model_geom_nodeid: cls.geom}, nodegroup_id=cls.search_model_geom_nodeid) cls.test_resource.tiles.append(tile) cls.test_resource.save() # add delay to allow for indexes to be updated time.sleep(1)
def post(self, request, graphid=None): ret = {} try: if self.action == "import_graph": graph_file = request.FILES.get("importedGraph").read() graphs = JSONDeserializer().deserialize(graph_file)["graph"] ret = GraphImporter.import_graph(graphs) else: if graphid is not None: graph = Graph.objects.get(graphid=graphid) data = JSONDeserializer().deserialize(request.body) if self.action == "new_graph": isresource = data[ "isresource"] if "isresource" in data else False name = _("New Resource Model") if isresource else _( "New Branch") author = request.user.first_name + " " + request.user.last_name ret = Graph.new(name=name, is_resource=isresource, author=author) elif self.action == "update_node": updated_values = graph.update_node(data) if "nodeid" in data: graph.save(nodeid=data["nodeid"]) else: graph.save() ret = JSONSerializer().serializeToPython(graph) ret["updated_values"] = updated_values ret["default_card_name"] = graph.temp_node_name elif self.action == "update_node_layer": nodeid = uuid.UUID(str(data.get("nodeid"))) node = graph.nodes[nodeid] node.config = data["config"] ret = graph node.save() elif self.action == "append_branch": ret = graph.append_branch(data["property"], nodeid=data["nodeid"], graphid=data["graphid"]) ret = ret.serialize() ret["nodegroups"] = graph.get_nodegroups() ret["cards"] = graph.get_cards() ret["widgets"] = graph.get_widgets() graph.save() elif self.action == "append_node": ret = graph.append_node(nodeid=data["nodeid"]) graph.save() elif self.action == "move_node": ret = graph.move_node(data["nodeid"], data["property"], data["newparentnodeid"]) graph.save() elif self.action == "export_branch": clone_data = graph.copy(root=data) clone_data["copy"].save() ret = {"success": True, "graphid": clone_data["copy"].pk} elif self.action == "clone_graph": clone_data = graph.copy() ret = clone_data["copy"] ret.save() ret.copy_functions( graph, [clone_data["nodes"], clone_data["nodegroups"]]) elif self.action == "reorder_nodes": json = request.body if json is not None: data = JSONDeserializer().deserialize(json) if "nodes" in data and len(data["nodes"]) > 0: sortorder = 0 with transaction.atomic(): for node in data["nodes"]: no = models.Node.objects.get( pk=node["nodeid"]) no.sortorder = sortorder no.save() sortorder = sortorder + 1 ret = data return JSONResponse(ret) except GraphValidationError as e: return JSONErrorResponse(e.title, e.message, {"status": "Failed"}) except ModelInactiveError as e: return JSONErrorResponse(e.title, e.message) except RequestError as e: return JSONErrorResponse( _("Elasticsearch indexing error"), _("""If you want to change the datatype of an existing node. Delete and then re-create the node, or export the branch then edit the datatype and re-import the branch.""" ), )
def concept(request, conceptid): f = request.GET.get('f', 'json') mode = request.GET.get('mode', '') lang = request.GET.get('lang', settings.LANGUAGE_CODE) pretty = request.GET.get('pretty', False) if request.method == 'GET': include_subconcepts = request.GET.get('include_subconcepts', 'true') == 'true' include_parentconcepts = request.GET.get('include_parentconcepts', 'true') == 'true' include_relatedconcepts = request.GET.get('include_relatedconcepts', 'true') == 'true' emulate_elastic_search = request.GET.get('emulate_elastic_search', 'false') == 'true' depth_limit = request.GET.get('depth_limit', None) depth_limit = 1 if not conceptid: return render( request, 'views/rdm/concept-report.htm', { 'lang': lang, 'concept_count': models.Concept.objects.filter(nodetype='Concept').count(), 'collection_count': models.Concept.objects.filter( nodetype='Collection').count(), 'scheme_count': models.Concept.objects.filter( nodetype='ConceptScheme').count(), 'entitytype_count': models.Concept.objects.filter( nodetype='EntityType').count(), 'default_report': True }) labels = [] concept_graph = Concept().get( id=conceptid, include_subconcepts=include_subconcepts, include_parentconcepts=include_parentconcepts, include_relatedconcepts=include_relatedconcepts, depth_limit=depth_limit, up_depth_limit=None, lang=lang, semantic=(mode == 'semantic' or mode == '')) languages = sort_languages(models.DLanguage.objects.all(), lang) valuetypes = models.DValueType.objects.all() relationtypes = models.DRelationType.objects.all() prefLabel = concept_graph.get_preflabel(lang=lang) for subconcept in concept_graph.subconcepts: subconcept.prefLabel = subconcept.get_preflabel(lang=lang) for relatedconcept in concept_graph.relatedconcepts: relatedconcept.prefLabel = relatedconcept.get_preflabel(lang=lang) for value in concept_graph.values: if value.category == 'label': labels.append(value) if (mode == 'semantic' or mode == '') and (concept_graph.nodetype == 'Concept' or concept_graph.nodetype == 'ConceptScheme' or concept_graph.nodetype == 'EntityType'): if concept_graph.nodetype == 'ConceptScheme': parent_relations = relationtypes.filter(category='Properties') else: parent_relations = relationtypes.filter( category='Semantic Relations').exclude( relationtype='related').exclude( relationtype='broader').exclude( relationtype='broaderTransitive') return render( request, 'views/rdm/concept-report.htm', { 'lang': lang, 'prefLabel': prefLabel, 'labels': labels, 'concept': concept_graph, 'languages': languages, 'sparql_providers': get_sparql_providers(), 'valuetype_labels': valuetypes.filter(category='label'), 'valuetype_notes': valuetypes.filter(category='note'), 'valuetype_related_values': valuetypes.filter( category__in=['undefined', 'identifiers']), 'parent_relations': parent_relations, 'related_relations': relationtypes.filter( Q(category='Mapping Properties') | Q(relationtype='related')), 'concept_paths': concept_graph.get_paths(lang=lang), 'graph_json': JSONSerializer().serialize( concept_graph.get_node_and_links(lang=lang)), 'direct_parents': [ parent.get_preflabel(lang=lang) for parent in concept_graph.parentconcepts ] }) elif mode == 'collections': return render( request, 'views/rdm/entitytype-report.htm', { 'lang': lang, 'prefLabel': prefLabel, 'labels': labels, 'concept': concept_graph, 'languages': languages, 'valuetype_labels': valuetypes.filter(category='label'), 'valuetype_notes': valuetypes.filter(category='note'), 'valuetype_related_values': valuetypes.filter( category__in=['undefined', 'identifiers']), 'related_relations': relationtypes.filter(relationtype='member'), 'concept_paths': concept_graph.get_paths(lang=lang) }) if request.method == 'POST': if len(request.FILES) > 0: skosfile = request.FILES.get('skosfile', None) imagefile = request.FILES.get('file', None) if imagefile: value = models.FileValue(valueid=str(uuid.uuid4()), value=request.FILES.get('file', None), concept_id=conceptid, valuetype_id='image', language_id=settings.LANGUAGE_CODE) value.save() return JSONResponse(value) elif skosfile: overwrite_options = request.POST.get('overwrite_options', None) staging_options = request.POST.get('staging_options', None) skos = SKOSReader() try: rdf = skos.read_file(skosfile) ret = skos.save_concepts_from_skos(rdf, overwrite_options, staging_options) return JSONResponse(ret) except Exception as e: return JSONResponse( { 'message': { 'title': _('Unable to Load SKOS File'), 'text': _('There was an issue saving the contents of the file to Arches.' ) + str(e) } }, status=500) else: data = JSONDeserializer().deserialize(request.body) if data: with transaction.atomic(): concept = Concept(data) concept.save() concept.index() return JSONResponse(concept) if request.method == 'DELETE': data = JSONDeserializer().deserialize(request.body) if data: with transaction.atomic(): concept = Concept(data) delete_self = data[ 'delete_self'] if 'delete_self' in data else False if not (delete_self and concept.id in CORE_CONCEPTS): if concept.nodetype == 'Collection': concept.delete(delete_self=delete_self) else: in_use = False if delete_self: check_concept = Concept().get( data['id'], include_subconcepts=True) in_use = check_concept.check_if_concept_in_use() if 'subconcepts' in data: for subconcept in data['subconcepts']: if in_use == False: check_concept = Concept().get( subconcept['id'], include_subconcepts=True) in_use = check_concept.check_if_concept_in_use( ) if in_use == False: concept.delete_index(delete_self=delete_self) concept.delete(delete_self=delete_self) else: return JSONResponse({ "in_use": in_use, 'message': { 'title': _('Unable to Delete'), 'text': _('This concept or one of it\'s subconcepts is already in use by an existing resource.' ) } }) return JSONResponse(concept) return HttpResponseNotFound
def resource_manager(request, resourcetypeid='', form_id='default', resourceid=''): if resourceid != '': resource = Resource(resourceid) elif resourcetypeid != '': resource = Resource({'entitytypeid': resourcetypeid}) if form_id == 'default': form_id = resource.form_groups[0]['forms'][0]['id'] form = resource.get_form(form_id) # Pravice preverjamo zaenkrat le preko grup # Uporabnik mora imeti dodeljeno grupo z nazivom tipa resourca if (request.user.username != 'anonymous'): user = User.objects.get(username=request.user.username) user_groups = user.groups.values_list('name', flat=True) else: user_groups = [] if (not 'EDIT_' + resourcetypeid in user_groups and not 'PUBLISH_' + resourcetypeid in user_groups and not request.user.is_staff and not request.user.is_superuser): raise UserNotAuthorized('User does have permission for this resource!') group_ownerships = 0 group_ownership = '' for group in user_groups: if group.startswith('OWNERSHIP_'): group_ownership = group[10:] group_ownerships = group_ownerships + 1 if (group_ownerships == 0 and (resourceid == '' or (resourceid != '' and not request.user.is_staff and not request.user.is_superuser))): raise UserNotAuthorized( 'User does have a ownership group! Please contact Early Watercraft administrator to resolve this issue.' ) if (group_ownerships > 1 and (resourceid == '' or (resourceid != '' and not request.user.is_staff and not request.user.is_superuser))): raise UserNotAuthorized( 'User have more than one ownership group! Please contact Early Watercraft administrator to resolve this issue.' ) if request.method == 'DELETE': resource.delete_index() se = SearchEngineFactory().create() realtionships = resource.get_related_resources(return_entities=False) for realtionship in realtionships: se.delete(index='resource_relations', doc_type='all', id=realtionship.resourcexid) realtionship.delete() resource.delete() return JSONResponse({'success': True}) if request.method == 'POST': data = JSONDeserializer().deserialize(request.POST.get('formdata', {})) current_status = resource.get_current_status() if (resourceid != ''): current_group = resource.get_current_group() else: current_group = group_ownership user_can_edit_document = get_user_can_edit_document( current_status, current_group, user, resourcetypeid, user_groups, group_ownership) if (not user_can_edit_document): return HttpResponseNotFound( '<h1>User can not edit this document!</h1>') if 'action' in request.POST: action = request.POST.get('action') if action == 'ready-for-approval': current_status = 'Pending approval' resource.set_resource_status(current_status, user) empty_errors_cache() errors = [] actions = get_possible_actions(current_status, False, user) if settings.EMAIL_ENABLED: resource_url = request.build_absolute_uri( resolve_url('resource_manager', resourcetypeid=resourcetypeid, form_id='summary', resourceid=resourceid)) # Dobimo seznam vseh publisherjev v trenutni skupini uporabnika if group_ownership <> '': search_group = 'OWNERSHIP_' + group_ownership current_group = Group.objects.get(name=search_group) current_users = current_group.user_set.all() search_group = 'PUBLISH_' + resourcetypeid publisher_group = Group.objects.get(name=search_group) publisher_users = publisher_group.user_set.all() recipients = [] for user1 in current_users: if user1 in publisher_users: if user1.username <> user.username: recipients.append(user1.first_name + ' ' + user1.last_name + '<' + user1.email + '>') # Pripravmo seznam mailov if len(recipients) > 0: resource_type_name = settings.RESOURCE_TYPE_CONFIGS( )[resourcetypeid]['name'] status = 'Pending approval' resource_name = resource.get_primary_name() subject = resource_name + ' (' + resource_type_name + ') - ' + status from_email = settings.EMAIL_FROM text_content = 'User ' + user.first_name + ' ' + user.last_name + ' (' + user.username + ') has submitted a document ' + resource_name + ' (' + resource_type_name + ') for approval.' html_content = 'User <strong>' + user.first_name + ' ' + user.last_name + ' (' + user.username + ')</strong> has submitted a document <a href="' + resource_url + '">' + resource_name + ' (' + resource_type_name + ')</a> for approval.<br>' #print html_content msg = EmailMultiAlternatives( subject, text_content, from_email, recipients) msg.attach_alternative(html_content, "text/html") msg.content_subtype = "html" # Main content is now text/html # Posljemo mail connection = mail.get_connection() # Manually open the connection connection.open() # Construct an email message that uses the connection msg.send() connection.close() else: if action == 'reject-approval': current_status = 'Approval rejected' resource.set_resource_status(current_status, user) empty_errors_cache() errors = [] actions = get_possible_actions(current_status, False, user) if settings.EMAIL_ENABLED: # Dobimo razlog zavrnitve rejectedDescription = request.POST.get('description') resource_url = request.build_absolute_uri( resolve_url('resource_manager', resourcetypeid=resourcetypeid, form_id='summary', resourceid=resourceid)) # Dobimo uporabnika, ki je dokument dal v pregled ret = [] current = None index = -1 start = 0 limit = 3 recipients = [] if resourceid != '': dates = models.EditLog.objects.filter( resourceid=resourceid).values_list( 'timestamp', flat=True).order_by('-timestamp').distinct( 'timestamp')[start:limit] for log in models.EditLog.objects.filter( resourceid=resourceid, timestamp__in=dates).values().order_by( '-timestamp', 'attributeentitytypeid'): if log['attributeentitytypeid'] == 'EW_STATUS.E55' and log[ 'oldvalue'] == 'Draft' and log[ 'newvalue'] == 'Pending approval': if int(log['userid']) <> user.id: print 'Sending mail...' print log['userid'] <> user.id print log['userid'] print user.id recipients.append( log['user_firstname'] + ' ' + log['user_lastname'] + '<' + log['user_email'] + '>') if len(recipients) > 0: resource_type_name = settings.RESOURCE_TYPE_CONFIGS( )[resourcetypeid]['name'] status = 'Approval rejected' resource_name = resource.get_primary_name() subject = resource_name + ' (' + resource_type_name + ') - ' + status from_email = settings.EMAIL_FROM text_content = 'User ' + user.first_name + ' ' + user.last_name + ' (' + user.username + ') has rejected a document ' + resource_name + ' (' + resource_type_name + '). For explanation go open document in Early Watercraft (section Validate Watercraft)' html_content = 'User <strong>' + user.first_name + ' ' + user.last_name + ' (' + user.username + ')</strong> has rejected a document <a href="' + resource_url + '">' + resource_name + ' (' + resource_type_name + ')</a> with following explanation:<br>' + rejectedDescription print html_content msg = EmailMultiAlternatives( subject, text_content, from_email, recipients) msg.attach_alternative(html_content, "text/html") msg.content_subtype = "html" # Main content is now text/html # Posljemo mail connection = mail.get_connection() # Manually open the connection connection.open() # Construct an email message that uses the connection msg.send() connection.close() else: if action == 'return-to-draft': current_status = 'Draft' resource.set_resource_status(current_status, user) empty_errors_cache() errors = [] actions = get_possible_actions(current_status, False, user) else: if action == 'publish': current_status = 'Published' resource.set_resource_status(current_status, user) empty_errors_cache() errors = [] actions = get_possible_actions( current_status, False, user) form.update(data, request.FILES) with transaction.atomic(): if resourceid != '': resource.delete_index() resource.save(user=request.user) resource.index() resourceid = resource.entityid print "Redirect_resource_manager" return redirect('resource_manager', resourcetypeid=resourcetypeid, form_id=form_id, resourceid=resourceid) min_max_dates = models.Dates.objects.aggregate(Min('val'), Max('val')) if request.method == 'GET': if form != None: lang = request.GET.get('lang', settings.LANGUAGE_CODE) current_status = resource.get_current_status() if (resourceid != ''): current_group = resource.get_current_group() else: current_group = group_ownership print "Current status: " print current_status print "Current group: " print current_group actions = [] user_can_edit_document = get_user_can_edit_document( current_status, current_group, user, resourcetypeid, user_groups, group_ownership) form.load(lang, current_group) # If user can not edit resource, there will be no validate and delete resource options if (not user_can_edit_document): for form_group in resource.form_groups: for form1 in form_group["forms"]: if (form1["id"] == 'delete-resource' or form1["id"] == 'validate-resource'): form_group["forms"].remove(form1) # If status is not Draft and user is not superuser, delete is disabled if (current_status <> 'Draft' and not user.is_superuser): for form_group in resource.form_groups: for form1 in form_group["forms"]: if (form1["id"] == 'delete-resource'): form_group["forms"].remove(form1) if form_id == 'validate-resource': errors = resource.validate_resource() cache.set('errors', errors, 1000) cache.set('resourceid', resourceid, 1000) errorsExists = False for error in errors: print error if error['type'] == 'error': errorsExists = True break actions = get_possible_actions(current_status, errorsExists, user) else: saved_resourceid = cache.get('resourceid') if (resourceid == saved_resourceid): errors = cache.get('errors') else: empty_errors_cache() errors = [] return render_to_response('resource-manager.htm', { 'form': form, 'formdata': JSONSerializer().serialize(form.data), 'form_template': 'views/forms/' + form_id + '.htm', 'form_id': form_id, 'resourcetypeid': resourcetypeid, 'resourceid': resourceid, 'main_script': 'resource-manager', 'active_page': 'ResourceManger', 'resource': resource, 'resource_name': resource.get_primary_name(), 'resource_type_name': resource.get_type_name(), 'resource_icon': settings.RESOURCE_TYPE_CONFIGS()[resourcetypeid]["icon_class"], 'form_groups': resource.form_groups, 'min_date': min_max_dates['val__min'].year if min_max_dates['val__min'] != None else 0, 'max_date': min_max_dates['val__max'].year if min_max_dates['val__min'] != None else 1, 'timefilterdata': JSONSerializer().serialize(Concept.get_time_filter_data()), 'current_status': current_status, 'user_groups': user_groups, 'errors': errors, 'actions': actions, 'user_can_edit_document': user_can_edit_document, 'region_coordinates': JSONSerializer().serialize(settings.REGION_COORDINATES), 'help': settings.HELP['resource_manager'] }, context_instance=RequestContext(request)) else: return HttpResponseNotFound('<h1>Arches form not found.</h1>')