def import_information(self, entry_data, lead, framework, data): old_id = data['id'] print('Entry info - {}'.format(old_id)) entry = Entry( lead=lead, analysis_framework=framework, ) if data.get('excerpt'): entry.excerpt = data['excerpt'] entry.entry_type = Entry.EXCERPT elif data.get('image'): entry.image = data['image'] entry.entry_type = Entry.IMAGE entry.created_by = get_user(entry_data['created_by']) entry.modified_by = entry.created_by entry.save() Entry.objects.filter(id=entry.id).update( created_at=entry_data['created_at'] ) # Start migrating the attributes elements = data['elements'] # TODO migrate excerpt and image widget for element in elements: self.migrate_attribute(entry, framework, element) return entry
def AddEntry(request): if request.user.is_authenticated(): if request.method == 'POST': form = EntryForm(request.POST) if form.is_valid(): parent = request.user.get_profile() entry = Entry(patient=parent, entry=form.cleaned_data['entry'], desc=form.cleaned_data['desc']) entry.save() if form.cleaned_data['entry'].find('CODE') > -1: return HttpResponseRedirect('/code/') else: return HttpResponseRedirect('/profile/') else: print form.errors return render_to_response( 'entry.html', {'form': form}, context_instance=RequestContext(request)) else: ''' user is not submitting the form ''' form = EntryForm() context = {'form': form} return render_to_response('entry.html', context, context_instance=RequestContext(request)) else: pass
def do_import_data(request, entity_id, context): user = User.objects.get(id=request.user.id) entity = Entity.objects.filter(id=entity_id, is_active=True).first() if not entity: return HttpResponse("Couldn't parse uploaded file", status=400) try: data = yaml.load(context, Loader=yaml.FullLoader) except yaml.parser.ParserError: return HttpResponse("Couldn't parse uploaded file", status=400) if not Entry.is_importable_data(data): return HttpResponse("Uploaded file has invalid data structure to import", status=400) if custom_view.is_custom("import_entry", entity.name): # import custom view resp = custom_view.call_custom("import_entry", entity.name, user, entity, data) # If custom_view returns available response this returns it to user, # or continues default processing. if resp: return resp # create job to import data to create or update entries and run it job = Job.new_import(user, entity, text='Preparing to import data', params=data) job.run() return HttpResponseSeeOther('/entry/%s/' % entity_id)
def _search_by_keyword(query, entity_name, per_page, page_num): # correct entries that contans query at EntryName or AttributeValue search_result = Entry.search_entries_for_simple(query, entity_name, per_page, (page_num - 1) * per_page) return (search_result['ret_count'], search_result['ret_values'])
def post(self): try: entrydata = request.get_json() title = entrydata.get('title') content = entrydata.get('content') new_entry = Entry(increment_entryId(), title, content, get_timestamp()) entry = json.loads(new_entry.json()) GetAllEntries.entries.append(entry) return make_response(jsonify( {'entries': GetAllEntries.entries}, {'message': "Entry successfully added"}), 200) except (ValueError, KeyError, TypeError): return make_response(jsonify( {'message': "JSON Format Error"}), 400)
def test_register_entries(self): ret = Entry.search_entries(self.user, [self.entity.id]) self.assertEqual(ret["ret_count"], 0) self.assertEqual(ret["ret_values"], []) register_documents(self._es, self._es._index) ret = Entry.search_entries(self.user, [self.entity.id]) self.assertEqual(ret["ret_count"], 3) self.assertTrue( all([ x["entity"]["id"] == self.entity.id for x in ret["ret_values"] ])) self.assertTrue( all([ x["entry"]["id"] in [y.id for y in self.entries] for x in ret["ret_values"] ]))
def test_register_entries(self): ret = Entry.search_entries(self.user, [self.entity.id]) self.assertEqual(ret['ret_count'], 0) self.assertEqual(ret['ret_values'], []) register_documents(self._es, self._es._index) ret = Entry.search_entries(self.user, [self.entity.id]) self.assertEqual(ret['ret_count'], 3) self.assertTrue( all([ x['entity']['id'] == self.entity.id for x in ret['ret_values'] ])) self.assertTrue( all([ x['entry']['id'] in [y.id for y in self.entries] for x in ret['ret_values'] ]))
def do_import_data(request, entity_id, context): user: User = request.user entity: Entity entity, error = get_obj_with_check_perm(user, Entity, entity_id, ACLType.Writable) if error: return error if not entity.is_active: return HttpResponse("Failed to get entity of specified id", status=400) try: data = yaml.load(context, Loader=yaml.FullLoader) except yaml.parser.ParserError: return HttpResponse("Couldn't parse uploaded file", status=400) except ValueError as e: return HttpResponse("Invalid value is found: %s" % e, status=400) except yaml.scanner.ScannerError: return HttpResponse("Couldn't scan uploaded file", status=400) except Exception as e: return HttpResponse("Unknown exception: %s" % e, status=500) if not Entry.is_importable_data(data): return HttpResponse( "Uploaded file has invalid data structure to import", status=400) for entity_name in data.keys(): import_entity: Entity = Entity.objects.filter(name=entity_name, is_active=True).first() if not import_entity: return HttpResponse("Specified entity does not exist (%s)" % entity_name, status=400) if not user.has_permission(import_entity, ACLType.Writable): return HttpResponse("You don't have permission to access (%s)" % entity_name, status=400) import_data = data[entity_name] if custom_view.is_custom("import_entry", entity_name): # import custom view import_data, err_msg = custom_view.call_custom( "import_entry", entity_name, user, import_entity, import_data) # If custom_view returns available response this returns it to user, # or continues default processing. if err_msg: return HttpResponse(err_msg, status=400) # create job to import data to create or update entries and run it job = Job.new_import(user, import_entity, text="Preparing to import data", params=import_data) job.run() return HttpResponseSeeOther("/entry/%s/" % entity_id)
def hello(request): try: t = Tropo() session = Session(request.body) if ('parameters' in dir(session)): print('Message request') t.call(to="+" + session.parameters['to'].strip(), network="SMS") json = t.say(session.parameters['msg']) json = t.RenderJson(json) return HttpResponse(json) else: msg = request.POST.get('msg', '') s = Session(request.body) cell = s.fromaddress['id'] # lookup patient with this cell # if cell[0] == '1': # trim leading 1 in cell # if there cell = cell[1:] print('Cell #%s' % cell) p = Patient.objects.filter( cell=cell) # all patients with this cell # if p.exists(): # if cell # found then create new entry if p.count() > 1: print('WARNING: Multiple patients with cell # %s' % cell) parent = p[0] # assume first entry = Entry(patient=parent, entry=msg) entry.save() if msg.find('CODE') > -1: json = t.say("Congratulations " + parent.name + " your code qualified you for a prize!") else: json = t.say("Entry saved, thank you " + parent.name) else: # if cell # NOT found then notify json = t.say("Could not find patient with cell # " + cell) json = t.RenderJson(json) return HttpResponse(json) except Exception, err: print('ERROR: %s\n' % str(err))
def handle(self, *args, **options): # self.stdout.write( options['name'] ) t = Entry(name=options['name']) t.text = "*This is a stub entry. Add vast knowledge here.*" t.clean() t.save() t.tags.add(Tag.objects.get(slug='stub')) self.stdout.write("Created stub article '{}'".format(options['name']))
def get_queryset(self): queryset = [] query = self.request.query_params.get("query", None) if not query: return queryset results = Entry.search_entries_for_simple( query, limit=ENTRY_CONFIG.MAX_SEARCH_ENTRIES) return results["ret_values"]
def hello(request): try: t = Tropo() session = Session(request.body) if('parameters' in dir(session)): print('Message request') t.call(to="+"+session.parameters['to'].strip(), network = "SMS") json = t.say(session.parameters['msg']) json = t.RenderJson(json) return HttpResponse(json) else : msg = request.POST.get('msg', '') s = Session(request.body) cell = s.fromaddress['id'] # lookup patient with this cell # if cell[0]=='1': # trim leading 1 in cell # if there cell = cell[1:] print('Cell #%s' % cell) p = Patient.objects.filter(cell=cell) # all patients with this cell # if p.exists(): # if cell # found then create new entry if p.count()>1: print('WARNING: Multiple patients with cell # %s' % cell) parent = p[0] # assume first entry = Entry(patient=parent, entry=msg) entry.save() if msg.find('CODE')>-1: json = t.say("Congratulations " + parent.name + " your code qualified you for a prize!") else: json = t.say("Entry saved, thank you " + parent.name) else: # if cell # NOT found then notify json = t.say("Could not find patient with cell # " + cell) json = t.RenderJson(json) return HttpResponse(json) except Exception, err: print('ERROR: %s\n' % str(err))
def advanced_search_result(request): user = User.objects.get(id=request.user.id) recv_entity = request.GET.getlist('entity[]') recv_attr = request.GET.getlist('attr[]') is_all_entities = request.GET.get('is_all_entities') == 'true' has_referral = request.GET.get('has_referral') == 'true' attrinfo = request.GET.get('attrinfo') entry_name = request.GET.get('entry_name') # check entity params if not is_all_entities: if not recv_entity: return HttpResponse("The entity[] parameters are required", status=400) if not all( [Entity.objects.filter(id=x, is_active=True).exists() for x in recv_entity]): return HttpResponse("Invalid entity ID is specified", status=400) # check attribute params if not recv_attr and not attrinfo: return HttpResponse("The attr[] or attrinfo parameters is required", status=400) # build hint attrs from JSON encoded params, # or attr[] the older param to keep backward compatibility # TODO deprecate attr[] hint_attrs = [{'name': x} for x in recv_attr] if attrinfo: try: hint_attrs = json.loads(attrinfo) except json.JSONDecodeError: return HttpResponse("The attrinfo parameter is not JSON", status=400) attr_names = [x['name'] for x in hint_attrs] if is_all_entities: attrs = sum( [list(EntityAttr.objects.filter(name=x, is_active=True)) for x in attr_names], []) entities = list(set([x.parent_entity.id for x in attrs if x])) else: entities = recv_entity return render(request, 'advanced_search_result.html', { 'hint_attrs': hint_attrs, 'results': Entry.search_entries(user, entities, hint_attrs, CONFIG.MAXIMUM_SEARCH_RESULTS, entry_name, hint_referral=has_referral), 'max_num': CONFIG.MAXIMUM_SEARCH_RESULTS, 'entities': ','.join([str(x) for x in entities]), 'has_referral': has_referral, 'is_all_entities': is_all_entities, 'entry_name': entry_name, })
def parse(): """ Parses the text file for entries and adds them to the database if it's not already in it """ regex = re.compile('(\D+);(\d+);(\d+)') with open('static/txt/database.txt', 'r') as myfile: for line in myfile: result = regex.match(line) if result: try: country = result.group(1) denomination = int(result.group(2)) quantity = int(result.group(3)) entry = Entry.objects.get(country=country, denomination=denomination) except Entry.DoesNotExist: entry = Entry(country=country, denomination=denomination, quantity=quantity) entry.save() except: pass
def newEntry(request): """ Grabs the information for the new entry, adds it to the database, and updates the text file. """ if request.POST.has_key('client_response'): # Get entry from the client entry = json.loads(request.POST['client_response']) # Validate first if validates(entry): currency = entry['currency'].title() denom = int(entry['denomination']) quantity = int(entry['quantity']) # Write to the database and append to text file try: entry = Entry.objects.get(country=currency, denomination=denom) entry.quantity += quantity entry.save() string = "%s;%s;" % (currency, denom) # Find line in text file and update quantity mylist = [] with open('static/txt/database.txt', 'r') as myfile: for line in myfile: if line.startswith(string): line = "%s;%s;%s\n" % (currency, denom, entry.quantity) print 'found old line' mylist.append(line) with open('static/txt/database.txt', 'w') as myfile: for line in mylist: myfile.write(line) except Entry.DoesNotExist: entry = Entry(country=currency, denomination=denom, quantity=quantity) entry.save() with open('static/txt/database.txt', 'r+a') as myfile: last = myfile.readlines()[-1] # Check if the file ended in a newline string = '' if '\n' in last else '\n' myfile.write("%s%s;%s;%s\n" % (string, currency, denom, quantity)) organize(False) return HttpResponse('success') return HttpResponse('failure')
def test_delete_entry(self): register_documents(self._es, self._es._index) # delete entry-0 entry = self.entries[0] Entry.objects.get(id=entry.id).delete() delete_unnecessary_documents(self._es, self._es._index) ret = Entry.search_entries(self.user, [self.entity.id]) self.assertEqual(ret["ret_count"], 2) self.assertFalse( any(x["entry"]["id"] == entry.id for x in ret["ret_values"]))
def test_delete_entry(self): register_documents(self._es, self._es._index) # delete entry-0 entry = self.entries[0] Entry.objects.filter(id=entry.id).delete() delete_unnecessary_documents(self._es, self._es._index) ret = Entry.search_entries(self.user, [self.entity.id]) self.assertEqual(ret['ret_count'], 2) self.assertFalse( any(x['entry']['id'] == entry.id for x in ret['ret_values']))
def AddEntry(request): if request.user.is_authenticated(): if request.method == 'POST': form = EntryForm(request.POST) if form.is_valid(): parent = request.user.get_profile() entry = Entry(patient=parent, entry=form.cleaned_data['entry'], desc=form.cleaned_data['desc']) entry.save() if form.cleaned_data['entry'].find('CODE')>-1: return HttpResponseRedirect('/code/') else: return HttpResponseRedirect('/profile/') else: print form.errors return render_to_response('entry.html', {'form':form}, context_instance=RequestContext(request)) else: ''' user is not submitting the form ''' form = EntryForm() context = {'form': form} return render_to_response('entry.html', context, context_instance=RequestContext(request)) else: pass
def advanced_search_result(request): user = User.objects.get(id=request.user.id) recv_entity = request.GET.getlist('entity[]') recv_attr = request.GET.getlist('attr[]') is_all_entities = request.GET.get('is_all_entities') == 'true' has_referral = request.GET.get('has_referral') == 'true' if not is_all_entities and (not recv_entity or not recv_attr): return HttpResponse("The attr[] and entity[] parameters are required", status=400) elif is_all_entities and not recv_attr: return HttpResponse("The attr[] parameters are required", status=400) if not is_all_entities and not all([ Entity.objects.filter(id=x, is_active=True).exists() for x in recv_entity ]): return HttpResponse("Invalid entity ID is specified", status=400) if is_all_entities: attrs = sum([ list(EntityAttr.objects.filter(name=x, is_active=True)) for x in recv_attr ], []) entities = list(set([x.parent_entity.id for x in attrs if x])) else: entities = recv_entity return render( request, 'advanced_search_result.html', { 'attrs': recv_attr, 'results': Entry.search_entries(user, entities, [{ 'name': x } for x in recv_attr], CONFIG.MAXIMUM_SEARCH_RESULTS, hint_referral=has_referral), 'max_num': CONFIG.MAXIMUM_SEARCH_RESULTS, 'entities': ','.join([str(x) for x in entities]), 'has_referral': has_referral, 'is_all_entities': is_all_entities, })
def post(self, request, format=None): user = User.objects.get(id=request.user.id) hint_entity = request.data.get('entities') hint_entry_name = request.data.get('entry_name', '') hint_attr = request.data.get('attrinfo') hint_referral = request.data.get('referral') entry_limit = request.data.get('entry_limit', CONFIG_ENTRY.MAX_LIST_ENTRIES) if (not isinstance(hint_entity, list) or not isinstance(hint_attr, list) or not isinstance(entry_limit, int)): return Response('The type of parameter is incorrect', status=status.HTTP_400_BAD_REQUEST) # forbid to input large size request if any( [len(str(x)) > CONFIG_ENTRY.MAX_QUERY_SIZE * 2 for x in hint_attr]): return Response("Sending parameter is too large", status=400) # convert hint_referral type to be eligible for search_entries method if hint_referral is None: hint_referral = False hint_entity_ids = [] for hint in hint_entity: try: if Entity.objects.filter(id=hint).exists(): hint_entity_ids.append(hint) except ValueError: # This may happen when a string value is specified in the entities parameter entity = Entity.objects.filter(name=hint).first() if entity: hint_entity_ids.append(entity.id) resp = Entry.search_entries( user, hint_entity_ids, hint_attr, entry_limit, **{ 'hint_referral': hint_referral, 'entry_name': hint_entry_name, }) return Response({'result': resp}, content_type='application/json; charset=UTF-8')
def has_permission(self, request, view): project_id = view.kwargs.get('project_id') lead_id = view.kwargs.get('lead_id') entry_id = view.kwargs.get('entry_id') analysis_pillar_id = view.kwargs.get('analysis_pillar_id') if project_id: return Project.get_for_member( request.user).filter(id=project_id).exists() elif lead_id: return Lead.get_for(request.user).filter(id=lead_id).exists() elif entry_id: return Entry.get_for(request.user).filter(id=entry_id).exists() elif analysis_pillar_id: return AnalysisPillar.objects.filter( analysis__project__projectmembership__member=request.user, id=analysis_pillar_id).exists() return True
def export_search_result(self, job_id): job = Job.objects.get(id=job_id) if not job.proceed_if_ready(): return # set flag to indicate that this job starts processing job.update(Job.STATUS["PROCESSING"]) user = job.user recv_data = json.loads(job.params) has_referral = recv_data.get("has_referral", False) referral_name = recv_data.get("referral_name") entry_name = recv_data.get("entry_name") hint_referral = "" if has_referral else False if referral_name: hint_referral = referral_name resp = Entry.search_entries( user, recv_data["entities"], recv_data["attrinfo"], settings.ES_CONFIG["MAXIMUM_RESULTS_NUM"], entry_name, hint_referral, ) io_stream = None if recv_data["export_style"] == "yaml": io_stream = _yaml_export(job, resp["ret_values"], recv_data, has_referral) elif recv_data["export_style"] == "csv": io_stream = _csv_export(job, resp["ret_values"], recv_data, has_referral) if io_stream: job.set_cache(io_stream.getvalue()) # update job status and save it except for the case that target job is canceled. if not job.is_canceled(): job.update(Job.STATUS["DONE"])
def export_search_result(self, job_id): job = Job.objects.get(id=job_id) if not job.is_ready_to_process(): return # wait dependent job is finished job.wait_dependent_job() # set flag to indicate that this job starts processing job.set_status(Job.STATUS['PROCESSING']) user = job.user recv_data = json.loads(job.params) has_referral = False if 'has_referral' in recv_data: has_referral = recv_data['has_referral'] hint_entry_name = '' if 'entry_name' in recv_data and recv_data['entry_name']: hint_entry_name = recv_data['entry_name'] resp = Entry.search_entries(user, recv_data['entities'], recv_data['attrinfo'], settings.ES_CONFIG['MAXIMUM_RESULTS_NUM'], hint_referral=has_referral, entry_name=hint_entry_name) io_stream = None if recv_data['export_style'] == 'yaml': io_stream = _yaml_export(job, resp['ret_values'], recv_data, has_referral) elif recv_data['export_style'] == 'csv': io_stream = _csv_export(job, resp['ret_values'], recv_data, has_referral) if io_stream: job.set_cache(io_stream.getvalue()) # update job status and save it except for the case that target job is canceled. if not job.is_canceled(): job.set_status(Job.STATUS['DONE'])
def test_update_entry(self): register_documents(self._es, self._es._index) # update entry-0 entry = self.entries[0] entry.attrs.first().add_value(self.user, 'new-attr-value') entry.name = 'new-entry-name' entry.save() register_documents(self._es, self._es._index) ret = Entry.search_entries(self.user, [self.entity.id]) self.assertEqual(ret['ret_count'], 3) entry_info = [ x for x in ret['ret_values'] if x['entry']['id'] == entry.id ][0] self.assertEqual(entry_info['entry']['name'], 'new-entry-name') self.assertEqual(entry_info['attrs']['attr']['value'], 'new-attr-value')
def export_search_result(self, job_id): job = Job.objects.get(id=job_id) if not job.proceed_if_ready(): return # set flag to indicate that this job starts processing job.update(Job.STATUS['PROCESSING']) user = job.user recv_data = json.loads(job.params) has_referral = recv_data.get('has_referral', False) referral_name = recv_data.get('referral_name') entry_name = recv_data.get('entry_name') hint_referral = '' if has_referral else False if referral_name: hint_referral = referral_name resp = Entry.search_entries(user, recv_data['entities'], recv_data['attrinfo'], settings.ES_CONFIG['MAXIMUM_RESULTS_NUM'], entry_name, hint_referral) io_stream = None if recv_data['export_style'] == 'yaml': io_stream = _yaml_export(job, resp['ret_values'], recv_data, has_referral) elif recv_data['export_style'] == 'csv': io_stream = _csv_export(job, resp['ret_values'], recv_data, has_referral) if io_stream: job.set_cache(io_stream.getvalue()) # update job status and save it except for the case that target job is canceled. if not job.is_canceled(): job.update(Job.STATUS['DONE'])
def test_update_entry(self): register_documents(self._es, self._es._index) # update entry-0 entry = self.entries[0] entry.attrs.first().add_value(self.user, "new-attr-value") entry.name = "new-entry-name" entry.save() register_documents(self._es, self._es._index) ret = Entry.search_entries(self.user, [self.entity.id], [{ "name": "attr" }]) self.assertEqual(ret["ret_count"], 3) entry_info = [ x for x in ret["ret_values"] if x["entry"]["id"] == entry.id ][0] self.assertEqual(entry_info["entry"]["name"], "new-entry-name") self.assertEqual(entry_info["attrs"]["attr"]["value"], "new-attr-value")
def do_import_data(request, entity_id, context): entity = Entity.objects.filter(id=entity_id, is_active=True).first() if not entity: return HttpResponse("Couldn't parse uploaded file", status=400) try: data = yaml.load(context, Loader=yaml.FullLoader) except yaml.parser.ParserError: return HttpResponse("Couldn't parse uploaded file", status=400) except ValueError as e: return HttpResponse("Invalid value is found: %s" % e, status=400) except yaml.scanner.ScannerError: return HttpResponse("Couldn't scan uploaded file", status=400) except Exception as e: return HttpResponse("Unknown exception: %s" % e, status=500) if not Entry.is_importable_data(data): return HttpResponse( "Uploaded file has invalid data structure to import", status=400) if custom_view.is_custom("import_entry", entity.name): # import custom view resp = custom_view.call_custom("import_entry", entity.name, request.user, entity, data) # If custom_view returns available response this returns it to user, # or continues default processing. if resp: return resp # create job to import data to create or update entries and run it job = Job.new_import(request.user, entity, text="Preparing to import data", params=data) job.run() return HttpResponseSeeOther("/entry/%s/" % entity_id)
def get(self, request, uuid=None, filename=None): queryset = File.objects.prefetch_related('lead_set') file = get_object_or_404(queryset, uuid=uuid) user = request.user leads_pk = file.lead_set.values_list('pk', flat=True) if (file.is_public or Lead.get_for(user).filter(pk__in=leads_pk).exists() or Entry.get_for(user).filter(image=file).exists() or Entry. get_for(user).filter(image_raw=request.build_absolute_uri( reverse('file', kwargs={'file_id': file.pk}), )).exists() # TODO: Add Profile ): if file.file: return redirect(request.build_absolute_uri(file.file.url)) return response.Response({ 'error': 'File doesn\'t exists', }, status=status.HTTP_404_NOT_FOUND) return response.Response({ 'error': 'Access Forbidden, Contact Admin', }, status=status.HTTP_403_FORBIDDEN)
def post(self, request, format=None): hint_entities = request.data.get("entities") hint_entry_name = request.data.get("entry_name", "") hint_attrs = request.data.get("attrinfo") hint_has_referral = request.data.get("has_referral", False) hint_referral_name = request.data.get("referral_name", "") is_output_all = request.data.get("is_output_all", True) entry_limit = request.data.get("entry_limit", self.MAX_LIST_ENTRIES) hint_referral = False if hint_has_referral: hint_referral = hint_referral_name if (not isinstance(hint_entities, list) or not isinstance(hint_entry_name, str) or not isinstance(hint_attrs, list) or not isinstance(is_output_all, bool) or not isinstance(hint_referral, (str, bool)) or not isinstance(entry_limit, int)): return Response("The type of parameter is incorrect", status=status.HTTP_400_BAD_REQUEST) # forbid to input large size request if len(hint_entry_name) > self.MAX_QUERY_SIZE: return Response("Sending parameter is too large", status=400) # check attribute params for hint_attr in hint_attrs: if "name" not in hint_attr: return Response( "The name key is required for attrinfo parameter", status=400) if not isinstance(hint_attr["name"], str): return Response("Invalid value for attrinfo parameter", status=400) if hint_attr.get("keyword"): if not isinstance(hint_attr["keyword"], str): return Response("Invalid value for attrinfo parameter", status=400) # forbid to input large size request if len(hint_attr["keyword"]) > self.MAX_QUERY_SIZE: return Response("Sending parameter is too large", status=400) # check entities params if not hint_entities: return Response("The entities parameters are required", status=400) hint_entity_ids = [] for hint_entity in hint_entities: entity = None if isinstance(hint_entity, int): entity = Entity.objects.filter(id=hint_entity, is_active=True).first() elif isinstance(hint_entity, str): if hint_entity.isnumeric(): entity = Entity.objects.filter( Q(id=hint_entity) | Q(name=hint_entity), Q(is_active=True)).first() else: entity = Entity.objects.filter(name=hint_entity, is_active=True).first() if entity and request.user.has_permission(entity, ACLType.Readable): hint_entity_ids.append(entity.id) resp = Entry.search_entries( request.user, hint_entity_ids, hint_attrs, entry_limit, hint_entry_name, hint_referral, is_output_all, ) # convert field values to fit entry retrieve API data type, as a workaround. # FIXME should be replaced with DRF serializer etc for entry in resp["ret_values"]: for name, attr in entry["attrs"].items(): def _get_typed_value(type: int) -> str: if type & AttrTypeValue["array"]: if type & AttrTypeValue["string"]: return "asArrayString" elif type & AttrTypeValue["named"]: return "asArrayNamedObject" elif type & AttrTypeValue["object"]: return "asArrayObject" elif type & AttrTypeValue["group"]: return "asArrayGroup" elif type & AttrTypeValue[ "string"] or type & AttrTypeValue["text"]: return "asString" elif type & AttrTypeValue["named"]: return "asNamedObject" elif type & AttrTypeValue["object"]: return "asObject" elif type & AttrTypeValue["boolean"]: return "asBoolean" elif type & AttrTypeValue["date"]: return "asString" elif type & AttrTypeValue["group"]: return "asGroup" raise ValidationError(f"unexpected type: {type}") entry["attrs"][name] = { "is_readble": attr["is_readble"], "type": attr["type"], "value": { _get_typed_value(attr["type"]): attr.get("value", ""), }, } return Response({"result": resp})
def update(self, entry: Entry, validated_data): entry.set_status(Entry.STATUS_EDITING) user: User = self.context["request"].user entity_name = entry.schema.name if custom_view.is_custom("before_update_entry", entity_name): custom_view.call_custom("before_update_entry", entity_name, user, validated_data, entry) attrs_data = validated_data.pop("attrs", []) # update name of Entry object. If name would be updated, the elasticsearch data of entries # that refers this entry also be updated by creating REGISTERED_REFERRALS task. job_register_referrals: Optional[Job] = None if "name" in validated_data and entry.name != validated_data["name"]: entry.name = validated_data["name"] entry.save(update_fields=["name"]) job_register_referrals = Job.new_register_referrals(user, entry) for entity_attr in entry.schema.attrs.filter(is_active=True): attr: Attribute = entry.attrs.filter(schema=entity_attr, is_active=True).first() if not attr: attr = entry.add_attribute_from_base(entity_attr, user) # skip for unpermitted attributes if not user.has_permission(attr, ACLType.Writable): continue # make AttributeValue object if the value is specified attr_data = [ x for x in attrs_data if int(x["id"]) == entity_attr.id ] if not attr_data: continue # Check a new update value is specified, or not if not attr.is_updated(attr_data[0]["value"]): continue attr.add_value(user, attr_data[0]["value"]) if custom_view.is_custom("after_update_entry", entity_name): custom_view.call_custom("after_update_entry", entity_name, user, attrs_data, entry) # update entry information to Elasticsearch entry.register_es() # clear flag to specify this entry has been completed to edit entry.del_status(Entry.STATUS_EDITING) # running job of re-register referrals because of chaning entry's name if job_register_referrals: job_register_referrals.run() # running job to notify changing entry event job_notify_event: Job = Job.new_notify_update_entry(user, entry) job_notify_event.run() return entry
def advanced_search_result(request): user = User.objects.get(id=request.user.id) recv_entity = request.GET.getlist('entity[]') recv_attr = request.GET.getlist('attr[]') is_all_entities = request.GET.get('is_all_entities') == 'true' has_referral = request.GET.get('has_referral') == 'true' referral_name = request.GET.get('referral_name') attrinfo = request.GET.get('attrinfo') entry_name = request.GET.get('entry_name', '') # forbid to input large size request if len(entry_name) > CONFIG_ENTRY.MAX_QUERY_SIZE: return HttpResponse("Sending parameter is too large", status=400) # check referral params # # process of converting older param for backward compatibility hint_referral = '' if has_referral else False if referral_name: hint_referral = referral_name # check attribute params # The "attr" parameter guarantees backward compatibility. # The "atterinfo" is another parameter, # that has same purpose that indicates which attributes to search, # And "attrinfo" is prioritize than "attr". # TODO deprecate attr[] hint_attrs = [{'name': x} for x in recv_attr] if attrinfo: try: # build hint attrs from JSON encoded params hint_attrs = json.loads(attrinfo) except json.JSONDecodeError: return HttpResponse("The attrinfo parameter is not JSON", status=400) for hint_attr in hint_attrs: if 'name' not in hint_attr: return HttpResponse( "The name key is required for attrinfo parameter", status=400) if not isinstance(hint_attr['name'], str): return HttpResponse("Invalid value for attrinfo parameter", status=400) if 'keyword' in hint_attr: if not isinstance(hint_attr['keyword'], str): return HttpResponse("Invalid value for attrinfo parameter", status=400) if len(hint_attr['keyword']) > CONFIG_ENTRY.MAX_QUERY_SIZE: return HttpResponse("Sending parameter is too large", status=400) # check entity params if is_all_entities: attr_names = [x['name'] for x in hint_attrs] recv_entity = list( EntityAttr.objects.filter( name__in=attr_names, is_active=True, parent_entity__is_active=True).order_by( 'parent_entity__name').values_list('parent_entity__id', flat=True).distinct()) if not recv_entity: return HttpResponse("Invalid value for attribute parameter", status=400) if not recv_entity: return HttpResponse("The entity[] parameters are required", status=400) hint_entity_ids = [] for entity_id in recv_entity: if not isinstance(entity_id, int) and not entity_id.isnumeric(): return HttpResponse("Invalid entity ID is specified", status=400) entity = Entity.objects.filter(id=entity_id, is_active=True).first() if not entity: return HttpResponse("Invalid entity ID is specified", status=400) if user.has_permission(entity, ACLType.Readable): hint_entity_ids.append(entity.id) return render( request, 'advanced_search_result.html', { 'hint_attrs': hint_attrs, 'results': Entry.search_entries(user, hint_entity_ids, hint_attrs, CONFIG.MAXIMUM_SEARCH_RESULTS, entry_name, hint_referral), 'max_num': CONFIG.MAXIMUM_SEARCH_RESULTS, 'entities': ','.join([str(x) for x in hint_entity_ids]), 'has_referral': has_referral, 'referral_name': referral_name, 'is_all_entities': is_all_entities, 'entry_name': entry_name, })
def post(self, request, format=None): hint_entities = request.data.get("entities") hint_entry_name = request.data.get("entry_name", "") hint_attrs = request.data.get("attrinfo") hint_referral = request.data.get("referral", False) is_output_all = request.data.get("is_output_all", True) entry_limit = request.data.get("entry_limit", CONFIG_ENTRY.MAX_LIST_ENTRIES) if ( not isinstance(hint_entities, list) or not isinstance(hint_entry_name, str) or not isinstance(hint_attrs, list) or not isinstance(is_output_all, bool) or not isinstance(hint_referral, (str, bool)) or not isinstance(entry_limit, int) ): return Response( "The type of parameter is incorrect", status=status.HTTP_400_BAD_REQUEST ) # forbid to input large size request if len(hint_entry_name) > CONFIG_ENTRY.MAX_QUERY_SIZE: return Response("Sending parameter is too large", status=400) # check attribute params for hint_attr in hint_attrs: if "name" not in hint_attr: return Response("The name key is required for attrinfo parameter", status=400) if not isinstance(hint_attr["name"], str): return Response("Invalid value for attrinfo parameter", status=400) if hint_attr.get("keyword"): if not isinstance(hint_attr["keyword"], str): return Response("Invalid value for attrinfo parameter", status=400) # forbid to input large size request if len(hint_attr["keyword"]) > CONFIG_ENTRY.MAX_QUERY_SIZE: return Response("Sending parameter is too large", status=400) # check entities params if not hint_entities: return Response("The entities parameters are required", status=400) hint_entity_ids = [] for hint_entity in hint_entities: entity = None if isinstance(hint_entity, int): entity = Entity.objects.filter(id=hint_entity, is_active=True).first() elif isinstance(hint_entity, str): if hint_entity.isnumeric(): entity = Entity.objects.filter( Q(id=hint_entity) | Q(name=hint_entity), Q(is_active=True) ).first() else: entity = Entity.objects.filter(name=hint_entity, is_active=True).first() if entity and request.user.has_permission(entity, ACLType.Readable): hint_entity_ids.append(entity.id) resp = Entry.search_entries( request.user, hint_entity_ids, hint_attrs, entry_limit, hint_entry_name, hint_referral, is_output_all, ) return Response({"result": resp})
def test_show_advanced_search_results_csv_escape(self): user = self.admin dummy_entity = Entity.objects.create(name='Dummy', created_user=user) dummy_entry = Entry(name='D,U"MM"Y', schema=dummy_entity, created_user=user) dummy_entry.save() CASES = [[AttrTypeStr, 'raison,de"tre', '"raison,de""tre"'], [AttrTypeObj, dummy_entry, '"D,U""MM""Y"'], [ AttrTypeText, "1st line\r\n2nd line", '"1st line' + "\r\n" + '2nd line"' ], [ AttrTypeNamedObj, { "key": dummy_entry }, "\"key: D,U\"\"MM\"\"Y\"" ], [ AttrTypeArrStr, ["one", "two", "three"], "\"one\nthree\ntwo\"" ], [AttrTypeArrObj, [dummy_entry], "\"D,U\"\"MM\"\"Y\""], [ AttrTypeArrNamedObj, [{ "key1": dummy_entry }], "\"key1: D,U\"\"MM\"\"Y\"" ]] for case in CASES: # setup data type_name = case[0].__name__ # AttrTypeStr -> 'AttrTypeStr' attr_name = type_name + ',"ATTR"' test_entity = Entity.objects.create(name="TestEntity_" + type_name, created_user=user) test_entity_attr = EntityAttr.objects.create( name=attr_name, type=case[0], created_user=user, parent_entity=test_entity) test_entity.attrs.add(test_entity_attr) test_entity.save() test_entry = Entry.objects.create(name=type_name + ',"ENTRY"', schema=test_entity, created_user=user) test_entry.save() test_attr = Attribute.objects.create(name=attr_name, schema=test_entity_attr, created_user=user, parent_entry=test_entry) test_attr.save() test_entry.attrs.add(test_attr) test_entry.save() test_val = None if case[0].TYPE & AttrTypeValue['array'] == 0: if case[0] == AttrTypeStr: test_val = AttributeValue.create(user=user, attr=test_attr, value=case[1]) elif case[0] == AttrTypeObj: test_val = AttributeValue.create(user=user, attr=test_attr, referral=case[1]) elif case[0] == AttrTypeText: test_val = AttributeValue.create(user=user, attr=test_attr, value=case[1]) elif case[0] == AttrTypeNamedObj: [(k, v)] = case[1].items() test_val = AttributeValue.create(user=user, attr=test_attr, value=k, referral=v) else: test_val = AttributeValue.create(user=user, attr=test_attr) test_val.set_status(AttributeValue.STATUS_DATA_ARRAY_PARENT) for child in case[1]: test_val_child = None if case[0] == AttrTypeArrStr: test_val_child = AttributeValue.create(user=user, attr=test_attr, value=child) elif case[0] == AttrTypeArrObj: test_val_child = AttributeValue.create(user=user, attr=test_attr, referral=child) elif case[0] == AttrTypeArrNamedObj: [(k, v)] = child.items() test_val_child = AttributeValue.create(user=user, attr=test_attr, value=k, referral=v) test_val.data_array.add(test_val_child) test_val.save() test_attr.values.add(test_val) test_attr.save() test_entry.register_es() resp = self.client.post( reverse('dashboard:export_search_result'), json.dumps({ 'entities': [test_entity.id], 'attrinfo': [{ 'name': test_attr.name, 'keyword': '' }], 'export_style': 'csv', }), 'application/json') self.assertEqual(resp.status_code, 200) content = Job.objects.last().get_cache() header = content.splitlines()[0] self.assertEqual(header, 'Name,Entity,"%s,""ATTR"""' % type_name) data = content.replace(header, '', 1).strip() self.assertEqual( data, '"%s,""ENTRY""",%s,%s' % (type_name, test_entity.name, case[2]))
def get_filtered_entries(user, queries): """ Get queryset of entries based on dynamic filters """ entries = Entry.get_for(user) project = queries.get('project') if project: entries = entries.filter(lead__project__id=project) filters = Filter.get_for(user) ONE_DAY = 24 * 60 * 60 created_at__lt = queries.get('created_at__lt') if created_at__lt: created_at__lt = datetime.fromtimestamp(created_at__lt * ONE_DAY) entries = entries.filter(created_at__lte=created_at__lt) created_at__gt = queries.get('created_at__gt') if created_at__gt: created_at__gt = datetime.fromtimestamp(created_at__gt * ONE_DAY) entries = entries.filter(created_at__gte=created_at__gt) modified_at__lt = queries.get('modified_at__lt') if modified_at__lt: modified_at__lt = datetime.fromtimestamp(modified_at__lt * ONE_DAY) entries = entries.filter(modified_at__lte=modified_at__lt) modified_at__gt = queries.get('modified_at__gt') if modified_at__gt: modified_at__gt = datetime.fromtimestamp(modified_at__gt * ONE_DAY) entries = entries.filter(modified_at__gte=modified_at__gt) for filter in filters: # For each filter, see if there is a query for that filter # and then perform filtering based on that query. query = queries.get(filter.key) query_lt = queries.get(filter.key + '__lt') query_gt = queries.get(filter.key + '__gt') if filter.filter_type == Filter.NUMBER: if query: entries = entries.filter( filterdata__filter=filter, filterdata__number=query, ) if query_lt: entries = entries.filter( filterdata__filter=filter, filterdata__number__lte=query_lt, ) if query_gt: entries = entries.filter( filterdata__filter=filter, filterdata__number__gte=query_gt, ) if filter.filter_type == Filter.LIST and query: if not isinstance(query, list): query = query.split(',') if len(query) > 0: entries = entries.filter( filterdata__filter=filter, filterdata__values__overlap=query, ) return entries.order_by('-lead__created_by', 'lead')