def get_index(req): context = { 'carousel_images': [], 'bulletin': None, 'latest_posts': [] } for post in Post.objects.filter(published=True).order_by('-ctime')[:settings.INDEX_POSTS_LIMIT]: context['latest_posts'].append(format_post(post)) for obj in Carousel.objects.get_active(): data = model_to_dict(obj) data['image'] = settings.MEDIA_URL + str(data['image']) context['carousel_images'].append(data) try: bulletin = Bulletin.objects.order_by('-publish_date')[0] data = model_to_dict(bulletin) context['bulletin'] = { 'src': settings.MEDIA_URL + str(data['src']), 'publish_date': '{:%d-%m-%Y}'.format(data['publish_date']) } except IndexError: pass return TemplateResponse(req, 'index.html', context)
def logging_delete(sender, instance, **kwargs): if sender.__name__ in list_of_models: from django.forms.models import model_to_dict company = None user_str = "System" obj_attr = "" if 'company' in dir(instance) and instance.company: company = instance.company http_request = get_request() if http_request: request_user = get_request().user user_str = 'User %s' % request_user if not company: if not request_user.is_authenticated(): # if 'User AnonymousUser' == user_str: user_str = 'Anonymous User' elif request_user.profile.company: company = request_user.profile.company for key in model_to_dict(instance).keys(): obj_attr += key.replace('_', ' ').upper() + ': ' + str(model_to_dict(instance)[key]) + '; ' note = 'User %s deleted %s: %s \n %s' % (user_str, sender.__name__, str(instance), obj_attr) Log.objects.create(company=company, note=note)
def dict_to_json_compliance(data, data_type=None): if data_type!=None and not hasattr(data_type, '_meta'): data_type = None if isinstance(data, datetime.date): new_data = data.strftime('%Y-%m-%d') elif isinstance(data, datetime.datetime): new_data = data.strftime('%Y-%m-%d %H:%M:%S') elif isinstance(data, dict): new_data = {} for key in data.keys(): if data_type==None: new_data[key] = dict_to_json_compliance(data[key], data_type) else: if data_type._meta.get_field(key).get_internal_type()=='ForeignKey' and data[key]!=None: foreign_class = data_type._meta.get_field(key).rel.to new_data[key] = dict_to_json_compliance(model_to_dict(foreign_class.objects.get(id=data[key]))) elif data_type._meta.get_field(key).get_internal_type()=='ManyToManyField': foreign_class = data_type._meta.get_field(key).rel.to new_data[key] = [dict_to_json_compliance(model_to_dict(foreign_class.objects.get(id=item))) for item in data[key]] else: new_data[key] = dict_to_json_compliance(data[key]) elif isinstance(data, list): new_data = [dict_to_json_compliance(item, data_type) for item in data] else: return data return new_data
def results(request): output = io.BytesIO() users = list(User.objects.all()) exits = list(Exit.objects.all()) clicks = list(Click.objects.all()) sessions = list(Session.objects.all()) rows = [] for session in sessions: user = filter(lambda u : u.session_id == session.id, users) exit = filter(lambda e : e.session_id == session.id, exits) click = filter(lambda c : c.session_id == session.id, clicks) if len(user) == 1 and len(exit) == 1 and len(click) >=3: demographic = model_to_dict(user[0]) demographic.update(model_to_dict(exit[0])) click = map(model_to_dict, click) row = demographic.copy() row.update(get_click_data(click)) if 'misc' in row: del row['misc'] rows.append(row) writer = csv.DictWriter(output, rows[0].keys()) writer.writeheader() writer.writerows(rows) return HttpResponse('sep=;\n' + output.getvalue(), content_type='text/plain')
def get_submission_formsets(data=None, instance=None, readonly=False): formset_classes = [ # (prefix, formset_class, callable SubmissionForm -> initial data) ('measure', MeasureFormSet, lambda sf: sf.measures.filter(category='6.1')), ('routinemeasure', RoutineMeasureFormSet, lambda sf: sf.measures.filter(category='6.2')), ('nontesteduseddrug', NonTestedUsedDrugFormSet, lambda sf: sf.nontesteduseddrug_set.all()), ('foreignparticipatingcenter', ForeignParticipatingCenterFormSet, lambda sf: sf.foreignparticipatingcenter_set.all()), ('investigator', InvestigatorFormSet, lambda sf: sf.investigators.all()), ] formsets = {} for name, formset_cls, initial in formset_classes: kwargs = {'prefix': name, 'readonly': readonly, 'initial': []} if readonly: kwargs['extra'] = 0 if instance: kwargs['initial'] = [model_to_dict(obj, exclude=('id',)) for obj in initial(instance).order_by('id')] formsets[name] = formset_cls(data, **kwargs) employees = [] if instance: for index, investigator in enumerate(instance.investigators.order_by('id')): for employee in investigator.employees.order_by('id'): employee_dict = model_to_dict(employee, exclude=('id', 'investigator')) employee_dict['investigator_index'] = index employees.append(employee_dict) kwargs = {'prefix': 'investigatoremployee', 'readonly': readonly} if readonly: kwargs['extra'] = 0 formsets['investigatoremployee'] = InvestigatorEmployeeFormSet(data, initial=employees or [], **kwargs) return formsets
def test_with_closed_sessionkind(self): proposal = models.Proposal( conference=self.conference, title="Proposal", description="DESCRIPTION", abstract="ABSTRACT", speaker=self.speaker, kind=self.kind, audience_level=self.audience_level, duration=self.duration, track=self.track ) self.kind.start_date = self.now - timedelta(2) self.kind.end_date = self.now - timedelta(1) self.kind.closed = None self.kind.save() form = forms.ProposalSubmissionForm(data=model_to_dict(proposal)) self.assertFalse(form.is_valid()) self.kind.start_date = None self.kind.end_date = None self.kind.closed = True self.kind.save() form = forms.ProposalSubmissionForm(data=model_to_dict(proposal)) self.assertFalse(form.is_valid(), form.errors)
def itemsAlumne(request, interid, alid, assigid, gid): assignatura = Submateria.objects.get(id=assigid) grup = Grup.objects.get(id=gid) inter = InterAvaluacio.objects.get(id=interid) alumne = Alumne.objects.get(id=alid) try: comentari = Comentari.objects.get(alumne=alumne,submateria=assignatura,interavaluacio=inter).text except Comentari.DoesNotExist: comentari = '' tipnotes = inter.tipnotes.all().order_by('ordre') notes = [] for t in tipnotes: n = Nota.objects.filter(submateria=assignatura,alumne=alumne,tipnota=t,interavaluacio=inter) nota = None if n: nota = model_to_dict(n[0]) its = [model_to_dict(i) for i in ItemNota.objects.filter(grupNota=t.grupNota) ] notes.append({ 'tipnota': model_to_dict(t), 'nota': nota, 'its': its }) desactivat = False dt = datetime.datetime.now().date() if dt < inter.data1 or dt > inter.data2: desactivat = True return toJson({ 'comentari': comentari, 'desactivat': desactivat, 'notes': notes, })
def get_bottoms(request): listagens = ListagemBottom.objects.all() response_data = {} response_data['tamanho'] = 3 response_data['bottoms'] = {} if len(listagens) <= 3: response_data['tamanho'] = len(listagens) for listagem in listagens: response_data['bottoms']["%s"%listagem.titulo] = {'peso': listagem.peso} items = ItemBottom.objects.filter(listagem=listagem) for item in items: response_data['bottoms']["%s"%listagem.titulo][item.pk] = model_to_dict(item) response_data['bottoms']["%s"%listagem.titulo][item.pk]['data'] = \ '%s' % response_data['bottoms']["%s"%listagem.titulo][item.pk]['data'] else: i = 0 for listagem in listagens: if i == 3: break response_data['bottoms']["%s"%listagem.titulo] = {'peso': listagem.peso} items = ItemBottom.objects.filter(listagem=listagem) for item in items: response_data['bottoms']["%s"%listagem.titulo][item.pk] = model_to_dict(item) response_data['bottoms']["%s"%listagem.titulo][item.pk]['data'] = \ '%s' % response_data['bottoms']["%s"%listagem.titulo][item.pk]['data'] i+=1 response_data['bottoms'] = OrderedDict(sorted(response_data['bottoms'].iteritems(), key=operator.itemgetter(1))) return HttpResponse(json.dumps(response_data), content_type="application/json")
def test_with_open_sessionkind(self): """ Tests that a proposal can be submitted with an open sessionkind """ proposal = models.Proposal( conference=self.conference, title="Proposal", description="DESCRIPTION", abstract="ABSTRACT", speaker=self.speaker, kind=self.kind, audience_level=self.audience_level, duration=self.duration, track=self.track ) data = model_to_dict(proposal) data['agree_to_terms'] = True form = forms.ProposalSubmissionForm(data=data) self.assertTrue(form.is_valid(), form.errors) now = datetime.datetime.now() self.kind.start_date = now - datetime.timedelta(1) self.kind.end_date = now + datetime.timedelta(1) self.kind.save() data = model_to_dict(proposal) data['agree_to_terms'] = True form = forms.ProposalSubmissionForm(data=data) self.assertTrue(form.is_valid(), form.errors)
def get_vlan_map(vlan, network_ipv4, network_ipv6): vlan_map = model_to_dict(vlan) if network_ipv4 is not None and len(network_ipv4) > 0: net_map = [] for net in network_ipv4: net_dict = model_to_dict(net) net_map.append(net_dict) vlan_map['redeipv4'] = net_map else: vlan_map['redeipv4'] = None if network_ipv6 is not None and len(network_ipv6) > 0: net_map = [] for net in network_ipv6: net_dict = model_to_dict(net) net_map.append(net_dict) vlan_map['redeipv6'] = net_map else: vlan_map['redeipv6'] = None return vlan_map
def get_items(request, listagem): items = '', response_data = {} if ProjetoServico.objects.filter(listagem=listagem): if request.GET.get('unidade'): items = ProjetoServico.objects.filter(listagem=listagem, unidade=request.GET.get('unidade')).order_by('categoria') elif request.GET.get('categoria'): items = ProjetoServico.objects.filter(listagem=listagem, categoria=request.GET.get('categoria')).order_by('categoria') else: items = ProjetoServico.objects.filter(listagem=listagem).order_by('categoria') item_group = list(items) for key, group in groupby(item_group, lambda item: item.categoria): response_data['%s'%key] = {} for item in group: response_data['%s'%key][item.pk] = model_to_dict(item) response_data['%s'%key][item.pk]['candidatura'] = \ '%s' % response_data['%s'%key][item.pk]['candidatura'] elif ProtocoloPublicacao.objects.filter(listagem=listagem): if request.GET.get('unidade'): items = ProtocoloPublicacao.objects.filter(listagem=listagem, unidade=request.GET.get('unidade')) else: items = ProtocoloPublicacao.objects.filter(listagem=listagem) for item in items: response_data[item.pk] = model_to_dict(item) response_data[item.pk]['assinatura'] = \ '%s' % response_data[item.pk]['assinatura'] response_data[item.pk]['termo'] = \ '%s' % response_data[item.pk]['termo'] return HttpResponse(json.dumps(response_data), content_type="application/json")
def testEditTicket(self): #init dictionary with ticket data keys/fields and values to set them to REPLACE_DICT= { 'status': 'open', 'comment':'Testing stuff', 'solution':'This is a test solution', 'keywords':'' } #init ticket data for test ticket and create test ticket in db ticket_data = { 'sector':Group.objects.get(name__contains='Saperion'), 'category': 'Problem', 'subject':'Editing Tickets', 'description':'Test for editing tickets' } now = timezone.now() t = Ticket( sector=ticket_data['sector'], category=ticket_data['category'], subject=ticket_data['subject'], description=ticket_data['description'], creationdatetime = now, status='open', creator='ppssystem', responsible_person=None, comment='', solution='', keywords='', image='' ) t.save() #login user TicketTest.login(self) #check if exactly one ticket is in the db for the given ticket id tickets = Ticket.objects.filter(subject='Editing Tickets') self.assertTrue(tickets.count()==1) ticket=tickets[0] ticket_dict = model_to_dict(ticket) DATA_DICT = ticket_dict for key in REPLACE_DICT.keys(): DATA_DICT[key]=REPLACE_DICT[key] DATA_DICT['subject']='An alternative subject' DATA_DICT['confirm']='confirm' DATA_DICT['image']=None editResponse = self.client.post("/tickets/"+str(ticket_dict['ticketid'])+"/edit/", DATA_DICT) tickets = Ticket.objects.filter(ticketid=ticket_dict['ticketid']) ticket = tickets[0] ticket_dict = model_to_dict(ticket) self.assertFalse(ticket_dict['subject']==DATA_DICT['subject']) self.assertTrue(ticket_dict['status']==DATA_DICT['status']) self.assertTrue(ticket_dict['comment']==DATA_DICT['comment']) self.assertTrue(ticket_dict['solution']==DATA_DICT['solution'])
def to_dict(self, org_list): country = '' if not self.country else model_to_dict(self.country) # Set groups in right order all_groups = [] auth_group_names = ['Users', 'User Managers', 'Project Editors', 'Admins'] for auth_group_name in auth_group_names: try: all_groups.append(Group.objects.get(name=auth_group_name)) except ObjectDoesNotExist: continue user_group = model_to_dict(self.group, fields=['id', 'name']) if self.group else None other_groups = [model_to_dict(group, fields=['id', 'name']) for group in all_groups] return dict( id=self.pk, organisation_full=model_to_dict(self.organisation, fields=['id', 'name', 'long_name',]), user_full=model_to_dict(self.user, fields=['id', 'first_name', 'last_name', 'email',]), is_approved=self.is_approved, job_title=self.job_title, country_full=country, group=user_group, other_groups=other_groups, actions=True if self.organisation in org_list else False, )
def get(self, request, person): dbentry = get_object_or_404(Person,uname=person) payload = model_to_dict(dbentry) paper_list = dbentry.paper_set.all().order_by('-sort_date') author_lists = [[model_to_dict(author,fields=["fullname"]) for author in paper.authors.all().order_by('order')] for paper in paper_list] payload["papers"] = [model_to_dict(paper) for paper in paper_list] for i,paper in enumerate(payload["papers"]): paper["authors"] = author_lists[i] all_press_list = [list(map(model_to_dict,paper.press_set.all())) for paper in paper_list] first_auth_press_list = [list(map(model_to_dict,order.paper.press_set.all())) for order in dbentry.order_set.filter(order=0)] selected_press_list = list(map(model_to_dict,dbentry.press_set.all())) payload["press"] = all_press_list payload["press_firstauthor"] = first_auth_press_list payload["press_selected"] = selected_press_list # project_list = dbentry.project_set.all() # # note that these are backward! # # (oops) # teaching_list = dbentry.courses_taken.all() # class_list = dbentry.courses_taught.all() # funding_list = dbentry.funding_set.all() return HttpResponse(dumps(payload,cls=DjangoJSONEncoder), content_type="application/json")
def user_to_dict(user, detail=True): user_fields = ['first_name', 'last_name', 'email'] date_fields = ['date_joined', 'moderated_at', 'verified_at', 'auth_token_expires'] status_fields = ['is_active', 'is_rejected', 'deactivated_reason', 'accepted_policy', 'rejected_reason'] if not detail: fields = user_fields date_fields = [] d = model_to_dict(user, fields=user_fields + status_fields) d['id'] = user.uuid for date_field in date_fields: val = getattr(user, date_field) if val: d[date_field] = api.utils.isoformat(getattr(user, date_field)) else: d[date_field] = None methods = d['authentication_methods'] = [] d['roles'] = list(user.groups.values_list("name", flat=True)) for provider in user.auth_providers.filter(): method_fields = ['identifier', 'active', 'affiliation'] method = model_to_dict(provider, fields=method_fields) method['backend'] = provider.auth_backend method['metadata'] = provider.info if provider.auth_backend == 'astakos': method['identifier'] = user.email methods.append(method) return d
def searchProduct(request): response = HttpResponse() response['Content-Type'] = "text/javascript" ProductIDFrompage = request.GET.get("searchtext", "") print "ProductIDFrompage", ProductIDFrompage productRestult = Product.objects.filter(outer_id__icontains=ProductIDFrompage) # data = serializers.serialize("json", productRestult) # print "tttttttttttttt", data product_list = [] for product in productRestult: product_dict = model_to_dict(product) product_dict['prod_skus'] = [] guiges = product.prod_skus.all() for guige in guiges: sku_dict = model_to_dict(guige) product_dict['prod_skus'].append(sku_dict) product_list.append(product_dict) data = json.dumps(product_list, cls=DjangoJSONEncoder) # productRestult[0].prod_skus.all() # model_to_dict(productRestult[0]) return HttpResponse(data)
def handle_get(self, request, user, *args, **kwargs): """Treat GET requests list all Filters. URL: filter/all/ """ try: self.log.info("List all Filters") # Commons Validations # User permission if not has_perm(user, AdminPermission.ENVIRONMENT_MANAGEMENT, AdminPermission.READ_OPERATION): self.log.error( u'User does not have permission to perform the operation.') return self.not_authorized() # Business Rules filters = Filter.objects.all() filter_list = [] for filter_ in filters: filter_dict = model_to_dict(filter_) filter_dict['equip_types'] = list() for fil_equip_type in filter_.filterequiptype_set.all(): filter_dict['equip_types'].append( model_to_dict(fil_equip_type.equiptype)) filter_list.append(filter_dict) return self.response(dumps_networkapi({'filter': filter_list})) except BaseException, e: return self.response_error(1)
def RequirementsForBadge(request, badge_id): merit_badge = MeritBadge.objects.get(pk=badge_id) requirements = merit_badge.requirements.all() response_data = [] for req in requirements: subreq_lvl1s = req.subrequirements_lvl1.all() dic_req = model_to_dict(req) dic_req['subrequirements'] = [] for sublvl1 in subreq_lvl1s: subreq_lvl2s = sublvl1.subrequirements_lvl2.all() dic_sublvl1 = model_to_dict(sublvl1) dic_sublvl1['subrequirements'] = [] for sublvl2 in subreq_lvl2s: subreq_lvl3s = sublvl2.subrequirements_lvl3.all() dic_sublvl2 = model_to_dict(sublvl2) dic_sublvl2['subrequirements'] = [] for sublvl3 in subreq_lvl3s: dic_sublvl3 = model_to_dict(sublvl3) dic_sublvl2['subrequirements'].append(dic_sublvl3) dic_sublvl1['subrequirements'].append(dic_sublvl2) dic_req['subrequirements'].append(dic_sublvl1) response_data.append(dic_req) return HttpResponse(json.dumps(response_data), mimetype="application/json")
def inner(request, *av, **kw): print request.META user = {} if not "HTTP_AUTHORIZATION" in request.META: return Jsonify({"error": "1102", "error_message": "用户未登录, 无session。", "status": False}) else: auth = request.META["HTTP_AUTHORIZATION"] auth = auth.strip().decode("base64") user["username"], user["password"] = auth.split(":") if user["username"] == "" or user["password"] == "": return Jsonify({"error": "1102", "error_message": "用户未登录。", "status": False}) else: _session_info = UserSession.objects.filter(session_password=user["password"]) if not _session_info: return Jsonify({"error": "1102", "error_message": "用户未登录, wrong session。", "status": False}) else: _session_info = model_to_dict(_session_info[0]) print _session_info _uid = _session_info["uid"] user = User.objects.filter(uid=_uid) user = model_to_dict(user[0]) if not user: return Jsonify({"error": "1103", "error_message": "用户不存在。", "status": False}) else: del (user["loginIp"]) del (user["lastLogin"]) del (user["salt"]) del (user["password"]) del (user["register"]) request.user = user return func(request, *av, **kw)
def testSuiteQuery(request): logging=getDefaultLogger() token=getParamFromRequest(request,'token') tokenUserid=tokenToUserid(token) userInfoName=UserInfo.objects.filter(userId=tokenUserid) if not userInfoName: return errResponse(U"登录票据验证失败,请更换登录用户!!!") try: testSuites=TestSuite.objects.all() data={} queryResult=list() for testSuite in testSuites: testItemRows=[] testItems=TestItem.objects.filter(suite_id=testSuite.test_suite_id).order_by("item_sequence") for testItem in testItems: testStepRows=[] testSteps=TestStep.objects.filter(item_id=testItem.test_item_id).order_by("step_sequence") for testStep in testSteps: stepListRow=model_to_dict(testStep) testStepRows.append(stepListRow) testItem2Json=model_to_dict(testItem) testItem2Json['test_step']=testStepRows itemListRow=testItem2Json testItemRows.append(itemListRow) testSuite2Json=model_to_dict(testSuite) testSuite2Json['test_item']=testItemRows print testSuite2Json queryResult.append(json.dumps(testSuite2Json,ensure_ascii=True)) print queryResult except Exception as e: logging.info(e) return errResponse(U"未找到Suite列表") return sucResponse(queryResult)
def get_score_context(score): test = Test.objects.get(id=score.test_id) score_context = model_to_dict(score) score_context['total_score'] = score.verbal_score + score.math_score + score.analytic_score score_context['test'] = model_to_dict(test) score_context['test']['date'] = test.get_pretty_date() return score_context
def nodes(request): if request.is_ajax() and request.method == 'GET': nodes = Computer.objects.filter( time__gte=datetime.now() - timedelta(seconds=15), screens__gt=0).annotate( dcount=Count('name')).order_by('wos_id') if not nodes: Activity.unset_all() node_list = [] for node in nodes: screens = SCREEN_IMAGES path = screens + os.sep + str(node.wos_id) + '.png' if os.sep != '/': path = path.replace('/', os.sep) if os.path.isfile(path): node_list.append({'node':model_to_dict(node), 'img': '/static/screen_images/' + str(node.wos_id) + '.png'}) else: node_list.append({'node':model_to_dict(node), 'img': '/static/img/SCREEN.png'}) return HttpResponse(json.dumps(node_list, default=swnp.utils.date_handler), mimetype='application/json') else: return HttpResponse(json.dumps({'status':'ERROR'}), mimetype='application/json')
def new_test_method(self): instance = model.create_test_instance() # This is the *expected* value, not generated the regular way. serialized = instance.as_test_serialized() deserialized = self.serializer.from_native(serialized) self.assertIsNot(deserialized, None, 'Deserialization of {!r} failed: {!r}' .format(serialized, self.serializer.errors)) # Turn them into dictionaries, so that we can compare them. deserialized = model_to_dict(deserialized) instance = model_to_dict(instance) # Now let's go through and remove some things which don't come through # serialization or deserialization and which thus can't be compared. for x in deserialized, instance: # ID is not deserialized. del x['id'] # The second field from any manual serializer in the hierarchy, # because they aren't included in the serializer. for base in model.__mro__: if base is CQRSPolymorphicModel: break assert base.__name__.startswith('Model') if base.prefix.endswith('m'): # Manual serializer del x['field_{}2'.format(base.prefix)] if base is not model: # Remove the (unserialized) foreign keys to the base also. del x['{}_ptr'.format(base.__name__.lower())] self.assertEqual(deserialized, instance)
def loadAll(self): self.allNodes = list(Node.objects.all()) self.allSlices = list(Slice.objects.all()) self.allSlivers = list(Sliver.objects.all()) self.allSites = list(Site.objects.all()) self.site_id = {} for site in self.allSites: d = model_to_dict(site) d["node_ids"] = [] d["slice_ids"] = [] self.site_id[site.id] = ensure_serializable(d) self.node_id = {} for node in self.allNodes: d = model_to_dict(node) d["sliver_ids"] = [] self.node_id[node.id] = ensure_serializable(d) self.site_id[node.site_id]["node_ids"].append(node.id) self.slice_id = {} for slice in self.allSlices: d = model_to_dict(slice) d["sliver_ids"] = [] self.slice_id[slice.id] = ensure_serializable(d) self.site_id[slice.site_id]["slice_ids"].append(site.id) print self.slice_id.keys() self.sliver_id = {} for sliver in self.allSlivers: self.sliver_id[sliver.id] = model_to_dict(sliver) self.slice_id[sliver.slice_id]["sliver_ids"].append(sliver.id) self.node_id[sliver.node_id]["sliver_ids"].append(sliver.id)
def new_test_method(self): instance = model.create_test_instance() # This is the *expected* value, not generated the regular way. serialized = instance.as_test_serialized() serializer = CQRSSerializerMeta._register[model]() deserialized = serializer.from_native(serialized, files=None) self.assertIsNot(deserialized, None, 'Deserialization of {!r} failed: {!r}' .format(serialized, serializer.errors)) # Turn them into dictionaries, so that we can compare them. deserialized = model_to_dict(deserialized) instance = model_to_dict(instance) # Now let's go through and remove some things which don't come through # serialization or deserialization and which thus can't be compared. for x in deserialized, instance: # ID is not deserialized. del x['id'] for field in fields_to_exclude: del x[field] self.assertEqual(deserialized, instance)
def get(self, request): user = request.user try: insti_address_form = InstituteAddressForm(initial=model_to_dict(user.insti_address)) except (AttributeError, InstituteAddress.DoesNotExist): insti_address_form = InstituteAddressForm() try: program_form = ProgramForm(initial=model_to_dict(user.program)) except (AttributeError, Program.DoesNotExist): program_form = ProgramForm() mobile_numbers = ContactNumber.objects.all().filter(user=user).order_by("-id") secondary_emails = SecondaryEmail.objects.all().filter(user=user).order_by("-id") user_profile = user.userprofile gpo_email = user.email ldap_number = user_profile.mobile roll_number = user_profile.roll_number return render( request, "user_resources/home.html", { "insti_address_form": insti_address_form, "program_form": program_form, "mobile_numbers": mobile_numbers, "secondary_emails": secondary_emails, "gpo_email": gpo_email, "ldap_number": ldap_number, "roll_number": roll_number, }, )
def export_orgs(request): host = request.META.get("HTTP_HOST", "") if request.is_secure(): host = "https://" + host else: host = "http://" + host scheme = request.GET.get("scheme", "xml") schemes = ["xml", "json", "csv"] if scheme not in schemes: scheme = "xml" orgs = list(models.Library.objects.all()) data = u"" if scheme == "csv" and orgs: iodata = StringIO.StringIO() fieldnames = model_to_dict(orgs[0]).keys() + ["site"] writer = unicodecsv.DictWriter(iodata, fieldnames=fieldnames) writer.writeheader() for org in orgs: org_dict = model_to_dict(org) org_dict["site"] = _get_org_site(org, host=host) writer.writerow(org_dict) data = iodata.getvalue() elif scheme == "json": ser = JsonSerializer(host=host) data = ser.serialize(orgs) else: ser = XmlSerializer(host=host) data = ser.serialize(orgs) return HttpResponse(data, content_type="application/" + scheme)
def handle_get(self, request, user, *args, **kwargs): """Treat GET requests to get a Filter by id. URL: filter/get/<id_filter>/ """ try: self.log.info("Get Filter by id") # Commons Validations # User permission if not has_perm(user, AdminPermission.ENVIRONMENT_MANAGEMENT, AdminPermission.READ_OPERATION): self.log.error(u"User does not have permission to perform the operation.") return self.not_authorized() if not is_valid_int_greater_zero_param(kwargs["id_filter"]): self.log.error(u"Parameter id_filter is invalid. Value: %s.", kwargs["id_filter"]) raise InvalidValueError(None, "id_filter", kwargs["id_filter"]) else: # Check existence fil = Filter().get_by_pk(kwargs["id_filter"]) filter_dict = model_to_dict(fil) filter_dict["equip_types"] = list() for fil_equip_type in fil.filterequiptype_set.all(): filter_dict["equip_types"].append(model_to_dict(fil_equip_type.equiptype)) return self.response(dumps_networkapi({"filter": filter_dict})) except InvalidValueError, e: return self.response_error(269, e.param, e.value)
def nodes(self, parameters): nodes = [] elements = [] ldap_users = ActiveDirectoryUser.objects.filter(ldap_configuration=parameters) user_fields = ['id', 'accountExpires', 'adminCount', 'name', 'isCriticalSystemObject', 'lastLogon', 'logonCount', 'pwdLastSet'] for user in ldap_users: elements.append(user) current = model_to_dict(user, user_fields) current['name'] = escape(current['name']) current['node_type'] = 'user' nodes.append(current) ldap_groups = ActiveDirectoryGroup.objects.filter(ldap_configuration=parameters) group_fields = ['id', 'cn'] for group in ldap_groups: current = model_to_dict(group, group_fields) current['node_type'] = 'group' if group.users.count() > 0: nodes.append(current) elements.append(group) results = {} results['elements'] = elements results['nodes'] = nodes return results
def chase_matches(obj, manager): if type(obj) == list: for u_obj in obj: u_obj['matches'] = [model_to_dict(match) for match in manager.objects.get(id=u_obj['id']).matches.all()] elif type(obj) == dict: obj['matches'] = [model_to_dict(match) for match in manager.objects.get(id=obj['id']).matches.all()] return obj
def get(self, request, format=None): if request.accepted_renderer.format == 'html': queryset = User.objects.all() return response.Response({'users': queryset}) return response.Response([model_to_dict(i) for i in User.objects.all()])
def apagar(request, hash): db_slug = 'default' try: usuario_id = request.user.id dict_hash = get_hash_url(hash) s2299_infoperant_infoagnocivo_id = int(dict_hash['id']) for_print = int(dict_hash['print']) except: usuario_id = False return redirect('login') usuario = get_object_or_404(Usuarios.objects.using(db_slug), excluido=False, id=usuario_id) pagina = ConfigPaginas.objects.using(db_slug).get( excluido=False, endereco='s2299_infoperant_infoagnocivo') permissao = ConfigPermissoes.objects.using(db_slug).get( excluido=False, config_paginas=pagina, config_perfis=usuario.config_perfis) dict_permissoes = json_to_dict(usuario.config_perfis.permissoes) paginas_permitidas_lista = usuario.config_perfis.paginas_permitidas modulos_permitidos_lista = usuario.config_perfis.modulos_permitidos s2299_infoperant_infoagnocivo = get_object_or_404( s2299infoPerAntinfoAgNocivo.objects.using(db_slug), excluido=False, id=s2299_infoperant_infoagnocivo_id) dados_evento = {} if s2299_infoperant_infoagnocivo_id: dados_evento = s2299_infoperant_infoagnocivo.evento() if dados_evento['status'] != 0: dict_permissoes['s2299_infoperant_infoagnocivo_apagar'] = 0 dict_permissoes['s2299_infoperant_infoagnocivo_editar'] = 0 if request.method == 'POST': if dados_evento['status'] == 0: import json from django.forms.models import model_to_dict situacao_anterior = json.dumps( model_to_dict(s2299_infoperant_infoagnocivo), indent=4, sort_keys=True, default=str) s2299infoPerAntinfoAgNocivo.objects.using(db_slug).filter( id=s2299_infoperant_infoagnocivo_id).delete() #s2299_infoperant_infoagnocivo_apagar_custom #s2299_infoperant_infoagnocivo_apagar_custom messages.success(request, 'Apagado com sucesso!') gravar_auditoria(situacao_anterior, '', 's2299_infoperant_infoagnocivo', s2299_infoperant_infoagnocivo_id, usuario_id, 3) else: messages.error( request, 'Não foi possivel apagar o evento, somente é possível apagar os eventos com status "Cadastrado"!' ) if request.session[ 'retorno_pagina'] == 's2299_infoperant_infoagnocivo_salvar': return redirect('s2299_infoperant_infoagnocivo', hash=request.session['retorno_hash']) else: return redirect(request.session['retorno_pagina'], hash=request.session['retorno_hash']) context = { 'usuario': usuario, 'modulos_permitidos_lista': modulos_permitidos_lista, 'paginas_permitidas_lista': paginas_permitidas_lista, 'permissao': permissao, 'data': datetime.datetime.now(), 'pagina': pagina, 'dict_permissoes': dict_permissoes, 'hash': hash, } return render(request, 's2299_infoperant_infoagnocivo_apagar.html', context)
def salvar(request, hash): db_slug = 'default' try: usuario_id = request.user.id dict_hash = get_hash_url(hash) s2299_infoperant_infoagnocivo_id = int(dict_hash['id']) if 'tab' not in dict_hash.keys(): dict_hash['tab'] = '' for_print = int(dict_hash['print']) except: usuario_id = False return redirect('login') usuario = get_object_or_404(Usuarios.objects.using(db_slug), excluido=False, id=usuario_id) pagina = ConfigPaginas.objects.using(db_slug).get( excluido=False, endereco='s2299_infoperant_infoagnocivo') permissao = ConfigPermissoes.objects.using(db_slug).get( excluido=False, config_paginas=pagina, config_perfis=usuario.config_perfis) if s2299_infoperant_infoagnocivo_id: s2299_infoperant_infoagnocivo = get_object_or_404( s2299infoPerAntinfoAgNocivo.objects.using(db_slug), excluido=False, id=s2299_infoperant_infoagnocivo_id) dict_permissoes = json_to_dict(usuario.config_perfis.permissoes) paginas_permitidas_lista = usuario.config_perfis.paginas_permitidas modulos_permitidos_lista = usuario.config_perfis.modulos_permitidos dados_evento = {} dados_evento['status'] = 0 if s2299_infoperant_infoagnocivo_id: dados_evento = s2299_infoperant_infoagnocivo.evento() if dados_evento['status'] != 0: dict_permissoes['s2299_infoperant_infoagnocivo_apagar'] = 0 dict_permissoes['s2299_infoperant_infoagnocivo_editar'] = 0 if permissao.permite_visualizar: mensagem = None if s2299_infoperant_infoagnocivo_id: s2299_infoperant_infoagnocivo_form = form_s2299_infoperant_infoagnocivo( request.POST or None, instance=s2299_infoperant_infoagnocivo, slug=db_slug) else: s2299_infoperant_infoagnocivo_form = form_s2299_infoperant_infoagnocivo( request.POST or None, slug=db_slug, initial={}) if request.method == 'POST': if s2299_infoperant_infoagnocivo_form.is_valid(): dados = s2299_infoperant_infoagnocivo_form.cleaned_data import json from django.forms.models import model_to_dict if s2299_infoperant_infoagnocivo_id: if dados_evento['status'] == 0: dados['modificado_por_id'] = usuario_id dados['modificado_em'] = datetime.datetime.now() #s2299_infoperant_infoagnocivo_campos_multiple_passo1 s2299infoPerAntinfoAgNocivo.objects.using( db_slug).filter( id=s2299_infoperant_infoagnocivo_id).update( **dados) obj = s2299infoPerAntinfoAgNocivo.objects.using( db_slug).get(id=s2299_infoperant_infoagnocivo_id) #s2299_infoperant_infoagnocivo_editar_custom #s2299_infoperant_infoagnocivo_campos_multiple_passo2 messages.success(request, 'Alterado com sucesso!') gravar_auditoria( json.dumps( model_to_dict(s2299_infoperant_infoagnocivo), indent=4, sort_keys=True, default=str), json.dumps(model_to_dict(obj), indent=4, sort_keys=True, default=str), 's2299_infoperant_infoagnocivo', s2299_infoperant_infoagnocivo_id, usuario_id, 2) else: messages.error( request, 'Somente é possível alterar eventos com status "Cadastrado"!' ) else: dados['criado_por_id'] = usuario_id dados['criado_em'] = datetime.datetime.now() dados['excluido'] = False #s2299_infoperant_infoagnocivo_cadastrar_campos_multiple_passo1 obj = s2299infoPerAntinfoAgNocivo(**dados) obj.save(using=db_slug) #s2299_infoperant_infoagnocivo_cadastrar_custom #s2299_infoperant_infoagnocivo_cadastrar_campos_multiple_passo2 messages.success(request, 'Cadastrado com sucesso!') gravar_auditoria( '{}', json.dumps(model_to_dict(obj), indent=4, sort_keys=True, default=str), 's2299_infoperant_infoagnocivo', obj.id, usuario_id, 1) if request.session['retorno_pagina'] not in ( 's2299_infoperant_infoagnocivo_apagar', 's2299_infoperant_infoagnocivo_salvar', 's2299_infoperant_infoagnocivo'): return redirect(request.session['retorno_pagina'], hash=request.session['retorno_hash']) if s2299_infoperant_infoagnocivo_id != obj.id: url_hash = base64.urlsafe_b64encode( '{"print": "0", "id": "%s"}' % (obj.id)) return redirect('s2299_infoperant_infoagnocivo_salvar', hash=url_hash) else: messages.error(request, 'Erro ao salvar!') s2299_infoperant_infoagnocivo_form = disabled_form_fields( s2299_infoperant_infoagnocivo_form, permissao.permite_editar) if s2299_infoperant_infoagnocivo_id: if dados_evento['status'] != 0: s2299_infoperant_infoagnocivo_form = disabled_form_fields( s2299_infoperant_infoagnocivo_form, 0) #s2299_infoperant_infoagnocivo_campos_multiple_passo3 for field in s2299_infoperant_infoagnocivo_form.fields.keys(): s2299_infoperant_infoagnocivo_form.fields[field].widget.attrs[ 'ng-model'] = 's2299_infoperant_infoagnocivo_' + field if int(dict_hash['print']): s2299_infoperant_infoagnocivo_form = disabled_form_for_print( s2299_infoperant_infoagnocivo_form) #[VARIAVEIS_SECUNDARIAS_VAZIAS] if s2299_infoperant_infoagnocivo_id: s2299_infoperant_infoagnocivo = get_object_or_404( s2299infoPerAntinfoAgNocivo.objects.using(db_slug), excluido=False, id=s2299_infoperant_infoagnocivo_id) pass else: s2299_infoperant_infoagnocivo = None #s2299_infoperant_infoagnocivo_salvar_custom_variaveis# tabelas_secundarias = [] #[FUNCOES_ESPECIAIS_SALVAR] if dict_hash[ 'tab'] or 's2299_infoperant_infoagnocivo' in request.session[ 'retorno_pagina']: request.session["retorno_hash"] = hash request.session[ "retorno_pagina"] = 's2299_infoperant_infoagnocivo_salvar' controle_alteracoes = Auditoria.objects.using(db_slug).filter( identidade=s2299_infoperant_infoagnocivo_id, tabela='s2299_infoperant_infoagnocivo').all() context = { 'ocorrencias': dados_evento['ocorrencias'], 'validacao_precedencia': dados_evento['validacao_precedencia'], 'validacoes': dados_evento['validacoes'], 'status': dados_evento['status'], 'controle_alteracoes': controle_alteracoes, 's2299_infoperant_infoagnocivo': s2299_infoperant_infoagnocivo, 's2299_infoperant_infoagnocivo_form': s2299_infoperant_infoagnocivo_form, 'mensagem': mensagem, 's2299_infoperant_infoagnocivo_id': int(s2299_infoperant_infoagnocivo_id), 'usuario': usuario, 'hash': hash, #[VARIAVEIS_SECUNDARIAS] 'modulos_permitidos_lista': modulos_permitidos_lista, 'paginas_permitidas_lista': paginas_permitidas_lista, 'permissao': permissao, 'data': datetime.datetime.now(), 'pagina': pagina, 'dict_permissoes': dict_permissoes, 'for_print': int(dict_hash['print']), 'tabelas_secundarias': tabelas_secundarias, 'tab': dict_hash['tab'], #s2299_infoperant_infoagnocivo_salvar_custom_variaveis_context# } if for_print in (0, 1): return render(request, 's2299_infoperant_infoagnocivo_salvar.html', context) elif for_print == 2: from wkhtmltopdf.views import PDFTemplateResponse response = PDFTemplateResponse( request=request, template='s2299_infoperant_infoagnocivo_salvar.html', filename="s2299_infoperant_infoagnocivo.pdf", context=context, show_content_in_browser=True, cmd_options={ 'margin-top': 10, 'margin-bottom': 10, 'margin-right': 10, 'margin-left': 10, 'zoom': 1, 'dpi': 72, 'orientation': 'Landscape', "viewport-size": "1366 x 513", 'javascript-delay': 1000, 'footer-center': '[page]/[topage]', "no-stop-slow-scripts": True }, ) return response elif for_print == 3: from django.shortcuts import render_to_response response = render_to_response( 's2299_infoperant_infoagnocivo_salvar.html', context) filename = "s2299_infoperant_infoagnocivo.xls" response[ 'Content-Disposition'] = 'attachment; filename=' + filename response[ 'Content-Type'] = 'application/vnd.ms-excel; charset=UTF-8' return response else: context = { 'usuario': usuario, 'modulos_permitidos_lista': modulos_permitidos_lista, 'paginas_permitidas_lista': paginas_permitidas_lista, 'permissao': permissao, 'data': datetime.datetime.now(), 'pagina': pagina, 'dict_permissoes': dict_permissoes, } return render(request, 'permissao_negada.html', context)
def news_detail(request): if request.method == 'POST': received_body = request.body.decode('utf-8') received_body = json.loads(received_body) uid = received_body.get('uid') openid = received_body.get('openid') # print(uid) # print(openid) # 点击量 count = NurIndex.objects.filter(uid=uid) for i in count: if i.count is None: clik = random.randint(20, 120) else: dic = random.randint(1, 3) clik = i.count + dic NurIndex.objects.filter(uid=uid).update(count=clik) # 相关的内容查找功能 data = {} result = NurNews.objects.filter(url=uid) data['list'] = json.loads(serializers.serialize('json', result)) dic = data['list'][0]['fields'] news_index = [] index_a = dic['a'] index_b = dic['b'] index_c = dic['c'] index_d = dic['d'] index_e = dic['e'] news_index.append(index_a) news_index.append(index_b) news_index.append(index_c) news_index.append(index_d) news_index.append(index_e) # 返回相关的新闻 data = {} result = NurIndex.objects.filter( Q(title__contains=index_a) | Q(title__contains=index_b)) data['list'] = json.loads(serializers.serialize('json', result)) index = data['list'][0:4] #头部 more_list = [] more = {} more['title'] = dic['title'] more['time'] = dic['pub_time'] more_list.append(more) #新闻内容 item = {} dic = dict_slice(dic, 0, 26) for i in dic: if len(dic[i]) > 2: if 'نۇر تورى تەرجىمىسى' not in dic[i]: item[i] = dic[i] list = [] for i in item: news = {} if '.jpg' in item[i] and 'https://' in item[i] or '.gif' in item[i]: news['a'] = item[i] elif '.mp4' in item[i]: news['b'] = item[i] else: news['c'] = item[i] # print(news) list.append(news) # 新闻点赞状态 priase = [] # if openid == 'false': # priase.append('false') # else: # if not NewsPriase.objects.filter(url=uid, open_id=openid): # priase.append('false') # else: # priase.append("true") # 点赞的数量 count = [] # 收藏 shoucang = [] if openid == 'false': shoucang.append('false') else: if not Shoucang.objects.filter(url=uid, open_id=openid): shoucang.append('false') else: shoucang.append("true") #返回评论 pinglun = [] ping = Comment.objects.filter(url=uid, verify=1).order_by('-like_count')[0:4] for dic in ping: dic = model_to_dict(dic) comment_id = dic['comment_id'] if openid == 'false': dic['CommentPrice'] = 'false' pinglun.append(dic) else: if not CommentPrice.objects.filter(comment_id=comment_id, open_id=openid): dic['CommentPrice'] = 'false' pinglun.append(dic) else: dic['CommentPrice'] = 'true' pinglun.append(dic) # 新闻点赞头像 ava = NewsPriase.objects.filter(url=uid).order_by('-pub_time') avatar = json.loads(serializers.serialize('json', ava)) #评论总数 comment = [] count = Comment.objects.filter(url=uid).count() comment.append(count) # 最后返回 data = [ more_list, list, index, news_index, priase, count, shoucang, pinglun, avatar, comment ] # return JsonResponse(data=data,safe=False,json_dumps_params={'ensure_ascii':False},content_type='application/json') return JsonResponse(data=data, safe=False)
def merge_relationships(cls, merged_state, state1, state2): """ Merge together the old relationships with the new. """ SimulationClass = apps.get_model('seed', 'Simulation') ScenarioClass = apps.get_model('seed', 'Scenario') PropertyMeasureClass = apps.get_model('seed', 'PropertyMeasure') # TODO: get some items off of this property view - labels and eventually notes # collect the relationships no_measure_scenarios = [x for x in state2.scenarios.filter(measures__isnull=True)] + \ [x for x in state1.scenarios.filter(measures__isnull=True)] building_files = [x for x in state2.building_files.all() ] + [x for x in state1.building_files.all()] simulations = [ x for x in SimulationClass.objects.filter( property_state__in=[state1, state2]) ] measures = [ x for x in PropertyMeasureClass.objects.filter( property_state__in=[state1, state2]) ] # copy in the no measure scenarios for new_s in no_measure_scenarios: new_s.pk = None new_s.save() merged_state.scenarios.add(new_s) for new_bf in building_files: new_bf.pk = None new_bf.save() merged_state.building_files.add(new_bf) for new_sim in simulations: new_sim.pk = None new_sim.property_state = merged_state new_sim.save() if len(measures) > 0: measure_fields = [f.name for f in measures[0]._meta.fields] measure_fields.remove('id') measure_fields.remove('property_state') new_items = [] # Create a list of scenarios and measures to reconstruct # { # scenario_id_1: [ new_measure_id_1, new_measure_id_2 ], # scenario_id_2: [ new_measure_id_2, new_measure_id_3 ], # measure ids can be repeated # } scenario_measure_map = {} for measure in measures: test_dict = model_to_dict(measure, fields=measure_fields) if test_dict in new_items: continue else: try: new_measure = copy.deepcopy(measure) new_measure.pk = None new_measure.property_state = merged_state new_measure.save() # grab the scenario that is attached to the orig measure and create a new connection for scenario in measure.scenario_set.all(): if scenario.pk not in scenario_measure_map.keys(): scenario_measure_map[scenario.pk] = [] scenario_measure_map[scenario.pk].append( new_measure.pk) except IntegrityError: _log.error( "Measure state_id, measure_id, application_sacle, and implementation_status already exists -- skipping for now" ) new_items.append(test_dict) # connect back up the scenario measures for scenario_id, measure_list in scenario_measure_map.items(): # create a new scenario from the old one scenario = ScenarioClass.objects.get(pk=scenario_id) scenario.pk = None scenario.property_state = merged_state scenario.save() # save to get new id # get the measures measures = PropertyMeasureClass.objects.filter( pk__in=measure_list) for measure in measures: scenario.measures.add(measure) scenario.save() return merged_state
def __init__(self, *args, **kwargs): self.document = kwargs.pop('document') super().__init__(*args, **kwargs) if self.document: self.initial = model_to_dict(self.document)
def salvar(request, hash): from emensageriapro.settings import VERSAO_EMENSAGERIA, VERSAO_MODELO, TP_AMB db_slug = 'default' try: usuario_id = request.user.id dict_hash = get_hash_url(hash) s5012_evtirrf_id = int(dict_hash['id']) if 'tab' not in dict_hash.keys(): dict_hash['tab'] = '' for_print = int(dict_hash['print']) except: usuario_id = False return redirect('login') usuario = get_object_or_404(Usuarios.objects.using(db_slug), excluido=False, id=usuario_id) pagina = ConfigPaginas.objects.using(db_slug).get(excluido=False, endereco='s5012_evtirrf') permissao = ConfigPermissoes.objects.using(db_slug).get( excluido=False, config_paginas=pagina, config_perfis=usuario.config_perfis) if s5012_evtirrf_id: s5012_evtirrf = get_object_or_404(s5012evtIrrf.objects.using(db_slug), excluido=False, id=s5012_evtirrf_id) dict_permissoes = json_to_dict(usuario.config_perfis.permissoes) paginas_permitidas_lista = usuario.config_perfis.paginas_permitidas modulos_permitidos_lista = usuario.config_perfis.modulos_permitidos if s5012_evtirrf_id: if s5012_evtirrf.status != 0: dict_permissoes['s5012_evtirrf_apagar'] = 0 dict_permissoes['s5012_evtirrf_editar'] = 0 if permissao.permite_visualizar: mensagem = None if s5012_evtirrf_id: s5012_evtirrf_form = form_s5012_evtirrf(request.POST or None, instance=s5012_evtirrf, slug=db_slug) else: s5012_evtirrf_form = form_s5012_evtirrf( request.POST or None, slug=db_slug, initial={ 'versao': VERSAO_MODELO, 'processamento_codigo_resposta': 0, 'tpamb': TP_AMB, 'procemi': 1, 'verproc': VERSAO_EMENSAGERIA }) if request.method == 'POST': if s5012_evtirrf_form.is_valid(): dados = s5012_evtirrf_form.cleaned_data import json from django.forms.models import model_to_dict if s5012_evtirrf_id: if s5012_evtirrf.status == 0: dados['modificado_por_id'] = usuario_id dados['modificado_em'] = datetime.datetime.now() #s5012_evtirrf_campos_multiple_passo1 s5012evtIrrf.objects.using(db_slug).filter( id=s5012_evtirrf_id).update(**dados) obj = s5012evtIrrf.objects.using(db_slug).get( id=s5012_evtirrf_id) #s5012_evtirrf_editar_custom #s5012_evtirrf_campos_multiple_passo2 messages.success(request, 'Alterado com sucesso!') gravar_auditoria( json.dumps(model_to_dict(s5012_evtirrf), indent=4, sort_keys=True, default=str), json.dumps(model_to_dict(obj), indent=4, sort_keys=True, default=str), 's5012_evtirrf', s5012_evtirrf_id, usuario_id, 2) else: messages.error( request, 'Não é possível salvar o evento, pois o mesmo não está com o status "Cadastrado"!' ) else: dados['arquivo_original'] = 0 dados['criado_por_id'] = usuario_id dados['criado_em'] = datetime.datetime.now() dados['excluido'] = False #s5012_evtirrf_cadastrar_campos_multiple_passo1 obj = s5012evtIrrf(**dados) obj.save(using=db_slug) #s5012_evtirrf_cadastrar_custom #s5012_evtirrf_cadastrar_campos_multiple_passo2 identidade_evento(obj.id, db_slug) messages.success(request, 'Cadastrado com sucesso!') gravar_auditoria( '{}', json.dumps(model_to_dict(obj), indent=4, sort_keys=True, default=str), 's5012_evtirrf', obj.id, usuario_id, 1) if request.session['retorno_pagina'] not in ( 's5012_evtirrf_apagar', 's5012_evtirrf_salvar', 's5012_evtirrf'): return redirect(request.session['retorno_pagina'], hash=request.session['retorno_hash']) if s5012_evtirrf_id != obj.id: url_hash = base64.urlsafe_b64encode( '{"print": "0", "id": "%s"}' % (obj.id)) return redirect('s5012_evtirrf_salvar', hash=url_hash) else: messages.error(request, 'Erro ao salvar!') s5012_evtirrf_form = disabled_form_fields(s5012_evtirrf_form, permissao.permite_editar) if s5012_evtirrf_id: if s5012_evtirrf.status != 0: s5012_evtirrf_form = disabled_form_fields( s5012_evtirrf_form, False) #s5012_evtirrf_campos_multiple_passo3 for field in s5012_evtirrf_form.fields.keys(): s5012_evtirrf_form.fields[field].widget.attrs[ 'ng-model'] = 's5012_evtirrf_' + field if int(dict_hash['print']): s5012_evtirrf_form = disabled_form_for_print(s5012_evtirrf_form) s5012_infocrcontrib_form = None s5012_infocrcontrib_lista = None if s5012_evtirrf_id: s5012_evtirrf = get_object_or_404( s5012evtIrrf.objects.using(db_slug), excluido=False, id=s5012_evtirrf_id) s5012_infocrcontrib_form = form_s5012_infocrcontrib( initial={'s5012_evtirrf': s5012_evtirrf}, slug=db_slug) s5012_infocrcontrib_form.fields['s5012_evtirrf'].widget.attrs[ 'readonly'] = True s5012_infocrcontrib_lista = s5012infoCRContrib.objects.using( db_slug).filter(excluido=False, s5012_evtirrf_id=s5012_evtirrf.id).all() else: s5012_evtirrf = None #s5012_evtirrf_salvar_custom_variaveis# tabelas_secundarias = [] #[FUNCOES_ESPECIAIS_SALVAR] if 's5012_evtirrf'[1] == '5': evento_totalizador = True else: evento_totalizador = False if not evento_totalizador: s5012_evtirrf_form.fields['tpamb'].widget.attrs['disabled'] = True s5012_evtirrf_form.fields['tpamb'].widget.attrs['readonly'] = True s5012_evtirrf_form.fields['tpamb'].value = TP_AMB s5012_evtirrf_form.fields['procemi'].widget.attrs[ 'disabled'] = True s5012_evtirrf_form.fields['procemi'].widget.attrs[ 'readonly'] = True s5012_evtirrf_form.fields['procemi'].value = 1 s5012_evtirrf_form.fields['verproc'].widget.attrs[ 'readonly'] = True s5012_evtirrf_form.fields['verproc'].value = VERSAO_EMENSAGERIA if dict_hash['tab'] or 's5012_evtirrf' in request.session[ 'retorno_pagina']: request.session["retorno_hash"] = hash request.session["retorno_pagina"] = 's5012_evtirrf_salvar' controle_alteracoes = Auditoria.objects.using(db_slug).filter( identidade=s5012_evtirrf_id, tabela='s5012_evtirrf').all() context = { 'evento_totalizador': evento_totalizador, 'controle_alteracoes': controle_alteracoes, 's5012_evtirrf': s5012_evtirrf, 's5012_evtirrf_form': s5012_evtirrf_form, 'mensagem': mensagem, 's5012_evtirrf_id': int(s5012_evtirrf_id), 'usuario': usuario, 'hash': hash, 's5012_infocrcontrib_form': s5012_infocrcontrib_form, 's5012_infocrcontrib_lista': s5012_infocrcontrib_lista, 'modulos_permitidos_lista': modulos_permitidos_lista, 'paginas_permitidas_lista': paginas_permitidas_lista, 'permissao': permissao, 'data': datetime.datetime.now(), 'pagina': pagina, 'dict_permissoes': dict_permissoes, 'for_print': int(dict_hash['print']), 'tabelas_secundarias': tabelas_secundarias, 'tab': dict_hash['tab'], #s5012_evtirrf_salvar_custom_variaveis_context# } if for_print in (0, 1): return render(request, 's5012_evtirrf_salvar.html', context) elif for_print == 2: from wkhtmltopdf.views import PDFTemplateResponse response = PDFTemplateResponse( request=request, template='s5012_evtirrf_salvar.html', filename="s5012_evtirrf.pdf", context=context, show_content_in_browser=True, cmd_options={ 'margin-top': 10, 'margin-bottom': 10, 'margin-right': 10, 'margin-left': 10, 'zoom': 1, 'dpi': 72, 'orientation': 'Landscape', "viewport-size": "1366 x 513", 'javascript-delay': 1000, 'footer-center': '[page]/[topage]', "no-stop-slow-scripts": True }, ) return response elif for_print == 3: from django.shortcuts import render_to_response response = render_to_response('s5012_evtirrf_salvar.html', context) filename = "s5012_evtirrf.xls" response[ 'Content-Disposition'] = 'attachment; filename=' + filename response[ 'Content-Type'] = 'application/vnd.ms-excel; charset=UTF-8' return response else: context = { 'usuario': usuario, 'modulos_permitidos_lista': modulos_permitidos_lista, 'paginas_permitidas_lista': paginas_permitidas_lista, 'permissao': permissao, 'data': datetime.datetime.now(), 'pagina': pagina, 'dict_permissoes': dict_permissoes, } return render(request, 'permissao_negada.html', context)
def serialize(self): data = model_to_dict(self) data['obat'] = self.obat.serialize() return data
def get_course_serializable(course, sem, school): d = model_to_dict(course) d["sections"] = get_meeting_sections(course, sem, school) return d
def serialize(self): data = model_to_dict(self) data['waktu_pembelian'] = str(self.waktu_pembelian.strftime("%d %b %Y %H:%M")) data['obat'] = self.serialize_obat() return data
def get(self, request, sem_name, year): """ Returns: **If the query parameter 'count' is present** Information regarding the number of friends only:: { "id": Course with the most friends, "count": The maximum # of friends in a course, "total_count": the total # in all classes on timetable, } **If the query parameter course_ids is present** a list of dictionaries representing past classmates and current classmates. These are students who the authenticated user is friends with and who has social courses enabled.:: [{ "course_id":6137, "past_classmates":[...], "classmates":[...] }, ...] **Otherwise** a list of friends and non-friends alike who have social_all enabled to be dispalyed in the "find-friends" modal. Sorted by the number courses the authenticated user shares.:: [{ "name": "...", "is_friend": Whether or not the user is current user's friend, "profile_url": link to FB profile, "shared_courses": [...], "peer": Info about the user, }, ...] """ if request.query_params.get('count'): school = request.subdomain student = Student.objects.get(user=request.user) course_ids = map(int, request.query_params.getlist('course_ids[]')) semester, _ = Semester.objects.get_or_create(name=sem_name, year=year) total_count = 0 count = 0 most_friend_course_id = -1 for course_id in course_ids: temp_count = get_friend_count_from_course_id( school, student, course_id, semester) if temp_count > count: count = temp_count most_friend_course_id = course_id total_count += temp_count data = { "id": most_friend_course_id, "count": count, "total_count": total_count } return Response(data, status=status.HTTP_200_OK) elif request.query_params.getlist('course_ids[]'): school = request.subdomain student = Student.objects.get(user=request.user) course_ids = map(int, request.query_params.getlist('course_ids[]')) semester, _ = Semester.objects.get_or_create(name=sem_name, year=year) # user opted in to sharing courses course_to_classmates = {} if student.social_courses: friends = student.friends.filter(social_courses=True) for course_id in course_ids: course_to_classmates[course_id] = \ get_classmates_from_course_id(school, student, course_id, semester, friends=friends) return Response(course_to_classmates, status=status.HTTP_200_OK) else: school = request.subdomain student = Student.objects.get(user=request.user) semester, _ = Semester.objects.get_or_create(name=sem_name, year=year) current_tt = student.personaltimetable_set.filter( school=school, semester=semester).order_by('last_updated').last() if current_tt is None: return Response([], status=status.HTTP_200_OK) current_tt_courses = current_tt.courses.all() # The most recent TT per student with social enabled that has # courses in common with input student matching_tts = PersonalTimetable.objects.filter(student__social_all=True, courses__id__in=current_tt_courses, semester=semester) \ .exclude(student=student) \ .order_by('student', 'last_updated') \ .distinct('student') friends = [] for matching_tt in matching_tts: friend = matching_tt.student sections_in_common = matching_tt.sections.all( ) & current_tt.sections.all() courses_in_common = matching_tt.courses.all( ) & current_tt_courses shared_courses = [] for course in courses_in_common: shared_courses.append({ 'course': model_to_dict(course, exclude=[ 'unstopped_description', 'description', 'credits' ]), # is there a section for this course that is in both timetables? 'in_section': (sections_in_common & course.section_set.all()).exists() }) friends.append({ 'peer': model_to_dict( friend, exclude=['user', 'id', 'fbook_uid', 'friends']), 'is_friend': student.friends.filter(id=friend.id).exists(), 'shared_courses': shared_courses, 'profile_url': 'https://www.facebook.com/' + friend.fbook_uid, 'name': friend.user.first_name + ' ' + friend.user.last_name, 'large_img': 'https://graph.facebook.com/' + friend.fbook_uid + '/picture?width=700&height=700' }) friends.sort(key=lambda friend: len(friend['shared_courses']), reverse=True) return Response(friends, status=status.HTTP_200_OK)
def serialize(self): return model_to_dict(self)
def _dict(self): fields = [field.name for field in self._meta.fields] fields.extend([field.name for field in self._meta.many_to_many]) return model_to_dict(self, fields=fields)
def serialize_recommendation_item(recommendations): return [model_to_dict(m.recommended_item) for m in recommendations]
def get_push_dict(self): dic = {} self.get_problem_field(dic) return {** dic, ** model_to_dict(self, fields=self.Judge.field)}
def users_to_zerver_userprofile( slack_data_dir: str, users: List[ZerverFieldsT], realm_id: int, timestamp: Any, domain_name: str ) -> Tuple[List[ZerverFieldsT], List[ZerverFieldsT], AddedUsersT, List[ZerverFieldsT], List[ZerverFieldsT]]: """ Returns: 1. zerver_userprofile, which is a list of user profile 2. avatar_list, which is list to map avatars to zulip avatard records.json 3. added_users, which is a dictionary to map from slack user id to zulip user id 4. zerver_customprofilefield, which is a list of all custom profile fields 5. zerver_customprofilefield_values, which is a list of user profile fields """ logging.info('######### IMPORTING USERS STARTED #########\n') zerver_userprofile = [] zerver_customprofilefield = [] # type: List[ZerverFieldsT] zerver_customprofilefield_values = [] # type: List[ZerverFieldsT] avatar_list = [] # type: List[ZerverFieldsT] added_users = {} # The user data we get from the slack api does not contain custom profile data # Hence we get it from the slack zip file slack_data_file_user_list = get_data_file(slack_data_dir + '/users.json') # To map user id with the custom profile fields of the corresponding user slack_user_custom_field_map = {} # type: ZerverFieldsT # To store custom fields corresponding to their ids custom_field_map = {} # type: ZerverFieldsT for user in slack_data_file_user_list: process_slack_custom_fields(user, slack_user_custom_field_map) # We have only one primary owner in slack, see link # https://get.slack.help/hc/en-us/articles/201912948-Owners-and-Administrators # This is to import the primary owner first from all the users user_id_count = custom_field_id_count = customprofilefield_id = 0 primary_owner_id = user_id_count user_id_count += 1 for user in users: slack_user_id = user['id'] if user.get('is_primary_owner', False): user_id = primary_owner_id else: user_id = user_id_count # email email = get_user_email(user, domain_name) # avatar # ref: https://chat.zulip.org/help/change-your-avatar avatar_url = build_avatar_url(slack_user_id, user['team_id'], user['profile']['avatar_hash']) build_avatar(user_id, realm_id, email, avatar_url, timestamp, avatar_list) # check if user is the admin realm_admin = get_admin(user) # timezone timezone = get_user_timezone(user) # Check for custom profile fields if slack_user_id in slack_user_custom_field_map: # For processing the fields custom_field_map, customprofilefield_id = build_customprofile_field( zerver_customprofilefield, slack_user_custom_field_map[slack_user_id], customprofilefield_id, realm_id, custom_field_map) # Store the custom field values for the corresponding user custom_field_id_count = build_customprofilefields_values( custom_field_map, slack_user_custom_field_map[slack_user_id], user_id, custom_field_id_count, zerver_customprofilefield_values) userprofile = UserProfile( full_name=get_user_full_name(user), short_name=user['name'], is_active=not user['deleted'], id=user_id, email=email, delivery_email=email, avatar_source='U', is_bot=user.get('is_bot', False), pointer=-1, is_realm_admin=realm_admin, bot_type=1 if user.get('is_bot', False) else None, date_joined=timestamp, timezone=timezone, last_login=timestamp) userprofile_dict = model_to_dict(userprofile) # Set realm id separately as the corresponding realm is not yet a Realm model instance userprofile_dict['realm'] = realm_id zerver_userprofile.append(userprofile_dict) added_users[slack_user_id] = user_id if not user.get('is_primary_owner', False): user_id_count += 1 logging.info(u"{} -> {}".format(user['name'], userprofile_dict['email'])) process_customprofilefields(zerver_customprofilefield, zerver_customprofilefield_values) logging.info('######### IMPORTING USERS FINISHED #########\n') return zerver_userprofile, avatar_list, added_users, zerver_customprofilefield, \ zerver_customprofilefield_values
def marcartareacomoFinalizada(req): tarea = Tarea.objects.get(pk=req.POST["id"]) tarea.estado = "Finalizada" tarea.save() return JsonResponse(model_to_dict(tarea))
def iset_export(request, iset, output_format='json'): try: iset = InstallableSet.objects.get(name=iset) except InstallableSet.DoesNotExist: try: iset = InstallableSet.objects.get(pk=iset) except InstallableSet.DoesNotExist: return HttpResponseNotFound( 'there is no installable set by this name or ID') if output_format == 'csv': response = HttpResponse(content_type='text/csv; charset=utf-8') response['Content-Disposition'] = 'attachment; filename="iset.csv"' res = model_to_dict(iset, exclude='datafile') wr = csv.DictWriter(response, fieldnames=res.keys(), restval="", extrasaction='ignore', dialect='excel', delimiter=";") wr.writeheader() wr.writerow(res) return response if output_format in ('csvc', 'sh', 'bash4'): res = [] for ii in iset.set_content.all(): a = __ii_to_dict(ii) res.append(a) else: res = model_to_dict(iset, exclude='datafile') res['set_content'] = [] for ii in iset.set_content.all(): iid = model_to_dict(ii, exclude=('datafile', 'belongs_to_set')) iid['how_to_install'] = [{ 'name': m.name, 'id': m.pk, 'target_offers': [{ 'id': lll.id, 'name': lll.name } for lll in m.method_compatible_with.all()] } for m in ii.how_to_install.all()] lcv = ii.what_is_installed iid['what_is_installed'] = { 'id': lcv.pk, 'version': lcv.version, 'target_logical_component_id': lcv.logical_component.id, 'target_logical_component_name': lcv.logical_component.name } res['set_content'].append(iid) if output_format == 'csvc': response = HttpResponse(content_type='text/csv; charset=utf-8') response['Content-Disposition'] = 'attachment; filename="iset.csv"' wr = csv.DictWriter(response, fieldnames=res[0].keys(), restval="", extrasaction='ignore', dialect='excel', delimiter=";") wr.writeheader() wr.writerows(res) return response elif output_format == 'sh': return render(request, 'scm/iset_export_sh.html', {'res': res}, content_type="text/plain") elif output_format == 'json': response = HttpResponse(content_type='text/json; charset=utf-8') json.dump(res, fp=response, ensure_ascii=False, indent=4) return response
def update(request, model, model_id): #Update model_id of type model try: if (model == 'users'): obj = User.objects.get(pk=model_id) first_name = request.POST.get('first_name', obj.first_name) last_name = request.POST.get('last_name', obj.last_name) email = request.POST.get('email', obj.email) password = request.POST.get('passwordHash', obj.passwordHash) obj = User(id=model_id, first_name=first_name, last_name=last_name, email=email, passwordHash=password) elif (model == 'artists'): obj = Artist.objects.get(pk=model_id) name = request.POST.get('name', obj.name) obj = Artist(id=model_id, name=name) elif (model == 'records'): obj = Record.objects.get(pk=model_id) artist_id = request.POST.get('artist', obj.artist.id) artist = Artist.objects.get(pk=artist_id) name = request.POST.get('name', obj.name) release_date = request.POST.get('release_date', obj.release_date) pressing = request.POST.get('pressing', obj.pressing) obj = Record(id=model_id, artist=artist, name=name, release_date=release_date, pressing=pressing) elif (model == 'songs'): obj = Song.objects.get(pk=model_id) name = request.POST.get('name', obj.name) duration = request.POST.get('duration', str(obj.duration)) durStr = datetime.datetime.strptime(duration, "%H:%M:%S") dur = datetime.timedelta(hours=durStr.hour, minutes=durStr.minute, seconds=durStr.second) artist_id = request.POST.get('artist', obj.artist.id) artist = Artist.objects.get(pk=artist_id) record_id = request.POST.get('record', obj.record.id) record = Record.objects.get(pk=record_id) obj = Song(id=model_id, name=name, duration=dur, artist=artist, record=record) elif (model == 'listings'): obj = Listing.objects.get(pk=model_id) price = request.POST.get('price', obj.price) seller_id = request.POST.get('seller', obj.seller.id) seller = User.objects.get(pk=seller_id) buyer_id = request.POST.get('buyer', obj.buyer.id) buyer = User.objects.get(pk=buyer_id) record_id = request.POST.get('record', obj.record.id) record = Record.objects.get(pk=record_id) date_posted = request.POST.get('date_posted', obj.date_posted) obj = Listing(id=model_id, price=price, seller=seller, buyer=buyer, record=record, date_posted=date_posted) elif (model == 'genres'): obj = Genre.objects.get(pk=model_id) record_id = request.POST.get('record', obj.record.id) record = Record.objects.get(pk=record_id) name = request.POST.get('name', obj.name) obj = Genre(id=model_id, record=record, name=name) elif (model == 'authenticators'): obj = Authenticator.objects.get(pk=model_id) user_id = request.POST.get('user_id', obj.user_id) authenticator = request.POST.get('authenticator', obj.authenticator) date_created = request.POST.get('date', obj.date_created) obj = Authenticator(user_id=user_id, authenticator=authenticator, date_created=date_created) obj.save() data = model_to_dict(obj) if (model == 'songs'): data['duration'] = str(data['duration']) result = {'ok': True, 'data': data} except ObjectDoesNotExist: result = {'ok': False, 'data': ""} return JsonResponse(result)
def qml_style(request, layername, style_name=None): """Update/Retrieve QML style of a given QGIS Layer. :param layername: The layer name in Geonode. :type layername: basestring :param style_name: The style name recognized by QGIS Server :type style_name: str """ layer = get_object_or_404(Layer, name=layername) if request.method == 'GET': # Request QML from QGIS server if not style_name: # If no style name provided, then it is a List request styles_obj = None try: styles_obj = style_list(layer, internal=False) except Exception: print("Failed to fetch styles") styles_dict = [] if styles_obj: styles_dict = [model_to_dict(s) for s in styles_obj] # If no style returned by GetCapabilities, this is a bug in QGIS # Attempt to generate default style name if not styles_dict: style_url = style_get_url(layer, 'default') response = requests.get(style_url) if response.status_code == 200: style_url = style_add_url(layer, 'default') with open(layer.qgis_layer.qml_path, 'w') as f: f.write(ensure_string(response.content)) response = requests.get(style_url) if response.status_code == 200: styles_obj = style_list(layer, internal=False) styles_dict = [ model_to_dict(s) for s in styles_obj ] response = HttpResponse(json.dumps(styles_dict), content_type='application/json') return response # Return XML file of the style style_url = style_get_url(layer, style_name, internal=False) response = requests.get(style_url) if response.status_code == 200: response = HttpResponse(ensure_string(response.content), content_type='text/xml') response['Content-Disposition'] = 'attachment; filename=%s.qml' % ( style_name, ) else: response = HttpResponse(ensure_string(response.content), status=response.status_code) return response elif request.method == 'POST': # For people who uses API request if not request.user.has_perm('change_resourcebase', layer.get_self_resource()): return HttpResponse( 'User does not have permission to change QML style.', status=403) # Request about adding new QML style form = QGISLayerStyleUploadForm(request.POST, request.FILES) if not form.is_valid(): return TemplateResponse(request, 'qgis_server/forms/qml_style.html', { 'resource': layer, 'style_upload_form': form }, status=400).render() try: uploaded_qml = request.FILES['qml'] # update qml in uploaded media folder # check upload session, is qml file exists? layerfile_set = layer.upload_session.layerfile_set try: qml_layer_file = layerfile_set.get(name='qml') # if it is exists, we need to delete it, because it won't be # managed by geonode qml_layer_file.delete() except LayerFile.DoesNotExist: pass # update qml in QGIS Layer folder content = uploaded_qml.read() qgis_layer = get_object_or_404(QGISServerLayer, layer=layer) with open(qgis_layer.qml_path, mode='w') as f: f.write(content) # construct URL to post new QML style_name = request.POST['name'] style_title = request.POST['title'] if not style_name: # Assign default name name_format = 'style_%Y%m%d%H%M%S' current_time = datetime.datetime.utcnow() style_name = current_time.strftime(name_format) # Add new style style_url = style_add_url(layer, style_name) response = requests.get(style_url) if not (response.status_code == 200 and ensure_string(response.content) == 'OK'): try: style_list(layer, internal=False) except Exception: print("Failed to fetch styles") return TemplateResponse( request, 'qgis_server/forms/qml_style.html', { 'resource': layer, 'style_upload_form': QGISLayerStyleUploadForm(), 'alert': True, 'alert_message': ensure_string(response.content), 'alert_class': 'alert-danger' }, status=response.status_code).render() # We succeeded on adding new style # Refresh style models try: style_list(layer, internal=False) qgis_style = layer.qgis_layer.styles.get(name=style_name) qgis_style.title = style_title qgis_style.save() alert_message = 'Successfully add style %s' % style_name except Exception: alert_message = 'Failed to fetch styles' return TemplateResponse(request, 'qgis_server/forms/qml_style.html', { 'resource': layer, 'style_upload_form': form, 'alert': True, 'alert_class': 'alert-success', 'alert_message': alert_message }, status=201).render() except Exception as e: logger.exception(e) return HttpResponseServerError() elif request.method == 'DELETE': # Request to delete particular QML Style if not style_name: # Style name should exists return HttpResponseBadRequest('Style name not provided.') # Handle removing tile-style cache try: style = layer.qgis_layer.styles.get(name=style_name) shutil.rmtree(style.style_tile_cache_path) except Exception: pass style_url = style_remove_url(layer, style_name) response = requests.get(style_url) if not (response.status_code == 200 and ensure_string(response.content) == 'OK'): alert_message = ensure_string(response.content) if 'NAME is NOT an existing style.' in ensure_string( response.content): alert_message = '%s is not an existing style' % style_name try: style_list(layer, internal=False) except Exception: print("Failed to fetch styles") return TemplateResponse( request, 'qgis_server/forms/qml_style.html', { 'resource': layer, 'style_upload_form': QGISLayerStyleUploadForm(), 'alert': True, 'alert_message': alert_message, 'alert_class': 'alert-danger' }, status=response.status_code).render() # Successfully removed styles # Handle when default style is deleted. # Will be handled by style_list method try: style_list(layer, internal=False) alert_message = 'Successfully deleted style %s' % style_name except Exception: alert_message = 'Failed to fetch styles' return TemplateResponse( request, 'qgis_server/forms/qml_style.html', { 'resource': layer, 'style_upload_form': QGISLayerStyleUploadForm(), 'alert': True, 'alert_message': alert_message, 'alert_class': 'alert-success' }, status=200).render() return HttpResponseBadRequest()
def format_objects(self, objects): """ Formats the object then adds a geogig_link as necessary. """ formatted_objects = [] for obj in objects: # convert the object to a dict using the standard values. # includes other values values = self.VALUES + [ 'alternate', 'name' ] formatted_obj = model_to_dict(obj, fields=values) username = obj.owner.get_username() full_name = (obj.owner.get_full_name() or username) formatted_obj['owner__username'] = username formatted_obj['owner_name'] = full_name if obj.category: formatted_obj['category__gn_description'] = obj.category.gn_description if obj.group: formatted_obj['group'] = obj.group try: formatted_obj['group_name'] = GroupProfile.objects.get(slug=obj.group.name) except GroupProfile.DoesNotExist: formatted_obj['group_name'] = obj.group formatted_obj['keywords'] = [k.name for k in obj.keywords.all()] if obj.keywords else [] formatted_obj['regions'] = [r.name for r in obj.regions.all()] if obj.regions else [] # add the geogig link formatted_obj['geogig_link'] = obj.geogig_link # provide style information bundle = self.build_bundle(obj=obj) formatted_obj['default_style'] = self.default_style.dehydrate( bundle, for_list=True) # Add resource uri formatted_obj['resource_uri'] = self.get_resource_uri(bundle) formatted_obj['links'] = self.dehydrate_ogc_links(bundle) if 'site_url' not in formatted_obj or len(formatted_obj['site_url']) == 0: formatted_obj['site_url'] = settings.SITEURL # Probe Remote Services formatted_obj['store_type'] = 'dataset' formatted_obj['online'] = True if hasattr(obj, 'storeType'): formatted_obj['store_type'] = obj.storeType if obj.storeType == 'remoteStore' and hasattr(obj, 'remote_service'): if obj.remote_service: formatted_obj['online'] = (obj.remote_service.probe == 200) else: formatted_obj['online'] = False formatted_obj['gtype'] = self.dehydrate_gtype(bundle) # put the object on the response stack formatted_objects.append(formatted_obj) return formatted_objects
def post(self, request): nombre = request.POST["nombre"] cliente = Cliente() cliente.nombre = nombre cliente.save() return JsonResponse(model_to_dict(cliente))
def user_add_song(req): post_data = json.loads(req.body.decode()) playlist = Playlist.objects.easy_create(post_data) data = model_to_dict(playlist) data = json.dumps(data) return HttpResponse(data, status=200, content_type='application/json')
def posts_handler_generic(request): if (request.method == 'POST'): # TODO: ADD validation user = check_authenticate(request) if (user == None): return HttpResponse(status=403) try: author = Author.objects.get(user_id=user.id) except: return HttpResponse(status=403) post = json.loads(request.body) #TODO: whatever this is supposed to be needs to be fixed post['source'] = "http://127.0.0.1:8000/posts/fixthislater" post['origin'] = "http://127.0.0.1:8000/posts/originfixthislater" post['author_id'] = author.id created = create_post(post) created.comments = [] serializer = PostSerializer(created) json_data = JSONRenderer().render(serializer.data) response = HttpResponse(json_data, content_type='application/json') response.status = 201 response['Location'] = request.path + str(created.id) return response #return create_json_response_with_location(data, new_post.id, request.path) elif (request.method == 'GET'): # TODO: this should return all the posts that a user can see, i.e their # stream, not all posts in db posts = Post.objects.all() size = request.GET.get('size', '25') paginator = PostPagination() paginator.page_size = size posts = Post.objects.all() result_posts = paginator.paginate_queryset(posts, request) for post in result_posts: comments = Comment.objects.filter(post_id=post['id']) author = Author.objects.get(id=post['author_id']) post['comments'] = comments post['author'] = author serializer = PostSerializer(result_posts, many=True) return paginator.get_paginated_response(serializer.data) elif (request.method == 'PUT'): # TODO: VALIDATION... again body = json.loads(request.body) new_post = create_post(body) data = model_to_dict(new_post) return create_json_response_with_location(data, new_post.id, request.path)
def export(self, fields=None): _fields = ['name', 'count_limit'] if fields is None else fields return model_to_dict(self, fields=_fields)
def __init__(self, *args, **kwargs): ensure_completely_loaded() if 'initial' in kwargs: if 'parent' in kwargs['initial']: # Prefill a few form values from the parent page try: page = self.page_manager.get( pk=kwargs['initial']['parent']) data = model_to_dict(page) for field in self.page_manager.exclude_from_copy: if field in data: del data[field] # These are always excluded from prefilling for field in self.never_copy_fields: if field in data: del data[field] data.update(kwargs['initial']) kwargs['initial'] = data except self.page_model.DoesNotExist: pass elif 'translation_of' in kwargs['initial']: # Only if translation extension is active try: page = self.page_manager.get( pk=kwargs['initial']['translation_of']) original = page.original_translation data = { 'translation_of': original.id, 'template_key': original.template_key, 'active': original.active, 'in_navigation': original.in_navigation, } if original.parent: try: data['parent'] = original.parent.get_translation( kwargs['initial']['language']).id except self.page_model.DoesNotExist: # ignore this -- the translation does not exist pass data.update(kwargs['initial']) kwargs['initial'] = data except (AttributeError, self.page_model.DoesNotExist): pass # Not required, only a nice-to-have for the `redirect_to` field modeladmin = kwargs.pop('modeladmin', None) super(PageAdminForm, self).__init__(*args, **kwargs) if modeladmin: # Note: Using `parent` is not strictly correct, but we can be # sure that `parent` always points to another page instance, # and that's good enough for us. self.fields['redirect_to'].widget = RedirectToWidget( self.page_model._meta.get_field('parent').rel, modeladmin.admin_site) if 'instance' in kwargs: choices = [] for key, template in kwargs['instance'].TEMPLATE_CHOICES: template = kwargs['instance']._feincms_templates[key] if template.preview_image: choices.append( (template.key, mark_safe(u'<img src="%s" alt="%s" /> %s' % ( template.preview_image, template.key, template.title, )))) else: choices.append((template.key, template.title)) self.fields['template_key'].choices = choices
def to_dict(self): comment = model_to_dict(self) comment['author_username'] = self.author.user.username comment['author_first_name'] = self.author.user.first_name comment['author_last_name'] = self.author.user.last_name return comment
def handle(self, *args, **options): # Suppress incorrect FITSFixedWarnings warnings.simplefilter('ignore', FITSFixedWarning) self.stdout.write("==== Light curve building %s ====" % (datetime.now().strftime('%Y-%m-%d %H:%M'))) try: start_super_block = SuperBlock.objects.get(tracking_number=options['supblock']) except SuperBlock.DoesNotExist: self.stdout.write("Cannot find SuperBlock with Tracking Number %d" % options['supblock']) exit(-1) start_blocks = Block.objects.filter(superblock=start_super_block.id) start_block = start_blocks[0] if options['single'] is True: super_blocks = [start_super_block, ] else: super_blocks = SuperBlock.objects.filter(body=start_super_block.body, block_start__gte=start_super_block.block_start-timedelta(days=options['timespan'])) obs_date = None if options['date']: if isinstance(options['date'], str): try: obs_date = datetime.strptime(options['date'], '%Y%m%d') except ValueError: raise CommandError(usage) else: obs_date = options['date'] # Initialize lists times = [] alltimes = [] mags = [] mag_errs = [] zps = [] zp_errs = [] mpc_lines = [] psv_lines = [] total_frame_count = 0 mpc_site = [] fwhm = [] air_mass = [] output_file_list = [] # build directory path / set permissions obj_name = sanitize_object_name(start_super_block.body.current_name()) datadir = os.path.join(options['datadir'], obj_name) out_path = settings.DATA_ROOT data_path = '' rw_permissions = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH if not os.path.exists(datadir) and not settings.USE_S3: try: os.makedirs(datadir) # Set directory permissions correctly for shared directories # Sets to (r)ead,(w)rite,e(x)ecute for owner & group, r-x for others os.chmod(datadir, stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IXOTH) except: msg = "Error creating output path %s" % datadir raise CommandError(msg) sb_day = start_super_block.block_start.strftime("%Y%m%d") # Turn telescope class into a diameter for theoretical FWHM curve tel_classes = start_super_block.get_telclass() if len(tel_classes.split(",")) > 1: self.stdout.write("Multiple telescope sizes found; theoretical FWHM curve will be wrong") tel_class = tel_classes.split(",")[0] else: tel_class = tel_classes try: tel_diameter = float(tel_class.replace('m', '.')) tel_diameter *= u.m except ValueError: self.stdout.write("Error determining telescope diameter, assuming 0.4m") tel_diameter = 0.4*u.m # Set offsets, convert from Arcsec to Radians ra_offset = radians(options['ra_offset'] / 3600) dec_offset = radians(options['dec_offset'] / 3600) for super_block in super_blocks: # Create, name, open ALCDEF file. if obs_date: alcdef_date = options['date'] else: alcdef_date = super_block.block_start.strftime("%Y%m%d") base_name = '{}_{}_{}_{}_'.format(obj_name, super_block.get_sites().replace(',', ''), alcdef_date, super_block.tracking_number) alcdef_filename = base_name + 'ALCDEF.txt' output_file_list.append('{},{}'.format(alcdef_filename, datadir.lstrip(out_path))) alcdef_txt = '' block_list = Block.objects.filter(superblock=super_block.id) if obs_date: block_list = block_list.filter(when_observed__lt=obs_date+timedelta(days=2)).filter(when_observed__gt=obs_date) self.stdout.write("Analyzing SuperblockBlock# %s for %s" % (super_block.tracking_number, super_block.body.current_name())) for block in block_list: block_mags = [] block_mag_errs = [] block_times = [] outmag = "NONE" self.stdout.write("Analyzing Block# %d" % block.id) obs_site = block.site # Get all Useful frames from each block frames_red = Frame.objects.filter(block=block.id, frametype__in=[Frame.BANZAI_RED_FRAMETYPE]).order_by('filter', 'midpoint') frames_ql = Frame.objects.filter(block=block.id, frametype__in=[Frame.BANZAI_QL_FRAMETYPE]).order_by('filter', 'midpoint') if len(frames_red) >= len(frames_ql): frames_all_zp = frames_red else: frames_all_zp = frames_ql frames = frames_all_zp.filter(zeropoint__isnull=False) self.stdout.write("Found %d frames (of %d total) for Block# %d with good ZPs" % (frames.count(), frames_all_zp.count(), block.id)) self.stdout.write("Searching within %.1f arcseconds and +/-%.2f delta magnitudes" % (options['boxwidth'], options['deltamag'])) total_frame_count += frames.count() frame_data = [] if frames_all_zp.count() != 0: elements = model_to_dict(block.body) filter_list = [] for frame in frames_all_zp: # get predicted position and magnitude of target during time of each frame emp_line = compute_ephem(frame.midpoint, elements, frame.sitecode) ra = S.sla_dranrm(emp_line['ra'] + ra_offset) dec = copysign(S.sla_drange(emp_line['dec'] + dec_offset), emp_line['dec'] + dec_offset) mag_estimate = emp_line['mag'] (ra_string, dec_string) = radec2strings(ra, dec, ' ') # Find list of frame sources within search region of predicted coordinates sources = search_box(frame, ra, dec, options['boxwidth']) midpoint_string = frame.midpoint.strftime('%Y-%m-%d %H:%M:%S') self.stdout.write("%s %s %s V=%.1f %s (%d) %s" % (midpoint_string, ra_string, dec_string, mag_estimate, frame.sitecode, len(sources), frame.filename)) best_source = None # Find source most likely to be target (Could Use Some Work) if len(sources) != 0 and frame.zeropoint is not None: if len(sources) == 1: best_source = sources[0] elif len(sources) > 1: # If more than 1 source, pick closest within deltamag min_sep = options['boxwidth'] * options['boxwidth'] for source in sources: sep = S.sla_dsep(ra, dec, radians(source.obs_ra), radians(source.obs_dec)) sep = degrees(sep) * 3600.0 src_ra_string, src_dec_string = radec2strings(radians(source.obs_ra), radians(source.obs_dec)) if len(block_mags) > 0: delta_mag = abs(block_mags[-1] - source.obs_mag) else: delta_mag = abs(mag_estimate - source.obs_mag) self.stdout.write("%s %s %s %s %.1f %.1f-%.1f %.1f" % ( ra_string, dec_string, src_ra_string, src_dec_string, sep, mag_estimate, source.obs_mag, delta_mag)) if sep < min_sep and delta_mag <= options['deltamag']: min_sep = sep best_source = source # Save target source and add to output files. if best_source and best_source.obs_mag > 0.0 and abs(mag_estimate - best_source.obs_mag) <= 3 * options['deltamag']: block_times.append(frame.midpoint) mpc_line, psv_line = self.make_source_measurement(block.body, frame, best_source, persist=options['persist']) mpc_lines.append(mpc_line) psv_lines.append(psv_line) block_mags.append(best_source.obs_mag) block_mag_errs.append(best_source.err_obs_mag) filter_list.append(frame.ALCDEF_filter_format()) # We append these even if we don't have a matching source or zeropoint # so we can plot conditions for all frames zps.append(frame.zeropoint) zp_errs.append(frame.zeropoint_err) frame_data.append({'ra': ra, 'dec': dec, 'mag': mag_estimate, 'bw': options['boxwidth'], 'dm': options['deltamag'], 'best_source': best_source}) alltimes.append(frame.midpoint) fwhm.append(frame.fwhm) azimuth, altitude = moon_alt_az(frame.midpoint, ra, dec, *get_sitepos(frame.sitecode)[1:]) zenith_distance = radians(90) - altitude air_mass.append(S.sla_airmas(zenith_distance)) obs_site = frame.sitecode catalog = frame.photometric_catalog if catalog == 'GAIA-DR2': outmag = 'GG' elif catalog == 'UCAC4': outmag = 'SR' if obs_site not in mpc_site: mpc_site.append(obs_site) if len(block_times) > 1: filter_set = list(set(filter_list)) for filt in filter_set: mag_set = [m for m, f in zip(block_mags, filter_list) if f == filt] time_set = [t for t, f in zip(block_times, filter_list) if f == filt] error_set = [e for e, f in zip(block_mag_errs, filter_list) if f == filt] alcdef_txt += self.output_alcdef(block, obs_site, time_set, mag_set, error_set, filt, outmag) mags += block_mags mag_errs += block_mag_errs times += block_times # Create gif of fits files used for LC extraction data_path = make_data_dir(out_path, model_to_dict(frames_all_zp[0])) frames_list = [os.path.join(data_path, f.filename) for f in frames_all_zp] if not options['nogif']: movie_file = make_gif(frames_list, sort=False, init_fr=100, center=3, out_path=data_path, plot_source=True, target_data=frame_data, show_reticle=True, progress=True) if "WARNING" not in movie_file: # Add write permissions to movie file try: os.chmod(movie_file, rw_permissions) except PermissionError: pass # Create DataProduct save_dataproduct(obj=block, filepath=movie_file, filetype=DataProduct.FRAME_GIF, force=options['overwrite']) output_file_list.append('{},{}'.format(movie_file, data_path.lstrip(out_path))) self.stdout.write("New gif created: {}".format(movie_file)) else: self.stdout.write(movie_file) save_dataproduct(obj=super_block, filepath=None, filetype=DataProduct.ALCDEF_TXT, filename=alcdef_filename, content=alcdef_txt, force=options['overwrite']) self.stdout.write("Found matches in %d of %d frames" % (len(times), total_frame_count)) if not settings.USE_S3: # Write light curve data out in similar format to Make_lc.csh i = 0 lightcurve_file = open(os.path.join(datadir, base_name + 'lightcurve_data.txt'), 'w') mpc_file = open(os.path.join(datadir, base_name + 'mpc_positions.txt'), 'w') psv_file = open(os.path.join(datadir, base_name + 'ades_positions.psv'), 'w') output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'lightcurve_data.txt'), datadir.lstrip(out_path))) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'mpc_positions.txt'), datadir.lstrip(out_path))) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'ades_positions.psv'), datadir.lstrip(out_path))) # Calculate integer part of JD for first frame and use this as a # constant in case of wrapover to the next day if len(times) > 0 and len(mags) > 0: mjd_offset = int(datetime2mjd_utc(times[0])) for time in times: time_jd = datetime2mjd_utc(time) time_jd_truncated = time_jd - mjd_offset if i == 0: lightcurve_file.write('#Object: %s\n' % start_super_block.body.current_name()) lightcurve_file.write("#MJD-%.1f Mag. Mag. error\n" % mjd_offset) lightcurve_file.write("%7.5lf %6.3lf %5.3lf\n" % (time_jd_truncated, mags[i], mag_errs[i])) i += 1 lightcurve_file.close() try: os.chmod(os.path.join(datadir, base_name + 'lightcurve_data.txt'), rw_permissions) except PermissionError: pass # Write out MPC1992 80 column file for mpc_line in mpc_lines: mpc_file.write(mpc_line + '\n') mpc_file.close() try: os.chmod(os.path.join(datadir, base_name + 'mpc_positions.txt'), rw_permissions) except PermissionError: pass # Write out ADES Pipe Separated Value file for psv_line in psv_lines: psv_file.write(psv_line + '\n') psv_file.close() try: os.chmod(os.path.join(datadir, base_name + 'ades_positions.psv'), rw_permissions) except PermissionError: pass # Create Default Plot Title if options['title'] is None: sites = ', '.join(mpc_site) try: # for single dates and short site lists, put everything on single line. if options['timespan'] < 1 and len(sites) <= 13: plot_title = '%s from %s (%s) on %s' % (start_super_block.body.current_name(), start_block.site.upper(), sites, start_super_block.block_end.strftime("%Y-%m-%d")) subtitle = '' # for lc covering multiple nights, reformat title elif options['timespan'] < 1: plot_title = '%s from %s to %s' % (start_block.body.current_name(), (start_super_block.block_end - timedelta( days=options['timespan'])).strftime("%Y-%m-%d"), start_super_block.block_end.strftime("%Y-%m-%d")) subtitle = 'Sites: ' + sites # for single night LC using many sites, put sites on 2nd line. else: plot_title = '%s from %s on %s' % (start_super_block.body.current_name(), start_block.site.upper(), start_super_block.block_end.strftime("%Y-%m-%d")) subtitle = 'Sites: ' + sites except TypeError: plot_title = 'LC for %s' % (start_super_block.body.current_name()) subtitle = '' else: plot_title = options['title'] subtitle = '' # Make plots if not settings.USE_S3: self.plot_timeseries(times, alltimes, mags, mag_errs, zps, zp_errs, fwhm, air_mass, title=plot_title, sub_title=subtitle, datadir=datadir, filename=base_name, diameter=tel_diameter) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'lightcurve_cond.png'), datadir.lstrip(out_path))) output_file_list.append('{},{}'.format(os.path.join(datadir, base_name + 'lightcurve.png'), datadir.lstrip(out_path))) try: os.chmod(os.path.join(datadir, base_name + 'lightcurve_cond.png'), rw_permissions) except PermissionError: pass try: os.chmod(os.path.join(datadir, base_name + 'lightcurve.png'), rw_permissions) except PermissionError: pass else: self.stdout.write("No sources matched.") if data_path: with open(os.path.join(data_path, base_name + 'lc_file_list.txt'), 'w') as outfut_file_file: outfut_file_file.write('# == Files created by Lightcurve Extraction for {} on {} ==\n'.format(obj_name, sb_day)) for output_file in output_file_list: outfut_file_file.write(output_file) outfut_file_file.write('\n') self.stdout.write(f"New lc file list created: {os.path.join(data_path, base_name + 'lc_file_list.txt')}") try: os.chmod(os.path.join(data_path, base_name + 'lc_file_list.txt'), rw_permissions) except PermissionError: pass
def to_dict(self): return model_to_dict(self)
def export(self, fields=None): _fields = ['mail_name', 'email'] if fields is None else fields return model_to_dict(self, fields=_fields)