def __row_to_contract(self, row): if not (row['Labor Category'] and row['Contract Year'] and row['Year 1/base']): return None contract = Contract() contract.idv_piid = row['CONTRACT .'] contract.labor_category = row['Labor Category'].strip().replace('\n', ' ') contract.vendor_name = row['COMPANY NAME'] contract.education_level = contract.get_education_code(row['Education']) contract.schedule = row['Schedule'] contract.business_size = row['Bus Size'] contract.contract_year = row['Contract Year'] contract.sin = row['SIN NUMBER'] contract.hourly_rate_year1 = contract.normalize_rate(str(row['Year 1/base'])) contract.contractor_site = row['Location'] if row['Begin Date']: contract.contract_start = datetime.strptime(row['Begin Date'], '%m/%d/%Y').date() if row['End Date']: contract.contract_end = datetime.strptime(row['End Date'], '%m/%d/%Y').date() contract.min_years_experience = int(row['MinExpAct']) if row['MinExpAct'].isdigit() else 0 for count, rate in enumerate(row[2:6]): if rate: setattr(contract, 'hourly_rate_year{}'.format(count + 2), contract.normalize_rate(str(rate))) self.__generate_contract_rate_years(row, contract) return contract
def edit(request, organization, project_id, record_id): setting, _ = Setting.objects.get_or_create(user=request.user) timezone = pytz.timezone(str(setting.timezone)) record_id = record_id or request.POST['record_id'] entry = get_object_or_404(TimeRecord, id=record_id) if not entry.user == request.user: return HttpResponseForbidden() form = TimeRecordForm(request.POST) start_time = datetime.strptime(form.data['start_time'], "%Y-%m-%dT%H:%M") entry.start_time = timezone.localize(start_time, is_dst=None) if form.data['end_time']: end_time = datetime.strptime(form.data['end_time'], "%Y-%m-%dT%H:%M") entry.end_time = timezone.localize(end_time, is_dst=None) else: entry.end_time = None entry.save() return redirect('tracker:project/timetable', organization=organization, project_id=project_id)
def test_output_complete(self): m1 = Kv15Stopmessage(dataownercode='HTM', user=self.user) m1.messagecodedate = datetime.strptime("2013-11-16", "%Y-%m-%d").date() m1.messagecodenumber = 10 start = make_aware(datetime.strptime("2013-11-16T14:09:35.161617", "%Y-%m-%dT%H:%M:%S.%f"), get_default_timezone()) end = make_aware(datetime.strptime("2013-11-17T03:00:00", "%Y-%m-%dT%H:%M:%S"), get_default_timezone()) m1.messagestarttime = start m1.messageendtime = end m1.messagecontent = "Bla!" m1.messagepriority = MESSAGEPRIORITY[1][0] m1.messagetype = MESSAGETYPE[1][0] m1.messagedurationtype = MESSAGEDURATIONTYPE[1][0] m1.reasontype = REASONTYPE[1][0] m1.subreasontype = SUBREASONTYPE[1][0] m1.reasoncontent = "Uitleg oorzaak" m1.effecttype = EFFECTTYPE[1][0] m1.subeffecttype = SUBEFFECTTYPE[1][0] m1.effectcontent = "Uitleg gevolg" m1.measuretype = MEASURETYPE[1][0] m1.submeasuretype = SUBMEASURETYPE[1][0] m1.measurecontent = "Uitleg aanpassing" m1.advicetype = ADVICETYPE[1][0] m1.subadvicetype = SUBADVICETYPE[1][0] m1.advicecontent = "Uitleg advies" m1.save() Kv15MessageStop(stopmessage=m1, stop=self.haltes[0]).save() self.assertXmlEqual(m1.to_xml(), self.getCompareXML('openebs/tests/output/message_complete.xml'))
def create(self, validated_data): inbox = Inbox.objects.get(pk=json.loads(self.initial_data['inbox'])['id']) customer = inbox.customer # unit = Object.objects.get(pk=json.loads(self.initial_data['object'])['id']) # unit.customer = customer type_work = TypeWork.objects.get(pk=json.loads(self.initial_data['type_work'])['id']) # unit.save() date = datetime.strptime(self.initial_data['date'],"%Y-%m-%d") end_date = datetime.strptime(self.initial_data['end_date'],"%Y-%m-%d") try: num = Contract.objects.all().aggregate(Max('num'))['num__max'] + 1 except TypeError as err: num = 1 init_status = "Новый договор" try: status = ContractStatus.objects.get(name=init_status) except ContractStatus.DoesNotExist as err: status = ContractStatus.objects.create(name=init_status) contract = Contract.objects.create( num=num, date=date.date(), end_date=end_date.date(), type_work=type_work, cost=self.initial_data['cost'], external_num=self.initial_data['external_num'], inbox=inbox, customer=customer, # object=unit, status=status ) return contract
def inquiry(request): try: biz_id = request.POST.get("biz_id") username = request.POST.get("username") script_id = request.POST.get("script_id") time = request.POST.get("time") doinfo = Doinfo.objects.all() doinfo = doinfo.filter(businessname=int(biz_id)).filter( username=username).filter(script_id=int(script_id)) starttime, endtime = time.split("-") starttime = starttime.strip().replace("/", "-") + " 00:00:00" endtime = endtime.strip().replace("/", "-") + " 23:59:00" start_time = datetime.strptime(starttime, "%Y-%m-%d %H:%M:%S") end_time = datetime.strptime(endtime, "%Y-%m-%d %H:%M:%S") doinfo = doinfo.filter(starttime__range=(start_time, end_time)) data = [info.to_dict() for info in doinfo] table_data = render_to_string("home_application/record_tbody.html", {"doinfos": data}) result = True message = "success" except Exception as e: table_data = [] result = False message = str(e) return JsonResponse({ "result": result, "message": message, "data": table_data })
def filterOrder(formF): #poprzedni krok/etap filtrowania - na poczatku lista wszystkich prevStep = FoodOrder.objects.all() #nastepny krok - zamowienia po filtrowaniu nextStep = [] #---FILTROWANIE PO KELNERZE--- selectedWaiter = formF.data['selectWaiter'] if selectedWaiter == 'W': nextStep.extend(prevStep) else: #idz po wszystkich zamowieniach i wylow te z danym kelnerem for order in prevStep: if order.waiter.username == selectedWaiter: nextStep.append(order) #---KONIEC FILTROWANIA PO KELNERZE--- prevStep = nextStep #---FILTROWANIE PO DATACH begin = datetime.strptime(formF.data['selectBegin'], '%Y-%m-%d') end = datetime.strptime(formF.data['selectEnd'], '%Y-%m-%d') nextStep = [] #idź po wszystkich zamówieniach for order in prevStep: #jak się mieści - wrzuć do listy if ((order.startDate.date() >= begin.date()) and ((order.endDate is None) or (order.endDate.date() <= end.date()))): nextStep.append(order) return nextStep
def event(request): events = [] if request.GET.get('start') and request.GET.get('end'): fr = request.GET.get('start') if 'T' not in fr: fr += 'T0:0:0' fr = datetime.strptime(fr, '%Y-%m-%dT%H:%M:%S') to = request.GET.get('end') if 'T' not in to: to += 'T0:0:0' to = datetime.strptime(to, '%Y-%m-%dT%H:%M:%S') todos = Todo.objects.filter(deadline__range=[fr, to]) else: todos = Todo.objects.all() for todo in todos: ddl_str = todo.deadline.strftime('%Y-%m-%dT%H:%M:%S') events.append({ 'title': todo.title, 'start': ddl_str, 'end': ddl_str, 'color': '#eee' if todo.done else '#6cf' if todo.user == request.user else '#cdf', 'textColor': '#bbb' if todo.done else '#000', 'url': resolve_url('todo:todo-detail', todo.pk), 'detail': todo.detail, }) return HttpResponse(json.dumps(events))
def update_schedule(request, iataCode=None): form = UpdateAirportForm(request.POST) choices = [] for airport in Airport.objects.all().order_by('iataCode'): choices.append((airport.iataCode, airport.name)) form.airport.choices = choices if request.method == 'POST' and form.validate(): flights = get_schedule(form.airport.data) for flight in flights: new_flight = { 'origin': Airport.objects.get(pk=flight['origin']), 'destination': Airport.objects.get(pk=flight['destination']), 'flightNum': flight['flightNum'], 'fromDate': datetime.strptime(flight['fromDate'], "%B %d, %Y"), 'toDate': datetime.strptime(flight['toDate'], "%B %d, %Y"), 'skd': datetime.strptime(flight['ska'], "%I:%M%p"), 'ska': datetime.strptime(flight['skd'], "%I:%M%p") } Flight.objects.get_or_create(**new_flight) return redirect('/schedule') else: return TemplateResponse(request, 'schedule/schedule_update_schedule.html', { 'form': form, })
def get_all_statistic(self, request): if request.method == "POST": data_from = datetime.strptime(request.data["from"], "%d-%m-%Y") orders = Orders.objects.filter(date_created_order__gte=data_from) # if user want use only data_from try: data_to = datetime.strptime(request.data["to"], "%d-%m-%Y") orders.filter(date_created_order__lte=data_to) except MultiValueDictKeyError: pass else: orders = Orders.objects.all() result = [] for order in orders: statistic = { "order_id": order.id, "product": order.product, "date_order": order.date_created_order.strftime("%d-%m-%Y %H:%M:%S"), "status": order.status, "paid": order.paid, "amount_to_pay": amount_to_pay(float(order.product.discount), float(order.product.price)) } result.append(statistic) serializer = StatisticSerializer(result, many=True) return Response(serializer.data)
def from_elasticsearch_date(value): try: date = datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f') except ValueError: date = datetime.strptime(value, '%Y-%m-%dT%H:%M:%S') return date
def filter_match(qs, request): # simple example: qs = qs.order_by("-start_time") filter_club = request.GET.get('club', None) if filter_club: filter_club = int(filter_club) qs = qs.filter(Q(home_team=filter_club) | Q(away_team=filter_club)) filter_season = request.GET.get('season', None) if filter_season: filter_season = int(filter_season) qs = qs.filter(league__season=filter_season) filter_date = request.GET.get('date', None) if filter_date: temp_date = filter_date.split(" - ") start_date = datetime.strptime(temp_date[0] + " 00:00:00", "%d/%m/%Y %H:%M:%S") end_date = datetime.strptime(temp_date[1] + " 23:59:59", "%d/%m/%Y %H:%M:%S") qs = qs.filter(start_time__gte=start_date).filter( start_time__lte=end_date) return qs
def clean(self): error_messages = [] # try: cleaned_data = super(RNForm, self).clean() Team = cleaned_data.get("Team") StartTime = cleaned_data.get("StartTime") LunchTime = cleaned_data.get("LunchTime") LunchDuration = cleaned_data.get("LunchDuration") EndTime = cleaned_data.get("EndTime") OpenTime = datetime.time(datetime.strptime(UserSettings.get("OpenTime"),"%H:%M")) CloseTime = datetime.time(datetime.strptime(UserSettings.get("CloseTime"),"%H:%M")) # does not conform to DRY principle? if not Team or not StartTime or not EndTime: raise forms.ValidationError('Please fill out all of the fields') if StartTime >= EndTime: error_messages.append('RNs cannot start after EndTime') if StartTime < OpenTime: error_messages.append('A RN can not start before the company opens at ' + OpenTime.strftime("%I:%M")) if EndTime > CloseTime: error_messages.append('A RN must leave before the company closes at ' + CloseTime.strftime("%I:%M")) if not LunchTime or (LunchTime and (LunchTime < StartTime or LunchTime > EndTime)): error_messages.append('RNs need a valid lunch start time') if len(error_messages): raise forms.ValidationError(' & '.join(error_messages)) return self.cleaned_data
def duration(self): date_format = '%Y-%m-%d %H:%M' date_range = str(self.dates_range) date_start = datetime.strptime(date_range[1:17], date_format) date_end = datetime.strptime(date_range[22:38], date_format) duration = date_end - date_start duration_days = duration.days duration_minutes = ceil(duration.seconds / 60) duration_hours = ceil((duration.seconds / 60) / 60) return duration_days, duration_minutes, duration_hours
def get_context_data(self, **kwargs): # career_id = self.kwargs['c'] date_start = datetime.strptime(self.request.GET.get('date_start'), "%m/%d/%Y").date() date_end = datetime.strptime(self.request.GET.get('date_end'), "%m/%d/%Y").date() return super(JobsStatePdf, self).get_context_data( pagesize="Letter", job_obj=Job, jobs=Job.objects.filter(register_at__range=(date_start, date_end)), **kwargs)
def get_context_data(self, **kwargs): date_start = datetime.strptime(self.request.GET.get('date_start'), "%m/%d/%Y").date() date_end = datetime.strptime(self.request.GET.get('date_end'), "%m/%d/%Y").date() return super(EmployeesPdf, self).get_context_data( pagesize="Letter", profile_obj=Profile, profiles=Profile.objects.filter(register_at__range=(date_start, date_end)), **kwargs)
def price(self): date_format = '%Y-%m-%d %H:%M' date_range = str(self.dates_range) date_start = datetime.strptime(date_range[1:17], date_format) date_end = datetime.strptime(date_range[22:38], date_format) duration = date_end - date_start duration_minutes = duration.seconds / 60 duration_days = duration.days if duration_days == 0: return round(duration_minutes * 0.05, 2) else: return round(duration_days * 72 + duration_minutes * 0.05, 2)
def list(self, params): sortLimitParams = self.setSortLimitParameters(params) filterObj = Q() if params.get('searchName'): filterObj = filterObj & Q(child__names__icontains=params.get('searchName')) if params.get('searchReportId'): filterObj = filterObj & Q(id=params.get('searchCReportId')) if params.get('searchDateCreated'): filterObj = filterObj & Q( date_created__gte=datetime.strptime(params.get('searchDateCreated') + ' 00:00:59', '%Y-%m-%d %H:%M:%S')) filterObj = filterObj & Q( date_created__lte=datetime.strptime(params.get('searchDateCreated') + ' 23:59:59', '%Y-%m-%d %H:%M:%S')) if params.get('searchTelephone'): filterObj = filterObj & Q(child__parent__telephone = params.get('searchTelephone')) if params.get('searchIDNO'): filterObj = filterObj & Q(child__parent__identity_number = params.get('searchIDNO')) if params.get('searchEmail'): filterObj = filterObj & Q(child__parent__email = params.get('searchEmail')) if params.get('searchRegno'): filterObj = filterObj & Q(child__regno = params.get('searchRegno')) result = DailyChildReport.objects.filter(filterObj).order_by(sortLimitParams['dir'] + sortLimitParams['sort'])[ sortLimitParams['start']: sortLimitParams['limit']] count = DailyChildReport.objects.filter(filterObj).count() records = [] for item in result: record = {} record['id'] = item.id #record['telephone'] = item.child.parent.telephone.encode('utf-8') #record['id_number'] = item.identity_number.encode('utf-8') record['date_created'] = item.date_created.isoformat() record['day'] = item.day.isoformat() record['parents'] = [ {"names": pt.names, "telephone": pt.telephone, "relationship": pt.relationship, "id": pt.id, "email": pt.email} for pt in item.child.parents] record['activities'] = [{"name": act.name, "category": act.category, "price" : act.unit_price, "id": act.id} for act in item.activities] record['components'] = [{"name": rp.name, "id": rp.id} for rp in item.repas.components] record['repas'] = {"id": item.repas.id, "price": item.repas.unit_price} record['day_price'] = item.day_price record['accueillante'] = {"names": item.accueillante.names, "telephone": item.accueillante.telephone, "id": item.accueillante.id, "email": item.accueillante.email} record['gender'] = item.child.gender.encode('utf-8') record['names'] = item.child.names record['group'] = item.child.group records.append(record) return {'totalCount': count, 'records': records}
def update_plane(flight): plane = Plane.objects.get(identifier=flight['plane']) plane.speed = float(flight['speed']) plane.landing_airport = Airport.objects.get(name=flight['destination']) plane.takeoff_airport = Airport.objects.get(name=flight['origin']) plane.takeoff_time = datetime.strptime(flight['takeoff_time'], '%Y-%m-%d %H:%M') plane.landing_time = datetime.strptime(flight['landing_time'], '%Y-%m-%d %H:%M') plane.current_state = "In Air" plane.runway = None plane.gate = None plane.save()
def list(self, params): sortLimitParams = self.setSortLimitParameters(params) filterObj = Q() if params.get('searchToEmail'): filterObj = filterObj & Q(to_email__icontains = params.get('searchToEmail')); if params.get('searchDateSent'): filterObj = filterObj & Q(sent_at__gte=datetime.strptime(params.get('searchDateSent') + ' 00:00:59', '%Y-%m-%d %H:%M:%S')) filterObj = filterObj & Q(sent_at__lte=datetime.strptime(params.get('searchDateSent') + ' 23:59:59', '%Y-%m-%d %H:%M:%S')) if params.get('searchSender'): filterObj = filterObj & Q(from_email__icontains = params.get('searchSender')); if params.get('searchSubject'): filterObj = filterObj & Q(subject__icontains = params.get('searchSubject')); if params.get('searchMessage'): filterObj = filterObj & Q(message_body__icontains = params.get('searchMessage')); if params.get('searchDateDelivered'): filterObj = filterObj & Q(delivery_date__gte=datetime.strptime(params.get('searchDateDelivered') + ' 00:00:59', '%Y-%m-%d %H:%M:%S')) filterObj = filterObj & Q(delivery_date__lte=datetime.strptime(params.get('searchDateDelivered') + ' 23:59:59', '%Y-%m-%d %H:%M:%S')) if params.get('searchRelay'): filterObj = filterObj & Q(scheduled_for_relay=1 if 'true' == params.get('searchRelay') else 0); if params.get('searchIds'): filterObj = filterObj & Q(id__in=eval(params.get('searchIds'))) if params.get('searchEventId'): filterObj = filterObj & Q(event__id=params.get('searchEventId')) result = EmailSchedule.objects.filter(filterObj).order_by(sortLimitParams['dir'] + sortLimitParams['sort']) [sortLimitParams['start']: sortLimitParams['limit']] count = EmailSchedule.objects.filter(filterObj).count() records = [] for item in result: record = {} record['id'] = item.id record['to_email'] = item.to_email record['scheduled_for_relay'] = item.scheduled_for_relay if item.sent_at: record['sent_at'] = item.sent_at.isoformat() record['from_email'] = item.from_email record['subject'] = item.subject record['message_body'] = item.message_body if item.delivery_date: record['delivery_date'] = item.delivery_date.isoformat() record['date_created'] = item.date_created.isoformat() if item.event_id: record['event'] = Event.objects.get(pk=item.event_id).name records.append(record) return {'totalCount': count, 'records': records}
def schedule_report(request): line_shortname = request.GET.get("l", "") start = request.GET.get("s", "") end = request.GET.get("e", "") logger.info("Line: %s, start: %s, end: %s", line_shortname, start, end) qs = ManufacturingLine.objects.filter(shortname__iexact=line_shortname) if not qs.exists(): messages.error(request, f"Invalid manufacturing line name: '{line_shortname}'") return redirect("error") query = Q(line=qs[0]) try: if start: start = timezone.make_aware(datetime.strptime(start, "%Y-%m-%d")) query &= Q(start_time__gte=start) if end: end = timezone.make_aware(datetime.strptime(end, "%Y-%m-%d")) query &= Q(end_time__lte=end) | Q(end_time__isnull=True) except ValueError as e: messages.error(request, f"Invalid date: {str(e)}") return redirect("error") schedules = GoalSchedule.objects.filter(query) if schedules.count() == 0: messages.error( request, "Unable to generate report: no goal was scheduled on " f"'{line_shortname}' between the specified timespan.", ) return redirect("error") formula_ids = set(schedules.values_list("goal_item__sku__formula", flat=True)) ingredients = FormulaIngredient.objects.filter(formula_id__in=formula_ids) goal_ids = set(schedules.values_list("goal_item__goal", flat=True)) goal_objs = Goal.objects.filter(pk__in=goal_ids) result = SortedDefaultDict(lambda: Decimal(0.0), key=operator.attrgetter("pk")) for goal in goal_objs: _calculate_report(goal, result=result) return render( request, template_name="meals/goal/schedule_report.html", context={ "time": timezone.now(), "line": qs[0], "start": start, "end": end, "activities": schedules, "formulas": ingredients, "ingredients": result.items(), "show_formulas": True, "show_ingredients": True, }, )
def list(self, params): sortLimitParams = self.setSortLimitParameters(params) filterObj = Q() if params.get('searchName'): filterObj = filterObj & Q( names__icontains=params.get('searchName')) if params.get('searchParentId'): filterObj = filterObj & Q(id=params.get('searchCParentId')) if params.get('searchDateCreated'): filterObj = filterObj & Q(date_created__gte=datetime.strptime( params.get('searchDateCreated') + ' 00:00:59', '%Y-%m-%d %H:%M:%S')) filterObj = filterObj & Q(date_created__lte=datetime.strptime( params.get('searchDateCreated') + ' 23:59:59', '%Y-%m-%d %H:%M:%S')) if params.get('searchTelephone'): filterObj = filterObj & Q(telephone=params.get('searchTelephone')) if params.get('searchIDNO'): filterObj = filterObj & Q( identity_document=params.get('searchIDNO')) if params.get('searchEmail'): filterObj = filterObj & Q(email=params.get('searchEmail')) result = CrecheParent.objects.filter(filterObj).order_by( sortLimitParams['dir'] + sortLimitParams['sort'] )[sortLimitParams['start']:sortLimitParams['limit']] count = CrecheParent.objects.filter(filterObj).count() records = [] for item in result: record = {} record['id'] = item.id record['telephone'] = item.telephone.encode('utf-8') record['id_number'] = item.identity_document.encode('utf-8') record['date_created'] = item.date_created.isoformat() record['children'] = [{ "names": ch.names, "regno": ch.regno, "id": ch.id } for ch in item.children.all()] record['address'] = item.full_address.encode('utf-8') record['email'] = item.email.encode('utf-8') record['names'] = item.names record['relationship'] = item.relationship.encode('utf-8') records.append(record) return {'totalCount': count, 'records': records}
def test_output_delete(self): m1 = Kv15Stopmessage(dataownercode='HTM', user=self.user) m1.messagecodedate = datetime.strptime("2013-11-16", "%Y-%m-%d").date() m1.messagecodenumber = 11 start = make_aware(datetime.strptime("2013-11-16T14:09:35.161617", "%Y-%m-%dT%H:%M:%S.%f"), get_default_timezone()) end = make_aware(datetime.strptime("2013-11-17T03:00:00", "%Y-%m-%dT%H:%M:%S"), get_default_timezone()) m1.messagestarttime = start m1.messageendtime = end m1.messagecontent = "Bla!" m1.save() Kv15MessageStop(stopmessage=m1, stop=self.haltes[0]).save() Kv15MessageStop(stopmessage=m1, stop=self.haltes[1]).save() self.assertXmlEqual(m1.to_xml_delete(), self.getCompareXML('openebs/tests/output/delete.xml'))
def list(self, params): sortLimitParams = self.setSortLimitParameters(params) filterObj = Q() if params.get('searchRef'): filterObj = filterObj & Q( bill_no__icontains=params.get('searchRef')) if params.get('searchBillId'): filterObj = filterObj & Q(id=params.get('searchBillId')) if params.get('searchDateGenerated'): filterObj = filterObj & Q(date_time__gte=datetime.strptime( params.get('searchDateGenerated') + ' 00:00:59', '%Y-%m-%d %H:%M:%S')) filterObj = filterObj & Q(date_time__lte=datetime.strptime( params.get('searchDateGenerated') + ' 23:59:59', '%Y-%m-%d %H:%M:%S')) if params.get('searchMonth'): filterObj = filterObj & Q(month=params.get('searchMonth')) if params.get('searchYear'): filterObj = filterObj & Q(year=params.get('searchYear')) if params.get('searchChildId'): filterObj = filterObj & Q(child__id=params.get('searchChildid')) if params.get('searchParentId'): filterObj = filterObj & Q( child__parent__id=params.get('searchParentId')) result = Bill.objects.filter(filterObj).order_by( sortLimitParams['dir'] + sortLimitParams['sort'] )[sortLimitParams['start']:sortLimitParams['limit']] count = Bill.objects.filter(filterObj).count() records = [] for item in result: record = {} record['id'] = item.id record['bill_no'] = item.bill_no.encode('utf-8') record['date_time'] = item.date_generated.isoformat() record['year'] = item.name.encode('utf-8') record['child_id'] = item.child.id record['child_parent_id'] = item.child.parent.id record['month'] = item.month record['year'] = item.year record['parent_name'] = item.child.parent.names record['child_name'] = item.child.names records.append(record) return {'totalCount': count, 'records': records}
def list(self, params): sortLimitParams = self.setSortLimitParameters(params) filterObj = Q() if params.get('searchName'): filterObj = filterObj & Q(name__icontains=params.get('searchName')) if params.get('searchEventTypeId'): filterObj = filterObj & Q( event_type__id=params.get('searchEventTypeId')) if params.get('searchDateGenerated'): filterObj = filterObj & Q(date_generated__gte=datetime.strptime( params.get('searchDateGenerated') + ' 00:00:59', '%Y-%m-%d %H:%M:%S')) filterObj = filterObj & Q(date_generated__lte=datetime.strptime( params.get('searchDateGenerated') + ' 23:59:59', '%Y-%m-%d %H:%M:%S')) if params.get('searchProcessed'): filterObj = filterObj & Q( processed=1 if 'true' == params.get('searchProcessed') else 0) if params.get('searchRefId'): filterObj = filterObj & Q( entity_reference_id=params.get('searchRefId')) if params.get('searchIds'): filterObj = filterObj & Q(id__in=eval(params.get('searchIds'))) result = Event.objects.filter(filterObj).order_by( sortLimitParams['dir'] + sortLimitParams['sort'] )[sortLimitParams['start']:sortLimitParams['limit']] count = Event.objects.filter(filterObj).count() records = [] for item in result: record = {} record['id'] = item.id record['event_type'] = item.event_type.name.encode('utf-8') record['date_generated'] = item.date_generated.isoformat() record['name'] = item.name.encode('utf-8') record['entity_reference_id'] = item.entity_reference_id record['processed'] = item.processed #get the recipients for this event record['recipient_count'] = EmailSchedule.objects.filter( event__id=item.id).count() records.append(record) return {'totalCount': count, 'records': records}
def get_data(self, exclude_urls=()): response = requests.get(self.url, headers=self.request_headers) entries = anymarkup.parse(response.text) data_list = [] for entry in entries['source']['vacancies']['vacancy']: if entry['url'] in exclude_urls: continue if not self.words_in_string( self.configuration.keywords, entry['description']) and not self.words_in_string( self.configuration.keywords, entry['job-name']): continue data_list.append({ 'url': entry['url'], 'source_datetime': datetime.strptime(entry['creation-date'][:19], self.time_format), 'text': entry['description'], 'title': entry['job-name'], }) return data_list
def get_rest_gate(request): status = 200 if request.method == "POST": data = json.loads(request.body) plane = Plane.objects.get(identifier=data['plane']) if 'arrive_at_time' in data: plane.current_state = "Docking" plane.gate = Gate.objects.get(identifier=data['gate']) plane.arrival_time = datetime.strptime(data['arrive_at_time'], '%Y-%m-%d %H:%M') plane.save() planes = Plane.objects.filter(gate=plane.gate, arrival_time=plane.arrival_time) if planes.count() > 1: status = 240 for p in planes: post_response_headers(p.identifier, error_messages['gate']) else: plane.current_state = "Arrived" plane.runway = None # free runway plane.arrival_time = None plane.save() return render(request, 'REST/index.html', {'errors': errors_cache}, status=status)
def get_rest_runway(request): status = 200 if request.method == "POST": data = json.loads(request.body) plane = Plane.objects.get(identifier=data['plane']) if 'arrive_at_time' in data: plane.current_state = "Needs Runway" plane.runway = Runway.objects.get(identifier=data['runway']) plane.arrival_time = datetime.strptime(data['arrive_at_time'], '%Y-%m-%d %H:%M') plane.save() planes = Plane.objects.filter(runway=plane.runway, arrival_time=plane.arrival_time) if planes.count() > 1: status = 240 for p in planes: post_response_headers(p.identifier, error_messages['runway']) else: plane.gate = None # free gate plane.current_state = "Taxi" plane.takeoff_airport = None plane.takeoff_time = None plane.landing_time = None plane.save() return render(request, 'REST/index.html', {'errors': errors_cache}, status=status)
def index(request): category_list = Category.objects.order_by('-likes')[:5] page_list = Page.objects.order_by('-views')[:5] context_dict = {'categories': category_list, 'pages': page_list} visits = request.session.get('visits') if not visits: visits = 1 reset_last_visit_time = False last_visit = request.session.get('last_visit') if last_visit: last_visit_time = datetime.strptime(last_visit[:-7], "%Y-%m-%d %H:%M:%S") if (datetime.now() - last_visit_time).days > 0: # ...reassign the value of the cookie to +1 of what it was before... visits = visits + 1 # ...and update the last visit cookie, too. reset_last_visit_time = True else: # Cookie last_visit doesn't exist, so create it to the current date/time. reset_last_visit_time = True if reset_last_visit_time: request.session['last_visit'] = str(datetime.now()) request.session['visits'] = visits context_dict['visits'] = visits response = render(request, 'rango/index.html', context_dict) return response
def setUp(self): """ generate user data """ (self.user_admin, self.password_admin) = create_user_admin() (self.user_test, self.password_test) = create_user_test() self.date = datetime.strptime('24052010', "%d%m%Y").date()
def get_data(self): rss = self.get_rss() data_list = [] for entry in rss['entries']: url = entry.get('link', '') text = entry.get('summary', '') title = entry.get('title', '') pub_date = entry.get('published_parsed') or entry.get( 'published', None) if self.words_in_string(self.configuration.stop_words, text) or \ not self.words_in_string(self.configuration.keywords, text) and \ not self.words_in_string(self.configuration.keywords, title): continue try: source_datetime = datetime.fromtimestamp(time.mktime(pub_date)) except TypeError: source_datetime = datetime.strptime( pub_date, self.configuration.time_format).date() data_list.append({ 'url': url, 'source_datetime': source_datetime, 'text': self.normalize_text(text), 'title': title }) return data_list
def test_decoder_creates_contract_object(self): contracts = CSVContractDecoder(self.csv_file.name).decode() self.assertEqual(contracts[0].education_level, 'HS') self.assertEqual(contracts[0].contract_end, datetime.strptime('1/8/2019', '%m/%d/%Y').date()) self.assertEqual(contracts[1].contract_end, None) self.assertEqual(contracts[1].labor_category, 'Analyst/Consultant I')
def verify_proxy(request): proxy_form = UserProxyForm(request.POST) newproxy = proxy_form.save(commit=False) command = 'voms-proxy-info' r = send_rimrock_command(command, newproxy.proxy) try: r.raise_for_status() newproxy.user = request.user pattern = re.compile(r'timeleft\s+?:\s+?(.*?)\\n') print r.content timeleft = pattern.search(r.text).group(1) d = datetime.strptime(timeleft, "%H:%M:%S") newproxy.valid_until = timezone.now() + timedelta(hours=d.hour, minutes=d.minute) try: oldproxy = UserProxy.objects.get(user=request.user) oldproxy.delete() print "Proxy deleted" except UserProxy.DoesNotExist: print "No previous proxy" newproxy.save() return True except requests.exceptions.HTTPError: return False
def from_native(self, data): try: parts = data.split('.') time_sans_microseconds = parts[0] return datetime.strptime(time_sans_microseconds, RAPID_PRO_TIME_FORMAT) except: return super(fields.DateTimeField, self).from_native(data)
def boetes(request, page=1): # Apply filter if any filters = dict() filters_str = ['housemate', 'fine_amount', 'final_date'] for filt in filters_str: filters[filt] = request.GET.get(filt, 0) # get list of active users sorted by move-in date active_users = User.objects.filter(is_active=True) active_housemates = Housemate.objects.filter(user__id__in=active_users).order_by('movein_date') select_housemates = active_housemates.exclude(display_name='Admin').exclude(display_name='Huis') boetes = Boete.objects try: if int(filters['housemate']): active_housemates = select_housemates.filter(id=int(filters['housemate'])) if int(filters['housemate']): boetes = boetes.filter(boete_user_id=int(filters['housemate'])) if filters['fine_amount']: boetes = boetes.filter(boete_count__gte=int(filters['fine_amount'])) if filters['final_date']: date = datetime.strptime(filters['final_date'], "%d-%m-%Y").date() boetes = boetes.filter(created_time__lte=date) except Exception as e: print(e) pass # get paginated list of fines boetes_list = Paginator(boetes.order_by('-created_time'), 10) # get lists of users with open fines log_boetes = active_housemates.filter(Q(boetes_open__gt=0), user__id__in=active_users).order_by('-boetes_open') num_boetes = list(log_boetes.filter(boetes_open__gt=0).aggregate(Sum('boetes_open')).values())[0] turfed_boetes_rwijn = list(log_boetes.filter(boetes_geturfd_rwijn__gt=0) .aggregate(Sum('boetes_geturfd_rwijn')).values())[0] turfed_boetes_wwijn = list(log_boetes.filter(boetes_geturfd_wwijn__gt=0) .aggregate(Sum('boetes_geturfd_wwijn')).values())[0] # ensure page number is valid try: table_list = boetes_list.page(page) except EmptyPage: table_list = boetes_list.page(1) page = 1 # build context object context = { 'breadcrumbs': request.get_full_path()[1:-1].split('/'), 'housemates': select_housemates, 'log_boetes': log_boetes, 'num_boetes': num_boetes, 'turfed_boetes_rwijn': turfed_boetes_rwijn, 'turfed_boetes_wwijn': turfed_boetes_wwijn, 'table_list': table_list, 'pages': str(boetes_list.num_pages), 'page_num': page, 'filters': filters } return render(request, 'bierlijst/boete/boetes.html', context)
def flights_filtered_by_date(request): format = '%Y-%m-%d' date_str = request.GET["date"] day = datetime.strptime(date_str, format) next_day = day + timedelta(days=1) flights = Flight.objects.filter(starting_time__range=[day.strftime(format), next_day.strftime(format)]) return flights
class Service(Base): """Dates that a route is active.""" feed = models.ForeignKey('Feed') service_id = models.CharField( max_length=255, db_index=True, help_text="Unique identifier for service dates.") monday = models.BooleanField(default=True, help_text="Is the route active on Monday?") tuesday = models.BooleanField(default=True, help_text="Is the route active on Tuesday?") wednesday = models.BooleanField( default=True, help_text="Is the route active on Wednesday?") thursday = models.BooleanField( default=True, help_text="Is the route active on Thursday?") friday = models.BooleanField(default=True, help_text="Is the route active on Friday?") saturday = models.BooleanField( default=True, help_text="Is the route active on Saturday?") sunday = models.BooleanField(default=True, help_text="Is the route active on Sunday?") start_date = models.DateField( default=datetime.strptime('18991231', '%Y%m%d')) end_date = models.DateField( default=datetime.strptime('21000101', '%Y%m%d')) def __unicode__(self): return u"%d-%s" % (self.feed.id, self.service_id) class Meta: db_table = 'multigtfs_service' app_label = 'multigtfs' # For Base import/export _column_map = (('service_id', 'service_id'), ('monday', 'monday'), ('tuesday', 'tuesday'), ('wednesday', 'wednesday'), ('thursday', 'thursday'), ('friday', 'friday'), ('saturday', 'saturday'), ('sunday', 'sunday'), ('start_date', 'start_date'), ('end_date', 'end_date')) _sort_order = ('start_date', 'end_date') # support commonly out-of-date GTFS feed data # {'old csv name': 'django field name'} _legacy_format = { 'service_name': 'service_id', }
def show_log(request, page=1): active_users = User.objects.filter(is_active=True) active_housemates = Housemate.objects.filter(user__id__in=active_users).order_by('movein_date') select_housemates = active_housemates.exclude(display_name='Admin') # Apply filter if any filters = dict() filters_str = ['housemate', 'beer_amount', 'aggregate_days', 'aggregate_hours', 'final_date'] for filt in filters_str: filters[filt] = request.GET.get(filt, 0) beer_logs = Turf.objects.order_by('-turf_time') try: if int(filters['housemate']): beer_logs = beer_logs.filter(turf_user_id=int(filters['housemate'])) if filters['beer_amount']: beer_logs = beer_logs.filter(turf_count__gte=int(filters['beer_amount'])) if filters['final_date']: date = datetime.strptime(filters['final_date'], "%d-%m-%Y").date() beer_logs = beer_logs.filter(turf_time__lte=date) if filters['aggregate_days'] == "on": beer_logs = beer_logs.extra(select={'turf_time': 'date( turf_time )'}) \ .values('turf_time', 'turf_type', 'turf_to') \ .annotate(turf_count=Sum('turf_count')) \ .order_by('-turf_time') elif filters['aggregate_hours'] == "on": # https://stackoverflow.com/questions/30465013/django-group-by-hour beer_logs = beer_logs \ .extra(select={'turf_time': 'date( turf_time )'}) \ .extra({"hour": "date_part(\'hour\', \"turf_time\")"}) \ .values('turf_time', 'hour', 'turf_type', 'turf_to') \ .annotate(turf_count=Sum('turf_count')) \ .order_by('-turf_time', '-hour') except Exception as e: print(e) pass # get list of turfed items turf_list = Paginator(beer_logs, 25) # ensure page number is valid try: table_list = turf_list.page(page) except EmptyPage: table_list = turf_list.page(1) page = 1 # build context object context = { 'breadcrumbs': request.get_full_path()[1:-1].split('/'), 'housemates': select_housemates, 'filters': filters, 'table_list': table_list, 'pages': str(turf_list.num_pages), 'page_num': page } return render(request, 'bierlijst/log/log.html', context)
def setUp(self): self.password = '******' * 10 self.user_admin = self.create_user( '*****@*****.**', password=settings.DJANGO_PYCOIN_ADMIN_PASS) self.user1 = self.create_user('*****@*****.**') self.digitalwork_user1 = self.create_digitalwork(self.user1) self.thumbnail_user1 = self.create_thumbnail(self.user1) self.date = datetime.strptime('24052010', "%d%m%Y").date()
def file_handler(xml_file): auto_commit_status = transaction.get_autocommit() transaction.set_autocommit(False) value = xml_file.read() tree = etree.fromstring(value) operators = {} for item in Operator.objects.all(): operators[item.code] = item result = [] for item in tree.xpath('//td/c'): get_value = lambda xpath: item.xpath(xpath)[0].text data_record = DataRecord() data_record.time = datetime.strptime(get_value('./d'), FORMAT_TIME) data_type = get_value('./s') data_type = data_type if data_type is not None else '' duration = get_value('./du') if 'INTERNET' in get_value('./n'): data_record.data_type = INTERNET data_record.duration = duration[:-2] # Remove 'Kb' from string else: data_record.operator = operators.get('U') source = get_value('./n') for operator_code in operators.keys(): if operator_code in source: data_record.operator = operators.get(operator_code) break data_record.number = re.findall(NUMBER_PATTERN, get_value('./n'))[0] data_record.direction = INCOMING if '<--' in get_value('./n') else OUTGOING if 'sms' in data_type: data_record.data_type = SMS data_record.duration = duration elif 'ussd' in data_type: continue else: data_record.data_type = CALL call_duration = datetime.strptime(duration, DURATION_TIME) data_record.duration = call_duration.minute * 60 + call_duration.second data_record.save() result.append(data_record) transaction.commit() transaction.set_autocommit(auto_commit_status) return result
def test_output_update(self): m1 = Kv15Stopmessage(dataownercode='HTM', user=self.user) m1.messagecodedate = datetime.strptime("2013-11-16", "%Y-%m-%d").date() m1.messagecodenumber = 5012 start = make_aware(datetime.strptime("2013-11-16T14:09:35.161617", "%Y-%m-%dT%H:%M:%S.%f"), get_default_timezone()) end = make_aware(datetime.strptime("2013-11-17T03:00:00", "%Y-%m-%dT%H:%M:%S"), get_default_timezone()) m1.messagestarttime = start m1.messageendtime = end m1.messagecontent = "Bla!" m1.save() Kv15MessageStop(stopmessage=m1, stop=self.haltes[0]).save() Kv15MessageStop(stopmessage=m1, stop=self.haltes[1]).save() initial = m1.messagecodenumber m1.messagecontent = "Dit bericht is geupdate!" m1.save() # This gets done by our form/model when we use the normal view, need to fake this here and repeat it Kv15MessageStop(stopmessage=m1, stop=self.haltes[0]).save() xml = "<DOSSIER>%s</DOSSIER>" % (m1.to_xml_delete(initial)+m1.to_xml()) self.assertXmlEqual(xml, self.getCompareXML('openebs/tests/output/update.xml'))
def decode_request(self, request, time): datas = [] for key, value in request.items(): LOG.info("Stats recieved from : " + key) # Verify if the switch is in the Database if Switch.objects.filter(pk=key).exists(): for stat in value: data = {"dpid": key} if self.decode_flow(data, stat) is None: self.decode_data(data, stat) datas.append(data) self.save_stat(datas, datetime.strptime(time, "%Y-%m-%dT%H:%M:%S.%f"))
def alocarBolsa(request, self): if request.method == 'POST': idAluno = request.POST['idAluno'] idBolsa = request.POST['idBolsa'] nivel = request.POST['nivel'] if nivel == 'M': urlReverse = 'admin:ppgi_alunomestradoesperabolsa_changelist' else: urlReverse = 'admin:ppgi_alunodoutoradoesperabolsa_changelist' try: dataInicioBD = datetime.strptime(request.POST['dataInicio'], '%d/%m/%Y') dataFimBD = datetime.strptime(request.POST['dataFim'], '%d/%m/%Y') bolsa = Bolsa.objects.get(id=idBolsa) if idAluno=='' or idBolsa=='' or dataInicioBD=='' or dataFimBD=='': messages.error(request, 'Preencha os campos corretamente.') elif bolsa.aluno != None: messages.error(request, 'Esta bolsa já foi alocada.') elif Bolsa.objects.filter(aluno_id=idAluno).exists(): messages.error(request, 'Este(a) aluno(a) já possui Bolsa.') else: bolsa.aluno = Aluno.objects.get(id=idAluno) bolsa.datainicio = dataInicioBD bolsa.datafim = dataFimBD bolsa.save() messages.success(request, 'Bolsa alocada com sucesso.') return redirect(urlReverse) except ValueError: messages.error(request, 'Preencha os campos corretamente.') return redirect(urlReverse)
def user_edit(request): if not request.user.group.name == 'users': raise Http404 if request.method == 'POST': p = request.FILES if 'picture' in request.FILES else request.POST if 'current-password' in p: '''if request.user.check_password(p['current-password']): User.objects(id = request.user.id).update_one( set__password = make_password(p['new-password']) ) return HttpResponse('ok') else: return HttpResponse('error')''' pass elif 'picture' in p: file = StringIO() img = Image.open(StringIO(p['picture'].read())) img = img.resize((200, 200)) img.save(file, 'JPEG', quality = 90) file.seek(0) path = default_storage.save('users/{0}.jpg'.format(request.user.username), ContentFile(file.read())) User.objects(id = request.user.id).update_one( set__picture = '/media/{0}'.format(path) ) elif 'email' in p: User.objects(id = request.user.id).update_one( set__first_name = p['first-name'], set__last_name = p['last-name'], set__email = p['email'], set__birth_date = datetime.strptime(p['birth-date'], '%d/%m/%Y') if p['birth-date'] else None, set__gender = p['gender'], set__about = p['about'], set__contact_info__website = p['contact-info-website'], set__contact_info__country = p['contact-info-country'], set__contact_info__state = p['contact-info-state'], set__contact_info__city = p['contact-info-city'] ) if not request.user.group: User.objects(id = request.user.id).update_one( set__group = Group.objects.get(name = 'users') ) return HttpResponseRedirect('/user/{0}'.format(request.user.username)) context = Context({'page_title': 'Editar Perfil', 'current_page': 'users'}); return render(request, 'user_edit.html', context)
def __init__(self, node): """ Extract information from the supplied XML node. """ mapping = [ ['textbox4', 'accreditation'], ['textbox13', 'name'], ['textbox5', 'scheme'], ['textbox19', 'capacity', 0], ['textbox12', 'country'], ['textbox15', 'technology'], ['textbox31', 'output'], ['textbox18', 'period'], ['textbox21', 'certs', 0], ['textbox24', 'start_no'], ['textbox27', 'finish_no'], ['textbox37', 'factor', 0], ['textbox30', 'issue_dt'], ['textbox33', 'status'], ['textbox36', 'status_dt'], ['textbox39', 'current_holder'], ['textbox45', 'reg_no'] ] for m in mapping: self.set_attr_from_xml(node, m) self.factor = float(self.factor) self.certs = int(self.certs) or 0 self.capacity = float(self.capacity) or 0 self.issue_dt = datetime.strptime(self.issue_dt, '%Y-%m-%dT00:00:00') self.status_dt = datetime.strptime(self.status_dt, '%Y-%m-%dT00:00:00') if self.period.startswith("01"): dt = datetime.strptime(self.period[:10], '%d/%m/%Y') self.period = dt.strftime("%b-%Y")
def filtreDeRutes(request): form = FiltreRutaForm() if request.method == 'POST': #prepareu diccionari amb els parametres del post q_str = request.POST.copy() q_str.pop('csrfmiddlewaretoken') #borrem el srtdgf de l'array q = json.dumps(q_str) url_next = reverse('posts:buscarRuta', kwargs={}) return HttpResponseRedirect(url_next+"?q="+q) else: q_str = request.GET.get('q',None) if q_str: q = json.loads(q_str) p = Q() #['titol', 'data', 'dificultat', 'categoria', 'administrador'] if 'titol' in q and q['titol']: p &= Q(titol = q['titol']) if 'data' in q and q['data']: d = datetime.strptime(q['data'], '%d/%m/%Y') p &= Q(data = d) if 'dificultat' in q and q['dificultat']: if q['dificultat'] != '0': p &= Q(dificultat = q['dificultat']) if 'categoria' in q and q['categoria']: n = int(q['categoria']) p &= Q(categoria = n) if 'administrador' in q and q['administrador']: n = int(q['administrador']) p &= Q(administrador = n) llista_rutes = Post.objects.filter( p ).order_by('-data') else: llista_rutes = Post.objects.none() page = request.GET.get('page') rutes = paginaitor_plus(page, llista_rutes, 2) form.fields['dificultat'].widget.choices = [ (0,'------') ] + list(form.fields['dificultat'].widget.choices) return render(request, 'posts/filtreDeRutes.html', {'form':form, 'rutes':rutes, 'q':q_str})
def get_context_data(self, **kwargs): context = super(InsultPageView, self).get_context_data(**kwargs) # This is not quite right, as currently we are not getting the user's timezone. Once the frontend is converted # to angular, we can get the client date from the frontend. date = self.request.GET.get('date') if date: date = datetime.strptime(date, '%Y-%m-%d').date() else: date = localtime(now()).date() # We need an insult in case we need to create the day. We could use a try/except to only get the insult if their # is not a day, but this is cleaner and should not slow anything down. defaults = {'insult': get_insult(date)} day, created = models.Day.objects.get_or_create(pk=date, defaults=defaults) context['insult'] = day.insult context['day'] = day context['yesterday'] = day.date - timedelta(days=1) return context
def detallProfessorHorari(request, pk, detall='all'): credentials = tools.getImpersonateUser(request) (user, l4) = credentials #grups_poden_veure_detalls = [u"sortides",u"consergeria",u"direcció",] #mostra_detalls = user.groups.filter(name__in=grups_poden_veure_detalls).exists() data_txt = request.GET.get( 'data', '' ) try: data = datetime.strptime(data_txt, r"%Y-%m-%d").date() except ValueError: data = datetime.today() professor = get_object_or_404( Professor, pk=pk) tutoria = professor.tutor_set.filter( professor = professor ) qHorari = Q(horari__professor = professor, dia_impartir = data) qGuardies = Q(professor_guardia = professor, dia_impartir = data) imparticions = Impartir.objects.filter( qHorari | qGuardies ).order_by( 'horari__hora') table=HorariProfessorTable(imparticions) RequestConfig(request).configure(table) return render( request, 'mostraInfoProfessorCercat.html', {'table': table, 'professor':professor, 'tutoria': tutoria, 'dia' : data, 'lendema': (data + timedelta( days = +1 )).strftime(r'%Y-%m-%d'), 'avui': datetime.today().date().strftime(r'%Y-%m-%d'), 'diaabans': (data + timedelta( days = -1 )).strftime(r'%Y-%m-%d'), })
def _format_date(self, dt): try: return datetime.strptime(dt, '%m/%d/%y').strftime('%m/%d/%Y') except ValueError: return None
def setUp(self): self.my_date = datetime.strptime("2015-08-27", "%Y-%m-%d").date() self.my_date_in_timestamp = datetime_to_timestamp(self.my_date)
def ajax_details_playlist(request): playlist_id = request.POST['playlist_id'] master = f.get_youtube_master() check = f.check_youtube_master(request=request, master=master) if check['status'] is False: return HttpResponseForbidden() check = f.check_api_token(request=request, master=master) if check['status'] is False: return HttpResponseForbidden() else: credential = check['value'] youtube = f.build_youtube(credential) playlist_response = f.playlist_details(youtube=youtube, playlist_id=playlist_id) ''' playlist_response = { "items": [ { "id": "PLFp2-gAWp2eVjZYkT502Xd0tMHFD0YjP9", "snippet": { "publishedAt": "2015-07-10T23:57:32.000Z", "channelId": "UC8iUi9DiP_Nr6uAuo7W74XQ", "title": "Test spamweb", "description": "", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/JhGkt6PQQ8E/default.jpg", "width": 120, "height": 90 }, "medium": { "url": "https://i.ytimg.com/vi/JhGkt6PQQ8E/mqdefault.jpg", "width": 320, "height": 180 }, "high": { "url": "https://i.ytimg.com/vi/JhGkt6PQQ8E/hqdefault.jpg", "width": 480, "height": 360 }, "standard": { "url": "https://i.ytimg.com/vi/JhGkt6PQQ8E/sddefault.jpg", "width": 640, "height": 480 } }, "channelTitle": "SpamWeb", "localized": { "title": "Test spamweb", "description": "" } }, "contentDetails": { "itemCount": 4 }, "player": { "embedHtml": "<iframe type='text/html' src='http://www.youtube.com/embed/videoseries?list=PLFp2-gAWp2eVjZYkT502Xd0tMHFD0YjP9' width='640' height='360' frameborder='0' allowfullscreen='true'/>" } } ] } ''' playlist = playlist_response["items"][0] playlist["snippet"]["publishedAt"] = datetime.strptime(playlist["snippet"]["publishedAt"], '%Y-%m-%dT%H:%M:%S.000Z') context = { 'playlist': playlist, } return render(request, 'spamusic/main-content.html', context)
def test_operator_can_add_vars_to_window(self): # A operator go to add var page self.browser.get('%s/vars/add/' % self.server_url) # A login form with a message is shown # TODO: self.check_notification_message("Please, login as an operator to access to this page", 'warning') # Add user with permission to add var credentials = {'username': '******', 'password': '******'} self.create_user_with_permission(permissions='add_var', **credentials) # Operator type his credential and proceed to log-in input_username = self.browser.find_element_by_id('id_username') input_username.send_keys(credentials['username']) input_password = self.browser.find_element_by_id('id_password') input_password.send_keys(credentials['password']) btn_submit = self.browser.find_element_by_css_selector('button.btn-primary') btn_submit.click() # Check operator menu menu = self.get_menu_item() # self.assertEqual(len(menu.find_elements_by_tag_name('li')), 12) self.goto_menu_item(("Add", "Var")) # Operator have more options to customize the scada: # Menus: # * Mimics -> Add -> Window # * Mimics -> Add -> Device # * Mimics -> Add -> Var # * Mimics -> Add -> Mimic # * Mimics -> Manage -> Windows # * Mimics -> Manage -> Devices # * Mimics -> Manage -> Vars # * Mimics -> Manage -> Mimic # * Mimics -> Windows -> Window 1 Title # * Mimics -> Windows -> Window 2 Title # * ... # * Mimics -> Windows -> Window n Title # * History -> Add -> Chart # * History -> Manage -> Charts # Since there is not devices to attach a variable it is redirected to add a device # He notes Add Device page self.check_page_title_and_header(title="Add Device", header="Add Device") # He notice breadcrumbs (devices > add new) self.check_breadcrumbs((("Devices", '/devices/'), ("Add new",))) # He notes enter device first notification self.check_notification_message("Please, add a device first", 'info') # Enter device data device_name = 'Router' input_name = self.browser.find_element_by_id('id_name') # TODO: self.assertEqual(input_name.get_attribute('placeholder'), 'Name of the Device') input_name.send_keys(device_name) input_name = self.browser.find_element_by_id('id_address') input_name.send_keys('1234') # Submit form to add device btn_submit = self.browser.find_element_by_css_selector('button.btn-primary') btn_submit.click() # He notes Device list page self.check_page_title_and_header(title="Devices", header="Devices") # He notice breadcrumbs (devices) self.check_breadcrumbs((("Devices",),)) # He notice the added device confirmation message self.check_notification_message("Device was added") # Operator goes to add var page self.goto_menu_item(("Add", "Var")) self.check_page_title_and_header(title="Add Variable", header="Add Variable") # He notice breadcrumbs (vars > add new) self.check_breadcrumbs((("Variables", '/vars/'), ("Add new",))) # Enter variable data var_name = 'Low Battery' input_var_name = self.browser.find_element_by_id('id_name') # TODO: self.assertEqual(input_name.get_attribute('placeholder'), 'Name of the variable') input_var_name.send_keys(var_name) # Select device select_var_device = self.browser.find_element_by_id('id_device') select_var_device.send_keys(Keys.ARROW_DOWN) # Specify a value input_var_value = self.browser.find_element_by_id('id_value') input_var_value.send_keys('1.0') # Submit form to add var btn_submit = self.browser.find_element_by_css_selector('button.btn-primary') btn_submit.click() # It is redirected to var list self.check_page_title_and_header(title="Variables", header="Variables") # He notice breadcrumbs (vars) self.check_breadcrumbs((("Variables",),)) # Confirmation message is shown self.check_notification_message("Variable was added") # In the list appears new var added table = self.browser.find_element_by_class_name('table') rows = table.find_elements_by_tag_name('tr') self.assertTrue( any(var_name in row.text for row in rows) ) # Add new added variable to window # Since a new device with a variable was added, automatically new mimic with var was created # So create new window self.goto_menu_item(("Add", "Window")) self.check_page_title_and_header(title="Add Window", header="Add Window") # He notice breadcrumbs (windows > add new) self.check_breadcrumbs((("Windows", '/windows/'), ("Add new",))) # Enter window data window_title = 'Main window' input_title = self.browser.find_element_by_id('id_title') # TODO: self.assertEqual(input_name.get_attribute('placeholder'), 'Title of the window') input_title.send_keys(window_title) # Submit form to add window btn_submit = self.browser.find_element_by_css_selector('button.btn-primary') btn_submit.click() # Now he is in windows list page self.check_page_title_and_header(title="Windows", header="Windows") # He notice breadcrumbs self.check_breadcrumbs((("Windows",),)) # So click in manage mimics button of a first # FIXME: This flow should be change in favor of a wizard button_manage_mimic = self.browser.find_elements_by_class_name('manage-mimics')[0] button_manage_mimic.click() # Now he is in manage mimics for the window page self.check_page_title_and_header(title="Manage Mimics", header="Manage Mimics") # He notice breadcrumbs (windows > Window.Title > Mimics) self.check_breadcrumbs((("Windows", '/windows/'), (window_title, '/windows/main-window/'), ("Mimics",),)) # Add mimic to window mimic_name = "Router" input_mimic_name = self.browser.find_element_by_id('id_name') input_mimic_name.send_keys(mimic_name) # Specify var select_mimic_vars = self.browser.find_element_by_id('id_vars') select_mimic_vars.send_keys(Keys.ARROW_DOWN) # Left other mimic field with it default values # TODO: Enter position values and check them in window details page # Submit form to add mimic to window btn_submit = self.browser.find_element_by_css_selector('button.btn-primary') btn_submit.click() # TODO: Add mimic from device (use name and vars from device) # Confirmation message is shown self.check_notification_message("Mimic was added") # Go to window details page (using breadcrumbs) button_view = self.browser.find_elements_by_link_text(window_title)[0] button_view.click() # Now he is details window page self.check_page_title_and_header(title=window_title, header=window_title) # He notice breadcrumbs (windows > Window.Title) self.check_breadcrumbs((("Windows", '/windows/'), (window_title,),)) # Then mimic for device with new variable is shown mimic_name_html = self.browser.find_elements_by_css_selector('.mimic .name')[0].text self.assertIn(mimic_name, mimic_name_html) # A variable value indicator and variable's name is shown var_item = self.browser.find_elements_by_css_selector('.mimic .var')[0] self.assertIn(var_name, var_item.text) self.assertEqual("1.0", var_item.find_element_by_class_name('value').text) # Last update timestamp is shown in page last_update_text = self.browser.find_element_by_css_selector('#last_updated_notificaion .value').text # Timestamp is close to now # TODO: Use human friendly format and/or django settings date and times format last_update_date = datetime.strptime(last_update_text, '%Y-%m-%d @ %H:%M:%S') now = datetime.now() # FIXME: date time must be tz aware self.assertAlmostEqual((now - last_update_date).total_seconds(), 0, delta=5)
def ajax_rechercher_videos(request): q = request.POST['q'] master = f.get_youtube_master() check = f.check_youtube_master(request=request, master=master) if check['status'] is False: return HttpResponseForbidden() check = f.check_api_token(request=request, master=master) if check['status'] is False: return HttpResponseForbidden() else: credential = check['value'] youtube = f.build_youtube(credential) video_list = f.search_video_list(youtube=youtube, q=q) ''' video_list = { "nextPageToken": "CAUQAA", "items": [ { "id": { "kind": "youtube#video", "videoId": "lki1y2wa820" }, "snippet": { "publishedAt": "2014-08-29T19:00:03.000Z", "channelId": "UC5nc_ZtjKW1htCVZVRxlQAQ", "title": "'Taking You Higher Pt. 3' (Progressive House Mix)", "description": "The new 'Taking You Higher' is finally here! Any support is truly appreciated. Download on iTunes... http://bit.ly/YJBeIg Listen on Spotify... http://spoti.fi/1Cazg2Y ...", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/lki1y2wa820/default.jpg" }, "medium": { "url": "https://i.ytimg.com/vi/lki1y2wa820/mqdefault.jpg" }, "high": { "url": "https://i.ytimg.com/vi/lki1y2wa820/hqdefault.jpg" } }, "channelTitle": "MrSuicideSheep", "liveBroadcastContent": "none" } }, { "id": { "kind": "youtube#video", "videoId": "N2mVfpDHr9k" }, "snippet": { "publishedAt": "2012-03-09T15:14:44.000Z", "channelId": "UC5nc_ZtjKW1htCVZVRxlQAQ", "title": "'Peaceful Solitude' Mix", "description": "Yipeee another mix :D As usual the tracks were chosen by myself and this time mixed by Aaron Static. Go give him your love. When I uploaded 'Burning ...", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/N2mVfpDHr9k/default.jpg" }, "medium": { "url": "https://i.ytimg.com/vi/N2mVfpDHr9k/mqdefault.jpg" }, "high": { "url": "https://i.ytimg.com/vi/N2mVfpDHr9k/hqdefault.jpg" } }, "channelTitle": "MrSuicideSheep", "liveBroadcastContent": "none" } }, { "id": { "kind": "youtube#video", "videoId": "heJBwBUStXU" }, "snippet": { "publishedAt": "2013-07-19T16:30:15.000Z", "channelId": "UC5nc_ZtjKW1htCVZVRxlQAQ", "title": "'Taking You Higher Pt. 2' (Progressive House Mix)", "description": "'Taking You Higher' Pt. 3 Support here... http://bit.ly/YJBeIg So a year after 'Taking You Higher' Rameses B and I decided to put out another summery ...", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/heJBwBUStXU/default.jpg" }, "medium": { "url": "https://i.ytimg.com/vi/heJBwBUStXU/mqdefault.jpg" }, "high": { "url": "https://i.ytimg.com/vi/heJBwBUStXU/hqdefault.jpg" } }, "channelTitle": "MrSuicideSheep", "liveBroadcastContent": "none" } }, { "id": { "kind": "youtube#video", "videoId": "waYpEQAYf3g" }, "snippet": { "publishedAt": "2015-06-19T17:28:39.000Z", "channelId": "UC5nc_ZtjKW1htCVZVRxlQAQ", "title": "Taking You Deeper (Deep House Mix)", "description": "First deep house mix! \"This mix represents the greatest adventure in life. We all begin in the same place: open, excited, and slightly uncertain. The road is filled ...", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/waYpEQAYf3g/default.jpg" }, "medium": { "url": "https://i.ytimg.com/vi/waYpEQAYf3g/mqdefault.jpg" }, "high": { "url": "https://i.ytimg.com/vi/waYpEQAYf3g/hqdefault.jpg" } }, "channelTitle": "MrSuicideSheep", "liveBroadcastContent": "none" } }, { "id": { "kind": "youtube#video", "videoId": "2td5Nj23vns" }, "snippet": { "publishedAt": "2015-01-01T19:01:08.000Z", "channelId": "UC5nc_ZtjKW1htCVZVRxlQAQ", "title": "'Dawn' Pt. 2 (An Ambient Mix)", "description": "Hey everyone, after exactly 2 years I've finally managed to bring you the next instalment of the ambient mix. I really hope you guys enjoy it! This mix is supposed ...", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/2td5Nj23vns/default.jpg" }, "medium": { "url": "https://i.ytimg.com/vi/2td5Nj23vns/mqdefault.jpg" }, "high": { "url": "https://i.ytimg.com/vi/2td5Nj23vns/hqdefault.jpg" } }, "channelTitle": "MrSuicideSheep", "liveBroadcastContent": "none" } } ] } ''' for video in video_list["items"]: video["snippet"]["publishedAt"] = datetime.strptime(video["snippet"]["publishedAt"], '%Y-%m-%dT%H:%M:%S.000Z') context = { 'video_list': video_list, 'q': q, } return render(request, 'spamusic/yt-tab-search-results.html', context)
def ajax_playlistitems(request): playlist_id = request.POST['playlist_id'] master = f.get_youtube_master() check = f.check_youtube_master(request=request, master=master) if check['status'] is False: return HttpResponseForbidden() check = f.check_api_token(request=request, master=master) if check['status'] is False: return HttpResponseForbidden() else: credential = check['value'] youtube = f.build_youtube(credential) playlist_items = f.playlist_items_list_all(youtube=youtube, playlist_id=playlist_id) ''' playlist_items = { "items": [ { "id": "PL2qvtSnCzogLZMGwm6eJnnMmGXmABvK__Y7ElRBdXPy4", "snippet": { "publishedAt": "2015-07-13T17:35:22.000Z", "channelId": "UC8iUi9DiP_Nr6uAuo7W74XQ", "title": "Uppermost - Flashback", "description": "Uppermost's incredible new album 'One' is out now! I highly recommend purchasing it.\nhttp://btprt.dj/1rTxUaX\nhttp://shop.uppwind.com/product.php?id_product=11\n\nUppermost:\nhttp://soundcloud.com/uppermost/\nhttp://www.uppwind.com/\nhttp://www.facebook.com/uppermost\nhttps://twitter.com/uppermostmusic\n\nSheepy twitter\nhttps://twitter.com/MrSuicideSheep\n\nPicture:\nhttp://www.flickr.com/photos/makayla_rogers/7483979652/\n\nMrSuicideSheep t-shirts!!!\nhttp://bit.ly/1813nN5", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/i78U3VEAwK8/default.jpg", "width": 120, "height": 90 }, "medium": { "url": "https://i.ytimg.com/vi/i78U3VEAwK8/mqdefault.jpg", "width": 320, "height": 180 }, "high": { "url": "https://i.ytimg.com/vi/i78U3VEAwK8/hqdefault.jpg", "width": 480, "height": 360 }, "standard": { "url": "https://i.ytimg.com/vi/i78U3VEAwK8/sddefault.jpg", "width": 640, "height": 480 }, "maxres": { "url": "https://i.ytimg.com/vi/i78U3VEAwK8/maxresdefault.jpg", "width": 1280, "height": 720 } }, "channelTitle": "SpamWeb", "playlistId": "PLFp2-gAWp2eVjZYkT502Xd0tMHFD0YjP9", "position": 0, "resourceId": { "kind": "youtube#video", "videoId": "i78U3VEAwK8" } } }, { "id": "PL2qvtSnCzogLZMGwm6eJnnCB4xk0TuXWrZJp7_JDMGRg", "snippet": { "publishedAt": "2015-07-13T17:37:28.000Z", "channelId": "UC8iUi9DiP_Nr6uAuo7W74XQ", "title": "ODESZA - Don't Stop", "description": "I'll let the music do the talking :)\nODESZA released their first album for free! Grab it here:\nhttp://www.mediafire.com/?25qk5hd4d02ofj6\n\nMake sure to give them your love.\nhttp://soundcloud.com/odesza\nhttps://www.facebook.com/pages/Odesza/428904283808020\n\nPicture:\nhttp://rhads.deviantart.com/art/Wallpaper-Land-of-the-Wind-327743415?offset=20\n\nFaceboob:\nhttp://www.facebook.com/MrSuicideSheep", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/BFW_Mxefvig/default.jpg", "width": 120, "height": 90 }, "medium": { "url": "https://i.ytimg.com/vi/BFW_Mxefvig/mqdefault.jpg", "width": 320, "height": 180 }, "high": { "url": "https://i.ytimg.com/vi/BFW_Mxefvig/hqdefault.jpg", "width": 480, "height": 360 }, "standard": { "url": "https://i.ytimg.com/vi/BFW_Mxefvig/sddefault.jpg", "width": 640, "height": 480 }, "maxres": { "url": "https://i.ytimg.com/vi/BFW_Mxefvig/maxresdefault.jpg", "width": 1280, "height": 720 } }, "channelTitle": "SpamWeb", "playlistId": "PLFp2-gAWp2eVjZYkT502Xd0tMHFD0YjP9", "position": 1, "resourceId": { "kind": "youtube#video", "videoId": "BFW_Mxefvig" } } }, { "id": "PL2qvtSnCzogLZMGwm6eJnnNJv9idCMGRiDy6bRe4I_9w", "snippet": { "publishedAt": "2015-07-11T08:38:21.000Z", "channelId": "UC8iUi9DiP_Nr6uAuo7W74XQ", "title": "Listenbee - Save Me (Tez Cadey Remix)", "description": "This remix is completely addicting! I can't help but move along to it.\nListen to the original... http://goo.gl/5ToJ2M\n\nTez Cadey\nhttps://soundcloud.com/tezcadey\nhttps://www.facebook.com/pages/Tez-Cadey/125292844188815?ref=ts\nhttps://twitter.com/TezCadey\n\nListenbee\nhttps://soundcloud.com/listenbee\nhttps://www.facebook.com/listenbeemusic\nhttps://twitter.com/listenbeemusic\nhttp://instagram.com/listenbeemusic\n\nReleased by Lokal Legend\nhttps://www.facebook.com/lokallegend\nhttps://twitter.com/lokallegend\nhttp://instagram.com/lokallegend\n\nPicture by Marat Safin\nhttps://flic.kr/p/fZ2zZn\nhttps://www.facebook.com/maratsafinphotography\n\nFacebooobs\nhttps://www.facebook.com/MrSuicideSheep\n\nFollow on Soundcloud\nhttps://soundcloud.com/mrsuicidesheep\n\nFollow on Twitter\nhttps://twitter.com/mrsuicidesheep\n\nSheepy t-shirts!\nhttp://bit.ly/Sheepytees\n\nSubmit tracks\nhttp://mrsuicidesheep.tracksubmit.com/submit/", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/JhGkt6PQQ8E/default.jpg", "width": 120, "height": 90 }, "medium": { "url": "https://i.ytimg.com/vi/JhGkt6PQQ8E/mqdefault.jpg", "width": 320, "height": 180 }, "high": { "url": "https://i.ytimg.com/vi/JhGkt6PQQ8E/hqdefault.jpg", "width": 480, "height": 360 }, "standard": { "url": "https://i.ytimg.com/vi/JhGkt6PQQ8E/sddefault.jpg", "width": 640, "height": 480 } }, "channelTitle": "SpamWeb", "playlistId": "PLFp2-gAWp2eVjZYkT502Xd0tMHFD0YjP9", "position": 2, "resourceId": { "kind": "youtube#video", "videoId": "JhGkt6PQQ8E" } } }, { "id": "PL2qvtSnCzogLZMGwm6eJnnKQxRdQlghMPqvc4u77Jth0", "snippet": { "publishedAt": "2015-07-14T10:28:10.000Z", "channelId": "UC8iUi9DiP_Nr6uAuo7W74XQ", "title": "Galantis - Louder, Harder, Better", "description": "This is just incredible! Best track from the Galantis album in my opinion.\nDownload... http://bit.ly/1Ca0aWz\n\nGalantis\nhttps://soundcloud.com/wearegalantis\nhttps://www.facebook.com/wearegalantis\nhttps://twitter.com/wearegalantis\nhttps://www.youtube.com/user/galantistv\n\nReleased by Atlantic Records\nhttp://www.atlanticrecords.com/\nhttps://twitter.com/atlanticrecords\nhttps://soundcloud.com/atlanticrecords\nhttps://www.facebook.com/atlanticrecords\nhttps://www.youtube.com/user/atlanticvideos\n\nArtwork from Kisaragi Kancolle\n\nFacebooobs\nhttps://www.facebook.com/MrSuicideSheep\n\nFollow on Soundcloud\nhttps://soundcloud.com/mrsuicidesheep\n\nFollow on Twitter\nhttps://twitter.com/mrsuicidesheep\n\nSheepy t-shirts!\nhttp://bit.ly/Sheepytees\n\nSubmit tracks\nhttp://mrsuicidesheep.tracksubmit.com/submit/", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/hAtB3ZWs--c/default.jpg", "width": 120, "height": 90 }, "medium": { "url": "https://i.ytimg.com/vi/hAtB3ZWs--c/mqdefault.jpg", "width": 320, "height": 180 }, "high": { "url": "https://i.ytimg.com/vi/hAtB3ZWs--c/hqdefault.jpg", "width": 480, "height": 360 }, "standard": { "url": "https://i.ytimg.com/vi/hAtB3ZWs--c/sddefault.jpg", "width": 640, "height": 480 }, "maxres": { "url": "https://i.ytimg.com/vi/hAtB3ZWs--c/maxresdefault.jpg", "width": 1280, "height": 720 } }, "channelTitle": "SpamWeb", "playlistId": "PLFp2-gAWp2eVjZYkT502Xd0tMHFD0YjP9", "position": 3, "resourceId": { "kind": "youtube#video", "videoId": "hAtB3ZWs--c" } } } ] } ''' for item in playlist_items["items"]: item["snippet"]["publishedAt"] = datetime.strptime(item["snippet"]["publishedAt"], '%Y-%m-%dT%H:%M:%S.000Z') context = { 'playlist_items': playlist_items, } return render(request, 'spamusic/playlist-videos.html', context)
def ajax_add_video_to_playlist(request): playlist_id = request.POST['playlist_id'] video_id = request.POST['video_id'] master = f.get_youtube_master() check = f.check_youtube_master(request=request, master=master) if check['status'] is False: return HttpResponseForbidden() check = f.check_api_token(request=request, master=master) if check['status'] is False: return HttpResponseForbidden() else: credential = check['value'] youtube = f.build_youtube(credential) playlist_item = f.add_video_to_playlist(youtube=youtube, playlist_id=playlist_id, video_id=video_id) ''' playlist_item = { "kind": "youtube#playlistItem", "etag": "\"iDqJ1j7zKs4x3o3ZsFlBOwgWAHU/vryJ8ygeWBRZUjiy8BfF18P1YtA\"", "id": "PL2qvtSnCzogLZMGwm6eJnnGEPUzm2ky4yVXxf1rDXRqY", "snippet": { "publishedAt": "2015-07-15T18:42:20.000Z", "channelId": "UC8iUi9DiP_Nr6uAuo7W74XQ", "title": "Uppermost - Disco Kids", "description": "Uppermost is back with some Disco! :P\nDownload... http://apple.co/1F8CbgP\n\nUppermost\nhttps://soundcloud.com/uppermost\nhttps://www.youtube.com/user/uppermostmusic\nhttps://twitter.com/uppermostmusic\nhttps://www.facebook.com/uppermost\nhttp://www.uppermostmusic.com/\n\nFacebooobs\nhttps://www.facebook.com/MrSuicideSheep\n\nFollow on Soundcloud\nhttps://soundcloud.com/mrsuicidesheep\n\nFollow on Twitter\nhttps://twitter.com/mrsuicidesheep\n\nSheepy t-shirts!\nhttp://bit.ly/Sheepytees\n\nSubmit tracks\nhttp://mrsuicidesheep.tracksubmit.com/submit/", "thumbnails": { "default": { "url": "https://i.ytimg.com/vi/_i2EnWgpwjc/default.jpg", "width": 120, "height": 90 }, "medium": { "url": "https://i.ytimg.com/vi/_i2EnWgpwjc/mqdefault.jpg", "width": 320, "height": 180 }, "high": { "url": "https://i.ytimg.com/vi/_i2EnWgpwjc/hqdefault.jpg", "width": 480, "height": 360 }, "standard": { "url": "https://i.ytimg.com/vi/_i2EnWgpwjc/sddefault.jpg", "width": 640, "height": 480 }, "maxres": { "url": "https://i.ytimg.com/vi/_i2EnWgpwjc/maxresdefault.jpg", "width": 1280, "height": 720 } }, "channelTitle": "SpamWeb", "playlistId": "PLFp2-gAWp2eVjZYkT502Xd0tMHFD0YjP9", "resourceId": { "kind": "youtube#video", "videoId": "_i2EnWgpwjc" } } } ''' playlist_item["snippet"]["publishedAt"] = datetime.strptime(playlist_item["snippet"]["publishedAt"], '%Y-%m-%dT%H:%M:%S.000Z') add_log(text="%s a ajouté une nouvelle vidéo : %s" % (request.user.username, playlist_item["snippet"]["title"]), app="spamusic", log_type="spamusic_add_video", user=request.user) context = { 'playlist_item': playlist_item, } return render(request, 'spamusic/new-video.html', context)
def detallAlumneHorari(request, pk, detall='all'): credentials = tools.getImpersonateUser(request) (user, l4) = credentials grups_poden_veure_detalls = [u"sortides",u"consergeria",u"direcció",] mostra_detalls = user.groups.filter(name__in=grups_poden_veure_detalls).exists() data_txt = request.GET.get( 'data', '' ) try: data = datetime.strptime(data_txt, r"%Y-%m-%d").date() except ValueError: data = datetime.today() qAvui = Q(impartir__dia_impartir=data) alumne = get_object_or_404( Alumne, pk=pk) controlOnEslAlumneAvui = alumne.controlassistencia_set.filter(qAvui) grup = alumne.grup horesDelGrupAvui = { x for x in grup.horari_set.filter(qAvui) .filter(es_actiu=True) } horesDeAlumneAvui = {c.impartir.horari for c in controlOnEslAlumneAvui} horesRestants = horesDelGrupAvui - horesDeAlumneAvui aules =[] for c in controlOnEslAlumneAvui: noHaDeSerAlAula = c.nohadeseralaula_set.all() missatgeNoHaDeSerAlAula = ", ".join([n.get_motiu_display() for n in noHaDeSerAlAula]) estat = c.estat.nom_estat if hasattr(c.estat,'nom_estat') else '' horanova = True for aula in aules: if c.impartir.horari.hora == aula['hora']: aula['horari_alumne']= aula['horari_alumne'] + u'\n' + \ c.impartir.get_nom_aula + u' ' + \ c.impartir.horari.professor.get_full_name() + u' ' + \ c.impartir.horari.assignatura.nom_assignatura + \ u' (' + estat + u')' horanova = False if horanova: novaaula = {'horari_alumne': c.impartir.get_nom_aula + ' ' + c.impartir.horari.professor.get_full_name() + u' ' + c.impartir.horari.assignatura.nom_assignatura + u' (' + estat + u')', 'hora': c.impartir.horari.hora, 'hora_inici': c.impartir.horari.hora.hora_inici, 'es_horari_grup': False, 'es_hora_actual': (c.impartir.horari.hora.hora_inici <= datetime.now().time() <= c.impartir.horari.hora.hora_fi), 'missatge_no_ha_de_ser_a_laula': missatgeNoHaDeSerAlAula, 'no_ha_de_ser_a_laula': True if noHaDeSerAlAula else False, 'horari_grup': '' } aules.append(novaaula) for horari in horesRestants: horanova=True for aula in aules: if horari.hora == aula['hora']: aula['horari_grup'] = ( aula['horari_grup'] + u'\n' + horari.nom_aula + u' ' + unicode( horari.professor ) + u' ' + unicode( horari.assignatura ) ) horanova = False if horanova: novaaula = {'horari_alumne': '', 'hora': horari.hora, 'hora_inici': horari.hora.hora_inici, 'es_horari_grup': True, 'es_hora_actual': (horari.hora.hora_inici <= datetime.now().time() <= horari.hora.hora_fi), 'no_ha_de_ser_a_laula': '', 'horari_grup': ( horari.nom_aula + u' ' + unicode( horari.professor ) + u' ' + unicode( horari.assignatura ) ), } aules.append(novaaula) aules_sorted = sorted(aules, key= lambda x: x['hora_inici'] ) table=HorariAlumneTable(aules_sorted) table.order_by = 'hora_inici' RequestConfig(request).configure(table) return render( request, 'mostraInfoAlumneCercat.html', {'table': table, 'alumne':alumne, 'dia' : data, 'mostra_detalls': mostra_detalls, 'lendema': (data + timedelta( days = +1 )).strftime(r'%Y-%m-%d'), 'avui': datetime.today().date().strftime(r'%Y-%m-%d'), 'diaabans': (data + timedelta( days = -1 )).strftime(r'%Y-%m-%d'), }, )
def update_local_db_based_on_record(eox_record, create_missing=False): """ update a database record based on a record provided by the Cisco EoX API :param eox_record: JSON data from the Cisco EoX API :param create_missing: set to True, if the product should be created if it doesn't exist in the local database :return: """ pid = eox_record["EOLProductID"] result_record = {"PID": pid, "blacklist": False, "updated": False, "created": False, "message": None} if create_missing: product, created = Product.objects.get_or_create(product_id=pid) if created: logger.debug("Product '%s' was not in database and is created" % pid) product.product_id = pid product.description = eox_record["ProductIDDescription"] # it is a Cisco API and the vendors are read-only within the database product.vendor = Vendor.objects.get(name="Cisco Systems") result_record["created"] = True else: try: created = False product = Product.objects.get(product_id=pid) except Exception: logger.info("product not found in database: %s" % pid, exc_info=True) result_record["created"] = False return result_record # update the lifecycle information try: update = True if product.eox_update_time_stamp is None: logger.debug("Update product %s because of missing timestamps" % pid) result_record["updated"] = True else: date_format = convert_time_format(eox_record["UpdatedTimeStamp"]["dateFormat"]) updated_time_stamp = datetime.strptime(eox_record["UpdatedTimeStamp"]["value"], date_format).date() if product.eox_update_time_stamp >= updated_time_stamp: logger.debug( "update of product not required: %s >= %s " % (product.eox_update_time_stamp, updated_time_stamp) ) result_record["updated"] = False result_record["message"] = "update suppressed (data not modified)" update = False else: logger.debug("Product %s update required" % pid) result_record["updated"] = True if update: # save datetime values from Cisco EoX API record value_map = { # <API value> : <class attribute> "UpdatedTimeStamp": "eox_update_time_stamp", "EndOfSaleDate": "end_of_sale_date", "LastDateOfSupport": "end_of_support_date", "EOXExternalAnnouncementDate": "eol_ext_announcement_date", "EndOfSWMaintenanceReleases": "end_of_sw_maintenance_date", "EndOfRoutineFailureAnalysisDate": "end_of_routine_failure_analysis", "EndOfServiceContractRenewal": "end_of_service_contract_renewal", "EndOfSvcAttachDate": "end_of_new_service_attachment_date", "EndOfSecurityVulSupportDate": "end_of_sec_vuln_supp_date", } for key in value_map.keys(): if eox_record.get(key, None): value = eox_record[key].get("value", None) if value != " ": setattr( product, value_map[key], datetime.strptime( value, convert_time_format(eox_record[key].get("dateFormat", "%Y-%m-%d")) ).date(), ) # save string values from Cisco EoX API record if "LinkToProductBulletinURL" in eox_record.keys(): raw_data = eox_record.get("LinkToProductBulletinURL", "") product.eol_reference_url = raw_data if "," not in raw_data else raw_data.split(",")[0].strip() if ("ProductBulletinNumber" in eox_record.keys()) and (product.eol_reference_url != ""): product.eol_reference_number = eox_record.get("ProductBulletinNumber", "EoL bulletin") with transaction.atomic(), reversion.create_revision(): product.save() reversion.set_comment("Updated by the Cisco EoX API crawler") except Exception as ex: if created: # remove the new (incomplete) entry from the database product.delete() logger.error("update of product '%s' failed." % pid, exc_info=True) logger.debug("DataSet with exception\n%s" % json.dumps(eox_record, indent=4)) result_record["message"] = "Update failed: %s" % str(ex) return result_record # save migration information if defined if "EOXMigrationDetails" in eox_record: migration_details = eox_record["EOXMigrationDetails"] product_migration_source, created = ProductMigrationSource.objects.get_or_create( name="Cisco EoX Migration option" ) if created: product_migration_source.description = "Migration option suggested by the Cisco EoX API." product_migration_source.save() if "MigrationOption" in migration_details: # only a single migration option per migration source is allowed pmo, _ = ProductMigrationOption.objects.get_or_create( product=product, migration_source=product_migration_source ) if migration_details["MigrationOption"] == "Enter PID(s)": # product replacement available, add replacement PID pmo.replacement_product_id = migration_details["MigrationProductId"].strip() pmo.migration_product_info_url = clean_api_url_response(migration_details["MigrationProductInfoURL"]) elif ( migration_details["MigrationOption"] == "See Migration Section" or migration_details["MigrationOption"] == "Enter Product Name(s)" ): # complex product migration, only add comment mig_strat = migration_details["MigrationStrategy"].strip() pmo.comment = mig_strat if mig_strat != "" else migration_details["MigrationProductName"].strip() pmo.migration_product_info_url = clean_api_url_response(migration_details["MigrationProductInfoURL"]) else: # no replacement available, only add comment pmo.comment = migration_details["MigrationOption"].strip() # some data separated by blank pmo.migration_product_info_url = clean_api_url_response(migration_details["MigrationProductInfoURL"]) # add message if only a single entry was saved if pmo.migration_product_info_url != migration_details["MigrationProductInfoURL"].strip(): result_record["message"] = "multiple URL values received, only the first one is saved" pmo.save() return result_record