def test_override_decorator(self): default = timezone.get_default_timezone() @timezone.override(EAT) def func_tz_eat(): self.assertIs(EAT, timezone.get_current_timezone()) @timezone.override(None) def func_tz_none(): self.assertIs(default, timezone.get_current_timezone()) try: timezone.activate(ICT) func_tz_eat() self.assertIs(ICT, timezone.get_current_timezone()) func_tz_none() self.assertIs(ICT, timezone.get_current_timezone()) timezone.deactivate() func_tz_eat() self.assertIs(default, timezone.get_current_timezone()) func_tz_none() self.assertIs(default, timezone.get_current_timezone()) finally: timezone.deactivate()
def coerce_times(start, end, date): start_dt = datetime.datetime.strptime(date + " " + start, "%Y-%m-%d %H:%M") start_ts = timezone.make_aware(start_dt, timezone.get_current_timezone()) end_dt = datetime.datetime.strptime(date + " " + end, "%Y-%m-%d %H:%M") end_ts = timezone.make_aware(end_dt, timezone.get_current_timezone()) return start_ts, end_ts
def grouped_totals(entries): select = {"day": {"date": """DATE_TRUNC('day', end_time)"""}, "week": {"date": """DATE_TRUNC('week', end_time)"""}} weekly = entries.extra(select=select["week"]).values('date', 'billable') weekly = weekly.annotate(hours=Sum('hours')).order_by('date') daily = entries.extra(select=select["day"]).values('date', 'project__name', 'billable') daily = daily.annotate(hours=Sum('hours')).order_by('date', 'project__name') weeks = {} for week, week_entries in groupby(weekly, lambda x: x['date']): try: if timezone.is_naive(week): week = timezone.make_aware(week, timezone.get_current_timezone()) except AttributeError: week = datetime.datetime.combine(week, timezone.get_current_timezone()) weeks[week] = get_hours(week_entries) days = [] last_week = None for day, day_entries in groupby(daily, lambda x: x['date']): week = get_week_start(day) if last_week and week > last_week: yield last_week, weeks.get(last_week, {}), days days = [] days.append((day, daily_summary(day_entries))) last_week = week yield week, weeks.get(week, {}), days
def test_lookup_date_with_use_tz(self): d = datetime.date(2014, 3, 12) # The following is equivalent to UTC 2014-03-12 18:34:23.24000. dt1 = datetime.datetime( 2014, 3, 12, 10, 22, 23, 240000, tzinfo=timezone.get_current_timezone() ) # The following is equivalent to UTC 2014-03-13 05:34:23.24000. dt2 = datetime.datetime( 2014, 3, 12, 21, 22, 23, 240000, tzinfo=timezone.get_current_timezone() ) t = datetime.time(21, 22, 23, 240000) m1 = DateTimeModel.objects.create(d=d, dt=dt1, t=t) m2 = DateTimeModel.objects.create(d=d, dt=dt2, t=t) # In Vancouver, we expect both results. self.assertQuerysetEqual( DateTimeModel.objects.filter(dt__date=d), [repr(m1), repr(m2)], ordered=False ) with self.settings(TIME_ZONE='UTC'): # But in UTC, the __date only matches one of them. self.assertQuerysetEqual( DateTimeModel.objects.filter(dt__date=d), [repr(m1)] )
def handle(self, *args, **options): print 'Start.' start_time = timezone.datetime(2014, 9, 01, 0, 0, 0, 0, tzinfo=timezone.get_current_timezone()) end_time = timezone.datetime(2015, 8, 31, 23, 59, 0, 0, tzinfo=timezone.get_current_timezone()) range_qs = Facebook_Status.objects.filter(published__range=(start_time, end_time)) # Shkifut print 'doing shkifut' STR_SEARCH_TERMS = [u'שקיפות', ] q_objects = [Q(content__icontains=x) for x in STR_SEARCH_TERMS] joined_q_objects = q_objects[0] # | q_objects[1] | q_objects[2] self.calculate_and_save_data_for_keyword(range_qs, joined_q_objects, file_name='shkifut') # Hasadna print 'doing shkifut' STR_SEARCH_TERMS = [u'כנסת פתוחה', u'לידע ציבורי', u'מפתח התקציב', u'תקציב פתוח', u'התקציב הפתוח'] q_objects = [Q(content__icontains=x) for x in STR_SEARCH_TERMS] joined_q_objects = q_objects[0] | q_objects[1] | q_objects[2] | q_objects[3] | q_objects[4] self.calculate_and_save_data_for_keyword(range_qs, joined_q_objects, file_name='hasadna_terms') # Democracy print 'doing Democracy' STR_SEARCH_TERMS = [u'דמוקרטיה', ] q_objects = [Q(content__icontains=x) for x in STR_SEARCH_TERMS] joined_q_objects = q_objects[0] self.calculate_and_save_data_for_keyword(range_qs, joined_q_objects, file_name='democracy') # all print 'doing all' joined_q_objects = Q() self.calculate_and_save_data_for_keyword(range_qs, joined_q_objects, file_name='total') print 'Done.'
def get_context_data(self, **kwargs): context = super(SingleDayView, self).get_context_data(**kwargs) session_list = self.get_queryset() for sess in list(session_list): sess.start_time = sess.start_time.astimezone(timezone.get_current_timezone()) locations = Location.objects.with_sessions().filter(event=self.event, sessions__in=context['session_list'] ).distinct() try: lunchtime = self.get_queryset().filter( title__istartswith='lunch')[0].start_time.astimezone(timezone.get_current_timezone()) except IndexError: lunchtime = None timeslots = self.request.GET.get('timeslots', '').split(',') timeslots = [dateparse(time).time() for time in timeslots] context['session_list'] = session_list context['event'] = self.event context['locations'] = locations context['timeslots'] = timeslots context['lunchtime'] = lunchtime context['now'] = timezone.now().astimezone(timezone.get_current_timezone()) context['now_minus_session_length'] = context['now'] - context['event'].session_length return context
def get_context_data(self, **kwargs): context = super(CurrentTimeslotView, self).get_context_data(**kwargs) try: lunchtime = self.get_queryset().filter( title__istartswith='lunch')[0].start_time.astimezone(timezone.get_current_timezone()) except IndexError: lunchtime = None timeslots = Session.objects.today_or_first_for_event(self.event ).values('start_time' ).annotate(sessions_in_timeslot=Count('start_time') ).filter(sessions_in_timeslot__gt=1 ) timeslots = { value['start_time'].astimezone(timezone.get_current_timezone()): key + 1 for (key, value) in enumerate(timeslots) } try: context['session_num'] = timeslots.get( context['session_list'][0].start_time.astimezone(timezone.get_current_timezone())) except IndexError: context['session_num'] = None context['event'] = self.event context['timeslots'] = timeslots context['lunchtime'] = lunchtime context['now'] = timezone.now().astimezone(timezone.get_current_timezone()) return context
def parse_duration(begin, value, errors): if len(value) > 1: try: i = int(value[:-1]) if value[-1] == 'y': return timezone.make_aware(datetime(begin.year + i, begin.month, begin.day, begin.hour, begin.minute, begin.second), timezone.get_current_timezone()) if value[-1] == 'm': yi = 0 while begin.month + i > 12: yi += 1 i = begin.month + i - 12 return timezone.make_aware(datetime(begin.year + yi, begin.month + i, begin.day, begin.hour, begin.minute, begin.second), timezone.get_current_timezone()) if value[-1] == 'd': return begin + timedelta(days=i) if value[-1] == 'h': return begin + timedelta(hours=i) if value[-1] == 'w': return begin + timedelta(weeks=i) except ValueError: pass errors.append(_("Unable to parse duration: {}").format(value)) return None
def submit_file(self, contest, pi_short_name, localtime=None, siotime=None, magickey="", file_size=1024, file_name='submission.cpp'): url = reverse('oisubmit', kwargs={'contest_id': contest.id}) file = ContentFile('a' * file_size, name=file_name) if localtime is None: localtime = datetime.now(utc) if isinstance(localtime, datetime): localtime_str = localtime.astimezone(get_current_timezone()). \ strftime("%Y-%m-%d %H:%M:%S") else: localtime_str = localtime if not magickey: magickey = str(OISUBMIT_MAGICKEY) if siotime is None: siotime_str = "" elif isinstance(siotime, datetime): siotime_str = siotime.astimezone(get_current_timezone()). \ strftime("%Y-%m-%d %H:%M:%S") else: siotime_str = siotime post_data = { 'localtime': localtime_str, 'siotime': siotime_str, 'magickey': magickey, 'problem_shortname': pi_short_name, 'pi_id': str(0), 'file': file } return self.client.post(url, post_data)
def test_make_show(self, wipe=True): # this may seem overly thorough, but it has already found bugs that # would otherwise have been missed: for hours in range(366*24, 0, -1): if wipe: Show.objects.all().delete() starter = ( timezone.now() .replace(tzinfo=timezone.get_current_timezone()) - datetime.timedelta(hours=hours) ) show = Show.at(starter) showtime = show.showtime.astimezone( timezone.get_current_timezone()) self.assertEqual(showtime.hour, 21) self.assertEqual(showtime.minute, 0) self.assertEqual(showtime.second, 0) self.assertEqual(showtime.microsecond, 0) self.assertEqual(showtime.weekday(), 5) self.assertEqual(show.end - show.showtime, datetime.timedelta(hours=2)) self.assertGreater(show.end, starter)
def __init__(self, **kwargs): if kwargs.get("initial"): self.options = kwargs["initial"] else: self.options = kwargs self.start_date = kwargs.get("start_date", None) self.end_date = kwargs.get("end_date", None) if self.options.get("date_range"): self.start_date, self.end_date = parse_date_range(self.options["date_range"]) if self.options.get("shop"): self.shop = Shop.objects.get(pk=self.options["shop"]) else: self.shop = None if self.start_date is None: self.start_date = make_aware(datetime.min + timedelta(days=1), get_current_timezone()) if self.end_date is None: self.end_date = make_aware(datetime.max - timedelta(days=1), get_current_timezone()) if self.options.get("request"): self.request = self.options["request"] self.rendered = False
def anomalyCntPeerJson(request): url = request.get_full_path() if '?' in url: params = url.split('?')[1] request_dict = urllib.parse.parse_qs(params) if "days" in request_dict.keys(): num_days = int(request_dict["days"][0]) end_time = timezone.now() start_time = end_time - timedelta(days=num_days) # print(start_time) anomalies = Anomaly.objects.filter(timestamp__range=[start_time, end_time]) elif "hours" in request_dict.keys(): num_hours = int(request_dict["hours"][0]) end_time = timezone.now() start_time = end_time - timedelta(hours=num_hours) anomalies = Anomaly.objects.filter(timestamp__range=[start_time, end_time]) elif ("start" in request_dict.keys()) and ("end" in request_dict.keys()): start_time = timezone.make_aware(datetime.utcfromtimestamp(int(request_dict["start"][0])), timezone.get_current_timezone()) end_time = timezone.make_aware(datetime.utcfromtimestamp(int(request_dict["end"][0])), timezone.get_current_timezone()) anomalies = Anomaly.objects.filter(timestamp__range=[start_time, end_time]) else: anomalies = Anomaly.objects.all() anomalyPeerCnt = {} for anomaly in anomalies: anomaly_peer_num = len(json.loads(anomaly.peers)) for i in range(anomaly_peer_num + 1): if i not in anomalyPeerCnt.keys(): anomalyPeerCnt[i] = 1 else: anomalyPeerCnt[i] += 1 rsp = JsonResponse(anomalyPeerCnt, safe=False) rsp["Access-Control-Allow-Origin"] = "*" return rsp
def make_aware_timezone(date, format=None): if isinstance(date, str) or isinstance(date, unicode): naive_datatime = timezone.datetime.strptime(date, format) res = timezone.make_aware(naive_datatime, timezone.get_current_timezone()) else: res = timezone.make_aware(date, timezone.get_current_timezone()) return res
def __init__(self, session, **kwargs): super(SessionApprovedEmailThread, self).__init__(session, **kwargs) details = dict(start_time=self.session.start_time.astimezone( timezone.get_current_timezone() ).strftime('%I:%M %p'), start_date=self.session.start_time.astimezone( timezone.get_current_timezone() ).strftime('%a %D'), location=self.session.location.name, site=Site.objects.get_current().domain, schedule_url=self.session.get_absolute_url()) self.subject = '[TCamp] {start_date}, {start_time} @ {location} -- Your Session is scheduled!'.format(**details) self.body = ("Your session has been approved and scheduled for:" "\n\n" "{start_date}, {start_time} in {location}." "\n\n" "Please see the registration desk 10 minutes before " "your scheduled time slot if you need an " "adapter to connect your laptop to a VGA projector, " "or ask the wall crew if you have logistical or timing " "questions." "\n\n" "Your session's permalink page (http://{site}{schedule_url}) has an " "etherpad on it for collaborative notetaking. To help keep " "a good record of the discussion that goes on during your talk, " "you may want to mention to the group that it's available." "\n\n" "Also, note that since it's published, your proposal can no longer be " "edited. Please see the wall crew with logistical or timing " "questions. ").format(**details)
def calculate_ptAge_now( bd): # convert all times to local timezone now = timezone.now() nowtz = make_aware(now, timezone.get_current_timezone()) bdtz = make_aware(bd, timezone.get_current_timezone()) age = 0 return age
def get_query(self): report = self model_class = report.root_model.model_class() message= "" objects = model_class.objects.all() # Filters # NOTE: group all the filters together into one in order to avoid # unnecessary joins filters = {} excludes = {} for filter_field in report.filterfield_set.all(): try: # exclude properties from standard ORM filtering if '[property]' in filter_field.field_verbose: continue if '[custom' in filter_field.field_verbose: continue filter_string = str(filter_field.path + filter_field.field) if filter_field.filter_type: filter_string += '__' + filter_field.filter_type # Check for special types such as isnull if filter_field.filter_type == "isnull" and filter_field.filter_value == "0": filter_ = {filter_string: False} elif filter_field.filter_type == "in": filter_ = {filter_string: filter_field.filter_value.split(',')} else: # All filter values are stored as strings, but may need to be converted if '[Date' in filter_field.field_verbose: filter_value = parser.parse(filter_field.filter_value) if settings.USE_TZ: filter_value = timezone.make_aware( filter_value, timezone.get_current_timezone() ) if filter_field.filter_type == 'range': filter_value = [filter_value, parser.parse(filter_field.filter_value2)] if settings.USE_TZ: filter_value[1] = timezone.make_aware( filter_value[1], timezone.get_current_timezone() ) else: filter_value = filter_field.filter_value if filter_field.filter_type == 'range': filter_value = [filter_value, filter_field.filter_value2] filter_ = {filter_string: filter_value} if not filter_field.exclude: filters.update(filter_) else: excludes.update(filter_) except Exception, e: message += "Filter Error on %s. If you are using the report builder then " % filter_field.field_verbose message += "you found a bug! " message += "If you made this in admin, then you probably did something wrong."
def testMonthlyTotal(self): start = timezone.make_aware( datetime.datetime(2011, 1, 1), timezone.get_current_timezone(), ) end = timezone.make_aware( datetime.datetime(2011, 3, 1), timezone.get_current_timezone(), ) trunc = 'month' last_day = randint(5, 10) worked1 = randint(1, 3) worked2 = randint(1, 3) for month in xrange(1, 7): for day in xrange(1, last_day + 1): day = timezone.make_aware( datetime.datetime(2011, month, day), timezone.get_current_timezone(), ) self.log_time(start=day, delta=(worked1, 0), user=self.user) self.log_time(start=day, delta=(worked2, 0), user=self.user2) date_headers = utils.generate_dates(start, end, trunc) pj_totals = self.get_project_totals(date_headers, trunc) for hour in pj_totals[0][0]: self.assertEqual(hour, last_day * worked1) for hour in pj_totals[0][1]: self.assertEqual(hour, last_day * worked2)
def weekly_time_spent_records(self): weekly_records = [] records = self.user.issues_time_spent_records.all() if records: earliest_record = records.last() monday_of_first_week = earliest_record.time_start.date() - \ timezone.timedelta(days=earliest_record.time_start.weekday()) for i in range(0, (timezone.now().date() - monday_of_first_week).days + 1, 7): start_date = monday_of_first_week + timezone.timedelta(days=i) end_date = start_date + timezone.timedelta(days=6) week_records = records.filter( time_start__gte=timezone.make_aware( datetime.datetime.combine( start_date, timezone.datetime.min.time() ), timezone.get_current_timezone() ), time_start__lte=timezone.make_aware( datetime.datetime.combine( end_date, timezone.datetime.max.time() ), timezone.get_current_timezone() ) ) week = {} week['start_date'] = start_date week['end_date'] = end_date week['records'] = week_records weekly_records.append(week) weekly_records.reverse() return weekly_records
def process_time(component, default_tz_str=None): ''' Attempts to return a timezone-aware version of the component's datetime, trying in this order: - If time string ends in Z, it's UTC - If TZID is specified in the component - If a default_tz_str is specified If these steps fail, a the defawult timeone in the settings will be used. ''' dt = component.dt try: # if its UTC, we'll already have an aware timezone if dt.tzinfo: return dt except AttributeError: # TODO: once allday is supported, use it # if no timezone attribute, it must be a regular date object. give it a time of midnight dt = datetime.datetime.combine(dt, datetime.time()) # otherwise, we need to find a timezone and localize to it tz_str = component.params.get('TZID', default_tz_str) if tz_str is None: # TODO: log unavailable timezone message return timezone.make_aware(dt, timezone.get_current_timezone()) try: return timezone.make_aware(dt, pytz.timezone(tz_str)) except pytz.exceptions.UnknownTimeZoneError: # TODO: log unknown timezone message return timezone.make_aware(dt, timezone.get_current_timezone())
def testBillableNonBillable(self): start = timezone.make_aware( datetime.datetime(2011, 1, 1), timezone.get_current_timezone(), ) day2 = timezone.make_aware( datetime.datetime(2011, 1, 2), timezone.get_current_timezone(), ) end = timezone.make_aware( datetime.datetime(2011, 1, 3), timezone.get_current_timezone(), ) self.log_daily(start, day2, end) trunc = 'day' billableQ = Q(project__type__billable=True) non_billableQ = Q(project__type__billable=False) date_headers = utils.generate_dates(start, end, trunc) pj_billable = self.get_project_totals(date_headers, trunc, Q(), 'billable') pj_billable_q = self.get_project_totals(date_headers, trunc, billableQ, 'total') pj_non_billable = self.get_project_totals(date_headers, trunc, Q(), 'non_billable') pj_non_billable_q = self.get_project_totals(date_headers, trunc, non_billableQ, 'total') self.assertEqual(list(pj_billable), list(pj_billable_q)) self.assertEqual(list(pj_non_billable), list(pj_non_billable_q))
def bereken_datum_laatstbijgewerkt(): datum_laatstbijgewerkt = datetime(2015, 1, 1, tzinfo=timezone.get_current_timezone()) for rij in MaandSlaAantallen.objects.filter(prioriteit=TRIVIAL): updated_at = rij.updated_at if updated_at >= datum_laatstbijgewerkt: datum_laatstbijgewerkt = updated_at return datum_laatstbijgewerkt.astimezone(timezone.get_current_timezone())
def venue_event_feed(request, pk): venue = get_object_or_404(Venue, pk=pk) if request.is_ajax() and request.method == 'GET': if 'start' in request.GET and 'end' in request.GET: fro = timezone.make_aware( datetime.fromtimestamp(float(request.GET['start'])), timezone.get_current_timezone()) to = timezone.make_aware( datetime.fromtimestamp(float(request.GET['end'])), timezone.get_current_timezone()) period = Period(Event.objects.exclude(appointment=None).filter( appointment__customer=request.user.userprofile.customer).filter(appointment__venue=venue), fro, to) data = [{'id': x.event.appointment_set.first().pk, 'title': "{}".format(x.event.appointment_set.first().venue_display_name), 'userId': [x.event.appointment_set.first().venue.pk], 'start': x.start.isoformat(), 'end': x.end.isoformat(), 'clientId': x.event.appointment_set.first().clientId, 'status': x.event.appointment_set.first().status, 'tag': getattr(x.event.appointment_set.first().tag, 'html_name', ""), 'body': x.event.description } for x in period.get_occurrences() if x.event.appointment_set.first()] return HttpResponse(json.dumps(data), content_type="application/json") # if all fails raise Http404
def add(request): id = request.POST.get('id', None) start_time = request.POST.get('start_date', None) end_time = request.POST.get('end_date', None) remarks = request.POST.get('remarks') public = request.POST.get('public') if not public: public = False else: public = True if not id or not start_time or not end_time: return render_to_response_json({'res':False}) ap = Schedule() ap.id = id ap.user = request.user ap.start_time = make_aware(datetime.strptime(start_time,'%Y-%m-%d %H:%M:%S'), get_current_timezone()) ap.end_time = make_aware(datetime.strptime(end_time,'%Y-%m-%d %H:%M:%S'), get_current_timezone()) ap.remarks = remarks ap.public = public try: ap.save() except: return render_to_response_json({'res':False, 'msg': '保存失败'}) return render_to_response_json({'res':True})
def filterLeadsBySource(request, id): user_id = request.user.id company_id = request.user.company_id start_date = request.GET.get('start_date') end_date = request.GET.get('end_date') source = request.GET.get('source') query_type = request.GET.get('query_type') page_number = int(request.GET.get('page_number')) items_per_page = int(request.GET.get('per_page')) offset = (page_number - 1) * items_per_page if start_date is not None: local_start_date_naive = datetime.fromtimestamp(float(start_date)) local_start_date = get_current_timezone().localize(local_start_date_naive, is_dst=None) if end_date is not None: local_end_date_naive = datetime.fromtimestamp(float(end_date)) local_end_date = get_current_timezone().localize(local_end_date_naive, is_dst=None) utc_current_date = datetime.utcnow() #print 'filter start us ' + str(local_start_date) + ' and edn is ' + str(local_end_date) try: start_date_field_qry = 'leads__hspt__properties__createdate__gte' end_date_field_qry = 'leads__hspt__properties__createdate__lte' source_field_qry = 'leads__hspt__properties__hs_analytics_source' company_field_qry = 'company_id' querydict = {company_field_qry: company_id, source_field_qry: source, start_date_field_qry : local_start_date, end_date_field_qry : local_end_date} total = Lead.objects(**querydict).count() leads = Lead.objects(**querydict).skip(offset).limit(items_per_page) serializer = LeadSerializer(leads, many=True) return JsonResponse({'count' : total, 'results': serializer.data}) except Exception as e: return JsonResponse({'Error' : str(e)})
def crearventaProducto(request): if request.method=='POST' and 'create' in request.POST: _resolucionFactura = ResolucionFacturacion.objects.filter(Activo = True)[0] _base = _resolucionFactura.Base _numero = _resolucionFactura.Consecutivo_Inicial + _resolucionFactura.Secuencia _resolucionFactura.Secuencia = _resolucionFactura.Secuencia + 1 _resolucionFactura.save() _numeroFactura = _base + str(_numero) _empleado = Empleado.objects.get(Documento=request.POST['selEmpleado']) _tz = get_current_timezone() #_fecha = _tz.localize(datetime.strptime(request.POST['txtFechaFactura'], '%d/%m/%Y')) _fecha = datetime.strptime(request.POST['txtFechaFactura'], '%d/%m/%Y') _cliente = Cliente.objects.get(Documento=request.POST['selCliente']) _observaciones = request.POST['txtObservaciones'] _impuesto = request.POST['txtImpuesto'] _total = request.POST['txtTotal'] _formaPago = FormaPago.objects.get(id=request.POST['selFormaPago']) _factura = Venta.objects.create(Numero_Factura_Venta=_numeroFactura, Fecha=_fecha, Valor_Total_Compra=_total, Forma_Pago=_formaPago, Observaciones = _observaciones, Empleado = _empleado, Cliente = _cliente, Impuesto = _impuesto) _preventa = PreVenta.objects.get(id=request.POST['_id']) _preDetallesVenta = PreDetalleVenta.objects.filter(PreVenta=_preventa) for _predetalleVenta in _preDetallesVenta: _detalleVenta = DetalleVenta.objects.create(Descuento=_predetalleVenta.Descuento, Cantidad=_predetalleVenta.Cantidad, Observaciones=_predetalleVenta.Observaciones, Producto=_predetalleVenta.Producto, Numero_Factura=_factura, Valor_Unitario=_predetalleVenta.Valor_Unitario, SubTotal=_predetalleVenta.SubTotal) _predetalleVenta.delete() _preventa.delete() context = { 'factura': _factura, 'empresa': '899999034-1', } return render(request, 'web/ventaPost.html', context) elif request.method=='POST' and 'preview' in request.POST: _empleado = Empleado.objects.get(Documento=request.POST['selEmpleado']) _tz = get_current_timezone() #_fecha = _tz.localize(datetime.strptime(request.POST['txtFechaFactura'], '%d/%m/%Y')) _fecha = datetime.strptime(request.POST['txtFechaFactura'], '%d/%m/%Y') _cliente = Cliente.objects.get(Documento=request.POST['selCliente']) _observaciones = request.POST['txtObservaciones'] _impuesto = request.POST['txtImpuesto'] _total = request.POST['txtTotal'] _formaPago = FormaPago.objects.get(id=request.POST['selFormaPago']) _preventa = PreVenta.objects.get(id=request.POST['_id']) _preventa.Fecha = _fecha _preventa.Valor_Total_Compra = _total _preventa.Forma_Pago = _formaPago _preventa.Observaciones = _observaciones _preventa.Empleado = _empleado _preventa.Cliente = _cliente _preventa.Impuesto = _impuesto _preventa.save() return redirect('/web/') elif request.method=='POST' and 'cancel' in request.POST: _preventa = PreVenta.objects.get(id=request.POST['_id']) PreDetalleVenta.objects.filter(PreVenta=_preventa).delete() _preventa.delete() return redirect('/web/') else: empleados_list = Empleado.objects.all() clientes_list = Cliente.objects.all() forma_pago_list = FormaPago.objects.all() #pre_Venta = PreVenta.objects.create(Valor_Total_Compra=0) context = { 'empleados_list': empleados_list, 'clientes_list': clientes_list, 'forma_pago_list': forma_pago_list} return render(request, 'web/venta.html', context)
def _fetch_meta(): mintime = timezone.make_aware(datetime.min, timezone.get_current_timezone()) conn = httplib.HTTPConnection("table.finance.yahoo.com", timeout=10) stocks = Stock.objects.all() for stock in stocks: id = str(stock.stock_id) code = "%s.%s" % (id[2:], "sz" if id[0:2] == "sz" else "ss") metaclass = create_stock_metaclass(id) create_stock_metatable(metaclass) count = metaclass.objects.count() latest = metaclass.objects.order_by("-time")[0].time if count > 0 else mintime content = getpage(conn, "/table.csv?s=%s" % code) if content is not None: srecords = content.split('\n')[1:] records = [x.split(',') for x in srecords] wstocks = [] for record in records: if len(record) != 7: continue time = datetime.strptime(record[0], "%Y-%m-%d") time = timezone.make_aware(time, timezone.get_current_timezone()) if time > latest: m = metaclass(time=time, open=float(record[1]), high=float(record[2]), low=float(record[3]), close=float(record[4]), vol=int(record[5]), adj=float(record[6])) wstocks.append(m) if len(wstocks) > 0: metaclass.objects.bulk_create(wstocks) conn.close() reload(import_module(settings.ROOT_URLCONF)) clear_url_caches()
def get_today_time_range(): startTime = datetime.combine(datetime.now(), time(0, 0, 0, 0))\ .replace(tzinfo=timezone.get_current_timezone()) endTime = datetime.combine(datetime.now(), time(23, 59, 59, 99999))\ .replace(tzinfo=timezone.get_current_timezone()) return startTime,endTime
def test_timer(self): print '' print '------------------- Testing timer functions: --------------------------' # monday t_2_w = datetime.datetime(year=2015, month=11, day=2, hour=14) # tuesday t_1_w = datetime.datetime(year=2015, month=11, day=3, hour=7) t_1_w = timezone.make_aware(t_1_w, timezone.get_current_timezone()) t_2_w = timezone.make_aware(t_2_w, timezone.get_current_timezone()) print '%40s' % ('Monday 14:00 is waiver period 2:'), self.assertTrue( util.is_2_waiver_period(t = t_2_w) ) print ' OK' print '%40s' % ('Tuesday 07:00 is waiver period 1:'), self.assertTrue( util.is_1_waiver_period(t = t_1_w) ) print ' OK' # wednesday t_2_w = datetime.datetime(year=2015, month=11, day=4, hour=14) t_2_w = timezone.make_aware(t_2_w, timezone.get_current_timezone()) print '%40s' % ('Wednesday 14:00 is waiver period 2:'), self.assertTrue( util.is_2_waiver_period(t = t_2_w) ) print ' OK'
def get_calendar(self, start, end, target_date, end_date): calendar = self.get_raw_calendar() search_start = start.replace(':', '') search_end = end.replace(':', '') events = self.event_set.filter(room=self, start_ts__gte=target_date, end_ts__lte=end_date) print for event in events: start_wtz = timezone.make_naive(event.start_ts, timezone.get_current_timezone()) end_wtz = timezone.make_naive(event.end_ts, timezone.get_current_timezone()) starts = start_wtz.strftime('%H%M') ends = end_wtz.strftime('%H%M') for block in calendar: id = block['mil_hour'] + block['minutes'] if int(starts) <= int(id) and int(id) <= int(ends): block['status'] = 'reserved' else: block['status'] = 'available' for block in calendar: id = block['mil_hour'] + block['minutes'] if int(search_start) <= int(id) and int(id) <= int(search_end): block['status'] = 'searched' return calendar
def update(request): id = request.POST.get('id', None) start_time = request.POST.get('start_date', None) end_time = request.POST.get('end_date', None) remarks = request.POST.get('remarks') public = request.POST.get('public') if public == 'false': public = False else: public = True if not id or not start_time or not end_time: return render_to_response_json({'res':False}) try: obj = Schedule.objects.get(pk=id) except: return render_to_response_json({'res':False, 'msg':'参数有误'}) if obj.user != request.user: data = {'res':False, 'msg': '您没有权限修改该记录'} return render_to_response_json(data) obj.start_time = make_aware(datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S'), get_current_timezone()) obj.end_time = make_aware(datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S'), get_current_timezone()) obj.remarks = remarks obj.public = public try: obj.save() except: return render_to_response_json({'res':False, 'msg': '保存失败'}) return render_to_response_json({'res':True})
def keywords(): """Create various keywords for testing different options.""" # keywords: test = Keyword.objects.create( keyword="test", description="This is an active test keyword with custom response", custom_response="Test custom response with %name%", activate_time=timezone.make_aware( datetime.strptime('Jun 1 1970 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone()), deactivate_time=timezone.make_aware( datetime.strptime('Jun 1 2400 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone())) test.save() # active with custom response test2 = Keyword.objects.create( keyword="2test", description="This is an active test keyword with no custom response", custom_response="", activate_time=timezone.make_aware( datetime.strptime('Jun 1 1970 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone()), deactivate_time=timezone.make_aware( datetime.strptime('Jun 1 2400 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone())) test2.save() # active with custom response test_expired = Keyword.objects.create( keyword="expired_test", description="This is an expired test keyword with no custom response", custom_response="", activate_time=timezone.make_aware( datetime.strptime('Jun 1 1970 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone()), deactivate_time=timezone.make_aware( datetime.strptime('Jun 1 1975 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone())) test_expired.save() # not yet active with custom response test_early = Keyword.objects.create( keyword="early_test", description="This is a not yet active test keyword " "with no custom response", custom_response="", activate_time=timezone.make_aware( datetime.strptime('Jun 1 2400 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone()), deactivate_time=timezone.make_aware( datetime.strptime('Jun 1 2400 1:35PM', '%b %d %Y %I:%M%p'), get_current_timezone())) test_early.save() test_no_end = Keyword.objects.create(keyword="test_no_end", description="This has no end", custom_response="Will always reply", activate_time=timezone.make_aware( datetime.strptime( 'Jun 1 1400 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone())) test_no_end.save() # test deactivated response test_deac_resp_fail = Keyword.objects.create( keyword="test_cust_endf", description="This has a diff reply", custom_response="Hi!", deactivated_response="Too slow, Joe!", activate_time=timezone.make_aware( datetime.strptime('Jun 1 1400 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone())) test_deac_resp = Keyword.objects.create( keyword="test_cust_end", description="This has a diff reply", custom_response="Just in time!", deactivated_response="Too slow, Joe!", deactivate_time=timezone.make_aware( datetime.strptime('Jun 1 1400 2:33PM', '%b %d %Y %I:%M%p'), get_current_timezone()), activate_time=timezone.make_aware( datetime.strptime('Jun 1 1400 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone())) test_no_end.save() test_early_with_response = Keyword.objects.create( keyword="early_test2", description="This is a not yet active test keyword" "with a custom response", too_early_response="This is far too early", activate_time=timezone.make_aware( datetime.strptime('Jun 1 2400 1:33PM', '%b %d %Y %I:%M%p'), get_current_timezone()), ) test_early.save() test_do_not_reply = Keyword.objects.create( keyword='donotreply', disable_all_replies=True, ) test_do_not_reply.save() keywords = { 'test': test, 'test2': test2, 'test_expired': test_expired, 'test_early': test_early, 'test_no_end': test_no_end, 'test_deac_resp': test_deac_resp, 'test_deac_resp_fail': test_deac_resp_fail, 'test_early_with_response': test_early_with_response, 'test_do_not_reply': test_do_not_reply, } return keywords
Article.objects.create( title='Markdown 与代码高亮测试', body=pathlib.Path(BASE_DIR).joinpath('utils', 'md.sample').read_text(encoding='utf-8'), category=Category.objects.create(name='Markdown测试'), author=user, ) print('create some faked posts published within the past year') fake = faker.Faker() # English for _ in range(100): tags = Tag.objects.order_by('?') tag1 = tags.first() tag2 = tags.last() cate = Category.objects.order_by('?').first() created_time = fake.date_time_between(start_date='-1y', end_date="now", tzinfo=timezone.get_current_timezone()) post = Article.objects.create( title=fake.sentence().rstrip('.'), body='\n\n'.join(fake.paragraphs(10)), create_time=created_time, category=cate, author=user, ) post.tags.add(tag1, tag2) post.save() fake = faker.Faker('zh_CN') for _ in range(100): # Chinese tags = Tag.objects.order_by('?') tag1 = tags.first() tag2 = tags.last()
def timestamp_to_datetime(timestamp): tz = get_current_timezone() return datetime.fromtimestamp(timestamp, tz)
def get_query(self): report = self model_class = report.root_model.model_class() message= "" objects = model_class.objects.all() # Filters # NOTE: group all the filters together into one in order to avoid # unnecessary joins filters = {} excludes = {} for filter_field in report.filterfield_set.all(): try: # exclude properties from standard ORM filtering if '[property]' in filter_field.field_verbose: continue if '[custom' in filter_field.field_verbose: continue filter_string = str(filter_field.path + filter_field.field) if filter_field.filter_type: filter_string += '__' + filter_field.filter_type # Check for special types such as isnull if filter_field.filter_type == "isnull" and filter_field.filter_value == "0": filter_ = {filter_string: False} elif filter_field.filter_type == "in": filter_ = {filter_string: filter_field.filter_value.split(',')} else: # All filter values are stored as strings, but may need to be converted if '[Date' in filter_field.field_verbose: filter_value = parser.parse(filter_field.filter_value) if settings.USE_TZ: filter_value = timezone.make_aware( filter_value, timezone.get_current_timezone() ) if filter_field.filter_type == 'range': filter_value = [filter_value, parser.parse(filter_field.filter_value2)] if settings.USE_TZ: filter_value[1] = timezone.make_aware( filter_value[1], timezone.get_current_timezone() ) else: filter_value = filter_field.filter_value if filter_field.filter_type == 'range': filter_value = [filter_value, filter_field.filter_value2] filter_ = {filter_string: filter_value} if not filter_field.exclude: filters.update(filter_) else: excludes.update(filter_) except Exception: import sys e = sys.exc_info()[1] message += "Filter Error on %s. If you are using the report builder then " % filter_field.field_verbose message += "you found a bug! " message += "If you made this in admin, then you probably did something wrong." if filters: objects = objects.filter(**filters) if excludes: objects = objects.exclude(**excludes) # Aggregates objects = self.add_aggregates(objects) # Distinct if report.distinct: objects = objects.distinct() return objects
def update(request, instance, validated_data, widget): validated_data[widget['name']] = datetime.now(tz=get_current_timezone()).date()
def get_date_posted(self, obj): user_local_time = timezone.template_localtime( obj.date_posted, use_tz=timezone.get_current_timezone()) return user_local_time.isoformat()
def setUp(self): self.user = User.objects.get(id=1) self.question = Question.objects.get(slug='question-1') self.current_date = datetime.datetime( 2020, 10, 21, tzinfo=timezone.get_current_timezone())
def metadata_post_save(instance, *args, **kwargs): logger.debug("handling UUID In pre_save_dataset") if isinstance(instance, Dataset) and hasattr( settings, 'LAYER_UUID_HANDLER') and settings.LAYER_UUID_HANDLER != '': logger.debug("using custom uuid handler In pre_save_dataset") from ..layers.utils import get_uuid_handler _uuid = get_uuid_handler()(instance).create_uuid() if _uuid != instance.uuid: instance.uuid = _uuid Dataset.objects.filter(id=instance.id).update(uuid=_uuid) # Fixup bbox if instance.bbox_polygon is None: instance.set_bbox_polygon((-180, -90, 180, 90), 'EPSG:4326') instance.set_bounds_from_bbox(instance.bbox_polygon, instance.srid or instance.bbox_polygon.srid) # Set a default user for accountstream to work correctly. if instance.owner is None: instance.owner = get_valid_user() if not instance.uuid: instance.uuid = str(uuid.uuid1()) # set default License if no specified if instance.license is None: license = License.objects.filter(name="Not Specified") if license and len(license) > 0: instance.license = license[0] instance.thumbnail_url = instance.get_thumbnail_url() instance.detail_url = instance.get_absolute_url() instance.csw_insert_date = datetime.datetime.now( timezone.get_current_timezone()) instance.set_missing_info() ResourceBase.objects.filter(id=instance.id).update( uuid=instance.uuid, srid=instance.srid, alternate=instance.alternate, bbox_polygon=instance.bbox_polygon, thumbnail_url=instance.get_thumbnail_url(), detail_url=instance.get_absolute_url(), csw_insert_date=datetime.datetime.now(timezone.get_current_timezone())) try: if not instance.regions or instance.regions.count() == 0: srid1, wkt1 = instance.geographic_bounding_box.split(";") srid1 = re.findall(r'\d+', srid1) poly1 = GEOSGeometry(wkt1, srid=int(srid1[0])) poly1.transform(4326) queryset = Region.objects.all().order_by('name') global_regions = [] regions_to_add = [] for region in queryset: try: srid2, wkt2 = region.geographic_bounding_box.split(";") srid2 = re.findall(r'\d+', srid2) poly2 = GEOSGeometry(wkt2, srid=int(srid2[0])) poly2.transform(4326) if poly2.intersection(poly1): regions_to_add.append(region) if region.level == 0 and region.parent is None: global_regions.append(region) except Exception: tb = traceback.format_exc() if tb: logger.debug(tb) if regions_to_add or global_regions: if regions_to_add and len(regions_to_add) > 0 and len( regions_to_add) <= 30: instance.regions.add(*regions_to_add) else: instance.regions.add(*global_regions) except Exception: tb = traceback.format_exc() if tb: logger.debug(tb) finally: # refresh catalogue metadata records from ..catalogue.models import catalogue_post_save catalogue_post_save(instance=instance, sender=instance.__class__)
def timestamp_to_datetime(timestamp, replace_tz=True): dt = datetime.datetime.fromtimestamp(int(timestamp)) if replace_tz: dt = dt.replace(tzinfo=timezone.get_current_timezone()) return dt
class Application(object): """ Basic application class. Application combined by set of methods, decorated with @view. Each method accepts requests and returns reply """ __metaclass__ = ApplicationBase title = "APPLICATION TITLE" icon = "icon_application" glyph = "file" extra_permissions = [ ] # List of additional permissions, not related with views implied_permissions = {} # permission -> list of implied permissions Form = NOCForm # Shortcut for form class config = settings.config TZ = get_current_timezone() METRICS = [] def __init__(self, site): self.site = site parts = self.__class__.__module__.split(".") self.module = parts[1] self.app = parts[3] self.module_title = __import__("noc.%s" % self.module, {}, {}, ["MODULE_NAME"]).MODULE_NAME self.app_id = "%s.%s" % (self.module, self.app) self.menu_url = None # Set by site.autodiscover() self.logger = logging.getLogger(self.app_id) metrics = [] self.metrics = MetricsHub((_daemon.metrics if _daemon else "noc.") + "apps.%s." % self.app_id, *(metrics + self.METRICS)) @classmethod def add_to_class(cls, name, value): if hasattr(value, "contribute_to_class"): value.contribute_to_class(cls, name) else: setattr(cls, name, value) def set_app(self, app): pass @classmethod def add_view(cls, name, func, url, access, url_name=None, menu=None, method=None, validate=None, api=False): # Decorate function to clear attributes f = functools.partial(func) f.im_self = func.im_self f.__name__ = func.__name__ # Add to class cls.add_to_class( name, view(url=url, access=access, url_name=url_name, menu=menu, method=method, validate=validate, api=api)(f)) site.add_contributor(cls, func.im_self) @property def js_app_class(self): return "NOC.main.desktop.IFramePanel" def get_launch_info(self, request): """ Return desktop launch information """ from noc.main.models import Permission user = request.user ps = self.get_app_id().replace(".", ":") + ":" lps = len(ps) if "PERMISSIONS" in request.session: perms = request.session["PERMISSIONS"] else: perms = Permission.get_effective_permissions(user) perms = [p[lps:] for p in perms if p.startswith(ps)] return { "class": self.js_app_class, "title": unicode(self.title), "params": { "url": self.menu_url, "permissions": perms, "app_id": self.app_id } } @classmethod def get_app_id(cls): """ Returns application id """ parts = cls.__module__.split(".") return "%s.%s" % (parts[1], parts[3]) @property def base_url(self): """ Application's base URL """ return "/%s/%s/" % (self.module, self.app) def reverse(self, url, *args, **kwargs): """ Reverse URL name to URL """ return self.site.reverse(url, *args, **kwargs) def message_user(self, request, message): """ Send a message to user """ messages.info(request, unicode(message)) def get_template_path(self, template): """ Return path to named template """ if isinstance(template, basestring): template = [template] r = [] for t in template: r += [ os.path.join(self.module, "apps", self.app, "templates", t), os.path.join(self.module, "templates", t), os.path.join("templates", t) ] return r def get_object_or_404(self, *args, **kwargs): """ Shortcut to get_object_or_404 """ if hasattr(args[0], "_fields"): # Document r = args[0].objects.filter(**kwargs).first() if not r: raise Http404("No %s matching given query" % args[0]) return r else: # Django model return get_object_or_404(*args, **kwargs) def render(self, request, template, dict={}, **kwargs): """ Render template within context """ return render_to_response(self.get_template_path(template), dict if dict else kwargs, context_instance=RequestContext( request, {"app": self})) def render_template(self, template, dict={}, **kwargs): """ Render template to string """ tp = self.get_template_path(template) return loader.render_to_string(tp, dict or kwargs) def render_response(self, data, content_type="text/plain"): """ Render arbitrary Content-Type response """ return HttpResponse(data, content_type=content_type) def render_plain_text(self, text, mimetype="text/plain"): """ Render plain/text response """ return HttpResponse(text, mimetype=mimetype) def render_json(self, obj, status=200): """ Create serialized JSON-encoded response """ return HttpResponse(json_encode(obj), mimetype="text/json", status=status) def render_success(self, request, subject=None, text=None): """ Render "success" page """ return self.site.views.main.message.success(request, subject=subject, text=text) def render_failure(self, request, subject=None, text=None): """ Render "failure" page """ return self.site.views.main.message.failure(request, subject=subject, text=text) def render_wait(self, request, subject=None, text=None, url=None, timeout=5, progress=None): """ Render wait page """ return self.site.views.main.message.wait(request, subject=subject, text=text, timeout=timeout, url=url, progress=progress) def render_static(self, request, path, document_root=None): document_root = document_root or self.document_root return serve_static(request, path, document_root=document_root) def response_redirect(self, url, *args, **kwargs): """ Redirect to URL """ if ":" in url: url = self.reverse(url, *args, **kwargs) return HttpResponseRedirect(url) def response_redirect_to_referrer(self, request, back_url=None): """ Redirect to referrer page """ if back_url is None: back_url = self.base_url return self.response_redirect( request.META.get("HTTP_REFERER", back_url)) def response_redirect_to_object(self, object): """ Redirect to object: {{base.url}}/{{object.id}}/ """ return self.response_redirect("%s%d/" % (self.base_url, object.id)) def response_forbidden(self, text=None): """ Render Forbidden response """ return HttpResponseForbidden(text) def response_not_found(self, text=None): """ Render Not Found response """ return HttpResponseNotFound(text) def response_bad_request(self, text=None): """ Render 400 Bad Request :param text: :return: """ return HttpResponse(text, status=400) def response_accepted(self, location=None): """ Render 202 Accepted :param location: :return: """ r = HttpResponse("", status=202) if location: r["Location"] = location return r def close_popup(self, request): """ Render javascript closing popup window """ return self.render(request, "close_popup.html") def html_escape(self, s): """ Escape HTML """ return escape(s) ## ## Logging ## def debug(self, message): self.logger.debug(message) def error(self, message): self.logger.error(message) def cursor(self): """ Returns db cursor """ return connection.cursor() def execute(self, sql, args=[]): """ Execute SQL query """ cursor = self.cursor() cursor.execute(sql, args) return cursor.fetchall() def lookup(self, request, func): """ AJAX lookup wrapper @todo: Remove """ result = [] if request.GET and "q" in request.GET: q = request.GET["q"] if len(q) > 2: # Ignore requests shorter than 3 letters result = list(func(q)) return self.render_plain_text("\n".join(result)) def lookup_json(self, request, func, id_field="id", name_field="name"): """ Ajax lookup wrapper, returns JSON list of hashes """ result = [] if request.GET and "q" in request.GET: q = request.GET["q"] for r in func(q): result += [{id_field: r, name_field: r}] return self.render_json(result) def get_views(self): """ Iterator returning application views """ for n in [v for v in dir(self) if hasattr(getattr(self, v), "url")]: yield getattr(self, n) def get_permissions(self): """ Return a set of permissions, used by application """ prefix = self.get_app_id().replace(".", ":") p = set(["%s:launch" % prefix]) # View permissions from HasPerm for view in self.get_views(): if isinstance(view.access, HasPerm): p.add(view.access.get_permission(self)) # mrt_config permissions for mrt in self.mrt_config: c = self.mrt_config[mrt] if "access" in c: if isinstance(c["access"], HasPerm): p.add(c["access"].get_permission(self)) elif isinstance(c["access"], basestring): p.add("%s:%s" % (prefix, c["access"])) # extra_permissions if callable(self.extra_permissions): extra = self.extra_permissions() else: extra = self.extra_permissions for e in extra: p.add(HasPerm(e).get_permission(self)) return p def user_access_list(self, user): """ Return a list of user access entries """ return [] def group_access_list(self, group): """ Return a list of group access entries """ return [] def user_access_change_url(self, user): """ Return an URL to change user access """ return None def group_access_change_url(self, group): """ Return an URL to change group access """ return None def customize_form(self, form, table, search=False): """ Add custom fields to django form class """ from noc.main.models import CustomField l = [] for f in CustomField.table_fields(table): if f.is_hidden: continue if f.type == "str": if search and f.is_filtered: ff = forms.ChoiceField(required=False, label=f.label, choices=[("", "---")] + f.get_choices()) elif f.enum_group: ff = forms.ChoiceField(required=False, label=f.label, choices=[("", "---")] + f.get_enums()) else: ml = f.max_length if f.max_length else 256 ff = forms.CharField(required=False, label=f.label, max_length=ml) elif f.type == "int": ff = forms.IntegerField(required=False, label=f.label) elif f.type == "bool": ff = forms.BooleanField(required=False, label=f.label) elif f.type == "date": ff = forms.DateField(required=False, label=f.label) elif f.type == "datetime": ff = forms.DateTimeField(required=False, label=f.label) else: raise ValueError("Invalid field type: '%s'" % f.type) l += [(str(f.name), ff)] form.base_fields.update(SortedDict(l)) return form def apply_custom_fields(self, o, v, table): """ Apply custom fields to form :param o: Object :param v: values dict :param table: table :return: """ from noc.main.models import CustomField for f in CustomField.table_fields(table): n = str(f.name) if n in v: setattr(o, n, v[n]) return o def apply_custom_initial(self, o, v, table): """ :param o: Object :param v: Initial data :param table: table :return: """ from noc.main.models import CustomField for f in CustomField.table_fields(table): n = str(f.name) if n not in v: x = getattr(o, n) if x: v[n] = x return o def form_errors(self, form): """ with self.form_errors(form): object.save() :param form: :return: """ return FormErrorsContext(form) def to_json(self, v): """ Convert custom types to json string :param v: :return: """ if v is None: return None elif isinstance(v, datetime.datetime): return self.TZ.localize(v).isoformat() else: raise Exception("Invalid to_json type") def check_mrt_access(self, request, name): mc = self.mrt_config[name] if "access" not in mc: return True access = mc["access"] if type(access) == bool: access = Permit() if access else Deny() elif isinstance(access, basestring): access = HasPerm(access) else: access = access return access.check(self, request.user) @view(url="^mrt/(?P<name>[^/]+)/$", method=["POST"], access=True, api=True) def api_run_mrt(self, request, name): from noc.sa.models import ReduceTask, ManagedObjectSelector # Check MRT configured if name not in self.mrt_config: return self.response_not_found("MRT %s is not found" % name) # Check MRT access if not self.check_mrt_access(request, name): return self.response_forbidden("Forbidden") # data = json_decode(request.raw_post_data) if "selector" not in data: return self.response_bad_request("'selector' is missed") # Run MRT mc = self.mrt_config[name] map_params = data.get("map_params", {}) map_params = dict((str(k), v) for k, v in map_params.iteritems()) objects = ManagedObjectSelector.resolve_expression(data["selector"]) task = ReduceTask.create_task(objects, "pyrule:mrt_result", {}, mc["map_script"], map_params, mc.get("timeout", 0)) if mc["map_script"] == "commands" and settings.LOG_MRT_COMMAND: # Log commands now = datetime.datetime.now() safe_append( os.path.join(settings.LOG_MRT_COMMAND, "commands", "%04d" % now.year, "%02d" % now.month, "%02d.log" % now.day), "%s\nDate: %s\nObjects: %s\nUser: %s\nCommands:\n%s\n" % ("-" * 72, now.isoformat(), ",".join( str(o) for o in objects), request.user.username, " " + "\n".join(map_params["commands"]).replace("\n", "\n "))) return task.id @view(url="^mrt/(?P<name>[^/]+)/(?P<task>\d+)/$", method=["GET"], access=True, api=True) def api_get_mrt_result(self, request, name, task): from noc.sa.models import ReduceTask, ManagedObjectSelector # Check MRT configured if name not in self.mrt_config: return self.response_not_found("MRT %s is not found" % name) # Check MRT access if not self.check_mrt_access(request, name): return self.response_forbidden("Forbidden") # t = self.get_object_or_404(ReduceTask, id=int(task)) try: r = t.get_result(block=False) except ReduceTask.NotReady: # Not ready completed = t.maptask_set.filter(status__in=("C", "F")).count() total = t.maptask_set.count() return { "ready": False, "progress": int(completed * 100 / total), "max_timeout": (t.stop_time - datetime.datetime.now()).seconds, "result": None } # Return result return {"ready": True, "progress": 100, "max_timeout": 0, "result": r} @view(url="^launch_info/$", method=["GET"], access="launch", api=True) def api_launch_info(self, request): return self.get_launch_info(request) # name -> {access: ..., map_script: ..., timeout: ...} mrt_config = {}
def getCurrentTimeZonInfo(): return timezone.get_current_timezone()
def makeAware(date_str): date_object.replace(tzinfo=timezone.get_current_timezone()) return me
def handle_timezone(value, is_dst=None): if settings.USE_TZ and timezone.is_naive(value): return timezone.make_aware(value, timezone.get_current_timezone(), is_dst) elif not settings.USE_TZ and timezone.is_aware(value): return timezone.make_naive(value, timezone.utc) return value
def parse_date_timezone(datetimeValue): """ parsing string datetime (eg: '2020-04-20 06:00:00') to date with timezone for accurate search """ datetimeValue = get_current_timezone().localize(parse_datetime(datetimeValue)) return datetimeValue
def convert_to_localtime(utctime): fmt = '%Y-%m-%d %H:%M:%S' utc = utctime.replace(tzinfo=pytz.UTC) local_timezone = utc.astimezone(timezone.get_current_timezone()) return local_timezone.strftime(fmt)
def addComicFromMetadata(self, md): if not md.isEmpty: # Let's get the issue Comic Vine id from the archive's metadata # If it's not there we'll skip the issue. cvID = self.getIssueCVID(md) if cvID is None: issue_name = md.series + ' #' + md.number self.logger.info( f'No Comic Vine ID for: {issue_name}... skipping.') return False # let's get the issue info from CV. issue_response = self.getIssue(cvID) if issue_response is None: return False # Get or create the Publisher. if md.publisher is not None: publisher_obj = self.getPublisher(md.publisher, issue_response) # Get or create the series and if a publisher is available set it. series_obj = self.getSeries(issue_response) if publisher_obj: series_obj.publisher = publisher_obj series_obj.save() # Ugh, deal wih the timezone current_timezone = timezone.get_current_timezone() tz = timezone.make_aware(md.mod_ts, current_timezone) pub_date = self.createPubDate(md.day, md.month, md.year) fixed_number = IssueString(md.issue).asString(pad=3) issue_slug = self.createIssueSlug(pub_date, fixed_number, series_obj.name) try: # Create the issue issue_obj = Issue.objects.create( file=md.path, name=str(md.title), slug=issue_slug, number=fixed_number, date=pub_date, page_count=md.page_count, cvurl=md.webLink, cvid=int(cvID), mod_ts=tz, series=series_obj, ) except IntegrityError as e: self.logger.error(f'Attempting to create issue in db - {e}') self.logger.info(f'Skipping: {md.path}') return # Set the issue image & short description. res = self.setIssueDetail(cvID, issue_response) if res: self.logger.info(f"Added: {issue_obj}") else: self.logger.warning( f'No detail information was saved for {issue_obj}') # Add the storyarc. self.addIssueStoryArcs( issue_obj.cvid, issue_response['results']['story_arc_credits']) # Add the creators self.addIssueCredits(issue_obj.cvid, issue_response['results']['person_credits']) return True
def set_timezone(date): if date is None: return date return make_aware(date, get_current_timezone(), is_dst=False)
def edit_course(request, pk): if request.method == "POST": course_obj = CourseModel.objects.filter(pk=pk).update( course_function=request.POST['course_function'], course_name=request.POST['course_name'], grading_rubric=request.POST['grading_rubric'], appeal_role=request.POST['appeal_role'], course_grading_type=request.POST['course_grading_type'], course_group_type=request.POST['course_group_type']) # after the course have been updated now save the assignments assignments = {} for d in request.POST: data = request.POST[d].strip() d = d.split("___") if len(d) > 1: if d[1] in ('grade_deadline', 'homework_deadline'): tz = get_current_timezone() data = tz.localize( datetime.strptime(data, '%m/%d/%Y %H:%M %p')) if d[0] in assignments.keys(): assignments[d[0]][d[1]] = data else: assignments[d[0]] = {} assignments[d[0]]['homework_name'] = d[0] assignments[d[0]][d[1]] = data print(assignments) for c in assignments: CourseHomeWorkModel.objects.update_or_create( course=CourseModel.objects.get(pk=pk), homework_name=c, defaults=assignments[c]) return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) course = CourseModel.objects.filter(pk=pk).first() homework = CourseHomeWorkModel.objects.filter( course=pk).order_by('homework_name') enrolled_student = StudentCourseModel.objects.filter( course=pk).select_related('user') all_grades = {} group_grades = {} first_grader_grading = {} # first fill the dummy first_grader_grading with None value for u in enrolled_student: user_id_e = u.user.id user_obj_e = u.user if user_id_e not in first_grader_grading.keys(): first_grader_grading[user_id_e] = {} first_grader_grading[user_id_e]['name'] = user_obj_e.name first_grader_grading[user_id_e]['grade'] = [None] * len(homework) for h in homework: homework_group = HomeworkGroup.objects.filter( course=course, homework=h).order_by('group_name') # for g in homework_group: # first check for instructor override grade if g.is_override: grade = g.grade elif g.appeal_done_count == g.total_member: appeal_grade, appeal_explanation = return_appeal_grade_explanation_1( g.group) grade = appeal_grade else: grade = g.grade # this is for the all_grades for members in HomeworkGroupMember.objects.filter( group=g).select_related('user'): user_id = members.user.id users_obj = members.user if user_id not in all_grades.keys(): all_grades[user_id] = {} all_grades[user_id]['name'] = users_obj.name all_grades[user_id]['grade'] = [] all_grades[user_id]['grade'].append(grade) else: all_grades[user_id]['grade'].append(grade) # this is for the first grader grading for grader_user in GroupCombinationModel.objects.filter( group=g).select_related('grader_user'): grader_user_id = grader_user.grader_user.id grader_user_obj = grader_user.grader_user # first find in the appeal_group total_grade = PeerEvaluationModel.objects.filter( group=g, peer_grader=grader_user_obj).aggregate(Avg('grade')) if total_grade['grade__avg'] is not None: total_grade = round(total_grade['grade__avg']) else: # now check does this user have even done the grading grading_done = HomeworkGroupGrade.objects.filter( group=g, grader=grader_user_obj) if grading_done.exists(): total_grade = grading_done.first() total_grade = total_grade.grade else: total_grade = None first_grader_grading[grader_user_id]['grade'][ int(h.homework_name) - 1] = total_grade # first fetch the assignment name array group_grades[h.id] = {} group_grades[h.id]['title'] = h.assignment_title group_grades[h.id]['value'] = [] for group in homework_group: grade_explanation = return_grade_explanation(group.group) appeal_grade_explanation = return_appeal_grade_explanation( group.group) instructor_grade, instructor_comment = return_instructor_grade_comment( group.group) temp = {} temp['id'] = group.group temp['group'] = return_member_name(group.group)[0] temp['grader'] = grade_explanation[2] temp['grade'] = grade_explanation[0] temp['explanation'] = grade_explanation[1] temp['appeal_grader'] = appeal_grade_explanation[2] temp['appeal_grade'] = appeal_grade_explanation[0] temp['appeal_explanation'] = appeal_grade_explanation[1] temp['deadline_miss'] = group.deadline_miss temp['file'] = group.attachment temp['updated_at'] = group.updated_at temp['group_id'] = group.group_name temp['instructor_grade'] = instructor_grade temp['instructor_comment'] = instructor_comment group_grades[h.id]['value'].append(temp) # print group_grades group_grades_sort = collections.OrderedDict(sorted(group_grades.items())) # now sum the all_grades for c in all_grades: t_all_grader = [x for x in all_grades[c]['grade'] if x is not None] if t_all_grader: sum_grade = sum(t_all_grader) / len(t_all_grader) all_grades[c]['grade'].append(sum_grade) else: all_grades[c]['grade'].append(None) # now sum the all first grading grades for c in first_grader_grading: t_all_grader = [ x for x in first_grader_grading[c]['grade'] if x is not None ] if t_all_grader: sum_grade = sum(t_all_grader) / len(t_all_grader) first_grader_grading[c]['grade'].append(sum_grade) else: first_grader_grading[c]['grade'].append(None) # print first_grader_grading return render( request, 'edit_course.html', { 'course': course, 'homework': homework, 'enrolled_student': enrolled_student.order_by("user__name"), 'course_pk': pk, 'all_grades': all_grades, 'group_grades': group_grades_sort, 'first_grader_grading': first_grader_grading, 'ta': TA.objects.filter(course=course).first() })
def get_context_data(self, **kwargs): form = forms.StatsFilterForm(self.request.GET or None) graph_type = forms.GRAPH_TYPE_LINEAR linear_type = forms.LINEAR_CALLS linear_c_type = forms.LINEAR_C_ANONS sunlight_type = forms.SUNLIGHT_BROWSERS days = 1 day = datetime.date.today() day_start = datetime.datetime(day.year, day.month, day.day, tzinfo=timezone.get_current_timezone()) graph = None if self.request.GET: if ('graph_type' in self.request.GET) and \ self.request.GET['graph_type']: graph_type = int(self.request.GET['graph_type']) if ('linear_type' in self.request.GET) and \ self.request.GET['linear_type']: linear_type = int(self.request.GET['linear_type']) if ('linear_c_type' in self.request.GET) and \ self.request.GET['linear_c_type']: linear_c_type = int(self.request.GET['linear_c_type']) if ('sunlight_type' in self.request.GET) and \ self.request.GET['sunlight_type']: sunlight_type = int(self.request.GET['sunlight_type']) if ('period' in self.request.GET) and self.request.GET['period']: days = int(self.request.GET['period']) day_start = day_start - datetime.timedelta(days=days) if graph_type == forms.GRAPH_TYPE_LINEAR: if linear_type == forms.LINEAR_CALLS: graph = graphs.collapsed_time_period_graph( day_start, days, 'calls_count') elif linear_type == forms.LINEAR_ERRORS: graph = graphs.collapsed_time_period_graph( day_start, days, 'exceptions_count') elif linear_type == forms.LINEAR_ANONS: graph = graphs.collapsed_time_period_graph( day_start, days, 'anon_calls_count') elif linear_type == forms.LINEAR_MOBILES: graph = graphs.collapsed_time_period_graph( day_start, days, 'mobiles_count') elif linear_type == forms.LINEAR_EXEC_TIME: graph = graphs.collapsed_time_period_graph(day_start, days, 'exec_time', multi=0.001) elif linear_type == forms.LINEAR_DB_CALLS: graph = graphs.collapsed_time_period_graph( day_start, days, 'db_count') elif linear_type == forms.LINEAR_DB_EXEC: graph = graphs.collapsed_time_period_graph(day_start, days, 'db_time', multi=0.001) elif linear_type == forms.LINEAR_TEMPLATE: graph = graphs.collapsed_time_period_graph(day_start, days, 'template_time', multi=0.001) elif graph_type == forms.GRAPH_TYPE_SUNLIGHT: if sunlight_type == forms.SUNLIGHT_BROWSERS: graph = graphs.collapsed_sunlignt_graph( day_start, days, 'browsers') graph['name'] = _("Browsers") elif sunlight_type == forms.SUNLIGHT_OSES: graph = graphs.collapsed_sunlignt_graph( day_start, days, 'oses') graph['name'] = _("Operating systems") elif sunlight_type == forms.SUNLIGHT_MOBILES: graph = graphs.collapsed_sunlignt_graph( day_start, days, 'devices') graph['name'] = _("Mobile devices") elif sunlight_type == forms.SUNLIGHT_MOD_CALLS: graph = graphs.collapsed_sunlignt_graph(day_start, days, 'modules', subfield="calls") graph['name'] = _("Calls") elif sunlight_type == forms.SUNLIGHT_MOD_ANONS: graph = graphs.collapsed_sunlignt_graph(day_start, days, 'modules', subfield="anons") graph['name'] = _("Anonymous") elif sunlight_type == forms.SUNLIGHT_MOD_MOBILES: graph = graphs.collapsed_sunlignt_graph(day_start, days, 'modules', subfield="mobiles") graph['name'] = _("Mobiles") elif sunlight_type == forms.SUNLIGHT_MOD_EXCEPT: graph = graphs.collapsed_sunlignt_graph(day_start, days, 'modules', subfield="exceptions") graph['name'] = _("Exceptions") elif sunlight_type == forms.SUNLIGHT_MOD_EXEC: graph = graphs.collapsed_sunlignt_graph(day_start, days, 'modules', subfield="exec") graph['name'] = _("Exec time") elif sunlight_type == forms.SUNLIGHT_MOD_DB_COUNT: graph = graphs.collapsed_sunlignt_graph(day_start, days, 'modules', subfield="db_count") graph['name'] = _("DB queries") elif sunlight_type == forms.SUNLIGHT_MOD_DB_TIME: graph = graphs.collapsed_sunlignt_graph(day_start, days, 'modules', subfield="db_time") graph['name'] = _("DB time") elif sunlight_type == forms.SUNLIGHT_MOD_TEMPLATE: graph = graphs.collapsed_sunlignt_graph(day_start, days, 'modules', subfield="render") graph['name'] = _("Template rendering") elif graph_type == forms.GRAPH_TYPE_LINEAR_C: if linear_c_type == forms.LINEAR_C_ANONS: graph = graphs.collapsed_sum_graph( day_start, days, ['calls_count', 'anon_calls_count'], [_("All"), _("Anonymous")]) elif linear_c_type == forms.LINEAR_C_MOBILES: graph = graphs.collapsed_sum_graph( day_start, days, ['calls_count', 'mobiles_count'], [_("All"), _("Mobile devices")]) elif linear_c_type == forms.LINEAR_C_EXEC: graph = graphs.collapsed_sum_graph( day_start, days, [logic_time, 'db_time', 'template_time'], [_("Logic"), _("DB"), _("Rendering")], multi=0.001, incremental=True) elif linear_c_type == forms.LINEAR_C_ERRORS: graph = graphs.collapsed_sum_graph( day_start, days, ['calls_count', 'exceptions_count'], [_("All"), _("Exceptions")]) kwargs['form'] = form kwargs['graph_type'] = graph_type kwargs['graph'] = graph return kwargs
from django.db import models from datetime import datetime, timedelta from email.utils import parsedate from django.utils import timezone import os import socket import settings current_timezone = timezone.get_current_timezone() def parse_datetime(string): if settings.USE_TZ: return datetime(*(parsedate(string)[:6]), tzinfo=current_timezone) else: return datetime(*(parsedate(string)[:6])) class TwitterAPICredentials(models.Model): """ Credentials for accessing the Twitter Streaming API. """ created_at = models.DateTimeField(auto_now_add=True) name = models.CharField(max_length=250) email = models.EmailField() api_key = models.CharField(max_length=250) api_secret = models.CharField(max_length=250)
def convert_to_localtime(utctime): utc = utctime.replace(tzinfo=pytz.UTC) localtz = utc.astimezone(timezone.get_current_timezone()) return formats.date_format(localtz, 'DATETIME_FORMAT')
def dynamic_activity_data(request): """Large database acitivity widget helper. This method serves as a helper for the activity widget, it's used with for AJAX requests based on date ranges passed to it. """ direction = request.GET.get('direction') range_end = request.GET.get('range_end') range_start = request.GET.get('range_start') days_total = 30 # Convert the date from the request. # # This takes the date from the request in YYYY-MM-DD format and # converts into a format suitable for QuerySet later on. if range_end: range_end = datetime.datetime.fromtimestamp( time.mktime(time.strptime(range_end, "%Y-%m-%d"))) if range_start: range_start = datetime.datetime.fromtimestamp( time.mktime(time.strptime(range_start, "%Y-%m-%d"))) if direction == "next" and range_end: new_range_start = range_end new_range_end = \ new_range_start + datetime.timedelta(days=days_total) elif direction == "prev" and range_start: new_range_start = range_start - datetime.timedelta(days=days_total) new_range_end = range_start elif direction == "same" and range_start and range_end: new_range_start = range_start new_range_end = range_end else: new_range_end = datetime.datetime.now() + datetime.timedelta(days=1) new_range_start = new_range_end - datetime.timedelta(days=days_total) current_tz = timezone.get_current_timezone() new_range_start = timezone.make_aware(new_range_start, current_tz) new_range_end = timezone.make_aware(new_range_end, current_tz) response_data = { "range_start": new_range_start.strftime("%Y-%m-%d"), "range_end": new_range_end.strftime("%Y-%m-%d") } def large_stats_data(range_start, range_end): def get_objects(model_name, timestamp_field, date_field): """Perform timestamp based queries. This method receives a dynamic model name and performs a filter query. Later the results are grouped by day and prepared for the charting library. """ args = '%s__range' % timestamp_field q = model_name.objects.filter(**{ args: (range_start, range_end) }) q = q.extra({timestamp_field: date_field}) q = q.values(timestamp_field) q = q.annotate(created_count=Count('pk')) q = q.order_by(timestamp_field) data = [] for obj in q: data.append([ time.mktime(time.strptime( six.text_type(obj[timestamp_field]), "%Y-%m-%d")) * 1000, obj['created_count'] ]) return data comment_array = get_objects(Comment, "timestamp", "date(timestamp)") change_desc_array = get_objects(ChangeDescription, "timestamp", "date(timestamp)") review_array = get_objects(Review, "timestamp", "date(timestamp)") rr_array = get_objects(ReviewRequest, "time_added", "date(time_added)") return { 'change_descriptions': change_desc_array, 'comments': comment_array, 'reviews': review_array, 'review_requests': rr_array } stats_data = large_stats_data(new_range_start, new_range_end) return { "range": response_data, "activity_data": stats_data }
def _convert_date(self, d): d = datetime.combine(d, time(hour=0, minute=0)) d = timezone.make_aware(d, timezone.get_current_timezone()) return d
def datetime_combine(date, time): """Timezone aware version of `datetime.datetime.combine`""" return make_aware( datetime.datetime.combine(date, time), get_current_timezone())
def dashboard(request): now = timezone.now() seven_days_ago = now - timedelta(days=7) if request.user.is_superuser: engagement_count = Engagement.objects.filter(active=True).count() finding_count = Finding.objects.filter( verified=True, mitigated=None, date__range=[seven_days_ago, now]).count() mitigated_count = Finding.objects.filter( mitigated__range=[seven_days_ago, now]).count() accepted_count = len([ finding for ra in Risk_Acceptance.objects.filter( reporter=request.user, created__range=[seven_days_ago, now]) for finding in ra.accepted_findings.all() ]) # forever counts findings = Finding.objects.filter(verified=True) else: engagement_count = Engagement.objects.filter(lead=request.user, active=True).count() finding_count = Finding.objects.filter( reporter=request.user, verified=True, mitigated=None, date__range=[seven_days_ago, now]).count() mitigated_count = Finding.objects.filter( mitigated_by=request.user, mitigated__range=[seven_days_ago, now]).count() accepted_count = len([ finding for ra in Risk_Acceptance.objects.filter( reporter=request.user, created__range=[seven_days_ago, now]) for finding in ra.accepted_findings.all() ]) # forever counts findings = Finding.objects.filter(reporter=request.user, verified=True) sev_counts = {'Critical': 0, 'High': 0, 'Medium': 0, 'Low': 0, 'Info': 0} for finding in findings: if finding.severity: sev_counts[finding.severity] += 1 by_month = list() dates_to_use = [ now, now - relativedelta(months=1), now - relativedelta(months=2), now - relativedelta(months=3), now - relativedelta(months=4), now - relativedelta(months=5), now - relativedelta(months=6) ] for date_to_use in dates_to_use: sourcedata = { 'y': date_to_use.strftime("%Y-%m"), 'a': 0, 'b': 0, 'c': 0, 'd': 0, 'e': 0 } for finding in Finding.objects.filter( reporter=request.user, verified=True, date__range=[ datetime(date_to_use.year, date_to_use.month, 1, tzinfo=timezone.get_current_timezone()), datetime(date_to_use.year, date_to_use.month, monthrange(date_to_use.year, date_to_use.month)[1], tzinfo=timezone.get_current_timezone()) ]): if finding.severity == 'Critical': sourcedata['a'] += 1 elif finding.severity == 'High': sourcedata['b'] += 1 elif finding.severity == 'Medium': sourcedata['c'] += 1 elif finding.severity == 'Low': sourcedata['d'] += 1 elif finding.severity == 'Info': sourcedata['e'] += 1 by_month.append(sourcedata) start_date = now - timedelta(days=180) r = relativedelta(now, start_date) weeks_between = int( ceil((((r.years * 12) + r.months) * 4.33) + (r.days / 7))) if weeks_between <= 0: weeks_between += 2 punchcard, ticks, highest_count = get_punchcard_data( findings, weeks_between, start_date) add_breadcrumb(request=request, clear=True) return render( request, 'dojo/dashboard.html', { 'engagement_count': engagement_count, 'finding_count': finding_count, 'mitigated_count': mitigated_count, 'accepted_count': accepted_count, 'critical': sev_counts['Critical'], 'high': sev_counts['High'], 'medium': sev_counts['Medium'], 'low': sev_counts['Low'], 'info': sev_counts['Info'], 'by_month': by_month, 'punchcard': punchcard, 'ticks': ticks, 'highest_count': highest_count })
def __str__(self): return "Meeting For %s" % self.datetime.astimezone( timezone.get_current_timezone()).date()
def _get_to_datetime(self): current_timezone = timezone.get_current_timezone() return ( self.cleaned_data['to_datetime'] .astimezone(current_timezone) )
import sys from pathlib import Path from django.core.management.base import BaseCommand from django.utils import timezone from django.utils.dateparse import parse_datetime from demographic import models from abstract import models as abstract_models from account.models import UserGroups BASE_DIR = Path(str(Path(__file__))).parents[3] DATA_DIR = BASE_DIR / 'data' server_tz = timezone.get_current_timezone() class Command(BaseCommand): """Populates the gender, state and career stage tables.""" def handle(self, *args, **options): sys.stdout.write("Creating user groups.\n") UserGroups.create_groups() sys.stdout.write("Creating states.\n") with open(str(DATA_DIR / 'states.txt')) as fp: states = [x.strip() for x in fp.readlines() if x.strip()] for state in states: model, created = models.State.objects.get_or_create(text=state) if created: sys.stdout.write("\tCreated state {}.\n".format(state)) else: sys.stdout.write("\t{} already exists.\n".format(state))
def create_fixtures(): biota = TopicCategory.objects.get(identifier='biota') location = TopicCategory.objects.get(identifier='location') elevation = TopicCategory.objects.get(identifier='elevation') farming = TopicCategory.objects.get(identifier='farming') world_extent = [-180, 180, -90, 90] map_data = [ ('SDI Default Map', 'SDI default map abstract', ('populartag', ), world_extent, biota), ('ipsum lorem', 'common ipsum lorem', ('populartag', 'maptagunique'), world_extent, biota), ('lorem1 ipsum1', 'common abstract1', ('populartag', ), world_extent, biota), ('ipsum foo', 'common bar lorem', ('populartag', ), world_extent, location), ('map one', 'common this is a unique thing', ('populartag', ), [0, 1, 0, 1], location), ('quux', 'common double thing', ('populartag', ), [0, 5, 0, 5], location), ('morx', 'common thing double', ('populartag', ), [0, 10, 0, 10], elevation), ('titledupe something else ', 'whatever common', ('populartag', ), [0, 10, 0, 10], elevation), ('something titledupe else ', 'bar common', ('populartag', ), [0, 50, 0, 50], elevation), ('map metadata true', 'map metadata true', ('populartag', ), [0, 22, 0, 22], farming), ] user_data = [ ('bobby', 'bob', 'bobby', ''), ('norman', 'norman', 'norman', ''), ('user1', 'pass', 'uniquefirst', 'foo'), ('user2', 'pass', 'foo', 'uniquelast'), ('unique_username', 'pass', 'foo', 'uniquelast'), ('jblaze', 'pass', 'johnny', 'blaze'), ('foo', 'pass', 'bar', 'baz'), ] people_data = [ ('this contains all my interesting profile information', ), ('some other information goes here', ), ] now = datetime.now(timezone.get_current_timezone()) step = timedelta(days=60) def get_test_date(): def it(): current = now - step while True: yield current current = current - step itinst = it() def callable(): return next(itinst) return callable next_date = get_test_date() layer_data = [ ('CA', 'abstract1', 'CA', 'geonode:CA', world_extent, next_date(), ('populartag', 'here'), elevation), ('layer2', 'abstract2', 'layer2', 'geonode:layer2', world_extent, next_date(), ('populartag', ), elevation), ('uniquetitle', 'something here', 'mylayer', 'geonode:mylayer', world_extent, next_date(), ('populartag', ), elevation), ('common blar', 'lorem ipsum', 'foo', 'geonode:foo', world_extent, next_date(), ('populartag', 'layertagunique'), location), ('common double it', 'whatever', 'whatever', 'geonode:whatever', [0, 1, 0, 1], next_date(), ('populartag', ), location), ('common double time', 'else', 'fooey', 'geonode:fooey', [0, 5, 0, 5], next_date(), ('populartag', ), location), ('common bar', 'uniqueabstract', 'quux', 'geonode:quux', [0, 10, 0, 10], next_date(), ('populartag', ), biota), ('common morx', 'lorem ipsum', 'fleem', 'geonode:fleem', [0, 50, 0, 50], next_date(), ('populartag', ), biota), ('layer metadata true', 'lorem ipsum', 'fleem', 'geonode:metadatatrue', [0, 22, 0, 22], next_date(), ('populartag', ), farming) ] document_data = [ ('lorem ipsum', 'common lorem ipsum', ('populartag', ), world_extent, biota), ('ipsum lorem', 'common ipsum lorem', ('populartag', 'doctagunique'), world_extent, biota), ('lorem1 ipsum1', 'common abstract1', ('populartag', ), world_extent, biota), ('ipsum foo', 'common bar lorem', ('populartag', ), world_extent, location), ('doc one', 'common this is a unique thing', ('populartag', ), [0, 1, 0, 1], location), ('quux', 'common double thing', ('populartag', ), [0, 5, 0, 5], location), ('morx', 'common thing double', ('populartag', ), [0, 10, 0, 10], elevation), ('titledupe something else ', 'whatever common', ('populartag', ), [0, 10, 0, 10], elevation), ('something titledupe else ', 'bar common', ('populartag', ), [0, 50, 0, 50], elevation), ('doc metadata true', 'doc metadata true', ('populartag', ), [0, 22, 0, 22], farming) ] return map_data, user_data, people_data, layer_data, document_data
def get_entry(self, key, href): # get single entry entry_request = requests.get(self.base + href, auth=self.auth) entry = entry_request.json() self.stdout.write(self.style.SUCCESS('Madek: %s' % entry.get('id'))) # the updated entry as dict new_entry = { "category": self.category } # get meta data meta_data = requests.get(self.base + href + '/meta-data/', auth=self.auth) meta_data = meta_data.json() author_objs = [] license_objs = [] keywords_objs = [] # iterate over meta-data for m in meta_data.get('meta-data'): key = m.get('meta_key_id') id = m.get('id') if (key == 'madek_core:title'): title = requests.get(self.base + '/api/meta-data/' + id, auth=self.auth).json().get('value') # print('madek_core:title: %s') % title new_entry['title'] = title elif (key == 'madek_core:subtitle'): subtitle = requests.get(self.base + '/api/meta-data/' + id, auth=self.auth).json().get('value') # print('madek_core:subtitle: %s') % subtitle new_entry['subtitle'] = subtitle elif (key == 'madek_core:portrayed_object_date'): portrayed_object_date = requests.get(self.base + '/api/meta-data/' + id, auth=self.auth).json().get('value') # print('madek_core:portrayed_object_date: %s') % portrayed_object_date new_entry['portrayed_object_date'] = portrayed_object_date elif (key == 'madek_core:authors'): authors = requests.get(self.base + '/api/meta-data/' + id, auth=self.auth).json().get('_json-roa')['collection'] # print('madek_core:authors: %s') % authors author_objs = self.get_authors(authors['relations']) elif (key == 'copyright:license'): licenses = requests.get(self.base + '/api/meta-data/' + id, auth=self.auth).json().get('_json-roa')['collection'] # print('copyright:license: %s') % licenses license_objs = self.get_licenses(licenses['relations']) # will be set at the end of this function elif (key == 'copyright:source'): source = requests.get(self.base + '/api/meta-data/' + id, auth=self.auth).json().get('value') # print('copyright:source: %s') % source new_entry['source'] = source elif (key == 'copyright:copyright_usage'): # print('copyright:copyright_usage: %s') % requests.get(self.base+'/api/meta-data/'+id, auth=self.auth).json().get('value') pass elif (key == 'madek_core:copyright_notice'): copyright_notice = requests.get(self.base + '/api/meta-data/' + id, auth=self.auth).json().get('value') # print('madek_core:copyright_notice: %s') % copyright_notice new_entry['copyright_notice'] = copyright_notice elif (key == 'madek_core:keywords'): keywords = requests.get(self.base + '/api/meta-data/' + id, auth=self.auth).json().get('_json-roa')['collection'] keywords_objs = self.get_keywords(keywords['relations']) # create or update entry obj, created = Entry.objects.update_or_create( remote_uuid=entry.get('id'), defaults=new_entry) # save image image_href = entry['_json-roa']['relations']['media-file'].get('href') image_request = requests.get(self.base + image_href, auth=self.auth) image = image_request.json() image_data_request = requests.get(self.base + image['_json-roa']['relations']['data-stream']['href'], stream=True, auth=self.auth) if not obj.image: if image_data_request.status_code == 200: f = tempfile.NamedTemporaryFile(delete=False) with open(f.name, 'wb') as f: image_data_request.raw.decode_content = True shutil.copyfileobj(image_data_request.raw, f) # for chunk in image_data_request: # f.write(chunk) f.close() # save entry with open(f.name, 'r') as f: self.stdout.write(self.style.SUCCESS(image.get('filename'))) obj.image.save(image.get('filename'), File(f), save=True) # save original image name obj.image_name = image.get('filename') f.close() os.unlink(f.name) # set licenses, authors, keywords self.stdout.write(self.style.SUCCESS('Entry: %s' % obj)) obj.license.set(license_objs) obj.author.set(author_objs) for keyword in keywords_objs: obj.tags.add(keyword) # set date m = re.findall('^(\d{4})', str(obj.portrayed_object_date)) if m: date = datetime.strptime(m[0], '%Y') date = timezone.make_aware(date, timezone.get_current_timezone()) obj.date = date obj.date_accuracy = 3 # save the entry obj.save() return obj