def smart_datetime(*args, **kwargs): if settings.USE_TZ: # return an aware datetime if 'tzinfo' not in kwargs: kwargs['tzinfo'] = timezone.get_default_timezone() return datetime(*args, **kwargs).astimezone(timezone.get_default_timezone()) return datetime(*args, **kwargs)
def filter_with_date_range(self, queryset): """ Filter results based that are within a (possibly open ended) daterange """ # Nothing to do if we don't have a date field if not self.date_range_field_name: return queryset # After the start date if self.start_date: start_datetime = timezone.make_aware( datetime.combine(self.start_date, time(0, 0)), timezone.get_default_timezone()) filter_kwargs = { "%s__gt" % self.date_range_field_name: start_datetime, } queryset = queryset.filter(**filter_kwargs) # Before the end of the end date if self.end_date: end_of_end_date = datetime.combine( self.end_date, time(hour=23, minute=59, second=59) ) end_datetime = timezone.make_aware(end_of_end_date, timezone.get_default_timezone()) filter_kwargs = { "%s__lt" % self.date_range_field_name: end_datetime, } queryset = queryset.filter(**filter_kwargs) return queryset
def get_periods(self): month = bool(re.findall(match_m, self.name_template)) if month: start = datetime.datetime( self.current_period.year, self.current_period.month, 1, 0, 0, 0, tzinfo=get_default_timezone() ) if self.current_period.month == 12: end = datetime.datetime( self.current_period.year, 12, 31, 0, 0, 0, tzinfo=get_default_timezone() ) else: end = datetime.datetime( self.current_period.year, self.current_period.month + 1, 1, 0, 0, 0, tzinfo=get_default_timezone() ) - datetime.timedelta(days=1) else: start = datetime.datetime( self.current_period.year, 1, 1, 0, 0, 0, tzinfo=get_default_timezone() ) end = datetime.datetime( self.current_period.year, 12, 31, 0, 0, 0, tzinfo=get_default_timezone() ) return start, end
def humanize_time_span(first, second): """ Returns a humanized string for an event start and end time. TODO: use user timezone """ humanized = "" tz = timezone.get_default_timezone() first = tz.normalize(first) second = tz.normalize(second) now = datetime.now(tz=timezone.get_default_timezone()) if first.day == second.day: if first.hour < 12 and second.hour < 12 or first.hour >= 12 and second.hour >= 12: return "from %s to %s." % ( first.strftime('%I:%M'), second.strftime('%I:%M %p').lower() ) else: return "from %s to %s." % ( first.strftime('%I:%M %p').lower(), second.strftime('%I:%M %p').lower() ) else: return "from %s the %s to %s the %s." % ( first.strftime('%I:%M %p').lower(), ordinal(int(first.strftime('%d'))), second.strftime('%I:%M %p').lower(), ordinal(int(second.strftime('%d'))) )
def test_reschedule_with_exclude(self): """ Test reschedule with exclude time. """ job = Job.objects.get(pk=1) job.reschedule_interval_type = 'HOUR' job.save() Run.objects.filter(pk=1).update( enqueue_dts=timezone.now(), schedule_dts=timezone.make_aware( datetime(2032, 1, 1, 11, 59), timezone.get_default_timezone()), return_dts=timezone.make_aware( datetime(2032, 1, 1, 11, 59), timezone.get_default_timezone()), ) RescheduleExclude.objects.create( job=job, start_time=time(12, 00), end_time=time(13, 00), ) job.reschedule() self.assertEqual(2, Run.objects.filter(job_id=1).count()) runs = Run.objects.filter(job_id=1) self.assertEqual( timezone.make_aware( datetime(2032, 1, 1, 13, 59), timezone.get_default_timezone() ), runs[1].schedule_dts )
def test_17_stocktransactions(self): """Simple stock transaction test""" order = self.create_order() product = self.create_product() Product = product.__class__ period = Period.objects.create( name='Period 1', start=timezone.now(), ) # Create a period which has been superceeded by Period 1 Period.objects.create( name='Period 0', start=timezone.make_aware(datetime(2000, 1, 1, 0, 0), timezone.get_default_timezone()), ) # Create a period in the far future Period.objects.create( name='Period 2', start=timezone.make_aware(datetime(2030, 1, 1, 0, 0), timezone.get_default_timezone()), ) s = StockTransaction.objects.create( product=product, type=StockTransaction.INITIAL, change=10, ) self.assertEqual(s.period, period) self.assertEqual(Product.objects.get(pk=product.id).items_in_stock, 10) StockTransaction.objects.create( product=product, type=StockTransaction.CORRECTION, change=-3, ) self.assertEqual(StockTransaction.objects.items_in_stock(product), 7) StockTransaction.objects.create( product=product, type=StockTransaction.SALE, change=-2, ) StockTransaction.objects.create( product=product, type=StockTransaction.PURCHASE, change=4, ) StockTransaction.objects.open_new_period(name='Something') transaction = StockTransaction.objects.filter(product=product)[0] self.assertEqual(transaction.type, StockTransaction.INITIAL) self.assertEqual(transaction.change, 9) self.assertEqual(transaction.period.name, 'Something')
def _update_feed(feed): logging.info("Update started.") old_limit = timezone.make_aware( datetime.now(), timezone.get_default_timezone() ) - timedelta( seconds = feed.update_interval ) if feed.last_updated is not None and old_limit < feed.last_updated: logging.info("Update not needed.") #return # Commented out for testing purposes. parsed_feed = feedparser.parse(feed.url) if parsed_feed.get('status') != 200: logging.warning("Feed '{url}' errored with message '{msg}'".format(url=feed.url, msg=str(parsed_feed['bozo_exception']).strip())) for entry in parsed_feed['entries']: published = dateutil.parser.parse(entry['published']) if feed.last_pubdate is None or published > feed.last_pubdate: logging.info('Update added new post: '+entry['guid']) feed.last_pubdate = timezone.make_aware(published, timezone.get_default_timezone()) for callback in _get_callbacks(feed): try: entry = jsonpickle.encode(entry, unpicklable=False) urllib2.urlopen(callback.url, data=entry) except Exception as err: logging.error("ERROR: " +str(err)) feed.last_updated = timezone.make_aware(datetime.now(), timezone.get_default_timezone()) feed.save()
def updated_date_range(self): ''' Filter by a date range for last_updated. Options should contain date_range_from and date_range_to, each of which are simply date objects. ''' options = self.options date_from = datetime.combine(options['date_range_from'], time(0, 0)) date_from = tz.make_aware(date_from, tz.get_default_timezone()) date_from = date_from.astimezone(tz.utc) date_to = datetime.combine(options['date_range_to'], time(23, 59, 59, 99)) date_to = tz.make_aware(date_to, tz.get_default_timezone()) date_to = date_to.astimezone(tz.utc) if self.model._meta.object_name != 'RecordMetadata': prefix = 'record_metadata__' else: prefix = '' filter = [ { '{}record_last_updated_gmt__gte'.format(prefix): date_from, '{}record_last_updated_gmt__lte'.format(prefix): date_to }, { '{}deletion_date_gmt__gte'.format(prefix): date_from, '{}deletion_date_gmt__lte'.format(prefix): date_to, } ] order_by = ['{}record_last_updated_gmt'.format(prefix)] return {'filter': filter, 'order_by': order_by}
def handle(self, *args, **kwargs): with open(log_file_name, 'a') as log: #now = datetime.now() now = timezone.make_aware(datetime.now(), timezone.get_default_timezone()) start_str = now.strftime(now.strftime("%m/%W/%Y - %H:%M:%S")) log.write("[*] Start: " + start_str + "\n") print start_str for screen_name in screen_names: try: print print "[*] Polling " + screen_name print find_reply_chains(screen_name) except TwitterError as e: print "[!] Rate Limit exceeded" print e print "[!] Sleeping for 15 minutes" wake = now + timedelta(seconds=60 * 15) wake_str = wake.strftime(now.strftime("%m/%W/%Y - %H:%M:%S")) print "[*] Will wake at " + wake_str time.sleep(60 * 15) print "[*] Resuming..." print now = timezone.make_aware(datetime.now(), timezone.get_default_timezone()) end_str = now.strftime(now.strftime("%m/%W/%Y - %H:%M:%S")) log.write("End: " + end_str + "\n") print "[*] End: " + end_str
def event_browser(request): """Return the daily process main overview page. """ if not request.user.is_authenticated(): return redirect("login") elif not has_permission(request.user, "cnto_view_events"): return redirect("manage") context = {} event_data = {} for event in Event.objects.all(): stats = Attendance.get_stats_for_event(event) start_dt = event.start_dt end_dt = event.end_dt event_data[start_dt.astimezone(timezone.get_default_timezone()).strftime("%Y-%m-%d %H:%M")] = { "title": "\n%s minutes\n%.2f %% attendance\n%s players" % ( event.duration_minutes, stats["average_attendance"] * 100.0, stats["player_count"]), "end_dt_string": end_dt.astimezone(timezone.get_default_timezone()).strftime("%Y-%m-%d %H:%M"), "css_class_name": event.event_type.css_class_name, } event_data = json.dumps(event_data) context["event_data"] = event_data context["groups"] = MemberGroup.objects.all() context["warning_count"] = MemberWarning.objects.filter(acknowledged=False).count() return render(request, 'cnto/event/browser.html', context)
def history(self, request, pk=None): instance = self.get_object() user = request.query_params.get('user', None) # Default to all users start = request.query_params.get('start', None) # In YYYY-MM-DD format if start: start = datetime.datetime.strptime(start, '%Y-%m-%d') else: start = datetime.datetime.min start = timezone.make_aware(start, timezone.get_default_timezone()) end = request.query_params.get('end', None) # In YYYY-MM-DD format if end: end = datetime.datetime.strptime(start, '%Y-%m-%d') else: end = datetime.datetime.today() end = timezone.make_aware(end, timezone.get_default_timezone()) history = [] for v, version in enumerate(Version.objects.get_for_object(instance).reverse()): if (user is not None and version.revision.user.username != user) \ or version.revision.date_created < start \ or version.revision.date_created > end: continue history.append({'version': v, 'created': version.revision.date_created.strftime('%Y-%m-%d %H:%M:%S'), 'user': version.revision.user.username, 'data': version.field_dict}) return Response(history, status=200)
def edit_profile(request,id): if request.method =='GET' : if request.user.is_authenticated(): now = timezone.make_aware(datetime.now(),timezone.get_default_timezone()) now = formats.date_format(now,"SHORT_DATETIME_FORMAT") data = User.objects.get(pk=id) form = EditProfileForm() dv = {'first_name':data.first_name,'last_name':data.last_name,'email':data.email} form = EditProfileForm(dv) return render(request, 'userprofile_edit.html', {'form':form,'timezones':pytz.common_timezones,"date":str(now)}) else: return redirect('/authen/') elif request.method == 'POST' : now = timezone.make_aware(datetime.now(),timezone.get_default_timezone()) now = formats.date_format(now,"SHORT_DATETIME_FORMAT") data = User.objects.get(pk=id) if request.POST.get('updateuser'): user = request.user time = request.POST['timezone'] form = EditProfileForm(request.POST) time_user = UserProfile.objects.filter(pk=id).update(user_timezone=time) request.session['django_timezone'] = request.POST['timezone'] if form.is_valid(): user.first_name = request.POST['first_name'] user.last_name = request.POST['last_name'] user.email = request.POST['email'] user.save() return redirect('/userprofile/') elif request.POST.get('canceledituser'): return redirect('/userprofile/')
def updated_date_range(self): """ Filter by a date range for last_updated. Options should contain date_range_from and date_range_to, each of which are simply date objects. Options *may* contain `is_deletion`, which is a boolean that indicates whether or not this requires "last deleted" rather than "last updated". """ options = self.options date_from = datetime.combine(options['date_range_from'], time(0, 0)) date_from = tz.make_aware(date_from, tz.get_default_timezone()) date_from = date_from.astimezone(tz.utc) date_to = datetime.combine(options['date_range_to'], time(23, 59, 59, 99)) date_to = tz.make_aware(date_to, tz.get_default_timezone()) date_to = date_to.astimezone(tz.utc) if self.model._meta.object_name != 'RecordMetadata': prefix = 'record_metadata__' else: prefix = '' if options.get('is_deletion', False): filter = [{ '{}deletion_date_gmt__gte'.format(prefix): date_from, '{}deletion_date_gmt__lte'.format(prefix): date_to, }] order_by = ['{}deletion_date_gmt'.format(prefix)] else: filter = [{ '{}record_last_updated_gmt__gte'.format(prefix): date_from, '{}record_last_updated_gmt__lte'.format(prefix): date_to }] order_by = ['{}record_last_updated_gmt'.format(prefix)] return {'filter': filter, 'order_by': order_by}
def weekly(request, from_date = None, run_update_before_chase = True): data = None form = WeeklyForm() # print('request', request) if request.method == 'POST':# and 'from_date' in request.POST.keys(): report_start_date = datetime(tzinfo = timezone.get_default_timezone(), *tuple( int(request.POST['from_date_'+z]) for z in ['year', 'month', 'day'] )) # form = WeeklyForm(request.POST) # if form.is_valid(): # clean_data = form.cleaned_data # if clean_data['from_date']: # report_start_date = datetime(tzinfo = timezone.get_default_timezone(), *tuple( getattr(clean_data['from_date'], z) for z in ['year', 'month', 'day'] )) elif from_date: match = re.match(r'(?P<year>\d\d\d\d).(?P<month>\d\d).(?P<day>\d\d)', from_date) print('matches', match.groups()) from_date_details = { k:int(v) for k,v in match.groupdict().items() } report_start_date = datetime(tzinfo = timezone.get_default_timezone(), *tuple( int(from_date_details[z]) for z in ['year', 'month', 'day'] )) # report = WeeklyReport(report_start_date) # data = report.action() else: report_start_date = None print('report_start_date', report_start_date) if report_start_date: if run_update_before_chase: print('running update prior to selecting cases') update_results = case_collector.true_update(target_start_time = timezone.now() - timedelta(days=8), target_end_time = timezone.now()) report = WeeklyReport(report_start_date) data = report.action() result = render(request, 'results.html', {'title': 'Weekly Report', 'data': data, 'form': form}) return result
def _human_date(self): """ Returns the appropriate date based on whether the event has started, ended, or is going on. """ if self.start_date > datetime.datetime.now(tz=timezone.get_default_timezone()): return self.start_date elif self.start_date < datetime.datetime.now(tz=timezone.get_default_timezone()) and self.end_date > datetime.datetime.now(tz=timezone.get_default_timezone()): return "Going on now!" elif self.end_date < datetime.datetime.now(tz=timezone.get_default_timezone()): return self.end_date
def is_offset_greater(time_string_offset): time1 = str(timezone.now())[:19] offset_time = time_string_offset[:19] # to remove extra, unnecessary numbers offset_time_formated = datetime.datetime.strptime(offset_time, "%Y-%m-%d %H:%M:%S" ) offset_time_tz_aware = timezone.make_aware(offset_time_formated, timezone.get_default_timezone()) now_time_formated = datetime.datetime.strptime(time1, "%Y-%m-%d %H:%M:%S" ) now_time_tz_aware = timezone.make_aware(now_time_formated, timezone.get_default_timezone()) return now_time_tz_aware > offset_time_tz_aware
def is_offset_is_greater(time_string_offset): time_now = str(timezone.now())[:19] offset_time = time_string_offset[:19] offset_time_formated = datetime.datetime.strptime(offset_time, "%Y-%m-%d %H:%M:%S") offset_time_tz_aware = timezone.make_aware(offset_time_formated, timezone.get_default_timezone()) now_time_formated = datetime.datetime.strptime(time_now, "%Y-%m-%d %H:%M:%S") now_time_tz_aware = timezone.make_aware(now_time_formated, timezone.get_default_timezone()) return now_time_tz_aware > offset_time_tz_aware
def waiver_key(waiver): (team, pickup) = waiver if not pickup: if team.name=="Nick": return timezone.make_aware(datetime(year=2001, month=1, day=1), timezone.get_default_timezone()) return timezone.make_aware(datetime(year=2000, month=1, day=1), timezone.get_default_timezone()) else: return pickup.date
def _get_job_tablebody(self, job): body = [str(job.id), job.created_time.astimezone(get_default_timezone()).strftime('%a %d %b %Y %H:%M'), job.description] if job.status == Job.ERROR: return body + [job.status+ ' ?'] elif job.status == Job.COMPLETED: return [str(job.id), job.created_time.astimezone(get_default_timezone()).strftime('%a %d %b %Y %H:%M'), job.description, job.status] else: return body + [job.status]
def calculate_start_and_end_dt_for_cycle(month_dt): start_dt = timezone.make_aware(datetime(month_dt.year, month_dt.month, 1, 0, 0), timezone.get_default_timezone()) cycle_end_month_number = month_dt.month + 1 end_dt = timezone.make_aware(datetime(month_dt.year, cycle_end_month_number, calendar.monthrange(month_dt.year, cycle_end_month_number)[1], 23, 59), timezone.get_default_timezone()) return start_dt, end_dt
def _update_payment_dates(self, payment): last_payment = payment.member.get_last_payment() if last_payment: payment.valid_until = last_payment.valid_until + timedelta(days=payment.type.duration) else: payment.valid_until = datetime.now(tz=timezone.get_default_timezone()) \ + timedelta(days=payment.type.duration) payment.date = datetime.now(tz=timezone.get_default_timezone()) payment.save()
def crawlblog(self, blog): # Feedergrabber returns ( [(link, title, date)], [errors]) # We're ignoring the errors returned for right now crawled, errors = feedergrabber27.feedergrabber(blog.feed_url) if crawled: for link, title, date in crawled: date = timezone.make_aware(date, timezone.get_default_timezone()) now = timezone.make_aware(datetime.datetime.now(), timezone.get_default_timezone()) title = cleantitle(title) # create the post instance if it doesn't already exist post, created = Post.objects.get_or_create( blog = blog, url = link, defaults = { 'title': title, 'date_updated': date, } ) if created: print "Created '%s' from blog '%s'" % (title, blog.feed_url) # Only post to zulip if the post was created recently # so that new accounts don't spam zulip with their entire post list if (now - date) < max_zulip_age: post_page = ROOT_URL + 'post/' + Post.objects.get(url=link).slug send_message_zulip(user=blog.user, link=post_page, title=title) # subscribe the author to comment updates # subscription, created = Comment_Subscription.objects.get_or_create( # user = blog.user, # post = post, # ) # if new info, update the posts if not created: # print ".", updated = False if date != post.date_updated: post.date_updated = date updated = True if title != post.title: post.title = title updated = True if updated: print "Updated %s in %s." % (title, blog.feed_url) post.save() else: log.debug(str(errors))
def month_view( request, year, month, template='swingtime/monthly_view.html', queryset=None ): ''' Render a tradional calendar grid view with temporal navigation variables. Context parameters: ``today`` the current datetime.datetime value ``calendar`` a list of rows containing (day, items) cells, where day is the day of the month integer and items is a (potentially empty) list of occurrence for the day ``this_month`` a datetime.datetime representing the first day of the month ``next_month`` this_month + 1 month ``last_month`` this_month - 1 month ''' year, month = int(year), int(month) cal = calendar.monthcalendar(year, month) dtstart = timezone.make_aware(datetime(year, month, 1), timezone.get_default_timezone()) last_day = max(cal[-1]) dtend = timezone.make_aware(datetime(year, month, last_day), timezone.get_default_timezone()) # TODO Whether to include those occurrences that started in the previous # month but end in this month? queryset = queryset._clone() if queryset is not None else Occurrence.objects.select_related() occurrences = queryset.filter(start_time__year=year, start_time__month=month) def start_day(o): return o.start_time.day by_day = dict([(dt, list(o)) for dt,o in itertools.groupby(occurrences, start_day)]) data = { 'today': timezone.now(), 'calendar': [[(d, by_day.get(d, [])) for d in row] for row in cal], 'this_month': dtstart, 'next_month': dtstart + timedelta(days=+last_day), 'last_month': dtstart + timedelta(days=-1), } return render(request, template, data)
def update_sun(): global sunset_minus, sunset, sunrise, sunrise_plus, last_update sunset_minus = city.sun(datetime.datetime.now() - datetime.timedelta(days=1))["sunset"].astimezone( timezone.get_default_timezone()).replace(tzinfo=None) sunset = city.sun(datetime.datetime.now())["sunset"].astimezone( timezone.get_default_timezone()).replace(tzinfo=None) sunrise = city.sun(datetime.datetime.now())["sunrise"].astimezone( timezone.get_default_timezone()).replace(tzinfo=None) sunrise_plus = city.sun(datetime.datetime.now() + datetime.timedelta(days=1))["sunrise"].astimezone( timezone.get_default_timezone()).replace(tzinfo=None) last_update = datetime.datetime.now()
def shift_to_dict(shift): return { 'id': shift.pk, 'role_id': shift.role_id, 'start_time': shift.start_time.astimezone(timezone.get_default_timezone()), 'shift_minutes': shift.shift_minutes, 'end_time': shift.end_time.astimezone(timezone.get_default_timezone()), 'is_empty': False, 'columns': shift.shift_minutes, 'open_slot_count': shift.open_slot_count, }
def get_first_and_last(year, month): """Returns two datetimes: first day and last day of given year&month""" ym_first = make_aware( datetime.datetime(year, month, 1), get_default_timezone() ) ym_last = make_aware( datetime.datetime(year, month, monthrange(year, month)[1], 23, 59, 59, 1000000-1), get_default_timezone() ) return ym_first, ym_last
def process_request(self, request): if request.path != "/feb-26-2015/": start_dt = datetime(2015, 2, 26, 6, tzinfo=timezone.get_default_timezone()) end_dt = datetime(2015, 2, 26, 23, 59, 59, tzinfo=timezone.get_default_timezone()) if start_dt <= timezone.now() <= end_dt: return HttpResponseRedirect("/feb-26-2015/") elif timezone.now() < start_dt: messages.error(request, self.msg_content) return None else: return None
def get_queryset(self): """Obtiene las :class:`Emergencia`s atendidas el día de hoy""" inicio = timezone.make_aware( datetime.combine(date.today(), time.min), timezone.get_default_timezone()) fin = timezone.make_aware(datetime.combine(date.today(), time.max), timezone.get_default_timezone()) #return Emergencia.objects.filter(created__range=(inicio, fin)) return Emergencia.objects.order_by('-created')
def test_string_representation(self): start = timezone.make_aware( datetime.datetime(2015, 2, 25, 17, 24), timezone.get_default_timezone() ) end = timezone.make_aware( datetime.datetime(2015, 3, 18, 18, 12), timezone.get_default_timezone() ) derog = G(Derogation, start_dt=start, end_dt=end, mode='H', zones=[F(num=2), F(num=3)]) assert str(derog) == "25/02-17:24->18/03-18:12 H Z2-Z3"
def fetch_tweets(): api = Api() date_format = '%a %b %d %H:%M:%S +0000 %Y' for user in TwitterUser.objects.all(): timeline = api.fetch_timeline_by_screen_name(screen_name=user.screen_name) for json in timeline: tweet_id = json['id'] tweet, created = Tweet.objects.get_or_create(user=user, tweet_id=tweet_id) # Do we have a retweet? if 'retweeted_status' in json: tweet.retweeted = True # Process the retweeter. retweet_user = json['retweeted_status']['user'] tweet.retweeter_profile_image_url = retweet_user['profile_image_url'] tweet.retweeter_screen_name = retweet_user['screen_name'] tweet.retweeter_name = retweet_user['name'] # Process the retweet. retweet = json['retweeted_status'] tweet.retweeted_status_id = retweet['id'] tweet.retweeted_status_id_str = retweet['id_str'] tweet.retweeted_status_text = retweet['text'] tweet.retweeted_status_source = retweet['source'] # Process the date of the original retweet. d = datetime.strptime(retweet['created_at'], date_format) d -= timedelta(seconds=timezone) tweet.retweeted_status_created_at = make_aware(d, get_default_timezone()) tweet.user = user tweet.tweet_id = json['id'] tweet.tweet_id_str = json['id_str'] tweet.source = json['source'] # Store reply metadata. tweet.in_reply_to_user_id = json['in_reply_to_user_id'] tweet.in_reply_to_user_id_str = json['in_reply_to_user_id_str'] tweet.in_reply_to_status_id = json['in_reply_to_status_id'] tweet.in_reply_to_status_id_str = json['in_reply_to_status_id_str'] # Urlize and linkify hashtags and usernames. tweet.text = urlize(json['text']) tweet.text = re_usernames.sub(replace_usernames, tweet.text) tweet.text = re_hashtags.sub(replace_hashtags, tweet.text) # Process the date. d = datetime.strptime(json['created_at'], date_format) d -= timedelta(seconds=timezone) tweet.created_at = make_aware(d, get_default_timezone()) tweet.save()
# Timezone support with fallback. try: from django.utils.timezone import (now, get_default_timezone, make_aware as django_make_aware) except ImportError: from datetime import datetime now = datetime.now make_aware = lambda v: v else: make_aware = lambda v: django_make_aware(v, get_default_timezone())
def was_published_recently(self): return self.pub_date >= timezone.make_aware( datetime.datetime.now(), timezone.get_default_timezone()) - datetime.timedelta(hours=6)
def test_tz_midnight(self): dt = tz_midnight(date.today()) self.assertEquals(dt.hour, 0) self.assertEquals(dt.minute, 0) self.assertEquals(dt.second, 0) self.assertEquals(dt.tzinfo, timezone.get_default_timezone())
def normalize(dt): return get_default_timezone().normalize(dt)
from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.core.urlresolvers import reverse_lazy from django.core.validators import MinValueValidator, MaxValueValidator from django.core.validators import RegexValidator from django.db import models from django.utils import timezone from django.utils.translation import string_concat from localflavor.us.models import PhoneNumberField from localflavor.us.models import USStateField from ws.fields import OptionalOneToOneField import ws.utils.dates as dateutils pytz_timezone = timezone.get_default_timezone() alphanum = RegexValidator(r'^[a-zA-Z0-9 ]*$', "Only alphanumeric characters and spaces allowed") class SingletonModel(models.Model): class Meta: abstract = True def save(self, *args, **kwargs): self.pk = 1 super(SingletonModel, self).save(*args, **kwargs) def delete(self, *args, **kwargs): pass
def day_now(): t = timezone.make_aware(datetime.datetime.now(), timezone.get_default_timezone()) return Days.objects.filter(Q(start_time__lte=t) | Q(id=1)).latest('id').id
def test_handle_dst_ending(self): dst_ending_date = datetime.datetime(2017, 2, 18, 23, 59, 59, 999999) handled = handle_timezone(dst_ending_date, False) self.assertEqual( handled, get_default_timezone().localize(dst_ending_date, False))
def default_timezone(cls): return get_default_timezone() if settings.USE_TZ else None
def test_datetime_with_local_tzinfo(self): ltz = get_default_timezone() dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.replace(tzinfo=None))
def test_activity_export(self): tz = timezone.get_default_timezone() office = OfficeFactory(name='Budapest') section_health = SectionFactory(name='Health') section_education = SectionFactory(name='Education') location_ABC = LocationFactory(name='Location ABC') location_345 = LocationFactory(name='Location 345') location_111 = LocationFactory(name='Location 111') partnership_A1 = InterventionFactory(title='Partnership A1') partner = partnership_A1.agreement.partner partner.name = 'Partner A' partner.save() partnership_A2 = InterventionFactory(title='Partnership A2') agreement = partnership_A2.agreement agreement.partner = partner agreement.save() partnership_B3 = InterventionFactory(title='Partnership B3') partner = partnership_B3.agreement.partner partner.name = 'Partner B' partner.save() partnership_C1 = InterventionFactory(title='Partnership C1') partner = partnership_C1.agreement.partner partner.name = 'Partner C' partner.save() # Some results result_A11 = ResultFactory(name='Result A11') result_A21 = ResultFactory(name='Result A21') # set up travels user_joe_smith = UserFactory(first_name='Joe', last_name='Smith') user_alice_carter = UserFactory(first_name='Alice', last_name='Carter') user_lenox_lewis = UserFactory(first_name='Lenox', last_name='Lewis') travel_1 = TravelFactory(reference_number='2016/1000', traveler=user_joe_smith, office=office, section=section_health, start_date=datetime(2017, 11, 8, tzinfo=tz), end_date=datetime(2017, 11, 14, tzinfo=tz), ) supervisor = UserFactory() travel_2 = TravelFactory(reference_number='2016/1211', supervisor=supervisor, traveler=user_alice_carter, office=office, section=section_education, start_date=datetime(2017, 11, 8, tzinfo=tz), end_date=datetime(2017, 11, 14, tzinfo=tz), ) # Do some cleanup TravelActivity.objects.all().delete() # Create the activities finally activity_1 = TravelActivityFactory(travel_type=TravelType.PROGRAMME_MONITORING, date=datetime(2016, 12, 3, tzinfo=UTC), result=result_A11, primary_traveler=user_joe_smith) activity_1.travels.add(travel_1) activity_1.locations.set([location_ABC, location_345]) activity_1.partner = partnership_A1.agreement.partner activity_1.partnership = partnership_A1 activity_1.save() activity_2 = TravelActivityFactory(travel_type=TravelType.PROGRAMME_MONITORING, date=datetime(2016, 12, 4, tzinfo=UTC), result=result_A21, primary_traveler=user_lenox_lewis) activity_2.travels.add(travel_1) activity_2.locations.set([location_111]) activity_2.partner = partnership_A2.agreement.partner activity_2.partnership = partnership_A2 activity_2.save() activity_3 = TravelActivityFactory(travel_type=TravelType.MEETING, date=datetime(2016, 12, 3, tzinfo=UTC), result=None, primary_traveler=user_joe_smith) activity_3.travels.add(travel_1) activity_3.locations.set([location_ABC]) activity_3.partner = partnership_B3.agreement.partner activity_3.partnership = partnership_B3 activity_3.save() activity_4 = TravelActivityFactory(travel_type=TravelType.SPOT_CHECK, date=datetime(2016, 12, 6, tzinfo=UTC), result=None, primary_traveler=user_alice_carter) activity_4.travels.add(travel_2) activity_4.locations.set([location_111, location_345]) activity_4.partner = partnership_C1.agreement.partner activity_4.partnership = partnership_C1 activity_4.save() with self.assertNumQueries(6): response = self.forced_auth_req('get', reverse('t2f:travels:list:activity_export'), user=self.unicef_staff) export_csv = csv.reader(StringIO(response.content.decode('utf-8'))) rows = [r for r in export_csv] self.assertEqual(len(rows), 5) # check header self.assertEqual(rows[0], ['reference_number', 'traveler', 'office', 'section', 'status', 'trip_type', 'partner', 'partnership', 'results', 'locations', 'start_date', 'end_date', 'is_secondary_traveler', 'primary_traveler_name']) self.assertEqual(rows[1], ['2016/1000', 'Joe Smith', 'Budapest', 'Health', 'planned', 'Programmatic Visit', 'Partner A', 'Partnership A1', 'Result A11', 'Location 345, Location ABC', '08-Nov-2017', '14-Nov-2017', '', '']) self.assertEqual(rows[2], ['2016/1000', 'Joe Smith', 'Budapest', 'Health', 'planned', 'Programmatic Visit', 'Partner A', 'Partnership A2', 'Result A21', 'Location 111', '08-Nov-2017', '14-Nov-2017', 'YES', 'Lenox Lewis']) self.assertEqual(rows[3], ['2016/1000', 'Joe Smith', 'Budapest', 'Health', 'planned', 'Meeting', 'Partner B', 'Partnership B3', '', 'Location ABC', '08-Nov-2017', '14-Nov-2017', '', '']) self.assertEqual(rows[4], ['2016/1211', 'Alice Carter', 'Budapest', 'Education', 'planned', 'Spot Check', 'Partner C', 'Partnership C1', '', 'Location 111, Location 345', '08-Nov-2017', '14-Nov-2017', '', ''])
from __future__ import unicode_literals import datetime from xml.dom import minidom from django.contrib.sites.models import Site from django.contrib.syndication import views from django.core.exceptions import ImproperlyConfigured from django.test import TestCase, override_settings from django.test.utils import requires_tz_support from django.utils import timezone from django.utils.feedgenerator import rfc2822_date, rfc3339_date from .models import Article, Entry TZ = timezone.get_default_timezone() class FeedTestCase(TestCase): @classmethod def setUpTestData(cls): cls.e1 = Entry.objects.create( title='My first entry', updated=datetime.datetime(1980, 1, 1, 12, 30), published=datetime.datetime(1986, 9, 25, 20, 15, 00) ) cls.e2 = Entry.objects.create( title='My second entry', updated=datetime.datetime(2008, 1, 2, 12, 30), published=datetime.datetime(2006, 3, 17, 18, 0) ) cls.e3 = Entry.objects.create(
from functools import reduce from django.db import models from django.utils.timezone import get_default_timezone from model_utils.models import TimeStampedModel from django.conf import settings from django.utils.translation import gettext_lazy as _ from sample_shop.orders.models import AbstractItem tz = get_default_timezone() from . import emails as cart_email # class Order(TimeStampedModel): # completed = models.BooleanField(_('Оформленный заказ'), default=False) # name = models.CharField(_('Имя'), max_length=100, default='') # email = models.EmailField(_('Email'), default='') # phone = models.CharField(_('Телефон'), max_length=30, default='') # city = models.CharField(_('Город'), max_length=100, default='', blank=True) # comment = models.TextField(_('Комментарий'), default='', blank=True) # layout = models.FileField(_('Макет'), upload_to='layout', blank=True, default='') # user = models.ForeignKey( # settings.AUTH_USER_MODEL, # on_delete=models.CASCADE, # related_name='orders', # blank=True, # null=True, # verbose_name=_('Пользователь'), # ) # # class Meta: # verbose_name = _('Заказ')
def run(self): """ Request new tweets from the Twitter API. """ urls = { QUERY_TYPE_USER: ("https://api.twitter.com/1.1/statuses/" "user_timeline.json?screen_name=%s" "&include_rts=true" % self.value.lstrip("@")), QUERY_TYPE_LIST: ("https://api.twitter.com/1.1/lists/statuses.json" "?list_id=%s&include_rts=true" % self.value.encode("utf-8")), QUERY_TYPE_SEARCH: "https://api.twitter.com/1.1/search/tweets.json" "?q=%s" % quote(self.value.encode("utf-8")), } try: url = urls[self.type] except KeyError: raise TwitterQueryException("Invalid query type: %s" % self.type) settings.use_editable() auth_settings = (settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET, settings.TWITTER_ACCESS_TOKEN_KEY, settings.TWITTER_ACCESS_TOKEN_SECRET) if not all(auth_settings): raise TwitterQueryException("Twitter OAuth settings missing") try: tweets = requests.get(url, auth=OAuth1(*auth_settings)).json() except Exception as e: raise TwitterQueryException("Error retrieving: %s" % e) try: raise TwitterQueryException(tweets["errors"][0]["message"]) except (IndexError, KeyError, TypeError): pass if self.type == "search": tweets = tweets["statuses"] for tweet_json in tweets: remote_id = str(tweet_json["id"]) tweet, created = self.tweets.get_or_create(remote_id=remote_id) if not created: continue if "retweeted_status" in tweet_json: user = tweet_json['user'] tweet.retweeter_user_name = user["screen_name"] tweet.retweeter_full_name = user["name"] tweet.retweeter_profile_image_url = user["profile_image_url"] tweet_json = tweet_json["retweeted_status"] if self.type == QUERY_TYPE_SEARCH: tweet.user_name = tweet_json['user']['screen_name'] tweet.full_name = tweet_json['user']['name'] tweet.profile_image_url = \ tweet_json['user']["profile_image_url"] date_format = "%a %b %d %H:%M:%S +0000 %Y" else: user = tweet_json["user"] tweet.user_name = user["screen_name"] tweet.full_name = user["name"] tweet.profile_image_url = user["profile_image_url"] date_format = "%a %b %d %H:%M:%S +0000 %Y" tweet.text = urlize(tweet_json["text"]) tweet.text = re_usernames.sub(replace_usernames, tweet.text) tweet.text = re_hashtags.sub(replace_hashtags, tweet.text) if getattr(settings, 'TWITTER_STRIP_HIGH_MULTIBYTE', False): chars = [ch for ch in tweet.text if ord(ch) < 0x800] tweet.text = ''.join(chars) d = datetime.strptime(tweet_json["created_at"], date_format) d -= timedelta(seconds=timezone) tweet.created_at = make_aware(d, get_default_timezone()) tweet.save() self.interested = False self.save()
def utc_to_local(dt): # change from UTC timezone to current seahub timezone tz = timezone.get_default_timezone() utc = dt.replace(tzinfo=timezone.utc) local = timezone.make_naive(utc, tz) return local
def test_use_tz(self): self.assertEqual(self.widget.render(self.datetime), "13.08.2012 18:00:00") aware_dt = timezone.make_aware(self.datetime, timezone.get_default_timezone()) self.assertEqual(self.widget.clean("13.08.2012 18:00:00"), aware_dt)
def statistics(request): total = Machine.objects.all().count() status_ping = Machine.objects.filter( Q(status_ipv4=Machine.StatusIP.REACHABLE) | Q(status_ipv4=Machine.StatusIP.CONFIRMED) | Q(status_ipv6=Machine.StatusIP.REACHABLE) | Q(status_ipv6=Machine.StatusIP.CONFIRMED)).count() status_ssh = Machine.objects.filter(status_ssh=True).count() status_login = Machine.objects.filter(status_login=True).count() status_abuild = Machine.objects.filter(status_abuild=True).count() check_ping = Machine.objects.filter( check_connectivity__gte=Machine.Connectivity.PING).count() check_ssh = Machine.objects.filter( check_connectivity__gte=Machine.Connectivity.SSH).count() check_login = Machine.objects.filter( check_connectivity__gte=Machine.Connectivity.ALL).count() check_abuild = Machine.objects.filter(check_abuild=True).count() released_reservations = ReservationHistory.objects.filter( reserved_until__gt=timezone.make_aware( datetime.datetime.today() - datetime.timedelta(days=2), timezone.get_default_timezone()), reserved_until__lte=timezone.make_aware( datetime.datetime.today(), timezone.get_default_timezone())) reserved_machines = Machine.objects.filter( reserved_at__gt=timezone.make_aware( datetime.datetime.today() - datetime.timedelta(days=2), timezone.get_default_timezone()), reserved_at__lte=timezone.make_aware(datetime.datetime.today(), timezone.get_default_timezone())) matrix = [[], [], [], []] for architecture in Architecture.objects.all(): matrix[0].append(architecture.machine_set.count()) matrix[1].append( architecture.machine_set.filter(reserved_by=None).count()) matrix[2].append( architecture.machine_set.filter(status_login=True).count()) infinite = timezone.datetime.combine(datetime.date.max, timezone.datetime.min.time()) infinite = timezone.make_aware(infinite, timezone.utc) matrix[3].append( architecture.machine_set.filter(reserved_until=infinite).count()) matrix[0].append(sum(matrix[0])) matrix[1].append(sum(matrix[1])) matrix[2].append(sum(matrix[2])) matrix[3].append(sum(matrix[3])) data = { 'total': total, 'matrix': matrix, 'status': { 'labels': ['Ping', 'SSH', 'Login', 'ABuild'], 'values1': [check_ping, check_ssh, check_login, check_abuild], 'values2': [status_ping, status_ssh, status_login, status_abuild], 'max': total if total % 100 == 0 else total - (total % 100) + 100 }, 'domains': { 'labels': list(Domain.objects.all().values_list('name', flat=True)), 'values': [domain.machine_set.count() for domain in Domain.objects.all()] }, 'released_reservations': released_reservations, 'reserved_machines': reserved_machines } return render( request, 'machines/statistics.html', { 'architectures': Architecture.objects.all(), 'data': data, 'title': 'Statistics' })
def item_pubdate(self, item): if isinstance(item, Document): return item.created else: return make_aware(dateutil.parser.parse(item[2]["created"]), get_default_timezone())
def _get_default_timezone(): return timezone.get_default_timezone()
def test_handle_dst_starting(self): dst_starting_date = datetime.datetime(2017, 10, 15, 0, 0, 0, 0) handled = handle_timezone(dst_starting_date, True) self.assertEqual( handled, get_default_timezone().localize(dst_starting_date, True))
def handle(self, *args, **options): user_name = options.get('user') user = None try: user = User.objects.get(username=user_name) except User.DoesNotExist: logger.error('The user %s did not exist.', user_name, extra={'options': options}) sys.exit() org = None try: org = Organism.objects.get(scientific_name=options.get('organism')) except Organism.DoesNotExist: logger.error('The organism %s did not exist.', options.get('organism'), extra={'options': options}) sys.exit() accepted_evcodes = None if options.get('evcodes'): accepted_evcodes = set(options.get('evcodes').split(',')) gene_ontology = go() remote = options.get('remote') != None obo_location = GO_OBO_URL if remote else options.get('obo') loaded_obo = gene_ontology.load_obo(obo_location, remote_location=remote, timeout=5) if not loaded_obo: logger.error("Couldn't load OBO file %s with remote equal to %s.", obo_location, remote) sys.exit() annot_zip_fh = None annot_fh = None if remote: annot_zip_fh = urllib2.urlopen(GO_ASSOC_FTP + '.'.join( (GO_ASSOC_PREFIX, GO_NAMES[org.scientific_name], GO_ASSOC_SUFFIX)), timeout=5) else: annot_zip_fh = open(options.get('annot')) annot_fh = gzip.GzipFile(fileobj=io.BytesIO(annot_zip_fh.read())) annot_zip_fh.close() annots = [] load_pairs = {} pubs = set() for line in annot_fh: if line.startswith('!'): continue toks = line.strip().split('\t') (xrdb, xrid, details, goid, ref, ev, date) = (toks[0], toks[1], toks[3], toks[4], toks[5], toks[6], toks[13]) if options.get('tair'): import re tair_regex = re.compile('AT[0-9MC]G[0-9][0-9][0-9][0-9][0-9]') first_alias = toks[10].split('|')[0] if tair_regex.match(toks[2]): xrid = toks[2] elif tair_regex.match(toks[9]): xrid = toks[9] elif tair_regex.match(first_alias): xrid = first_alias if options.get('only_wb') and (toks[0] != 'WB'): continue if details == 'NOT': continue if accepted_evcodes is not None and not (ev in accepted_evcodes): continue if options.get('leading') is not None: xrid = xrid.split(':')[1] try: load_pairs[xrdb].append(xrid) except KeyError: load_pairs[xrdb] = [ xrid, ] refs = ref.split('|') for ref_item in refs: if ref_item.startswith('PMID:'): pubs.add(ref_item.split(':')[1]) else: logger.info("Unknown publication key %s", ref_item) annots.append((xrdb, xrid, goid, ref, date)) xref_cache = {} if options.get('pseudomonas'): logger.info('Pseudomonas entered') for (xrdb, xrids) in load_pairs.iteritems(): gene_objs = Gene.objects.filter(systematic_name__in=xrids) logger.info( "Mapped %s Pseudomonas genes from the database using gene systematic name.", gene_objs.count()) for gene_obj in gene_objs: xref_cache[(xrdb, gene_obj.systematic_name)] = gene_obj else: for (xrdb, xrids) in load_pairs.iteritems(): if xrdb in DB_REMAP: xrdb = DB_REMAP[xrdb] try: xrdb_obj = CrossRefDB.objects.get(name=xrdb) except CrossRefDB.DoesNotExist: logger.warning("Couldn't find the cross reference DB %s.", xrdb) continue xrid_objs = CrossRef.objects.filter( crossrefdb=xrdb_obj).filter(xrid__in=xrids) logger.info("Mapped %s cross references from %s", xrid_objs.count(), xrdb) for xrid_obj in xrid_objs: xref_cache[(xrdb, xrid_obj.xrid)] = xrid_obj.gene load_pmids(pubs) pub_cache = {} pub_values = Publication.objects.filter(pmid__in=pubs).only( 'id', 'pmid').values() for pub in pub_values: pub_cache[pub['pmid']] = pub['id'] for annot in annots: (xrdb, xrid, goid, ref, date) = annot if xrdb in DB_REMAP: xrdb = DB_REMAP[xrdb] try: gene = xref_cache[(xrdb, xrid)] except KeyError: logger.debug("Couldn't find xrid %s in xrdb %s.", xrid, xrdb) logger.info("Couldn't find xrid %s in xrdb %s.", xrid, xrdb) continue refs = ref.split('|') pub = None for ref_item in refs: if ref_item.startswith('PMID:'): try: pub = pub_cache[int(ref_item.split(':')[1])] except KeyError: pub = None gene_ontology.add_annotation(go_id=goid, gid=gene.pk, ref=pub, date=date, direct=True) gene_ontology.populated = True #mark annotated gene_ontology.propagate() #prop annotations evlist = list(accepted_evcodes) for (term_id, term) in gene_ontology.go_terms.iteritems(): if term.annotations: slug = slugify(' '.join( (term.go_id, org.scientific_name, term.full_name)))[:50] #make first 50 chars into a slug namespace = GO_NAMESPACE_MAP[term.get_namespace()] go_id = term.go_id.split(':')[1] #construct title title = 'GO' + '-' + namespace + '-' + go_id + ':' + term.full_name #construct abstract #write evidence as string evclause = '' if len(evlist): evclause = ' Only annotations with evidence coded as ' if len(evlist) == 1: evclause = evclause + evlist[0] else: evclause = evclause + ', '.join( evlist[:-1]) + ' or ' + evlist[-1] evclause = evclause + ' are included.' if term.description: description = term.description + ' Annotations are propagated through transitive closure as recommended by the GO Consortium.' + evclause else: logger.info("No description on term %s", term) #get geneset changed = False try: gs_obj = Geneset.objects.get(slug=slug, creator=user) changed = False #flag to know if we need to call save #all these genesets should be public if not gs_obj.public: gs_obj.public = True changed = True if gs_obj.title != title: gs_obj.title = title changed = True if gs_obj.abstract != description: gs_obj.abstract = description changed = True except Geneset.DoesNotExist: gs_obj = Geneset(title=title, slug=slug, creator=user, organism=org, public=True, abstract=description) changed = True #if anything changed if changed: gs_obj.save() if options.get('initial'): #disable commit field's auto_now_add, allows us to set a prior annotation date commit_date = Version._meta.get_field_by_name( 'commit_date')[0] commit_date.auto_now_add = False logger.info( 'Initial load. Need to construct versions of %s from annotation date.', term.go_id) date_annots = {} for annotation in term.annotations: date = timezone.make_aware( datetime.strptime(annotation.date, '%Y%m%d'), timezone.get_default_timezone()) try: date_annots[date].append(annotation) except KeyError: date_annots[date] = [ annotation, ] annots_as_of_date = set() prior_annots = set() prior_version = None for (date, annots) in sorted(date_annots.iteritems()): annots_as_of_date.update([(annotation.gid, annotation.ref) for annotation in annots]) if (annots_as_of_date == prior_annots ): #if nothing changed, continue continue v_obj = Version(geneset=gs_obj, creator=user, parent=prior_version, commit_date=date) v_obj.description = "Added " + str( len(annots) ) + " annotations from GO based on the dates provided in the GO annotation file." v_obj.annotations = annots_as_of_date v_obj.save() prior_version = v_obj prior_annots = annots_as_of_date.copy() #re-enable auto_now_add commit_date.auto_now_add = True else: #load annotations most_recent_versions = Version.objects.filter( geneset=gs_obj).order_by('-commit_date')[:1] annots = set([(annotation.gid, annotation.ref) for annotation in term.annotations]) description = '' most_recent_version = None if most_recent_versions: most_recent_version = most_recent_versions[0] if (most_recent_version.commit_date > timezone.now()): logger.error('Version from the future: %s.', most_recent_version) new = annots - most_recent_version.annotations removed = most_recent_version.annotations - annots if (new or removed): description = description + 'Added ' + str( len(new)) + ' and removed ' + str( len(removed)) + ' annotations from GO.' else: description = 'Created with ' + str( len(annots)) + ' annotations from GO.' if description: v_obj = Version(geneset=gs_obj, creator=user, parent=most_recent_version, commit_date=timezone.now()) v_obj.description = description v_obj.annotations = annots v_obj.save()
def get_field_value(self, instance, field_name): now_dt = timezone.now() if timezone.is_aware(now_dt): now_dt = timezone.make_naive(now_dt, timezone.get_default_timezone()) return now_dt
def tzone_convert(date_t, tz): assert isinstance(date_t, datetime) date_t = timezone.make_aware(date_t, timezone.get_default_timezone(), is_dst=False) return timezone.make_naive(date_t, tz)
def humanized_date(self): to_tz = timezone.get_default_timezone() current_time = datetime.now().astimezone(to_tz) return humanize.naturaltime(current_time - self.date_added)
from django.utils.functional import cached_property from django.utils.html import format_html from django.utils.translation import ugettext_lazy as _ from enum import Enum from guardian.shortcuts import get_users_with_perms from select2 import fields as select2_fields from serviceform.tasks.models import Task from typing import Tuple, Set, Optional, Sequence, Iterator, Iterable, TYPE_CHECKING from .email import EmailTemplate from .mixins import CopyMixin from .participation import QuestionAnswer from .people import Participant, ResponsibilityPerson from .. import emails, utils from ..utils import ColorStr if TYPE_CHECKING: from .participation import ParticipationActivity, ParticipationActivityChoice local_tz = timezone.get_default_timezone() logger = logging.getLogger(__name__) def imported_symbols_anchor(): print(RGBColorField, settings, GenericRelation, Prefetch, render_to_string, reverse, format_html, get_users_with_perms, select2_fields, Task, emails, CopyMixin, Participant, ResponsibilityPerson, EmailTemplate, QuestionAnswer, ParticipationActivity, ParticipationActivityChoice, datetime, Enum, string, Tuple, Set, Optional, Sequence, Iterator, Iterable, _, cached_property, models, utils, ColorStr)
def reimport_scan(self, scan, scan_type, test, active=True, verified=True, tags=None, minimum_severity=None, user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, push_to_jira=None, close_old_findings=True, group_by=None, sonarqube_config=None): logger.debug(f'REIMPORT_SCAN: parameters: {locals()}') user = user or get_current_user() now = timezone.now() # retain weird existing logic to use current time for provided scan date scan_date_time = datetime.datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware( scan_date_time, timezone.get_default_timezone()) if sonarqube_config: # it there is not sonarqube_config, just use original if sonarqube_config.product != test.engagement.product: raise ValidationError( '"sonarqube_config" has to be from same product as "test"') if test.sonarqube_config != sonarqube_config: # update of sonarqube_config test.sonarqube_config = sonarqube_config test.save() logger.debug('REIMPORT_SCAN: Parse findings') parsed_findings = importer_utils.parse_findings( scan, test, active, verified, scan_type) logger.debug('REIMPORT_SCAN: Processing findings') new_findings, reactivated_findings, findings_to_mitigate, untouched_findings = \ self.process_parsed_findings(test, parsed_findings, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now) closed_findings = [] if close_old_findings: logger.debug( 'REIMPORT_SCAN: Closing findings no longer present in scan report' ) closed_findings = self.close_old_findings( test, findings_to_mitigate, scan_date_time, user=user, push_to_jira=push_to_jira) logger.debug('REIMPORT_SCAN: Updating test/engagement timestamps') importer_utils.update_timestamps(test, scan_date, version, branch_tag, build_id, commit_hash, now, scan_date_time) if settings.TRACK_IMPORT_HISTORY: logger.debug('REIMPORT_SCAN: Updating Import History') importer_utils.update_import_history( Test_Import.REIMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add, version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings, test, new_findings, closed_findings, reactivated_findings) logger.debug('REIMPORT_SCAN: Generating notifications') updated_count = len(closed_findings) + len(reactivated_findings) + len( new_findings) if updated_count > 0: notifications_helper.notify_scan_added( test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings, findings_reactivated=reactivated_findings, findings_untouched=untouched_findings) logger.debug('REIMPORT_SCAN: Done') return test, updated_count, len(new_findings), len( closed_findings), len(reactivated_findings), len( untouched_findings)
def reimport_scan(self, scan, scan_type, test, active=True, verified=True, tags=None, minimum_severity=None, user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None, commit_hash=None, push_to_jira=None, close_old_findings=True, group_by=None, api_scan_configuration=None, service=None): logger.debug(f'REIMPORT_SCAN: parameters: {locals()}') user = user or get_current_user() now = timezone.now() # retain weird existing logic to use current time for provided scan date scan_date_time = datetime.datetime.combine(scan_date, timezone.now().time()) if settings.USE_TZ: scan_date_time = timezone.make_aware( scan_date_time, timezone.get_default_timezone()) if api_scan_configuration: if api_scan_configuration.product != test.engagement.product: raise ValidationError( 'API Scan Configuration has to be from same product as the Test' ) if test.api_scan_configuration != api_scan_configuration: test.api_scan_configuration = api_scan_configuration test.save() # check if the parser that handle the scan_type manage tests parser = get_parser(scan_type) if hasattr(parser, 'get_tests'): logger.debug('REIMPORT_SCAN parser v2: Create parse findings') tests = parser.get_tests(scan_type, scan) # for now we only consider the first test in the list and artificially aggregate all findings of all tests # this is the same as the old behavior as current import/reimporter implementation doesn't handle the case # when there is more than 1 test parsed_findings = [] for test_raw in tests: parsed_findings.extend(test_raw.findings) else: logger.debug('REIMPORT_SCAN: Parse findings') parsed_findings = parser.get_findings(scan, test) logger.debug('REIMPORT_SCAN: Processing findings') new_findings = [] reactivated_findings = [] findings_to_mitigate = [] untouched_findings = [] if settings.ASYNC_FINDING_IMPORT: chunk_list = importer_utils.chunk_list(parsed_findings) results_list = [] # First kick off all the workers for findings_list in chunk_list: result = self.process_parsed_findings( test, findings_list, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, sync=False) # Since I dont want to wait until the task is done right now, save the id # So I can check on the task later results_list += [result] # After all tasks have been started, time to pull the results logger.debug('REIMPORT_SCAN: Collecting Findings') for results in results_list: serial_new_findings, serial_reactivated_findings, serial_findings_to_mitigate, serial_untouched_findings = results.get( ) new_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_new_findings ] reactivated_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_reactivated_findings ] findings_to_mitigate += [ next(serializers.deserialize("json", finding)).object for finding in serial_findings_to_mitigate ] untouched_findings += [ next(serializers.deserialize("json", finding)).object for finding in serial_untouched_findings ] logger.debug('REIMPORT_SCAN: All Findings Collected') # Indicate that the test is not complete yet as endpoints will still be rolling in. test.percent_complete = 50 test.save() importer_utils.update_test_progress(test) else: new_findings, reactivated_findings, findings_to_mitigate, untouched_findings = \ self.process_parsed_findings(test, parsed_findings, scan_type, user, active, verified, minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add, push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, sync=True) closed_findings = [] if close_old_findings: logger.debug( 'REIMPORT_SCAN: Closing findings no longer present in scan report' ) closed_findings = self.close_old_findings( test, findings_to_mitigate, scan_date_time, user=user, push_to_jira=push_to_jira) logger.debug('REIMPORT_SCAN: Updating test/engagement timestamps') importer_utils.update_timestamps(test, scan_date, version, branch_tag, build_id, commit_hash, now, scan_date_time) if settings.TRACK_IMPORT_HISTORY: logger.debug('REIMPORT_SCAN: Updating Import History') importer_utils.update_import_history( Test_Import.REIMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add, version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings, test, new_findings, closed_findings, reactivated_findings) logger.debug('REIMPORT_SCAN: Generating notifications') updated_count = len(closed_findings) + len(reactivated_findings) + len( new_findings) if updated_count > 0: notifications_helper.notify_scan_added( test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings, findings_reactivated=reactivated_findings, findings_untouched=untouched_findings) logger.debug('REIMPORT_SCAN: Done') return test, updated_count, len(new_findings), len( closed_findings), len(reactivated_findings), len( untouched_findings)
def localize(dt): return get_default_timezone().localize(dt)
def simulate(start_date=None, end_date=None): models.Order.objects.all().delete() models.Quote.objects.all().delete() models.DayReport.objects.all().delete() models.DayHistory.objects.all().delete() for db_account in models.Account.objects.all(): db_account.save() for db_stock in models.Stock.objects.filter(account=db_account): db_stock.count = 0 db_stock.save() if start_date is None: first_quote = models.SimHistory.objects.all().order_by('date')[0] else: try: first_quote = models.SimHistory.objects.filter( date__gte=start_date).order_by('date')[0] except IndexError: return False cur_dt = timezone.datetime(year=first_quote.date.year, month=first_quote.date.month, day=first_quote.date.day, hour=9, minute=31, second=0, tzinfo=timezone.get_default_timezone()) if end_date is None: last_quote = models.SimHistory.objects.all().order_by('-date')[0] else: try: last_quote = models.SimHistory.objects.filter( date__lte=end_date).order_by('-date')[0] except IndexError: return False last_dt = timezone.datetime(year=last_quote.date.year, month=last_quote.date.month, day=last_quote.date.day, hour=23, minute=59, second=59, tzinfo=timezone.get_default_timezone()) day_delta = timezone.timedelta(1) simclient.reset_sim_config() client = simclient.Client() client.login(cur_dt) while cur_dt <= last_dt: logger.info('running sim: ' + str(cur_dt)) client.update(cur_dt) run(dt=cur_dt, client=client) load_history_sim(cur_dt.date()) cur_dt += day_delta client.logout() return True
def set_to_now(): return datetime.utcnow().replace(tzinfo=get_default_timezone())
# This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() import datetime from django.utils import timezone from cloto.models import ServerInfo from django.conf import settings from cloto.log import logger from cloto import environment_controller runningfrom = datetime.datetime.now(tz=timezone.get_default_timezone()) # Creating initial data s = ServerInfo(id=1, owner=settings.OWNER, version=settings.VERSION, runningfrom=runningfrom, doc=settings.API_INFO_URL) s.save() # Starting environments Controller controller = environment_controller.environment_controller() if not controller.is_started(): controller.start_manager() logger.info("SERVER STARTED")