def timezone_support_compare(new_value, old_value, timezone_to_set=pytz.UTC): if not (isinstance(new_value, datetime.datetime) and isinstance(old_value, datetime.datetime)): return raw_compare(new_value, old_value) db_value_is_aware = timezone.is_aware(old_value) in_memory_value_is_aware = timezone.is_aware(new_value) if db_value_is_aware == in_memory_value_is_aware: return raw_compare(new_value, old_value) if db_value_is_aware: # If db value is aware, it means that settings.USE_TZ=True, so we need to convert in-memory one warnings.warn("DateTimeField received a naive datetime (%s)" " while time zone support is active." % new_value, RuntimeWarning) new_value = timezone.make_aware(new_value, timezone_to_set).astimezone(pytz.utc) else: # The db is not timezone aware, but the value we are passing for comparison is aware. warnings.warn("Time zone support is not active (settings.USE_TZ=False), " "and you pass a time zone aware value (%s)" " Converting database value before comparison." % new_value, RuntimeWarning) old_value = timezone.make_aware(old_value, pytz.utc).astimezone(timezone_to_set) return raw_compare(new_value, old_value)
def __unicode__(self): entrada = timezone.localtime(self.entrada) if timezone.is_aware(self.entrada) else self.entrada if self.saida: saida = timezone.localtime(self.saida) if timezone.is_aware(self.saida) else self.saida return u'%s - de %s a %s' % (self.membro, entrada, saida) else: return u'%s - %s' % (self.membro, entrada)
def parse_publish(self, date, timezone=None): """ Parses a datetime string into a datetime instance. """ if type(date) is datetime.date: date = datetime.datetime.combine(date, datetime.datetime.min.time()) if not isinstance(date, datetime.datetime): date = parse(date) if not is_aware(date) and timezone: date = date.replace(tzinfo=tz.gettz(timezone)) if not is_aware(date): try: # Django>1.7 date = make_aware(date) except TypeError: # Django<1.8 date = make_aware(date, utc) if timezone and date.tzinfo != tz.gettz(timezone): date = date.replace(tzinfo=tz.gettz(timezone)) return date
def test_seconds_to_expiry(self): "Test that it handles naive and tz-aware times" with self.settings(USE_TZ=False): at = AccessToken(expires_on=TOMORROW) expires_at = datetime.combine(at.expires_on, time.min) self.assertTrue(is_naive(expires_at)) self.assertTrue(is_naive(now())) self.assertEqual( at.seconds_to_expiry, int((expires_at - now()).total_seconds()) ) with self.settings(USE_TZ=True): at = AccessToken(expires_on=TOMORROW) expires_at = make_aware( datetime.combine(at.expires_on, time.min), get_current_timezone() ) self.assertTrue(is_aware(expires_at)) self.assertTrue(is_aware(now())) self.assertEqual( at.seconds_to_expiry, int((expires_at - now()).total_seconds()) )
def backwards(self, orm): "Write your backwards methods here." for s in orm['sleep.Sleep'].objects.all(): if timezone.is_aware(s.start_time): s.start_time = timezone.make_naive(s.start_time, t) if timezone.is_aware(s.end_time): s.end_time = timezone.make_naive(s.start_time, t) s.save()
def _match_aware(cls, old, new): tz = timezone.get_default_timezone() if timezone.is_naive(old) and timezone.is_aware(new): return (old, timezone.make_naive(new, tz)) elif timezone.is_aware(old) and timezone.is_naive(new): return (timezone.make_naive(old, tz), new) else: return (old, new)
def test_make_naive_use_tz_false(settings): """Tests datetimes are left intact if `USE_TZ` is not in effect.""" settings.USE_TZ = False datetime_object = datetime(2016, 1, 2, 21, 52, 25, tzinfo=pytz.utc) assert timezone.is_aware(datetime_object) naive_datetime = make_naive(datetime_object) assert timezone.is_aware(naive_datetime)
def enforce_timezone(self, value): """ When `self.default_timezone` is `None`, always return naive datetimes. When `self.default_timezone` is not `None`, always return aware datetimes. """ if (self.default_timezone is not None) and not timezone.is_aware(value): return timezone.make_aware(value, self.default_timezone) elif (self.default_timezone is None) and timezone.is_aware(value): return timezone.make_naive(value, timezone.UTC()) return value
def test_guess_with_datetime(self): generator = self.instance.guess_format(models.DateTimeField()) with django_setting('USE_TZ', True): value = generator(datetime.now()) self.assertTrue(timezone.is_aware(value)) with django_setting('USE_TZ', False): value = generator(datetime.now()) self.assertFalse(timezone.is_aware(value))
def make_aware(dt, tz): if dt is None: return if settings.USE_TZ: if timezone.is_aware(dt): return dt return timezone.make_aware(dt, tz) if timezone.is_aware(dt): return timezone.make_naive(dt) return dt
def timestamp(obj): if isinstance(obj, datetime.datetime): if timezone.is_aware(obj): return obj.astimezone(tz.tzutc()).strftime('%Y-%m-%d %H:%M:%SZ') else: return obj.strftime('%Y-%m-%d %H:%M:%SZ') if isinstance(obj, datetime.date): return str(obj) if timezone.is_aware(obj): obj = timezone.make_naive(obj, timezone.get_current_timezone()) return time.mktime(obj.timetuple())
def make_aware(dt, tz): if settings.USE_TZ: if timezone.is_aware(dt): return dt else: return timezone.make_aware(dt, tz) else: if timezone.is_aware(dt): return timezone.make_naive(dt) else: return dt
def __init__(self, start, end, available=False, source=None): if not timezone.is_aware(start): start = timezone.make_aware(start) if not timezone.is_aware(end): end = timezone.make_aware(end) self.start = start self.end = end self.available = available self.source = source
def normalize_event_data(self, data): # First we pull out our top-level (non-data attr) kwargs if not data.get('level') or data['level'] not in LOG_LEVELS_DICT: data['level'] = logging.ERROR if not data.get('logger'): data['logger'] = settings.DEFAULT_LOGGER_NAME timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() if not data.get('culprit'): data['culprit'] = '' # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if dj_settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) data['timestamp'] = timestamp if not data.get('event_id'): data['event_id'] = uuid.uuid4().hex data.setdefault('message', None) data.setdefault('time_spent', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('checksum', None) data.setdefault('platform', None) tags = data.get('tags') if not tags: tags = [] # full support for dict syntax elif isinstance(tags, dict): tags = tags.items() else: tags = list(tags) data['tags'] = tags if 'sentry.interfaces.Exception' in data: if 'values' not in data['sentry.interfaces.Exception']: data['sentry.interfaces.Exception'] = {'values': [data['sentry.interfaces.Exception']]} # convert stacktrace + exception into expanded exception if 'sentry.interfaces.Stacktrace' in data: data['sentry.interfaces.Exception']['values'][0]['stacktrace'] = data.pop('sentry.interfaces.Stacktrace') return data
def between(self, after, before, **kwargs): if timezone.is_aware(after): after = timezone.make_naive(after) if timezone.is_aware(before): before = timezone.make_naive(before) for dt in self._between(after, before, **kwargs): try: yield timezone.make_aware(dt) except NonExistentTimeError: pass
def argsHelper(self, args={}, start=datetime.datetime(2011, 1, 2), end=datetime.datetime(2011, 1, 4)): if not timezone.is_aware(start): start = timezone.make_aware(start, timezone.get_current_timezone()) if not timezone.is_aware(end): end = timezone.make_aware(end, timezone.get_current_timezone()) args.update({ 'from_date': start.strftime('%m/%d/%Y'), 'to_date': end.strftime('%m/%d/%Y'), 'export': True, }) return args
def bulk_entries(self, start=datetime.datetime(2011, 1, 2), end=datetime.datetime(2011, 1, 4)): if not timezone.is_aware(start): start = timezone.make_aware(start, timezone.get_current_timezone()) if not timezone.is_aware(end): end = timezone.make_aware(end, timezone.get_current_timezone()) dates = utils.generate_dates(start, end, 'day') projects = [self.p1, self.p2, self.p2, self.p4, self.p5, self.sick] self.make_entries(projects=projects, dates=dates, user=self.user, hours=2) self.make_entries(projects=projects, dates=dates, user=self.user2, hours=1)
def test_guess_format_timezone(self): test_names = ('something_at', 'something_At', 'gameUpdated_At', 'game_created_at') with django_setting('USE_TZ', True): for name in test_names: value = self.instance.guess_format(name)(datetime.now()) self.assertTrue(timezone.is_aware(value)) with django_setting('USE_TZ', False): for name in test_names: value = self.instance.guess_format(name)(datetime.now()) self.assertFalse(timezone.is_aware(value))
def enforce_timezone(cls, value): """ When `self.default_timezone` is `None`, always return naive datetimes. When `self.default_timezone` is not `None`, always return aware datetimes. """ field_timezone = cls.default_timezone() if (field_timezone is not None) and not is_aware(value): return make_aware(value, field_timezone) elif (field_timezone is None) and is_aware(value): return make_naive(value, utc) return value
def test_make_dt_aware_with_pytz(self): local_tz = timezone.get_current_timezone() now = datetime.datetime.now() date = DateRangeFilter.make_dt_aware(now, local_tz) self.assertEqual(date.tzinfo.zone, local_tz.zone) self.assertTrue(timezone.is_aware(date)) now = timezone.now() date = DateRangeFilter.make_dt_aware(now, local_tz) self.assertEqual(date.tzinfo.zone, local_tz.zone) self.assertTrue(timezone.is_aware(date))
def datesafe_eq(obj1, obj2): """ If two objects are dates, but don't both have the same timezone awareness status, compare them in a timezone-safe way. Otherwise, compare them with regular equality. """ if isinstance(obj1, datetime) and not timezone.is_aware(obj1): obj1 = timezone.make_aware(obj1, pytz.UTC) if isinstance(obj2, datetime) and not timezone.is_aware(obj2): obj2 = timezone.make_aware(obj2, pytz.UTC) return obj1 == obj2
def test_storage_mtime(self): obj = self.storage.bucket.Object.return_value obj.last_modified = datetime.now(utc) name = 'file.txt' self.assertFalse( is_aware(self.storage.modified_time(name)), 'Naive datetime object expected from modified_time()' ) self.assertTrue( is_aware(self.storage.get_modified_time(name)), 'Aware datetime object expected from get_modified_time()' )
def strptime(self, value, format): if format == self.ISO_8601: parsed = parse_datetime(value) if parsed is None: # Continue with other formats if doesn't match raise ValueError # Handle timezone awareness. Copied from: # https://github.com/tomchristie/django-rest-framework/blob/3.2.0/rest_framework/fields.py#L965-L969 if settings.USE_TZ and not timezone.is_aware(parsed): return timezone.make_aware(parsed, self.default_timezone) elif not settings.USE_TZ and timezone.is_aware(parsed): return timezone.make_naive(parsed, timezone.UTC()) return parsed return super(IsoDateTimeField, self).strptime(value, format)
def from_native(self, value): if value in validators.EMPTY_VALUES: return None if isinstance(value, datetime.datetime): if timezone and settings.USE_TZ and timezone.is_aware(value): # Convert aware datetimes to the default time zone # before casting them to dates (#17742). default_timezone = timezone.get_default_timezone() value = timezone.make_naive(value, default_timezone) return value.date() if isinstance(value, datetime.date): return value for format in self.input_formats: if format.lower() == ISO_8601: try: parsed = parse_date(value) except (ValueError, TypeError): pass else: if parsed is not None: return parsed else: try: parsed = datetime.datetime.strptime(value, format) except (ValueError, TypeError): pass else: return parsed.date() msg = self.error_messages["invalid"] % readable_date_formats(self.input_formats) raise ValidationError(msg)
def make_local(self, date_time): """ Convert aware datetime to local datetime. """ if timezone.is_aware(date_time): return timezone.localtime(date_time) return date_time
def adapt_datetimefield_value(self, value): """ Transform a datetime value to an object compatible with what is expected by the backend driver for datetime columns. If naive datetime is passed assumes that is in UTC. Normally Django models.DateTimeField makes sure that if USE_TZ is True passed datetime is timezone aware. """ if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # cx_Oracle doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = timezone.make_naive(value, self.connection.timezone) else: raise ValueError("Oracle backend does not support timezone-aware datetimes when USE_TZ is False.") return Oracle_datetime.from_datetime(value)
def convert_datetimefield_value(self, value, expression, connection): if value is not None: if not isinstance(value, datetime.datetime): value = parse_datetime(value) if settings.USE_TZ and not timezone.is_aware(value): value = timezone.make_aware(value, self.connection.timezone) return value
def get_local_day_min(dt=None, year=None, month=None, day=None, utc=True): """讲时间转换成 本地时0点 并转换成UTC :param dt: :param navie: :return: """ if dt is None: tm = timezone.now() elif timezone.is_aware(dt): tm = timezone.make_naive(dt) else: tm = dt if year is not None: tm = tm.replace(year=year) if month is not None: tm = tm.replace(month=month) if day is not None: tm = tm.replace(day=day) tm = datetime.datetime.combine(tm, datetime.time.min) # 计算后的tm没有tzinfo tm = timezone.make_aware(tm) if utc: tm = timezone.make_naive(tm, timezone.utc) return tm.replace(tzinfo=timezone.utc) else: return tm
def _test_file_time_getter_tz_handling_on(self, getter): # Django's TZ (and hence the system TZ) is set to Africa/Algiers which # is UTC+1 and has no DST change. We can set the Django TZ to something # else so that UTC, Django's TIME_ZONE, and the system timezone are all # different. now_in_algiers = timezone.make_aware(datetime.now()) with timezone.override(timezone.get_fixed_timezone(-300)): # At this point the system TZ is +1 and the Django TZ # is -5. The following will be aware in UTC. now = timezone.now() self.assertFalse(self.storage.exists('test.file.tz.on')) f = ContentFile('custom contents') f_name = self.storage.save('test.file.tz.on', f) self.addCleanup(self.storage.delete, f_name) dt = getter(f_name) # dt should be aware, in UTC self.assertTrue(timezone.is_aware(dt)) self.assertEqual(now.tzname(), dt.tzname()) # The three timezones are indeed distinct. naive_now = datetime.now() algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now) django_offset = timezone.get_current_timezone().utcoffset(naive_now) utc_offset = timezone.utc.utcoffset(naive_now) self.assertGreater(algiers_offset, utc_offset) self.assertLess(django_offset, utc_offset) # dt and now should be the same effective time. self.assertLess(abs(dt - now), timedelta(seconds=2))
def save_submission(xform, xml, media_files, new_uuid, submitted_by, status, date_created_override): if not date_created_override: date_created_override = get_submission_date_from_xml(xml) instance = _get_instance(xml, new_uuid, submitted_by, status, xform) for f in media_files: Attachment.objects.get_or_create( instance=instance, media_file=f, mimetype=f.content_type) # override date created if required if date_created_override: if not timezone.is_aware(date_created_override): # default to utc? date_created_override = timezone.make_aware( date_created_override, timezone.utc) instance.date_created = date_created_override instance.save() if instance.xform is not None: pi, created = ParsedInstance.objects.get_or_create( instance=instance) if not created: pi.save(async=False) return instance
def normalize(self, request_env=None): request_env = request_env or {} data = self.data errors = data['errors'] = [] # Before validating with a schema, attempt to cast values to their desired types # so that the schema doesn't have to take every type variation into account. text = six.text_type fp_types = six.string_types + six.integer_types + (float, ) def to_values(v): return {'values': v} if v and isinstance(v, (tuple, list)) else v casts = { 'environment': lambda v: text(v) if v is not None else v, 'fingerprint': lambda v: list(map(text, v)) if isinstance(v, list) and all(isinstance(f, fp_types) for f in v) else v, 'release': lambda v: text(v) if v is not None else v, 'dist': lambda v: text(v).strip() if v is not None else v, 'time_spent': lambda v: int(v) if v is not None else v, 'tags': lambda v: [(text(v_k).replace(' ', '-').strip(), text(v_v).strip()) for (v_k, v_v) in dict(v).items()], 'timestamp': lambda v: process_timestamp(v), 'platform': lambda v: v if v in VALID_PLATFORMS else 'other', 'sentry.interfaces.Message': lambda v: v if isinstance(v, dict) else { 'message': v }, # These can be sent as lists and need to be converted to {'values': [...]} 'exception': to_values, 'sentry.interfaces.Exception': to_values, 'breadcrumbs': to_values, 'sentry.interfaces.Breadcrumbs': to_values, 'threads': to_values, 'sentry.interfaces.Threads': to_values, } for c in casts: if c in data: try: data[c] = casts[c](data[c]) except Exception as e: errors.append({ 'type': EventError.INVALID_DATA, 'name': c, 'value': data[c] }) del data[c] # raw 'message' is coerced to the Message interface, as its used for pure index of # searchable strings. If both a raw 'message' and a Message interface exist, try and # add the former as the 'formatted' attribute of the latter. # See GH-3248 msg_str = data.pop('message', None) if msg_str: msg_if = data.setdefault('sentry.interfaces.Message', {'message': msg_str}) if msg_if.get('message') != msg_str: msg_if.setdefault('formatted', msg_str) # Fill in ip addresses marked as {{auto}} client_ip = request_env.get('client_ip') if client_ip: if get_path(data, ['sentry.interfaces.Http', 'env', 'REMOTE_ADDR' ]) == '{{auto}}': data['sentry.interfaces.Http']['env'][ 'REMOTE_ADDR'] = client_ip if get_path(data, ['request', 'env', 'REMOTE_ADDR']) == '{{auto}}': data['request']['env']['REMOTE_ADDR'] = client_ip if get_path( data, ['sentry.interfaces.User', 'ip_address']) == '{{auto}}': data['sentry.interfaces.User']['ip_address'] = client_ip if get_path(data, ['user', 'ip_address']) == '{{auto}}': data['user']['ip_address'] = client_ip # Validate main event body and tags against schema is_valid, event_errors = validate_and_default_interface(data, 'event') errors.extend(event_errors) if 'tags' in data: is_valid, tag_errors = validate_and_default_interface(data['tags'], 'tags', name='tags') errors.extend(tag_errors) # Validate interfaces for k in list(iter(data)): if k in CLIENT_RESERVED_ATTRS: continue value = data.pop(k) if not value: self.logger.debug('Ignored empty interface value: %s', k) continue try: interface = get_interface(k) except ValueError: self.logger.debug('Ignored unknown attribute: %s', k) errors.append({ 'type': EventError.INVALID_ATTRIBUTE, 'name': k }) continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception as e: log = self.logger.debug if isinstance( e, InterfaceValidationError) else self.logger.error log('Discarded invalid value for interface: %s (%r)', k, value, exc_info=True) errors.append({ 'type': EventError.INVALID_DATA, 'name': k, 'value': value }) # Additional data coercion and defaulting level = data.get('level') or DEFAULT_LOG_LEVEL if isinstance(level, int) or (isinstance(level, six.string_types) and level.isdigit()): level = LOG_LEVELS.get(int(level), DEFAULT_LOG_LEVEL) data['level'] = LOG_LEVELS_MAP.get(level, LOG_LEVELS_MAP[DEFAULT_LOG_LEVEL]) if data.get('dist') and not data.get('release'): data['dist'] = None timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # TODO (alex) can this all be replaced by utcnow? # it looks like the only time that this would even be hit is when timestamp # is not defined, as the earlier process_timestamp already converts existing # timestamps to floats. if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = float(timezone.now().strftime('%s')) data.setdefault('checksum', None) data.setdefault('culprit', None) data.setdefault('dist', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('fingerprint', None) data.setdefault('logger', DEFAULT_LOGGER_NAME) data.setdefault('platform', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('tags', []) data.setdefault('transaction', None) # Fix case where legacy apps pass 'environment' as a tag # instead of a top level key. # TODO (alex) save() just reinserts the environment into the tags if not data.get('environment'): tagsdict = dict(data['tags']) if 'environment' in tagsdict: data['environment'] = tagsdict['environment'] del tagsdict['environment'] data['tags'] = tagsdict.items() # the SDKs currently do not describe event types, and we must infer # them from available attributes data['type'] = eventtypes.infer(data).key data['version'] = self.version exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] # If there is no User ip_addres, update it either from the Http interface # or the client_ip of the request. auth = request_env.get('auth') is_public = auth and auth.is_public add_ip_platforms = ('javascript', 'cocoa', 'objc') http_ip = data.get('sentry.interfaces.Http', {}).get('env', {}).get('REMOTE_ADDR') if http_ip: data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', http_ip) elif client_ip and (is_public or data.get('platform') in add_ip_platforms): data.setdefault('sentry.interfaces.User', {}).setdefault('ip_address', client_ip) if client_ip and data.get('sdk'): data['sdk']['client_ip'] = client_ip # Trim values data['logger'] = trim(data['logger'].strip(), 64) trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['transaction']: data['transaction'] = trim(data['transaction'], MAX_CULPRIT_LENGTH) return data
def model_from_serializable_data(model, data, check_fks=True, strict_fks=False): pk_field = model._meta.pk # If model is a child via multitable inheritance, use parent's pk while pk_field.remote_field and pk_field.remote_field.parent_link: pk_field = pk_field.remote_field.model._meta.pk kwargs = {pk_field.attname: data['pk']} for field_name, field_value in data.items(): try: field = model._meta.get_field(field_name) except FieldDoesNotExist: continue # Filter out reverse relations if isinstance(field, ForeignObjectRel): continue if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel): related_objects = field.remote_field.model._default_manager.filter( pk__in=field_value) kwargs[field.attname] = list(related_objects) elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel): if field_value is None: kwargs[field.attname] = None else: clean_value = field.remote_field.model._meta.get_field( field.remote_field.field_name).to_python(field_value) kwargs[field.attname] = clean_value if check_fks: try: field.remote_field.model._default_manager.get( **{field.remote_field.field_name: clean_value}) except field.remote_field.model.DoesNotExist: if field.remote_field.on_delete == models.DO_NOTHING: pass elif field.remote_field.on_delete == models.CASCADE: if strict_fks: return None else: kwargs[field.attname] = None elif field.remote_field.on_delete == models.SET_NULL: kwargs[field.attname] = None else: raise Exception( "can't currently handle on_delete types other than CASCADE, SET_NULL and DO_NOTHING" ) else: value = field.to_python(field_value) # Make sure datetimes are converted to localtime if isinstance(field, models.DateTimeField ) and settings.USE_TZ and value is not None: default_timezone = timezone.get_default_timezone() if timezone.is_aware(value): value = timezone.localtime(value, default_timezone) else: value = timezone.make_aware(value, default_timezone) kwargs[field.name] = value obj = model(**kwargs) if data['pk'] is not None: # Set state to indicate that this object has come from the database, so that # ModelForm validation doesn't try to enforce a uniqueness check on the primary key obj._state.adding = False return obj
def fields_differ(self,f1,f2): if isinstance(f1,datetime.datetime) and isinstance(f2,datetime.datetime) and (timezone.is_aware(f1) != timezone.is_aware(f2)): return True else: return (f1 != f2)
def date_hierarchy(cl): """ Display the date hierarchy for date drill-down functionality. """ if cl.date_hierarchy: field_name = cl.date_hierarchy field = get_fields_from_path(cl.model, field_name)[-1] if isinstance(field, models.DateTimeField): dates_or_datetimes = 'datetimes' qs_kwargs = {'is_dst': True} else: dates_or_datetimes = 'dates' qs_kwargs = {} year_field = '%s__year' % field_name month_field = '%s__month' % field_name day_field = '%s__day' % field_name field_generic = '%s__' % field_name year_lookup = cl.params.get(year_field) month_lookup = cl.params.get(month_field) day_lookup = cl.params.get(day_field) def link(filters): return cl.get_query_string(filters, [field_generic]) if not (year_lookup or month_lookup or day_lookup): # select appropriate start level date_range = cl.queryset.aggregate(first=models.Min(field_name), last=models.Max(field_name)) if date_range['first'] and date_range['last']: if dates_or_datetimes == 'datetimes': date_range = { k: timezone.localtime(v) if timezone.is_aware(v) else v for k, v in date_range.items() } if date_range['first'].year == date_range['last'].year: year_lookup = date_range['first'].year if date_range['first'].month == date_range['last'].month: month_lookup = date_range['first'].month if year_lookup and month_lookup and day_lookup: day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup)) return { 'show': True, 'back': { 'link': link({ year_field: year_lookup, month_field: month_lookup }), 'title': capfirst(formats.date_format(day, 'YEAR_MONTH_FORMAT')) }, 'choices': [{ 'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT')) }] } elif year_lookup and month_lookup: days = getattr(cl.queryset, dates_or_datetimes)(field_name, 'day', **qs_kwargs) return { 'show': True, 'back': { 'link': link({year_field: year_lookup}), 'title': str(year_lookup) }, 'choices': [{ 'link': link({ year_field: year_lookup, month_field: month_lookup, day_field: day.day }), 'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT')) } for day in days] } elif year_lookup: months = getattr(cl.queryset, dates_or_datetimes)(field_name, 'month', **qs_kwargs) return { 'show': True, 'back': { 'link': link({}), 'title': _('All dates') }, 'choices': [{ 'link': link({ year_field: year_lookup, month_field: month.month }), 'title': capfirst(formats.date_format(month, 'YEAR_MONTH_FORMAT')) } for month in months] } else: years = getattr(cl.queryset, dates_or_datetimes)(field_name, 'year', **qs_kwargs) return { 'show': True, 'back': None, 'choices': [{ 'link': link({year_field: str(year.year)}), 'title': str(year.year), } for year in years] }
def update_unit(self, unit, request, user=None): """Updates backend file and unit.""" if user is None: user = request.user # Save with lock acquired with self.subproject.repository_lock(): src = unit.get_source_plurals()[0] add = False pounit, add = self.store.find_unit(unit.context, src) # Bail out if we have not found anything if pounit is None or pounit.is_obsolete(): return False, None # Check for changes if ((not add or unit.target == '') and unit.target == pounit.get_target() and unit.fuzzy == pounit.is_fuzzy()): return False, pounit # Store translations if unit.is_plural(): pounit.set_target(unit.get_target_plurals()) else: pounit.set_target(unit.target) # Update fuzzy flag pounit.mark_fuzzy(unit.fuzzy) # Optionally add unit to translation file if add: self.store.add_unit(pounit) # We need to update backend now author = get_author_name(user) # Update po file header now = timezone.now() if not timezone.is_aware(now): now = timezone.make_aware(now, timezone.utc) # Prepare headers to update headers = { 'add': True, 'last_translator': author, 'plural_forms': self.language.get_plural_form(), 'language': self.language_code, 'PO_Revision_Date': now.strftime('%Y-%m-%d %H:%M%z'), } # Optionally store language team with link to website if self.subproject.project.set_translation_team: headers['language_team'] = '%s <%s>' % ( self.language.name, get_site_url(self.get_absolute_url()), ) # Optionally store email for reporting bugs in source report_source_bugs = self.subproject.report_source_bugs if report_source_bugs != '': headers['report_msgid_bugs_to'] = report_source_bugs # Update genric headers self.store.update_header(**headers) # commit possible previous changes (by other author) self.commit_pending(request, author) # save translation changes self.store.save() # commit VCS repo if needed self.git_commit(request, author, timezone.now(), sync=True) return True, pounit
def from_kwargs(self, project, **kwargs): # TODO: this function is way too damn long and needs refactored # the inner imports also suck so let's try to move it away from # the objects manager from sentry.models import Event, Project project = Project.objects.get_from_cache(pk=project) # First we pull out our top-level (non-data attr) kwargs event_id = kwargs.pop('event_id', None) message = kwargs.pop('message', None) culprit = kwargs.pop('culprit', None) level = kwargs.pop('level', None) or logging.ERROR time_spent = kwargs.pop('time_spent', None) logger_name = kwargs.pop('logger', None) or settings.DEFAULT_LOGGER_NAME server_name = kwargs.pop('server_name', None) site = kwargs.pop('site', None) date = kwargs.pop('timestamp', None) or timezone.now() checksum = kwargs.pop('checksum', None) tags = kwargs.pop('tags', []) # full support for dict syntax if isinstance(tags, dict): tags = tags.items() # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if dj_settings.TIME_ZONE: if not timezone.is_aware(date): date = date.replace(tzinfo=timezone.utc) elif timezone.is_aware(date): date = date.replace(tzinfo=None) data = kwargs kwargs = { 'level': level, 'message': message, } event = Event(project=project, event_id=event_id, culprit=culprit or '', logger=logger_name, data=data, server_name=server_name, site=site, time_spent=time_spent, datetime=date, **kwargs) # Calculcate the checksum from the first highest scoring interface if not checksum: checksum = get_checksum_from_event(event) event.checksum = checksum group_kwargs = kwargs.copy() group_kwargs.update({ 'last_seen': date, 'first_seen': date, 'time_spent_total': time_spent or 0, 'time_spent_count': time_spent and 1 or 0, }) views = self._get_views(event) try: group, is_new, is_sample = self._create_group(event, tags=tags, **group_kwargs) except Exception, exc: # TODO: should we mail admins when there are failures? try: logger.exception(u'Unable to process log entry: %s', exc) except Exception, exc: warnings.warn(u'Unable to process log entry: %s', exc)
def iterate_list(self, form_data): cl = self.event.checkin_lists.get(pk=form_data['list']) questions = list( Question.objects.filter(event=self.event, id__in=form_data['questions'])) qs = self._get_queryset(cl, form_data) name_scheme = PERSON_NAME_SCHEMES[self.event.settings.name_scheme] headers = [ _('Order code'), _('Attendee name'), ] if len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: headers.append(_('Attendee name: {part}').format(part=label)) headers += [ _('Product'), _('Price'), _('Checked in'), _('Checked out'), _('Automatically checked in') ] if not cl.include_pending: qs = qs.filter(order__status=Order.STATUS_PAID) else: qs = qs.filter(order__status__in=(Order.STATUS_PAID, Order.STATUS_PENDING)) headers.append(_('Paid')) if form_data['secrets']: headers.append(_('Secret')) headers.append(_('E-mail')) headers.append(_('Phone number')) if self.event.has_subevents: headers.append(pgettext('subevent', 'Date')) headers.append(_('Start date')) headers.append(_('End date')) for q in questions: headers.append(str(q.question)) headers.append(_('Company')) headers.append(_('Voucher code')) headers.append(_('Order date')) headers.append(_('Order time')) headers.append(_('Requires special attention')) headers.append(_('Comment')) headers.append(_('Seat ID')) headers.append(_('Seat name')) headers.append(_('Seat zone')) headers.append(_('Seat row')) headers.append(_('Seat number')) headers += [ _('Address'), _('ZIP code'), _('City'), _('Country'), pgettext('address', 'State'), ] yield headers yield self.ProgressSetTotal(total=qs.count()) for op in qs: try: ia = op.order.invoice_address except InvoiceAddress.DoesNotExist: ia = InvoiceAddress() last_checked_in = None if isinstance(op.last_checked_in, str): # SQLite last_checked_in = dateutil.parser.parse(op.last_checked_in) elif op.last_checked_in: last_checked_in = op.last_checked_in if last_checked_in and not is_aware(last_checked_in): last_checked_in = make_aware(last_checked_in, UTC) last_checked_out = None if isinstance(op.last_checked_out, str): # SQLite last_checked_out = dateutil.parser.parse(op.last_checked_out) elif op.last_checked_out: last_checked_out = op.last_checked_out if last_checked_out and not is_aware(last_checked_out): last_checked_out = make_aware(last_checked_out, UTC) row = [ op.order.code, op.attendee_name or (op.addon_to.attendee_name if op.addon_to else '') or ia.name, ] if len(name_scheme['fields']) > 1: for k, label, w in name_scheme['fields']: row.append((op.attendee_name_parts or (op.addon_to.attendee_name_parts if op.addon_to else {}) or ia.name_parts).get(k, '')) row += [ str(op.item) + (" – " + str(op.variation.value) if op.variation else ""), op.price, date_format(last_checked_in.astimezone(self.event.timezone), 'SHORT_DATETIME_FORMAT') if last_checked_in else '', date_format(last_checked_out.astimezone(self.event.timezone), 'SHORT_DATETIME_FORMAT') if last_checked_out else '', _('Yes') if op.auto_checked_in else _('No'), ] if cl.include_pending: row.append( _('Yes') if op.order.status == Order.STATUS_PAID else _('No')) if form_data['secrets']: row.append(op.secret) row.append(op.attendee_email or (op.addon_to.attendee_email if op.addon_to else '') or op.order.email or '') row.append(str(op.order.phone) if op.order.phone else '') if self.event.has_subevents: row.append(str(op.subevent.name)) row.append( date_format( op.subevent.date_from.astimezone(self.event.timezone), 'SHORT_DATETIME_FORMAT')) if op.subevent.date_to: row.append( date_format( op.subevent.date_to.astimezone( self.event.timezone), 'SHORT_DATETIME_FORMAT')) else: row.append('') acache = {} if op.addon_to: for a in op.addon_to.answers.all(): # We do not want to localize Date, Time and Datetime question answers, as those can lead # to difficulties parsing the data (for example 2019-02-01 may become Février, 2019 01 in French). if a.question.type in Question.UNLOCALIZED_TYPES: acache[a.question_id] = a.answer else: acache[a.question_id] = str(a) for a in op.answers.all(): # We do not want to localize Date, Time and Datetime question answers, as those can lead # to difficulties parsing the data (for example 2019-02-01 may become Février, 2019 01 in French). if a.question.type in Question.UNLOCALIZED_TYPES: acache[a.question_id] = a.answer else: acache[a.question_id] = str(a) for q in questions: row.append(acache.get(q.pk, '')) row.append(op.company or ia.company) row.append(op.voucher.code if op.voucher else "") row.append( op.order.datetime.astimezone( self.event.timezone).strftime('%Y-%m-%d')) row.append( op.order.datetime.astimezone( self.event.timezone).strftime('%H:%M:%S')) row.append( _('Yes') if op.order.checkin_attention or op.item.checkin_attention else _('No')) row.append(op.order.comment or "") if op.seat: row += [ op.seat.seat_guid, str(op.seat), op.seat.zone_name, op.seat.row_name, op.seat.seat_number, ] else: row += ['', '', '', '', ''] row += [ op.street or '', op.zipcode or '', op.city or '', op.country if op.country else '', op.state or '', ] yield row
def normalize_event_data(self, data): # TODO(dcramer): store http.env.REMOTE_ADDR as user.ip # First we pull out our top-level (non-data attr) kwargs if not isinstance(data.get('level'), (six.string_types, int)): data['level'] = logging.ERROR elif data['level'] not in LOG_LEVELS: data['level'] = logging.ERROR if not data.get('logger'): data['logger'] = DEFAULT_LOGGER_NAME else: data['logger'] = trim(data['logger'], 64) if data.get('platform'): data['platform'] = trim(data['platform'], 64) timestamp = data.get('timestamp') if not timestamp: timestamp = timezone.now() # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) data['timestamp'] = timestamp if not data.get('event_id'): data['event_id'] = uuid.uuid4().hex data.setdefault('message', None) data.setdefault('culprit', None) data.setdefault('time_spent', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('checksum', None) data.setdefault('platform', None) data.setdefault('extra', {}) tags = data.get('tags') if not tags: tags = [] # full support for dict syntax elif isinstance(tags, dict): tags = tags.items() # prevent [tag, tag, tag] (invalid) syntax elif not all(len(t) == 2 for t in tags): tags = [] else: tags = list(tags) data['tags'] = tags if not isinstance(data['extra'], dict): # throw it away data['extra'] = {} trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) # TODO(dcramer): find a better place for this logic exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] if 'sentry.interfaces.Http' in data: # default the culprit to the url if not data['culprit']: data['culprit'] = data['sentry.interfaces.Http']['url'] if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['message']: data['message'] = trim(data['message'], 2048) return data
def U(self): "Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)" if isinstance(self.data, datetime.datetime) and is_aware(self.data): return int(calendar.timegm(self.data.utctimetuple())) else: return int(time.mktime(self.data.timetuple()))
def normalize(self): # TODO(dcramer): store http.env.REMOTE_ADDR as user.ip # First we pull out our top-level (non-data attr) kwargs data = self.data if not isinstance(data.get('level'), (six.string_types, int)): data['level'] = logging.ERROR elif data['level'] not in LOG_LEVELS: data['level'] = logging.ERROR if not data.get('logger'): data['logger'] = DEFAULT_LOGGER_NAME else: logger = trim(data['logger'].strip(), 64) if TagKey.is_valid_key(logger): data['logger'] = logger else: data['logger'] = DEFAULT_LOGGER_NAME if data.get('platform'): data['platform'] = trim(data['platform'], 64) current_timestamp = timezone.now() timestamp = data.get('timestamp') if not timestamp: timestamp = current_timestamp if isinstance(timestamp, datetime): # We must convert date to local time so Django doesn't mess it up # based on TIME_ZONE if settings.TIME_ZONE: if not timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=timezone.utc) elif timezone.is_aware(timestamp): timestamp = timestamp.replace(tzinfo=None) timestamp = float(timestamp.strftime('%s')) data['timestamp'] = timestamp data['received'] = current_timestamp if not data.get('event_id'): data['event_id'] = uuid4().hex data.setdefault('message', '') data.setdefault('culprit', None) data.setdefault('time_spent', None) data.setdefault('server_name', None) data.setdefault('site', None) data.setdefault('checksum', None) data.setdefault('fingerprint', None) data.setdefault('platform', None) data.setdefault('environment', None) data.setdefault('extra', {}) data.setdefault('errors', []) tags = data.get('tags') if not tags: tags = [] # full support for dict syntax elif isinstance(tags, dict): tags = tags.items() # prevent [tag, tag, tag] (invalid) syntax elif not all(len(t) == 2 for t in tags): tags = [] else: tags = list(tags) data['tags'] = [] for key, value in tags: key = six.text_type(key).strip() value = six.text_type(value).strip() if not (key and value): continue data['tags'].append((key, value)) if not isinstance(data['extra'], dict): # throw it away data['extra'] = {} trim_dict(data['extra'], max_size=settings.SENTRY_MAX_EXTRA_VARIABLE_SIZE) # TODO(dcramer): more of validate data needs stuffed into the manager for key in data.keys(): if key in CLIENT_RESERVED_ATTRS: continue value = data.pop(key) try: interface = get_interface(key)() except ValueError: continue try: inst = interface.to_python(value) data[inst.get_path()] = inst.to_json() except Exception: pass data['version'] = self.version # TODO(dcramer): find a better place for this logic exception = data.get('sentry.interfaces.Exception') stacktrace = data.get('sentry.interfaces.Stacktrace') if exception and len(exception['values']) == 1 and stacktrace: exception['values'][0]['stacktrace'] = stacktrace del data['sentry.interfaces.Stacktrace'] if 'sentry.interfaces.Http' in data: try: ip_address = validate_ip( data['sentry.interfaces.Http'].get('env', {}).get('REMOTE_ADDR'), required=False, ) except ValueError: ip_address = None if ip_address: data.setdefault('sentry.interfaces.User', {}) data['sentry.interfaces.User'].setdefault( 'ip_address', ip_address) if data['time_spent']: data['time_spent'] = int(data['time_spent']) if data['culprit']: data['culprit'] = trim(data['culprit'], MAX_CULPRIT_LENGTH) if data['message']: data['message'] = trim(data['message'], settings.SENTRY_MAX_MESSAGE_LENGTH) return data
def autosave_js(self, request, object_id, extra_context=None): try: object_id = int(unquote(object_id)) except ValueError: return HttpResponse(status=404, content_type='application/x-javascript') obj = None updated = None # Raise exception if the admin doesn't have a 'autosave_last_modified_field' property if not self.autosave_last_modified_field: cls_name = f"{self.__module__}.{self.__class__.__name__}" raise ImproperlyConfigured( f"Autosave is not configured correctly. {cls_name} " "is missing property 'autosave_last_modified_field', which " "should be set to the model's last updated datetime field.") # Raise exception if self.autosave_last_modified_field is not set try: self.model._meta.get_field(self.autosave_last_modified_field) except FieldDoesNotExist: raise prefix = self.app_model_label if not object_id: autosave_url = reverse(f"admin:{prefix}_add") add_log_entries = LogEntry.objects.filter( user=request.user, content_type=ContentType.objects.get_for_model(self.model), action_flag=ADDITION) try: updated = add_log_entries[0].action_time except IndexError: pass else: autosave_url = reverse(f"admin:{prefix}_change", args=[str(object_id)]) try: obj = self.get_object(request, object_id) except (ValueError, self.model.DoesNotExist): name = force_text(opts.verbose_name) key = escape(object_id) raise Http404( _(f'{name} object with primary key {key} does not exist.')) else: updated = getattr(obj, self.autosave_last_modified_field, None) # Make sure date modified time doesn't predate Unix-time. if updated: if timezone.is_aware(updated): updated = timezone.make_naive(updated) # I'm pretty confident they didn't do any Django autosaving in 1969. updated = max(updated, datetime(year=1970, month=1, day=1)) if obj and not self.has_change_permission(request, obj): raise PermissionDenied elif not obj and not self.has_add_permission(request): raise PermissionDenied js_vars = { 'autosave_url': autosave_url, 'is_add_view': not (object_id), 'server_time_epoch': time.mktime(datetime.now().timetuple()), 'last_updated_epoch': time.mktime(updated.timetuple()) if updated else None, 'is_recovered_autosave': bool(request.GET.get('is_recovered')), } response_js = textwrap.dedent(""" var DjangoAutosave = (typeof window.DjangoAutosave != 'undefined') ? DjangoAutosave : {{}}; DjangoAutosave.config = (function() {{ var config = {config_data}; config.client_time_epoch = Math.round((new Date()).getTime()/1000, 0); config.client_time_offset = config.client_time_epoch - config.server_time_epoch; return config; }})(); """).strip().format( config_data=json.dumps(js_vars, indent=4, sort_keys=True)) return HttpResponse(response_js, content_type='application/x-javascript')
def format_datetime(self, data): # translate to time in local timezone if timezone.is_aware(data): data = timezone.localtime(data) return data.strftime("%Y-%m-%d %H:%M:%S %z")
def date(datetime, arg=None): from django.template.defaultfilters import date from django.utils import timezone if not timezone.is_aware(datetime): datetime = datetime.replace(tzinfo=timezone.utc) return date(datetime, arg)
def _value(self): date = self.data if settings.USE_TZ and isinstance( date, datetime.datetime) and timezone.is_aware(date): self.data = timezone.localtime(date) return super(DateTimeField, self)._value()
def testHourly(self): Job.objects.all().delete() job = Job.objects.create( name='test', raw_command='ls', frequency=c.HOURLY, enabled=True, force_run=True, log_stdout=True, log_stderr=True, ) self.assertEqual(job.logs.all().count(), 0) self.assertTrue(job.next_run) next_run0 = job.next_run.astimezone(pytz.utc) print('next_run0:', next_run0) self.assertTrue(timezone.is_aware(next_run0)) self.assertEqual(next_run0.tzname(), 'UTC') # Initial next_run should be one-hour from now. td = next_run0 - timezone.now().astimezone(pytz.utc) print('td:', td) result = abs(td.total_seconds() - 3600) self.assertTrue(result <= 5, result) call_command('cron', update_heartbeat=0, sync=1) print('stdout:', job.logs.all()[0].stdout) print('stderr:', job.logs.all()[0].stderr) self.assertEqual(job.logs.all()[0].success, True) Job.objects.update() job = Job.objects.get(id=job.id) self.assertEqual(job.enabled, True) self.assertEqual(job.force_run, False) self.assertTrue(job.next_run) self.assertEqual(job.logs.all().count(), 1) next_run1 = job.next_run.astimezone(pytz.utc) print('next_run1:', next_run1) print('now:', timezone.now().astimezone(pytz.utc)) self.assertTrue(timezone.is_aware(next_run1)) # All datetimes get normalized to UTC in the database. self.assertEqual(next_run1.tzname(), 'UTC') # Force run should have left the next_run unchanged. td = (next_run1 - next_run0) #.total_seconds() print('td:', td) self.assertEqual(td.total_seconds(), 0) job.next_run = timezone.now() - timedelta(seconds=3600) job.save() self.assertEqual(job.is_due(), True) call_command('cron', update_heartbeat=0, sync=1) # The job should have been automatically scheduled to run an hour later. Job.objects.update() job = Job.objects.get(id=job.id) self.assertEqual(job.logs.all().count(), 2) next_run2 = job.next_run.astimezone(pytz.utc) print('next_run0:', next_run0) print('next_run2:', next_run2) #self.assertTrue(td.total_seconds()) td2 = (next_run2 - timezone.now().astimezone(pytz.utc)) print('td2:', td2) self.assertTrue(abs(td2.total_seconds() - 3600) <= 5)
def get_aware_datetime(date_str): ret = parse_datetime(date_str) if not is_aware(ret): ret = make_aware(ret) return ret
def wrapper(self, *args, **kwargs): recipient = kwargs.pop('email_address', None) source = '' destination = '' vcluster = '' try: source = join_host_and_port(kwargs.get('s3_host'), kwargs.get('s3_port')) except Exception: pass try: destination = join_host_and_port(settings.SX_CONF.get('cluster'), settings.SX_CONF.get('port')) except Exception: pass try: vcluster = Cluster.objects.get(pk=kwargs['cluster_pk']).name except Exception: pass try: returned = func(self, *args, **kwargs) except Exception as e: reason = ': '.join([type(e).__name__, str(e)]) imported = None if hasattr(e, 'imported_buckets'): imported = e.imported_buckets else: imported = [] if hasattr(e, 'skipped_buckets'): skipped = e.skipped_buckets else: skipped = [] if recipient: finish_time = timezone.now() if timezone.is_aware(finish_time): finish_time = timezone.localtime(finish_time) send_status_email(recipients=recipient, succeeded=False, uuid=self.request.id, time=finish_time, imported=imported, skipped=skipped, reason=reason, source=source, destination=destination, vcluster=vcluster) raise else: imported = returned.get('imported_buckets') skipped = returned.get('skipped_buckets') if recipient: finish_time = timezone.now() if timezone.is_aware(finish_time): finish_time = timezone.localtime(finish_time) send_status_email(recipients=recipient, succeeded=True, uuid=self.request.id, time=finish_time, imported=imported, skipped=skipped, source=source, destination=destination, vcluster=vcluster) return returned
def astimezone(dt: datetime.datetime, tzinfo: pytz.tzinfo.DstTzInfo): assert dj_timezone.is_aware(dt) return tzinfo.normalize(dt.astimezone(tzinfo))
def datetime_to_str(datetime, format=DATETIME_FORMAT): if is_aware(datetime): datetime = to_naive_datetime(datetime) return datetime.strftime(format)
def get_running_lock_date(self): date = self.cache.get(self.lock_name) if date and not timezone.is_aware(date): tz = timezone.get_current_timezone() date = timezone.make_aware(date, tz) return date
def make_naive(dt, tz): if timezone.is_aware(dt): return timezone.make_naive(dt, tz) return dt
def format_date(date): return (localtime(date) if is_aware(date) else date) \ .strftime('%Y-%m-%d %H:%M:%S')
def save_submission( request: 'rest_framework.request.Request', xform: XForm, xml: str, media_files: list, new_uuid: str, status: str, date_created_override: datetime, ) -> Instance: if not date_created_override: date_created_override = get_submission_date_from_xml(xml) # We have to save the `Instance` to the database before we can associate # any `Attachment`s with it, but we are inside a transaction and saving # attachments is slow! Usually creating an `Instance` updates the # submission count of the parent `XForm` automatically via a `post_save` # signal, but that takes a lock on `logger_xform` that persists until the # end of the transaction. We must avoid doing that until all attachments # are saved, and we are as close as possible to the end of the transaction. # See https://github.com/kobotoolbox/kobocat/issues/490. # # `_get_instance(..., defer_counting=True)` skips incrementing the # submission counters and returns an `Instance` with a `defer_counting` # attribute set to `True` *if* a new instance was created. We are # responsible for calling `update_xform_submission_count()` if the returned # `Instance` has `defer_counting = True`. instance = _get_instance(request, xml, new_uuid, status, xform, defer_counting=True) save_attachments(instance, media_files) # override date created if required if date_created_override: if not timezone.is_aware(date_created_override): # default to utc? date_created_override = timezone.make_aware( date_created_override, timezone.utc) instance.date_created = date_created_override instance.save() if instance.xform is not None: pi, created = ParsedInstance.objects.get_or_create(instance=instance) if not created: pi.save(asynchronous=False) # Now that the slow tasks are complete and we are (hopefully!) close to the # end of the transaction, update the submission count if the `Instance` was # newly created if getattr(instance, 'defer_counting', False): # Remove the Python-only attribute del instance.defer_counting update_xform_submission_count(instance=instance, created=True) update_user_submissions_counter(instance=instance, created=True) return instance
def date_hierarchy(cl): """ Display the date hierarchy for date drill-down functionality. """ if cl.date_hierarchy: field_name = cl.date_hierarchy field = get_fields_from_path(cl.model, field_name)[-1] if isinstance(field, models.DateTimeField): dates_or_datetimes = "datetimes" qs_kwargs = {"is_dst": True} else: dates_or_datetimes = "dates" qs_kwargs = {} year_field = "%s__year" % field_name month_field = "%s__month" % field_name day_field = "%s__day" % field_name field_generic = "%s__" % field_name year_lookup = cl.params.get(year_field) month_lookup = cl.params.get(month_field) day_lookup = cl.params.get(day_field) def link(filters): return cl.get_query_string(filters, [field_generic]) if not (year_lookup or month_lookup or day_lookup): # select appropriate start level date_range = cl.queryset.aggregate(first=models.Min(field_name), last=models.Max(field_name)) if date_range["first"] and date_range["last"]: if dates_or_datetimes == "datetimes": date_range = { k: timezone.localtime(v) if timezone.is_aware(v) else v for k, v in date_range.items() } if date_range["first"].year == date_range["last"].year: year_lookup = date_range["first"].year if date_range["first"].month == date_range["last"].month: month_lookup = date_range["first"].month if year_lookup and month_lookup and day_lookup: day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup)) return { "show": True, "back": { "link": link({ year_field: year_lookup, month_field: month_lookup }), "title": capfirst(formats.date_format(day, "YEAR_MONTH_FORMAT")), }, "choices": [{ "title": capfirst(formats.date_format(day, "MONTH_DAY_FORMAT")) }], } elif year_lookup and month_lookup: days = getattr(cl.queryset, dates_or_datetimes)(field_name, "day", **qs_kwargs) return { "show": True, "back": { "link": link({year_field: year_lookup}), "title": str(year_lookup), }, "choices": [{ "link": link({ year_field: year_lookup, month_field: month_lookup, day_field: day.day, }), "title": capfirst(formats.date_format(day, "MONTH_DAY_FORMAT")), } for day in days], } elif year_lookup: months = getattr(cl.queryset, dates_or_datetimes)(field_name, "month", **qs_kwargs) return { "show": True, "back": { "link": link({}), "title": _("All dates") }, "choices": [{ "link": link({ year_field: year_lookup, month_field: month.month }), "title": capfirst(formats.date_format(month, "YEAR_MONTH_FORMAT")), } for month in months], } else: years = getattr(cl.queryset, dates_or_datetimes)(field_name, "year", **qs_kwargs) return { "show": True, "back": None, "choices": [{ "link": link({year_field: str(year.year)}), "title": str(year.year), } for year in years], }
def update_units(self, author): """Update backend file and unit.""" updated = False for unit in self.unit_set.filter(pending=True): src = unit.get_source_plurals()[0] add = False pounit, add = self.store.find_unit(unit.context, src) unit.pending = False # Bail out if we have not found anything if pounit is None or pounit.is_obsolete(): self.log_error('message %s disappeared!', unit) unit.save(backend=True, update_fields=['pending']) continue # Check for changes if ((not add or unit.target == '') and unit.target == pounit.get_target() and unit.fuzzy == pounit.is_fuzzy()): unit.save(backend=True, update_fields=['pending']) continue updated = True # Optionally add unit to translation file. # This has be done prior setting tatget as some formats # generate content based on target language. if add: self.store.add_unit(pounit) # Store translations if unit.is_plural(): pounit.set_target(unit.get_target_plurals()) else: pounit.set_target(unit.target) # Update fuzzy flag pounit.mark_fuzzy(unit.fuzzy) # Update comments as they might have been changed (eg, fuzzy flag # removed) translated = pounit.is_translated() flags = pounit.get_flags() if translated != unit.translated or flags != unit.flags: unit.translated = translated unit.flags = flags unit.save(backend=True, update_fields=['translated', 'flags', 'pending']) # Did we do any updates? if not updated: return # Update po file header now = timezone.now() if not timezone.is_aware(now): now = timezone.make_aware(now, timezone.utc) # Prepare headers to update headers = { 'add': True, 'last_translator': author, 'plural_forms': self.language.get_plural_form(), 'language': self.language_code, 'PO_Revision_Date': now.strftime('%Y-%m-%d %H:%M%z'), } # Optionally store language team with link to website if self.subproject.project.set_translation_team: headers['language_team'] = '{0} <{1}>'.format( self.language.name, get_site_url(self.get_absolute_url())) # Optionally store email for reporting bugs in source report_source_bugs = self.subproject.report_source_bugs if report_source_bugs != '': headers['report_msgid_bugs_to'] = report_source_bugs # Update genric headers self.store.update_header(**headers) # save translation changes self.store.save() # Update stats (the translated flag might have changed) self.update_stats()
def make_naive(value): if getattr(settings, 'USE_TZ', False) and timezone.is_aware(value): tz = timezone.get_default_timezone() value = timezone.make_naive(value, tz) return value
def get_field_value(self, instance, field_name): now_dt = timezone.now() if timezone.is_aware(now_dt): now_dt = timezone.make_naive(now_dt) return now_dt
def naturaltime(value): """ For date and time values show how many seconds, minutes, or hours ago compared to current timestamp return representing string. """ if not isinstance(value, date): # datetime is a subclass of date return value now = datetime.now(utc if is_aware(value) else None) if value < now: delta = now - value if delta.days != 0: # Translators: delta will contain a string like '2 months' or '1 month, 2 weeks' return _('%(delta)s ago') % { 'delta': defaultfilters.timesince( value, now, time_strings={ # Translators: 'naturaltime-past' strings will be included in # '%(delta)s ago' 'year': npgettext_lazy('naturaltime-past', '%d year', '%d years'), 'month': npgettext_lazy('naturaltime-past', '%d month', '%d months'), 'week': npgettext_lazy('naturaltime-past', '%d week', '%d weeks'), 'day': npgettext_lazy('naturaltime-past', '%d day', '%d days'), 'hour': npgettext_lazy('naturaltime-past', '%d hour', '%d hours'), 'minute': npgettext_lazy('naturaltime-past', '%d minute', '%d minutes') }) } elif delta.seconds == 0: return _('now') elif delta.seconds < 60: return ngettext( # Translators: please keep a non-breaking space (U+00A0) # between count and time unit. 'a second ago', '%(count)s seconds ago', delta.seconds) % { 'count': delta.seconds } elif delta.seconds // 60 < 60: count = delta.seconds // 60 return ngettext( # Translators: please keep a non-breaking space (U+00A0) # between count and time unit. 'a minute ago', '%(count)s minutes ago', count) % { 'count': count } else: count = delta.seconds // 60 // 60 return ngettext( # Translators: please keep a non-breaking space (U+00A0) # between count and time unit. 'an hour ago', '%(count)s hours ago', count) % { 'count': count } else: delta = value - now if delta.days != 0: # Translators: delta will contain a string like '2 months' or '1 month, 2 weeks' return _('%(delta)s from now') % { 'delta': defaultfilters.timeuntil( value, now, time_strings={ # Translators: 'naturaltime-future' strings will be included in # '%(delta)s from now' 'year': npgettext_lazy('naturaltime-future', '%d year', '%d years'), 'month': npgettext_lazy('naturaltime-future', '%d month', '%d months'), 'week': npgettext_lazy('naturaltime-future', '%d week', '%d weeks'), 'day': npgettext_lazy('naturaltime-future', '%d day', '%d days'), 'hour': npgettext_lazy('naturaltime-future', '%d hour', '%d hours'), 'minute': npgettext_lazy('naturaltime-future', '%d minute', '%d minutes') }) } elif delta.seconds == 0: return _('now') elif delta.seconds < 60: return ngettext( # Translators: please keep a non-breaking space (U+00A0) # between count and time unit. 'a second from now', '%(count)s seconds from now', delta.seconds) % { 'count': delta.seconds } elif delta.seconds // 60 < 60: count = delta.seconds // 60 return ngettext( # Translators: please keep a non-breaking space (U+00A0) # between count and time unit. 'a minute from now', '%(count)s minutes from now', count) % { 'count': count } else: count = delta.seconds // 60 // 60 return ngettext( # Translators: please keep a non-breaking space (U+00A0) # between count and time unit. 'an hour from now', '%(count)s hours from now', count) % { 'count': count }
def update_units(self, units, store, author_name, author_id): """Update backend file and unit.""" updated = False for unit in units: # We reuse the queryset, so pending units might reappear here if not unit.pending: continue # Skip changes by other authors change_author = unit.get_last_content_change()[0] if change_author.id != author_id: continue # Remove pending flag unit.pending = False try: pounit, add = store.find_unit(unit.context, unit.source) except UnitNotFound: # Bail out if we have not found anything report_error(cause="String disappeared") self.log_error("disappeared string: %s", unit) continue # Check for changes if ( (not add or unit.target == "") and unit.target == pounit.target and unit.approved == pounit.is_approved(unit.approved) and unit.fuzzy == pounit.is_fuzzy() ): continue updated = True # Optionally add unit to translation file. # This has be done prior setting tatget as some formats # generate content based on target language. if add: store.add_unit(pounit.unit) # Store translations if unit.is_plural: pounit.set_target(unit.get_target_plurals()) else: pounit.set_target(unit.target) # Update fuzzy/approved flag pounit.mark_fuzzy(unit.state == STATE_FUZZY) pounit.mark_approved(unit.state == STATE_APPROVED) # Update comments as they might have been changed by state changes state = unit.get_unit_state(pounit, "") flags = pounit.flags if state != unit.state or flags != unit.flags: unit.state = state unit.flags = flags unit.save( update_fields=["state", "flags", "pending"], same_content=True, ) # Did we do any updates? if not updated: return # Update po file header now = timezone.now() if not timezone.is_aware(now): now = timezone.make_aware(now, timezone.utc) # Prepare headers to update headers = { "add": True, "last_translator": author_name, "plural_forms": self.plural.plural_form, "language": self.language_code, "PO_Revision_Date": now.strftime("%Y-%m-%d %H:%M%z"), } # Optionally store language team with link to website if self.component.project.set_language_team: headers["language_team"] = "{} <{}>".format( self.language.name, get_site_url(self.get_absolute_url()) ) # Optionally store email for reporting bugs in source report_source_bugs = self.component.report_source_bugs if report_source_bugs: headers["report_msgid_bugs_to"] = report_source_bugs # Update genric headers store.update_header(**headers) # save translation changes store.save()
def serialize_datetime(t, format='%Y-%m-%d %H:%M:%S'): if not t: return None if timezone.is_aware(t): t = timezone.make_naive(t) return t.strftime(format)