def get_grid_content(request): """ Get the configuration of the specified table from tables.json Then return the data according to the table's getters :param request: :return: """ today = datetime.date.today() now = timezone.now() tz_offset = request.session['detected_tz'] tz = offset_to_timezone(tz_offset) extras = DotMap(today=today, now=now, user=request.user, tz=tz) grid_type = request.POST['grid-type'] extra_args = json.loads(request.POST.get('extras', {})) for key, value in extra_args.items(): extras[key] = value table = tables[grid_type] klass = table['class'] filter = table['filter'] if filter is None: objs = klass.objects.all() else: objs = filter(extras) rows = get_attrs(objs, table, extras) return rows
def save_history(request): """ Save a copy of all ExtraAttrValue (labels, notes, ...) in a HistoryEntry :param request: must specify a comment to store with this copy :return: name of the zip file created :version: 2.0.0 """ version = 4 user = request.user comment = get_or_error(request.POST, 'comment') database_id = get_or_error(request.POST, 'database') backup_type = get_or_error(request.POST, 'type') database = get_or_error(Database, dict(id=database_id)) assert_permission(user, database, DatabasePermission.VIEW) assert_values(backup_type, ['labels', 'segmentation']) meta = dict(database=database_id, user=user.id, time=timezone.now(), version=version, note=comment, type=backup_type) zip_buffer = io.BytesIO() with zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_BZIP2, False) as zip_file: zip_file.writestr('meta.json', json.dumps(meta)) zip_file.writestr('root.extraattrvalue.json', 'here for checking purpose') if backup_type == 'labels': save_label_history(database, user, zip_file) else: save_segmentation_history(database, user, zip_file) binary_content = zip_buffer.getvalue() he = HistoryEntry.objects.create(user=user, time=timezone.now(), database=database, version=version, note=comment, type=backup_type) filename = he.filename filepath = history_path(filename) ensure_parent_folder_exists(filepath) with open(filepath, 'wb') as f: f.write(binary_content) tz_offset = request.session['detected_tz'] tz = offset_to_timezone(tz_offset) _, rows = bulk_get_history_entries([he], DotMap(user=user, database=database_id, tz=tz)) return rows[0]
def test_examples(self): # Python < 3.4 compatibility: if not hasattr(self, 'subTest'): self.skipTest('No subTest support') for (js_offset_hours, expected_tzs) in self.example_offsets_timezones: with self.subTest(hour=js_offset_hours): js_offset_minutes = js_offset_hours * 60 actual_tzs = ( str( offset_to_timezone(js_offset_minutes, datetime(2018, 1, 1, 0, 0, 0))), # Start/end of year str( offset_to_timezone(js_offset_minutes, datetime(2018, 7, 1, 0, 0, 0))), # Mid-year ) self.assertEqual(expected_tzs, actual_tzs)
def set_timezone(request): request.session['detected_tz'] = request.data['offset'] tz = offset_to_timezone(request.data['offset']) return Response({ 'tz': str(tz) })
def test_fuzzy(self): """Test the fuzzy matching of timezones""" tz = offset_to_timezone(-10, now=self.winter) self.assertEqual(str(tz), 'Europe/London')
def test_tokyo(self): tz = offset_to_timezone(-9 * 60, now=self.summer) self.assertEqual(str(tz), 'Asia/Tokyo')
def test_new_york_summer(self): tz = offset_to_timezone(4 * 60, now=self.summer) self.assertEqual(str(tz), 'America/New_York')
def test_london_summer(self): tz = offset_to_timezone(-60, now=self.summer) self.assertEqual(str(tz), 'Europe/London')
def test_london_winter(self): tz = offset_to_timezone(0, now=self.winter) self.assertEqual(str(tz), 'Europe/London')
def get_timezone(tz): try: timezone = offset_to_timezone(tz) return timezone except TypeError: return tz