def test_round_trip_conversion(self): datetime_values = [ datetime.datetime(2015, 1, 1, 15, 0, 0).replace(microsecond=500), datetime.datetime(2015, 1, 1, 15, 0, 0).replace(microsecond=0), datetime.datetime(2015, 1, 1, 15, 0, 0).replace(microsecond=999999), ] datetime_values = [ date_utils.add_utc_tz(datetime_values[0]), date_utils.add_utc_tz(datetime_values[1]), date_utils.add_utc_tz(datetime_values[2]), ] microsecond_values = [] # Calculate microsecond values for value in datetime_values: seconds = calendar.timegm(value.timetuple()) microseconds_reminder = value.time().microsecond result = int(seconds * 1000000) + microseconds_reminder microsecond_values.append(result) field = ComplexDateTimeField() # datetime to us for index, value in enumerate(datetime_values): actual_value = field._datetime_to_microseconds_since_epoch(value=value) expected_value = microsecond_values[index] expected_microseconds = value.time().microsecond self.assertEqual(actual_value, expected_value) self.assertTrue(str(actual_value).endswith(str(expected_microseconds))) # us to datetime for index, value in enumerate(microsecond_values): actual_value = field._microseconds_since_epoch_to_datetime(data=value) expected_value = datetime_values[index] self.assertEqual(actual_value, expected_value)
def test_round_trip_conversion(self): datetime_values = [ datetime.datetime(2015, 1, 1, 15, 0, 0).replace(microsecond=500), datetime.datetime(2015, 1, 1, 15, 0, 0).replace(microsecond=0), datetime.datetime(2015, 1, 1, 15, 0, 0).replace(microsecond=999999) ] datetime_values = [ date_utils.add_utc_tz(datetime_values[0]), date_utils.add_utc_tz(datetime_values[1]), date_utils.add_utc_tz(datetime_values[2]) ] microsecond_values = [] # Calculate microsecond values for value in datetime_values: seconds = calendar.timegm(value.timetuple()) microseconds_reminder = value.time().microsecond result = int(seconds * 1000000) + microseconds_reminder microsecond_values.append(result) field = ComplexDateTimeField() # datetime to us for index, value in enumerate(datetime_values): actual_value = field._datetime_to_microseconds_since_epoch(value=value) expected_value = microsecond_values[index] expected_microseconds = value.time().microsecond self.assertEqual(actual_value, expected_value) self.assertTrue(str(actual_value).endswith(str(expected_microseconds))) # us to datetime for index, value in enumerate(microsecond_values): actual_value = field._microseconds_since_epoch_to_datetime(data=value) expected_value = datetime_values[index] self.assertEqual(actual_value, expected_value)
def test_format_sec_truncated(self): dt1 = date.add_utc_tz(datetime.datetime.utcnow()) dt2 = isotime.parse(isotime.format(dt1, usec=False)) dt3 = datetime.datetime(dt1.year, dt1.month, dt1.day, dt1.hour, dt1.minute, dt1.second) self.assertLess(dt2, dt1) self.assertEqual(dt2, date.add_utc_tz(dt3))
def test_query_exclude_fields(self): count = 5 ts = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(count): category = "type1" obj = FakeModelDB(name="test-2-%s" % (i), timestamp=ts, category=category) self.access.add_or_update(obj) model_dbs = FakeModel.query() self.assertEqual(model_dbs[0].name, "test-2-0") self.assertEqual(model_dbs[0].timestamp, ts) self.assertEqual(model_dbs[0].category, "type1") model_dbs = FakeModel.query(exclude_fields=["name"]) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, ts) self.assertEqual(model_dbs[0].category, "type1") model_dbs = FakeModel.query(exclude_fields=["name", "timestamp"]) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, None) self.assertEqual(model_dbs[0].category, "type1") model_dbs = FakeModel.query(exclude_fields=["name", "timestamp", "category"]) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, None) self.assertEqual(model_dbs[0].category, None)
def test_query_exclude_fields(self): count = 5 ts = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(count): category = 'type1' obj = FakeModelDB(name='test-2-%s' % (i), timestamp=ts, category=category) self.access.add_or_update(obj) model_dbs = FakeModel.query() self.assertEqual(model_dbs[0].name, 'test-2-0') self.assertEqual(model_dbs[0].timestamp, ts) self.assertEqual(model_dbs[0].category, 'type1') model_dbs = FakeModel.query(exclude_fields=['name']) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, ts) self.assertEqual(model_dbs[0].category, 'type1') model_dbs = FakeModel.query(exclude_fields=['name', 'timestamp']) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, None) self.assertEqual(model_dbs[0].category, 'type1') model_dbs = FakeModel.query( exclude_fields=['name', 'timestamp', 'category']) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, None) self.assertEqual(model_dbs[0].category, None)
def test_format(self): dt = date.add_utc_tz(datetime.datetime(2000, 1, 1, 12)) dt_str_usec_offset = '2000-01-01T12:00:00.000000+00:00' dt_str_usec = '2000-01-01T12:00:00.000000Z' dt_str_offset = '2000-01-01T12:00:00+00:00' dt_str = '2000-01-01T12:00:00Z' dt_unicode = u'2000-01-01T12:00:00Z' # datetime object self.assertEqual(isotime.format(dt, usec=True, offset=True), dt_str_usec_offset) self.assertEqual(isotime.format(dt, usec=True, offset=False), dt_str_usec) self.assertEqual(isotime.format(dt, usec=False, offset=True), dt_str_offset) self.assertEqual(isotime.format(dt, usec=False, offset=False), dt_str) self.assertEqual(isotime.format(dt_str, usec=False, offset=False), dt_str) self.assertEqual(isotime.format(dt_unicode, usec=False, offset=False), dt_unicode) # unix timestamp (epoch) dt = 1557390483 self.assertEqual(isotime.format(dt, usec=True, offset=True), '2019-05-09T08:28:03.000000+00:00') self.assertEqual(isotime.format(dt, usec=False, offset=False), '2019-05-09T08:28:03Z') self.assertEqual(isotime.format(dt, usec=False, offset=True), '2019-05-09T08:28:03+00:00')
def test_query_only_fields(self): count = 5 ts = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(count): category = "type1" obj = FakeModelDB(name="test-%s" % (i), timestamp=ts, category=category) self.access.add_or_update(obj) model_dbs = FakeModel.query() self.assertEqual(model_dbs[0].name, "test-0") self.assertEqual(model_dbs[0].timestamp, ts) self.assertEqual(model_dbs[0].category, "type1") # only id model_dbs = FakeModel.query(only_fields=["id"]) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, None) self.assertEqual(model_dbs[0].category, None) # only name - note: id is always included model_dbs = FakeModel.query(only_fields=["name"]) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, "test-0") self.assertEqual(model_dbs[0].timestamp, None) self.assertEqual(model_dbs[0].category, None)
def test_query_exclude_fields(self): count = 5 ts = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(count): category = 'type1' obj = FakeModelDB(name='test-2-%s' % (i), timestamp=ts, category=category) self.access.add_or_update(obj) model_dbs = FakeModel.query() self.assertEqual(model_dbs[0].name, 'test-2-0') self.assertEqual(model_dbs[0].timestamp, ts) self.assertEqual(model_dbs[0].category, 'type1') model_dbs = FakeModel.query(exclude_fields=['name']) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, ts) self.assertEqual(model_dbs[0].category, 'type1') model_dbs = FakeModel.query(exclude_fields=['name', 'timestamp']) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, None) self.assertEqual(model_dbs[0].category, 'type1') model_dbs = FakeModel.query(exclude_fields=['name', 'timestamp', 'category']) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, None) self.assertEqual(model_dbs[0].category, None)
def test_query_only_fields(self): count = 5 ts = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(count): category = 'type1' obj = FakeModelDB(name='test-%s' % (i), timestamp=ts, category=category) self.access.add_or_update(obj) model_dbs = FakeModel.query() self.assertEqual(model_dbs[0].name, 'test-0') self.assertEqual(model_dbs[0].timestamp, ts) self.assertEqual(model_dbs[0].category, 'type1') # only id model_dbs = FakeModel.query(only_fields=['id']) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, None) self.assertEqual(model_dbs[0].timestamp, None) self.assertEqual(model_dbs[0].category, None) # only name - note: id is always included model_dbs = FakeModel.query(only_fields=['name']) self.assertTrue(model_dbs[0].id) self.assertEqual(model_dbs[0].name, 'test-0') self.assertEqual(model_dbs[0].timestamp, None) self.assertEqual(model_dbs[0].category, None)
def test_token_post_set_ttl(self): timestamp = date_utils.add_utc_tz(date_utils.get_datetime_utc_now()) response = self.app.post_json(TOKEN_V1_PATH, {'ttl': 60}, expect_errors=False) expected_expiry = date_utils.get_datetime_utc_now() + datetime.timedelta(seconds=60) self.assertEqual(response.status_int, 201) actual_expiry = isotime.parse(response.json['expiry']) self.assertLess(timestamp, actual_expiry) self.assertLess(actual_expiry, expected_expiry)
def test_get_marker_from_db(self): marker_dt = date_utils.get_datetime_utc_now() - datetime.timedelta(minutes=5) marker_db = DumperMarkerDB(marker=isotime.format(marker_dt, offset=False), updated_at=date_utils.get_datetime_utc_now()) DumperMarker.add_or_update(marker_db) exec_exporter = ExecutionsExporter(None, None) export_marker = exec_exporter._get_export_marker_from_db() self.assertEqual(export_marker, date_utils.add_utc_tz(marker_dt))
def setUpClass(cls): super(TestActionExecutionFilters, cls).setUpClass() cls.dt_base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) cls.num_records = 100 cls.refs = {} cls.start_timestamps = [] cls.fake_types = [ { "trigger": copy.deepcopy(fixture.ARTIFACTS["trigger"]), "trigger_type": copy.deepcopy(fixture.ARTIFACTS["trigger_type"]), "trigger_instance": copy.deepcopy( fixture.ARTIFACTS["trigger_instance"] ), "rule": copy.deepcopy(fixture.ARTIFACTS["rule"]), "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["chain"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["action-chain"]), "liveaction": copy.deepcopy( fixture.ARTIFACTS["liveactions"]["workflow"] ), "context": copy.deepcopy(fixture.ARTIFACTS["context"]), "children": [], }, { "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["local"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["run-local"]), "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["task1"]), }, ] def assign_parent(child): candidates = [v for k, v in cls.refs.items() if v.action["name"] == "chain"] if candidates: parent = random.choice(candidates) child["parent"] = str(parent.id) parent.children.append(child["id"]) cls.refs[str(parent.id)] = ActionExecution.add_or_update(parent) for i in range(cls.num_records): obj_id = str(bson.ObjectId()) timestamp = cls.dt_base + datetime.timedelta(seconds=i) fake_type = random.choice(cls.fake_types) data = copy.deepcopy(fake_type) data["id"] = obj_id data["start_timestamp"] = isotime.format(timestamp, offset=False) data["end_timestamp"] = isotime.format(timestamp, offset=False) data["status"] = data["liveaction"]["status"] data["result"] = data["liveaction"]["result"] if fake_type["action"]["name"] == "local" and random.choice([True, False]): assign_parent(data) wb_obj = ActionExecutionAPI(**data) db_obj = ActionExecutionAPI.to_model(wb_obj) cls.refs[obj_id] = ActionExecution.add_or_update(db_obj) cls.start_timestamps.append(timestamp) cls.start_timestamps = sorted(cls.start_timestamps)
def test_create_token_ttl_capped(self): ttl = cfg.CONF.auth.token_ttl + 10 expected_expiry = date_utils.get_datetime_utc_now() + datetime.timedelta(seconds=ttl) expected_expiry = date_utils.add_utc_tz(expected_expiry) token = access.create_token(USERNAME, 10) self.assertTrue(token is not None) self.assertTrue(token.token is not None) self.assertEqual(token.user, USERNAME) self.assertLess(isotime.parse(token.expiry), expected_expiry)
def test_create_token_ttl_capped(self): ttl = cfg.CONF.auth.token_ttl + 10 expected_expiry = date_utils.get_datetime_utc_now( ) + datetime.timedelta(seconds=ttl) expected_expiry = date_utils.add_utc_tz(expected_expiry) token = access.create_token(USERNAME, 10) self.assertTrue(token is not None) self.assertTrue(token.token is not None) self.assertEqual(token.user, USERNAME) self.assertLess(isotime.parse(token.expiry), expected_expiry)
def test_create_token_ttl_ok(self): ttl = 10 token = access.create_token(USERNAME, 10) self.assertIsNotNone(token) self.assertIsNotNone(token.token) self.assertEqual(token.user, USERNAME) expected_expiry = date_utils.get_datetime_utc_now( ) + datetime.timedelta(seconds=ttl) expected_expiry = date_utils.add_utc_tz(expected_expiry) self.assertLess(isotime.parse(token.expiry), expected_expiry)
def _test_token_post(self, path=TOKEN_V1_PATH): ttl = cfg.CONF.auth.token_ttl timestamp = date_utils.get_datetime_utc_now() response = self.app.post_json(path, {}, expect_errors=False) expected_expiry = date_utils.get_datetime_utc_now() + datetime.timedelta(seconds=ttl) expected_expiry = date_utils.add_utc_tz(expected_expiry) self.assertEqual(response.status_int, 201) self.assertIsNotNone(response.json['token']) self.assertEqual(response.json['user'], USERNAME) actual_expiry = isotime.parse(response.json['expiry']) self.assertLess(timestamp, actual_expiry) self.assertLess(actual_expiry, expected_expiry)
def setUpClass(cls): super(TestActionExecutionFilters, cls).setUpClass() cls.dt_base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) cls.num_records = 100 cls.refs = {} cls.start_timestamps = [] cls.fake_types = [ { "trigger": copy.deepcopy(fixture.ARTIFACTS["trigger"]), "trigger_type": copy.deepcopy(fixture.ARTIFACTS["trigger_type"]), "trigger_instance": copy.deepcopy(fixture.ARTIFACTS["trigger_instance"]), "rule": copy.deepcopy(fixture.ARTIFACTS["rule"]), "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["chain"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["action-chain"]), "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["workflow"]), "context": copy.deepcopy(fixture.ARTIFACTS["context"]), "children": [], }, { "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["local"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["run-local"]), "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["task1"]), }, ] def assign_parent(child): candidates = [v for k, v in cls.refs.iteritems() if v.action["name"] == "chain"] if candidates: parent = random.choice(candidates) child["parent"] = str(parent.id) parent.children.append(child["id"]) cls.refs[str(parent.id)] = ActionExecution.add_or_update(parent) for i in range(cls.num_records): obj_id = str(bson.ObjectId()) timestamp = cls.dt_base + datetime.timedelta(seconds=i) fake_type = random.choice(cls.fake_types) data = copy.deepcopy(fake_type) data["id"] = obj_id data["start_timestamp"] = isotime.format(timestamp, offset=False) data["end_timestamp"] = isotime.format(timestamp, offset=False) data["status"] = data["liveaction"]["status"] data["result"] = data["liveaction"]["result"] if fake_type["action"]["name"] == "local" and random.choice([True, False]): assign_parent(data) wb_obj = ActionExecutionAPI(**data) db_obj = ActionExecutionAPI.to_model(wb_obj) cls.refs[obj_id] = ActionExecution.add_or_update(db_obj) cls.start_timestamps.append(timestamp) cls.start_timestamps = sorted(cls.start_timestamps)
def setUpClass(cls): super(TestActionExecutionFilters, cls).setUpClass() cls.dt_base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) cls.num_records = 100 cls.refs = {} cls.start_timestamps = [] cls.fake_types = [ { 'trigger': copy.deepcopy(fixture.ARTIFACTS['trigger']), 'trigger_type': copy.deepcopy(fixture.ARTIFACTS['trigger_type']), 'trigger_instance': copy.deepcopy(fixture.ARTIFACTS['trigger_instance']), 'rule': copy.deepcopy(fixture.ARTIFACTS['rule']), 'action': copy.deepcopy(fixture.ARTIFACTS['actions']['chain']), 'runner': copy.deepcopy(fixture.ARTIFACTS['runners']['action-chain']), 'liveaction': copy.deepcopy(fixture.ARTIFACTS['liveactions']['workflow']), 'context': copy.deepcopy(fixture.ARTIFACTS['context']), 'children': [] }, { 'action': copy.deepcopy(fixture.ARTIFACTS['actions']['local']), 'runner': copy.deepcopy(fixture.ARTIFACTS['runners']['run-local']), 'liveaction': copy.deepcopy(fixture.ARTIFACTS['liveactions']['task1']) } ] def assign_parent(child): candidates = [v for k, v in cls.refs.items() if v.action['name'] == 'chain'] if candidates: parent = random.choice(candidates) child['parent'] = str(parent.id) parent.children.append(child['id']) cls.refs[str(parent.id)] = ActionExecution.add_or_update(parent) for i in range(cls.num_records): obj_id = str(bson.ObjectId()) timestamp = cls.dt_base + datetime.timedelta(seconds=i) fake_type = random.choice(cls.fake_types) data = copy.deepcopy(fake_type) data['id'] = obj_id data['start_timestamp'] = isotime.format(timestamp, offset=False) data['end_timestamp'] = isotime.format(timestamp, offset=False) data['status'] = data['liveaction']['status'] data['result'] = data['liveaction']['result'] if fake_type['action']['name'] == 'local' and random.choice([True, False]): assign_parent(data) wb_obj = ActionExecutionAPI(**data) db_obj = ActionExecutionAPI.to_model(wb_obj) cls.refs[obj_id] = ActionExecution.add_or_update(db_obj) cls.start_timestamps.append(timestamp) cls.start_timestamps = sorted(cls.start_timestamps)
def _test_token_post(self): ttl = cfg.CONF.auth.token_ttl timestamp = date_utils.get_datetime_utc_now() response = self.app.post_json('/tokens', {}, expect_errors=False) expected_expiry = date_utils.get_datetime_utc_now() + datetime.timedelta(seconds=ttl) expected_expiry = date_utils.add_utc_tz(expected_expiry) self.assertEqual(response.status_int, 201) self.assertIsNotNone(response.json['token']) self.assertEqual(response.json['user'], USERNAME) actual_expiry = isotime.parse(response.json['expiry']) self.assertLess(timestamp, actual_expiry) self.assertLess(actual_expiry, expected_expiry)
def test_format(self): dt = date.add_utc_tz(datetime.datetime(2000, 1, 1, 12)) dt_str_usec_offset = '2000-01-01T12:00:00.000000+00:00' dt_str_usec = '2000-01-01T12:00:00.000000Z' dt_str_offset = '2000-01-01T12:00:00+00:00' dt_str = '2000-01-01T12:00:00Z' dt_unicode = u'2000-01-01T12:00:00Z' self.assertEqual(isotime.format(dt, usec=True, offset=True), dt_str_usec_offset) self.assertEqual(isotime.format(dt, usec=True, offset=False), dt_str_usec) self.assertEqual(isotime.format(dt, usec=False, offset=True), dt_str_offset) self.assertEqual(isotime.format(dt, usec=False, offset=False), dt_str) self.assertEqual(isotime.format(dt_str, usec=False, offset=False), dt_str) self.assertEqual(isotime.format(dt_unicode, usec=False, offset=False), dt_unicode)
def _microseconds_since_epoch_to_datetime(self, data): """ Convert a number representation to a `datetime` object (the object you will manipulate). This is the reverse function of `_convert_from_datetime`. :param data: Number of microseconds since the epoch. :type data: ``int`` """ result = datetime.datetime.utcfromtimestamp(data // SECOND_TO_MICROSECONDS) microseconds_reminder = data % SECOND_TO_MICROSECONDS result = result.replace(microsecond=microseconds_reminder) result = date_utils.add_utc_tz(result) return result
def _microseconds_since_epoch_to_datetime(self, data): """ Convert a number representation to a `datetime` object (the object you will manipulate). This is the reverse function of `_convert_from_datetime`. :param data: Number of microseconds since the epoch. :type data: ``int`` """ result = datetime.datetime.utcfromtimestamp(data // SECOND_TO_MICROSECONDS) microseconds_reminder = (data % SECOND_TO_MICROSECONDS) result = result.replace(microsecond=microseconds_reminder) result = date_utils.add_utc_tz(result) return result
def test_parse(self): dt = date.add_utc_tz(datetime.datetime(2000, 1, 1, 12)) self.assertEqual(isotime.parse('2000-01-01 12:00:00Z'), dt) self.assertEqual(isotime.parse('2000-01-01 12:00:00+00'), dt) self.assertEqual(isotime.parse('2000-01-01 12:00:00+0000'), dt) self.assertEqual(isotime.parse('2000-01-01 12:00:00+00:00'), dt) self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000Z'), dt) self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000+00'), dt) self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000+0000'), dt) self.assertEqual(isotime.parse('2000-01-01 12:00:00.000000+00:00'), dt) self.assertEqual(isotime.parse('2000-01-01T12:00:00Z'), dt) self.assertEqual(isotime.parse('2000-01-01T12:00:00+00:00'), dt) self.assertEqual(isotime.parse('2000-01-01T12:00:00.000000Z'), dt) self.assertEqual(isotime.parse('2000-01-01T12:00:00.000000+00:00'), dt) self.assertEqual(isotime.parse('2000-01-01T12:00:00.000Z'), dt)
def _test_token_post(self, path=TOKEN_V1_PATH): ttl = cfg.CONF.auth.token_ttl timestamp = date_utils.get_datetime_utc_now() response = self.app.post_json(path, {}, expect_errors=False) expected_expiry = date_utils.get_datetime_utc_now() + datetime.timedelta( seconds=ttl ) expected_expiry = date_utils.add_utc_tz(expected_expiry) self.assertEqual(response.status_int, 201) self.assertIsNotNone(response.json["token"]) self.assertEqual(response.json["user"], USERNAME) actual_expiry = isotime.parse(response.json["expiry"]) self.assertLess(timestamp, actual_expiry) self.assertLess(actual_expiry, expected_expiry) return response
def test_create_token_service_token_can_use_arbitrary_ttl(self): ttl = (10000 * 24 * 24) # Service token should support arbitrary TTL token = access.create_token(USERNAME, ttl=ttl, service=True) expected_expiry = date_utils.get_datetime_utc_now() + datetime.timedelta(seconds=ttl) expected_expiry = date_utils.add_utc_tz(expected_expiry) self.assertTrue(token is not None) self.assertEqual(token.user, USERNAME) self.assertLess(isotime.parse(token.expiry), expected_expiry) # Non service token should throw on TTL which is too large self.assertRaises(TTLTooLargeException, access.create_token, USERNAME, ttl=ttl, service=False)
def test_datetime_range(self): base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(60): timestamp = base + datetime.timedelta(seconds=i) obj = FakeModelDB(name=uuid.uuid4().hex, timestamp=timestamp) self.access.add_or_update(obj) dt_range = "2014-12-25T00:00:10Z..2014-12-25T00:00:19Z" objs = self.access.query(timestamp=dt_range) self.assertEqual(len(objs), 10) self.assertLess(objs[0].timestamp, objs[9].timestamp) dt_range = "2014-12-25T00:00:19Z..2014-12-25T00:00:10Z" objs = self.access.query(timestamp=dt_range) self.assertEqual(len(objs), 10) self.assertLess(objs[9].timestamp, objs[0].timestamp)
def test_datetime_range(self): base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(60): timestamp = base + datetime.timedelta(seconds=i) obj = FakeModelDB(name=uuid.uuid4().hex, timestamp=timestamp) self.access.add_or_update(obj) dt_range = '2014-12-25T00:00:10Z..2014-12-25T00:00:19Z' objs = self.access.query(timestamp=dt_range) self.assertEqual(len(objs), 10) self.assertLess(objs[0].timestamp, objs[9].timestamp) dt_range = '2014-12-25T00:00:19Z..2014-12-25T00:00:10Z' objs = self.access.query(timestamp=dt_range) self.assertEqual(len(objs), 10) self.assertLess(objs[9].timestamp, objs[0].timestamp)
def test_datetime_range(self): base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(60): timestamp = base + datetime.timedelta(seconds=i) doc = copy.deepcopy(self.fake_history_subtasks[0]) doc['id'] = str(bson.ObjectId()) doc['start_timestamp'] = isotime.format(timestamp) obj = ActionExecutionAPI(**doc) ActionExecution.add_or_update(ActionExecutionAPI.to_model(obj)) dt_range = '2014-12-25T00:00:10Z..2014-12-25T00:00:19Z' objs = ActionExecution.query(start_timestamp=dt_range) self.assertEqual(len(objs), 10) dt_range = '2014-12-25T00:00:19Z..2014-12-25T00:00:10Z' objs = ActionExecution.query(start_timestamp=dt_range) self.assertEqual(len(objs), 10)
def test_sort_multiple(self): count = 60 base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(count): category = 'type1' if i % 2 else 'type2' timestamp = base + datetime.timedelta(seconds=i) obj = FakeModelDB(name=uuid.uuid4().hex, timestamp=timestamp, category=category) self.access.add_or_update(obj) objs = self.access.query(order_by=['category', 'timestamp']) self.assertEqual(len(objs), count) for i in range(count): category = 'type1' if i < count / 2 else 'type2' self.assertEqual(objs[i].category, category) self.assertLess(objs[0].timestamp, objs[(count / 2) - 1].timestamp) self.assertLess(objs[count / 2].timestamp, objs[(count / 2) - 1].timestamp) self.assertLess(objs[count / 2].timestamp, objs[count - 1].timestamp)
def test_get_(self, mock_get): field = ComplexDateTimeField() # No value set mock_get.return_value = None self.assertEqual(field.__get__(instance=None, owner=None), None) # Already a datetime mock_get.return_value = date_utils.get_datetime_utc_now() self.assertEqual(field.__get__(instance=None, owner=None), mock_get.return_value) # Microseconds dt = datetime.datetime(2015, 1, 1, 15, 0, 0).replace(microsecond=500) dt = date_utils.add_utc_tz(dt) us = field._datetime_to_microseconds_since_epoch(value=dt) mock_get.return_value = us self.assertEqual(field.__get__(instance=None, owner=None), dt)
def test_create_token_service_token_can_use_arbitrary_ttl(self): ttl = (10000 * 24 * 24) # Service token should support arbitrary TTL token = access.create_token(USERNAME, ttl=ttl, service=True) expected_expiry = date_utils.get_datetime_utc_now( ) + datetime.timedelta(seconds=ttl) expected_expiry = date_utils.add_utc_tz(expected_expiry) self.assertTrue(token is not None) self.assertEqual(token.user, USERNAME) self.assertLess(isotime.parse(token.expiry), expected_expiry) # Non service token should throw on TTL which is too large self.assertRaises(TTLTooLargeException, access.create_token, USERNAME, ttl=ttl, service=False)
def test_sort_by_start_timestamp(self): base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(60): timestamp = base + datetime.timedelta(seconds=i) doc = copy.deepcopy(self.fake_history_subtasks[0]) doc["id"] = str(bson.ObjectId()) doc["start_timestamp"] = isotime.format(timestamp) obj = ActionExecutionAPI(**doc) ActionExecution.add_or_update(ActionExecutionAPI.to_model(obj)) dt_range = "2014-12-25T00:00:10Z..2014-12-25T00:00:19Z" objs = ActionExecution.query(start_timestamp=dt_range, order_by=["start_timestamp"]) self.assertLess(objs[0]["start_timestamp"], objs[9]["start_timestamp"]) dt_range = "2014-12-25T00:00:19Z..2014-12-25T00:00:10Z" objs = ActionExecution.query(start_timestamp=dt_range, order_by=["-start_timestamp"]) self.assertLess(objs[9]["start_timestamp"], objs[0]["start_timestamp"])
def test_add_utc_tz(self): dt = datetime.datetime.utcnow() self.assertIsNone(dt.tzinfo) dt = date_utils.add_utc_tz(dt) self.assertIsNotNone(dt.tzinfo) self.assertEqual(dt.tzinfo.tzname(None), 'UTC')
def test_format_tz_aware(self): dt1 = date.add_utc_tz(datetime.datetime.utcnow()) dt2 = isotime.parse(isotime.format(dt1, usec=True)) self.assertEqual(dt2, dt1)
def test_format_tz_naive(self): dt1 = datetime.datetime.utcnow() dt2 = isotime.parse(isotime.format(dt1, usec=True)) self.assertEqual(dt2, date.add_utc_tz(dt1))