def test_refresh(self): setup_mock_response('SearchCerts') cert = UUID('adb3261b-c657-4fd2-a057-bc9f85310b80') app = app_factory() IARCCert.objects.create(app=app, cert_id=cert.get_hex()) refresh(app) eq_(len(responses.calls), 1) eq_(responses.calls[0].request.headers.get('StorePassword'), settings.IARC_V2_STORE_PASSWORD) eq_(responses.calls[0].request.headers.get('StoreID'), settings.IARC_V2_STORE_ID) eq_(json.loads(responses.calls[0].request.body), { 'CertID': unicode(cert) }) # Compare with mock data. Force reload using .objects.get in order to # properly reset the related objects caching. app = Webapp.objects.get(pk=app.pk) self.assertSetEqual( app.rating_descriptors.to_keys(), ['has_classind_lang', 'has_generic_parental_guidance_recommended', 'has_pegi_parental_guidance_recommended']) self.assertSetEqual( app.rating_interactives.to_keys(), ['has_shares_location', 'has_digital_purchases', 'has_users_interact']) eq_(app.content_ratings.all()[0].get_rating_class(), CLASSIND_12)
def device_info(self): di = {} lbls = Counter() cmd = 'blkid -s LABEL -s UUID -t TYPE=btrfs'.split() subp = subprocess.Popen(cmd, stdout=subprocess.PIPE) for line in subp.stdout: dev, label, uuid = BLKID_RE.match(line).groups() uuid = UUID(hex=uuid.decode('ascii')) dev = fsdecode(dev) if label is not None: try: label = label.decode('ascii') except UnicodeDecodeError: # Don't try to guess. pass if uuid in di: # btrfs raid assert di[uuid].label == label di[uuid].devices.append(dev) else: lbls[label] += 1 di[uuid] = DeviceInfo(label, [dev]) rc = subp.wait() # 2 means there is no btrfs filesystem if rc not in (0, 2): raise subprocess.CalledProcessError(rc, cmd) self._label_occurs = dict(lbls) return di
def __init__(self, id, *a, **k): if isinstance(id, UUID): id = str(id) try: UUID.__init__(self, id, *a, **k) except Exception, e: print "error initializing contact id: ", a, k raise e
def uuid4_to_bytes(uuid4): """Convert a given uuid4 to the right number of bytes. - If it is a fully random uuid4, return the 16 bytes - If it looks like a manually build uuid4, remove the fake bytes """ bytes_array = UUID(uuid4).bytes is_built_uuid4 = bytes_array[6] in (b"\x40", ord(b"\x40")) and bytes_array[8] in (b"\x80", ord(b"\x80")) if is_built_uuid4: bytes_array = b"".join((bytes_array[:6], bytes_array[7:8], bytes_array[9:16])) return bytes_array.rstrip(b"\x00").ljust(9, b"\x00")
class key(object): def __init__(self, uuid=NULL_KEY): if not isinstance(uuid, UUID): self.uuid = UUID(uuid) else: self.uuid = uuid def __str__(self): return self.uuid.urn.split(':')[2] def __repr__(self): return "key('%s')" % self.__str__() def __hash__(self): return self.uuid.__hash__() def __eq__(self, other): return str(self) == str(other) def __ne__(self, other): return not (self == other) def __nonzero__(self): return (self.uuid.int != 0) @staticmethod def random(): return key(UUID.uuid4())
def get_data_by_hex_uuid_or_404(model, hex_uuid, kind=''): """Get instance data by uuid and kind. Raise 404 Not Found if there is no data. This requires model has a `bin_uuid` column. :param model: a string, model name in rio.models :param hex_uuid: a hex uuid string in 24-bytes human-readable representation. :return: a dict. """ uuid = UUID(hex_uuid) bin_uuid = uuid.get_bytes() instance = get_instance_by_bin_uuid(model, bin_uuid) if not instance: return abort(404) return ins2dict(instance, kind)
def _is_cluster_volume(cluster_id, ebs_volume): """ Helper function to check if given volume belongs to given cluster. :param UUID cluster_id: UUID of Flocker cluster to check for membership. :param boto3.resources.factory.ec2.Volume ebs_volume: EBS volume to check for input cluster membership. :return bool: True if input volume belongs to input Flocker cluster. False otherwise. """ if ebs_volume.tags is not None: actual_cluster_id = [tag["Value"] for tag in ebs_volume.tags if tag["Key"] == CLUSTER_ID_LABEL] if actual_cluster_id: actual_cluster_id = UUID(actual_cluster_id.pop()) if actual_cluster_id == cluster_id: return True return False
def __get__(self, instance, owner): """Generates a new UUID if this attribute is None.""" if self.name is None : self.name = UUID.search(instance, owner,self) if self.name is not None: value = instance.__dict__.get(self.name, None) if value is None: value = uuid.uuid4() self.__set__(instance, value) return value else: raise AttributeError("Cannot find any property named %s in: %s" % (self.name, owner))
def clean_cert_id(self): cert_id = self.cleaned_data['cert_id'] if settings.DEBUG and cert_id == '0': # For local developement without IARC server, accept '0' as a # special value which will generate a rating locally. return None try: value = UUID(cert_id) except ValueError as e: raise forms.ValidationError(e.message) # Check for existence using the hexadecimal value without a separator. if (IARCCert.objects.filter( cert_id=value.get_hex()).exclude(app=self.app).exists()): raise forms.ValidationError( _('This IARC certificate is already being used for another ' 'app. Please create a new IARC Ratings Certificate.')) # Return as string separated by dashes. return unicode(value)
def set_guid(self, value): raw_bytes = UUID.get_bytes_le(value) p1 = raw_bytes[0:4] p2 = raw_bytes[4:6] p3 = raw_bytes[6:8] p4 = raw_bytes[8:16] self.reader.set_raw_header_property("proj_id_1", p1) self.reader.set_raw_header_property("proj_id_2", p2) self.reader.set_raw_header_property("proj_id_3", p3) self.reader.set_raw_header_property("proj_id_4", p4) '''Sets the GUID for the file. It must be a :class:`uuid.UUID` instance''' return
def do_GET(self): try: if self.server.events and \ self.server.events.endupdate and \ not self.server.events.endupdate.isSet(): self.send_error(503, "Waiting for long run update...") return id = search("orgId=([^&]*)", self.path) if not id: self.send_error(400, "Bad request: %s" % self.path) return id = UUID(id.group(1)) self.send_response(200) self.send_header('Content-type', 'text/plain') self.send_header('Content-Encoding', 'utf-8') self.end_headers() for r in self.server.idx.search(id.int): self.wfile.write(r.encode('utf-8')) except: self.send_error(500, "Unexpected error: %s" % str(exc_info()[1])) raise
def device_info(self): di = {} lbls = Counter() for line in subprocess.check_output( 'blkid -s LABEL -s UUID -t TYPE=btrfs'.split() ).splitlines(): dev, label, uuid = BLKID_RE.match(line).groups() uuid = UUID(hex=uuid.decode('ascii')) dev = fsdecode(dev) if label is not None: try: label = label.decode('ascii') except UnicodeDecodeError: # Don't try to guess. pass if uuid in di: # btrfs raid assert di[uuid].label == label di[uuid].devices.append(dev) else: lbls[label] += 1 di[uuid] = DeviceInfo(label, [dev]) self._label_occurs = dict(lbls) return di
def task_id(): return UUID(int=1)
def _get_correlation_id(self): id = self.impl.getCorrelationId() if isinstance(id, JUUID): id = UUID( id.toString() ) return id
def random(): return key(UUID.uuid4())
) from wacryptolib.encryption import SUPPORTED_ENCRYPTION_ALGOS from wacryptolib.escrow import ( EscrowApi, generate_asymmetric_keypair_for_storage, generate_free_keypair_for_least_provisioned_key_type, ) from wacryptolib.exceptions import DecryptionError, ConfigurationError from wacryptolib.jsonrpc_client import JsonRpcProxy, status_slugs_response_error_handler from wacryptolib.key_generation import generate_asymmetric_keypair from wacryptolib.key_storage import DummyKeyStorage, FilesystemKeyStorage, FilesystemKeyStoragePool, DummyKeyStoragePool from wacryptolib.utilities import load_from_json_bytes, dump_to_json_bytes, generate_uuid0 from wacryptolib.utilities import dump_to_json_file, load_from_json_file ENFORCED_UID1 = UUID("0e8e861e-f0f7-e54b-18ea-34798d5daaaa") ENFORCED_UID2 = UUID("65dbbe4f-0bd5-4083-a274-3c76efeebbbb") VOID_CONTAINER_CONF_REGARDING_DATA_ENCRYPTION_STRATA = dict(data_encryption_strata=[]) # Forbidden VOID_CONTAINER_CONF_REGARDING_KEY_ENCRYPTION_STRATA = dict( # Forbidden data_encryption_strata=[ dict( data_encryption_algo="AES_CBC", key_encryption_strata=[], data_signatures=[ dict(message_digest_algo="SHA256", signature_algo="DSA_DSS", signature_escrow=LOCAL_ESCROW_MARKER) ], ) ] )
def test_encoding(self): su = ShortUUID() u = UUID("{3b1f8b40-222c-4a6e-b77e-779d5a94e21c}") self.assertEqual(su.encode(u), "CXc85b4rqinB7s5J52TRYb")
def request_possible_routes_for_cargo(self, tracking_id: str) -> List[dict]: routes = self.booking_application.request_possible_routes_for_cargo( UUID(tracking_id)) return [self.dict_from_itinerary(route) for route in routes]
def from_string(cls, value: str) -> SubscriptionIdentifier: partition, uuid = value.split("/") return cls(PartitionId(int(partition)), UUID(uuid))
('int_check', 1, 1), ('int_check', 1.9, 1), ('int_check', '1', 1), ('int_check', '1.9', ValidationError), ('int_check', b'1', 1), ('int_check', 12, 12), ('int_check', '12', 12), ('int_check', b'12', 12), ('float_check', 1, 1.0), ('float_check', 1.0, 1.0), ('float_check', '1.0', 1.0), ('float_check', '1', 1.0), ('float_check', b'1.0', 1.0), ('float_check', b'1', 1.0), ('uuid_check', 'ebcdab58-6eb8-46fb-a190-d07a33e9eac8', UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8')), ('uuid_check', UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8'), UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8')), ('uuid_check', b'ebcdab58-6eb8-46fb-a190-d07a33e9eac8', UUID('ebcdab58-6eb8-46fb-a190-d07a33e9eac8')), ('uuid_check', 'ebcdab58-6eb8-46fb-a190-', ValidationError), ('uuid_check', 123, ValidationError), ('decimal_check', 42.24, Decimal('42.24')), ('decimal_check', '42.24', Decimal('42.24')), ('decimal_check', b'42.24', Decimal('42.24')), ('decimal_check', ' 42.24 ', Decimal('42.24')), ('decimal_check', Decimal('42.24'), Decimal('42.24')), ('decimal_check', 'not a valid decimal', ValidationError), ('decimal_check', 'NaN', ValidationError), ]) def test_default_validators(field, value, result):
(1.23, float, 1.23), ("1.23", float, 1.23), (True, bool, True), ("1", bool, True), ("", bool, False), (None, int, None), ("a", str, "a"), ("YWJjAA==", bytes, b"abc\0"), (1, str, "1"), ("1.23", Decimal, Decimal("1.23")), ("(1+2j)", complex, complex("(1+2j)")), ({"a": 1}, frozendict, frozendict(a=1)), ( "abf4ddeb-fb9c-44c5-b865-012ba7787469", UUID, UUID("abf4ddeb-fb9c-44c5-b865-012ba7787469"), ), ({"a": 1, "b": "2"}, SimpleNamedTuple, SimpleNamedTuple(a="1", b=2)), ({"a": 1, "b": "2"}, SimpleDataclass, SimpleDataclass(a="1", b=2)), ({"a": 1, "b": "2"}, DataclassWithMethod, DataclassWithMethod(a="1", b=2)), ( {"val": {"a": 1, "b": "2"}}, ComplexNamedTuple, ComplexNamedTuple(val=SimpleNamedTuple(a="1", b=2)), ), ( {"val": {"a": 1, "b": "2"}}, ComplexDataclass, ComplexDataclass(val=SimpleDataclass(a="1", b=2)), ), (
import time from django.test import TestCase, Client, tag from django.contrib.staticfiles.testing import StaticLiveServerTestCase from selenium import webdriver from uuid import UUID from users.models import User """ --------------------------------------------------- Helper functions --------------------------------------------------- """ # Methods for randomly generating fields for User instances random_uuid = lambda rd: UUID(int=rd.getrandbits(128)).hex random_username = lambda rd: f"meepy-{random_uuid(rd)[:10]}" random_email = lambda rd: f"meepy-{random_uuid(rd)[:10]}@colorado.edu" random_password = lambda rd: random_uuid(rd) random_docker_image = ( lambda rd: "https://hub.docker.com/r/meepy/{random_uuid(rd)[:10]}:{random_uuid(rd)[:10]}" ) # Generate all of the data for a new user, consisting of a username, # an email, and a password. def create_random_user(rd): username = random_username(rd) email = random_email(rd) password = random_password(rd)
def delete_subscription(*, dataset: Dataset, partition: int, key: str): SubscriptionDeleter(dataset, PartitionId(partition)).delete(UUID(key)) return "ok", 202, {"Content-Type": "text/plain"}
from uuid import UUID from types import SimpleNamespace import datetime from odc.dscache.tools import group_by_nothing def test_group_by_nothing(): dss = [ SimpleNamespace( id=UUID(int=0x1_000_000 + i), crs="epsg:3857", center_time=datetime.datetime(2020, 1, i), ) for i in range(1, 4) ] dss.append( SimpleNamespace( id=UUID(int=0x10), crs="epsg:3577", center_time=datetime.datetime(2020, 1, 1), )) print(dss) xx = group_by_nothing(dss) print(xx) xx = group_by_nothing(dss, datetime.timedelta(seconds=-200)) print(xx) for u in xx.uuid.data: print(u)
class TestOvnNorth(object): MAC_ADDRESS = '01:00:00:00:00:11' DEVICE_ID = 'device-id-123456' NIC_NAME = 'port_name' NETWORK_NAME = 'test_net' NETWORK_ID10 = UUID(int=10) NETWORK_ID11 = UUID(int=11) NETWORK_NAME10 = 'name10' NETWORK_NAME11 = 'name11' PORT_ID01 = UUID(int=1) PORT_ID02 = UUID(int=2) PORT_NAME01 = 'port1' PORT_NAME02 = 'port2' PORT_1 = OvnPortRow(PORT_ID01, addresses=MAC_ADDRESS, external_ids={ PortMapper.OVN_NIC_NAME: PORT_NAME01, PortMapper.OVN_DEVICE_ID: str(PORT_ID01), PortMapper.OVN_DEVICE_OWNER: PortMapper.DEVICE_OWNER_OVIRT, }) PORT_2 = OvnPortRow(PORT_ID02, addresses=MAC_ADDRESS, external_ids={ PortMapper.OVN_NIC_NAME: PORT_NAME02, PortMapper.OVN_DEVICE_ID: str(PORT_ID02), PortMapper.OVN_DEVICE_OWNER: PortMapper.DEVICE_OWNER_OVIRT, }) SUBNET_ID101 = UUID(int=101) SUBNET_ID102 = UUID(int=102) SUBNET_101 = OvnSubnetRow(SUBNET_ID101, network_id=str(NETWORK_ID10)) SUBNET_102 = OvnSubnetRow(SUBNET_ID102) NETWORK_10 = OvnNetworkRow(NETWORK_ID10, NETWORK_NAME10) NETWORK_11 = OvnNetworkRow(NETWORK_ID11, NETWORK_NAME11, ports=[PORT_1, PORT_2]) ROUTER_ID20 = UUID(int=20) ROUTER_NAME20 = 'router20' ROUTER_20 = OvnRouterRow(ROUTER_ID20, ROUTER_NAME20) ports = [PORT_1, PORT_2] networks = [NETWORK_10, NETWORK_11] subnets = [SUBNET_101, SUBNET_102] def assert_networks_equal(self, actual, network_row): assert actual['id'] == str(network_row.uuid) assert actual['name'] == network_row.name assert actual['tenant_id'] == tenant_id() def assert_port_equal(self, actual, port_row, network_id): assert actual['id'] == str(port_row.uuid) assert actual['network_id'] == network_id assert actual['name'] == port_row.external_ids[PortMapper.OVN_NIC_NAME] device_id = port_row.external_ids[PortMapper.OVN_DEVICE_ID] assert actual['device_id'] == device_id assert actual['security_groups'] == [] assert actual['port_security_enabled'] is False assert actual['tenant_id'] == tenant_id() assert actual['fixed_ips'] == [] @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsListCommand', autospec=False) def test_get_networks(self, mock_ls_list, mock_connection): mock_ls_list.return_value.execute.return_value = TestOvnNorth.networks ovn_north = OvnNorth() result = ovn_north.list_networks() assert len(result) == 2 self.assert_networks_equal(result[0], TestOvnNorth.NETWORK_10) self.assert_networks_equal(result[1], TestOvnNorth.NETWORK_11) assert mock_ls_list.call_count == 1 assert mock_ls_list.return_value.execute.call_count == 1 @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsGetCommand', autospec=False) def test_get_network(self, mock_ls_get, mock_connection): mock_ls_get.return_value.execute.return_value = ( TestOvnNorth.NETWORK_10) ovn_north = OvnNorth() result = ovn_north.get_network(str(TestOvnNorth.NETWORK_ID10)) self.assert_networks_equal(result, TestOvnNorth.NETWORK_10) assert mock_ls_get.call_count == 1 assert mock_ls_get.return_value.execute.call_count == 1 assert mock_ls_get.mock_calls[0] == mock.call( ovn_north.idl, str(TestOvnNorth.NETWORK_ID10)) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsAddCommand', autospec=False) def test_add_network(self, mock_add_command, mock_connection): mock_add_command.return_value.execute.return_value = ( TestOvnNorth.NETWORK_10) ovn_north = OvnNorth() rest_data = { NetworkMapper.REST_NETWORK_NAME: TestOvnNorth.NETWORK_NAME10 } result = ovn_north.add_network(rest_data) self.assert_networks_equal(result, TestOvnNorth.NETWORK_10) assert mock_add_command.call_count == 1 assert mock_add_command.mock_calls[0] == mock.call( ovn_north.idl, TestOvnNorth.NETWORK_NAME10, False) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsGetCommand.execute', lambda x: OvnNetworkRow(TestOvnNorth.NETWORK_ID10, TestOvnNorth .NETWORK_NAME10)) @mock.patch('ovsdbapp.backend.ovs_idl.command.DbSetCommand', autospec=False) def test_update_network(self, mock_set_command, mock_connection): ovn_north = OvnNorth() rest_data = { NetworkMapper.REST_NETWORK_NAME: TestOvnNorth.NETWORK_NAME10 } result = ovn_north.update_network(rest_data, TestOvnNorth.NETWORK_ID10) self.assert_networks_equal(result, TestOvnNorth.NETWORK_10) assert mock_set_command.call_count == 1 assert mock_set_command.mock_calls[0] == mock.call( ovn_north.idl, OvnNorth.TABLE_LS, TestOvnNorth.NETWORK_ID10, (NetworkMapper.REST_NETWORK_NAME, TestOvnNorth.NETWORK_NAME10)) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsGetCommand.execute', lambda x: TestOvnNorth.NETWORK_10) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsDelCommand', autospec=False) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsListCommand.' 'execute', lambda x: []) def test_delete_network(self, mock_del_command, mock_connection): ovn_north = OvnNorth() ovn_north.delete_network(TestOvnNorth.NETWORK_ID10) assert mock_del_command.call_count == 1 expected_del_call = mock.call(ovn_north.idl, TestOvnNorth.NETWORK_ID10, False) assert mock_del_command.mock_calls[0] == expected_del_call @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.LsListCommand.execute', lambda x: TestOvnNorth.networks) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.LspGetCommand.execute', lambda x: TestOvnNorth.PORT_1) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsListCommand.' 'execute', lambda x: []) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LspAddCommand', autospec=False) @mock.patch('ovsdbapp.backend.ovs_idl.command.DbSetCommand', autospec=False) def test_add_port(self, mock_db_set, mock_add_command, mock_connection): mock_add_command.return_value.execute.return_value = ( TestOvnNorth.PORT_1) ovn_north = OvnNorth() rest_data = { PortMapper.REST_PORT_NAME: TestOvnNorth.PORT_NAME01, PortMapper.REST_PORT_NETWORK_ID: str(TestOvnNorth.NETWORK_ID10), PortMapper.REST_PORT_DEVICE_ID: TestOvnNorth.DEVICE_ID, PortMapper.REST_PORT_DEVICE_OWNER: PortMapper.DEVICE_OWNER_OVIRT, PortMapper.REST_PORT_ADMIN_STATE_UP: True, PortMapper.REST_PORT_MAC_ADDRESS: TestOvnNorth.MAC_ADDRESS } result = ovn_north.add_port(rest_data) # ID11 because this network has the port in TestOvnNorth.networks self.assert_port_equal(result, TestOvnNorth.PORT_1, str(TestOvnNorth.NETWORK_ID11)) assert mock_add_command.call_count == 1 mock_add_command.assert_called_with(ovn_north.idl, str(TestOvnNorth.NETWORK_ID10), TestOvnNorth.PORT_NAME01, None, None, False) assert mock_db_set.call_count == 3 assert mock_db_set.mock_calls[0] == mock.call( ovn_north.idl, OvnNorth.TABLE_LSP, str(TestOvnNorth.PORT_ID01), (OvnNorth.ROW_LSP_NAME, str(TestOvnNorth.PORT_ID01))) assert mock_db_set.mock_calls[2] == mock.call( ovn_north.idl, OvnNorth.TABLE_LSP, TestOvnNorth.PORT_ID01, (OvnNorth.ROW_LSP_EXTERNAL_IDS, { PortMapper.OVN_DEVICE_ID: TestOvnNorth.DEVICE_ID }), (OvnNorth.ROW_LSP_EXTERNAL_IDS, { PortMapper.OVN_NIC_NAME: TestOvnNorth.PORT_NAME01 }), (OvnNorth.ROW_LSP_EXTERNAL_IDS, { PortMapper.OVN_DEVICE_OWNER: PortMapper.DEVICE_OWNER_OVIRT }), (OvnNorth.ROW_LSP_ENABLED, True)) assert mock_db_set.mock_calls[4] == mock.call( ovn_north.idl, OvnNorth.TABLE_LSP, TestOvnNorth.PORT_ID01, (OvnNorth.ROW_LSP_ADDRESSES, [TestOvnNorth.MAC_ADDRESS]), ) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.LsListCommand.execute', lambda x: TestOvnNorth.networks) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.LspListCommand.execute', lambda x: TestOvnNorth.ports) def test_list_ports(self, mock_connection): ovn_north = OvnNorth() ports = ovn_north.list_ports() assert len(ports) == 2 self.assert_port_equal(ports[0], TestOvnNorth.PORT_1, str(TestOvnNorth.NETWORK_ID11)) self.assert_port_equal(ports[1], TestOvnNorth.PORT_2, str(TestOvnNorth.NETWORK_ID11)) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LspDelCommand', autospec=False) def test_delete_port(self, mock_del_command, mock_connection): ovn_north = OvnNorth() ovn_north.delete_port(TestOvnNorth.PORT_ID01) assert mock_del_command.call_count == 1 expected_del_call = mock.call(ovn_north.idl, TestOvnNorth.PORT_ID01, None, False) assert mock_del_command.mock_calls[0] == expected_del_call @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsListCommand.' 'execute', lambda x: TestOvnNorth.subnets) def test_list_subnets(self, mock_connection): ovn_north = OvnNorth() result = ovn_north.list_subnets() assert len(result) == 2 assert result[0]['id'] == str(TestOvnNorth.SUBNET_ID101) assert result[0]['network_id'] == str(TestOvnNorth.NETWORK_ID10) assert result[0]['tenant_id'] == tenant_id() @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsGetCommand.' 'execute', lambda x: TestOvnNorth.SUBNET_101) def test_get_subnet(self, mock_connection): ovn_north = OvnNorth() result = ovn_north.get_subnet(TestOvnNorth.SUBNET_ID101) assert result['id'] == str(TestOvnNorth.SUBNET_ID101) assert result['network_id'] == str(TestOvnNorth.NETWORK_ID10) gateway_ip = TestOvnNorth.SUBNET_101.options['router'] assert result['gateway_ip'] == gateway_ip @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsDelCommand', autospec=False) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsGetCommand.' 'execute', lambda x: TestOvnNorth.SUBNET_101) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsGetCommand.execute', lambda x: TestOvnNorth.NETWORK_10) def test_delete_subnet(self, mock_del_command, mock_connection): ovn_north = OvnNorth() ovn_north.delete_subnet(TestOvnNorth.SUBNET_ID101) assert mock_del_command.call_count == 1 expected_del_call = mock.call( ovn_north.idl, TestOvnNorth.SUBNET_ID101, ) assert mock_del_command.mock_calls[0] == expected_del_call @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsListCommand.' 'execute', lambda x: []) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsGetCommand.execute', lambda x: TestOvnNorth.NETWORK_10) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsGetCommand.' 'execute', lambda x: TestOvnNorth.SUBNET_102) @mock.patch('ovsdbapp.backend.ovs_idl.command.DbSetCommand', autospec=False) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsAddCommand', autospec=False) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsSetOptionsCommand', autospec=False) def test_add_subnet(self, mock_setoptions_command, mock_add_command, mock_dbset_command, mock_connection): add_execute = mock_add_command.return_value.execute add_execute.return_value = TestOvnNorth.SUBNET_102 subnet_cidr = '1.1.1.0/24' ovn_north = OvnNorth() rest_data = { SubnetMapper.REST_SUBNET_NAME: 'subnet_name', SubnetMapper.REST_SUBNET_CIDR: subnet_cidr, SubnetMapper.REST_SUBNET_NETWORK_ID: str(TestOvnNorth.NETWORK_ID10), SubnetMapper.REST_SUBNET_DNS_NAMESERVERS: ['1.1.1.1'], SubnetMapper.REST_SUBNET_GATEWAY_IP: '1.1.1.0', } result = ovn_north.add_subnet(rest_data) assert result['id'] == str(TestOvnNorth.SUBNET_ID102) assert mock_dbset_command.call_count == 1 assert mock_add_command.call_count == 1 assert mock_setoptions_command.call_count == 1 expected_dbset_call = mock.call( ovn_north.idl, OvnNorth.TABLE_LS, str(TestOvnNorth.NETWORK_ID10), (OvnNorth.ROW_LS_OTHER_CONFIG, { NetworkMapper.OVN_SUBNET: subnet_cidr }), ) assert mock_dbset_command.mock_calls[0] == expected_dbset_call expected_add_call = mock.call(ovn_north.idl, subnet_cidr, ovirt_name='subnet_name', ovirt_network_id=str( TestOvnNorth.NETWORK_ID10)) assert mock_add_command.mock_calls[0] == expected_add_call expected_options_call = mock.call(ovn_north.idl, TestOvnNorth.SUBNET_ID102, dns_server='1.1.1.1', lease_time=dhcp_lease_time(), router='1.1.1.0', server_id='1.1.1.0', server_mac=dhcp_server_mac(), mtu=dhcp_mtu()) assert mock_setoptions_command.mock_calls[0] == expected_options_call @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsListCommand.' 'execute', lambda x: []) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsGetCommand.execute', lambda x: TestOvnNorth.NETWORK_10) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsGetCommand.' 'execute', lambda x: TestOvnNorth.SUBNET_102) @mock.patch('ovsdbapp.backend.ovs_idl.command.DbSetCommand', autospec=False) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsAddCommand', autospec=False) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsSetOptionsCommand', autospec=False) def test_add_subnet_no_dns(self, mock_setoptions_command, mock_add_command, mock_dbset_command, mock_connection): add_execute = mock_add_command.return_value.execute add_execute.return_value = TestOvnNorth.SUBNET_102 subnet_cidr = '1.1.1.0/24' ovn_north = OvnNorth() rest_data = { SubnetMapper.REST_SUBNET_NAME: 'subnet_name', SubnetMapper.REST_SUBNET_CIDR: subnet_cidr, SubnetMapper.REST_SUBNET_NETWORK_ID: str(TestOvnNorth.NETWORK_ID10), SubnetMapper.REST_SUBNET_DNS_NAMESERVERS: [], SubnetMapper.REST_SUBNET_GATEWAY_IP: '1.1.1.0', } result = ovn_north.add_subnet(rest_data) assert result['id'] == str(TestOvnNorth.SUBNET_ID102) assert mock_dbset_command.call_count == 1 assert mock_add_command.call_count == 1 assert mock_setoptions_command.call_count == 1 """ TODO: This test causes Jenkins to get stuck. Commenting out until the issue is solved. @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.LsListCommand.execute', lambda x: TestOvnNorth.networks ) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.LspGetCommand.execute', lambda x: TestOvnNorth.PORT_1 ) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsListCommand.' 'execute', lambda x: [] ) @mock.patch( 'ovsdbapp.backend.ovs_idl.command.DbSetCommand', autospec=False ) def test_update_subnet(self, mock_db_set, mock_connection): ovn_north = OvnNorth() rest_data = { SubnetMapper.REST_SUBNET_NAME: 'subnet_name', SubnetMapper.REST_SUBNET_ENABLE_DHCP: True, SubnetMapper.REST_SUBNET_NETWORK_ID: TestOvnNorth.NETWORK_ID10, SubnetMapper.REST_SUBNET_DNS_NAMESERVERS: ['8.8.8.8'], SubnetMapper.REST_SUBNET_GATEWAY_IP: '172.16.0.254', SubnetMapper.REST_SUBNET_IP_VERSION: 4, SubnetMapper.REST_SUBNET_CIDR: '172.16.0.0/24' } ovn_north.update_subnet(rest_data, TestOvnNorth.SUBNET_ID101) assert mock_db_set.call_count == 2 assert mock_db_set.mock_calls[0] == mock.call( ovn_north.idl, OvnNorth.TABLE_LS, TestOvnNorth.NETWORK_ID10, ( OvnNorth.ROW_LS_OTHER_CONFIG, {NetworkMapper.OVN_SUBNET: rest_data[SubnetMapper.REST_SUBNET_CIDR]} ) ) assert mock_db_set.mock_calls[2] == mock.call( ovn_north.idl, OvnNorth.TABLE_DHCP_Options, TestOvnNorth.SUBNET_ID101, ( OvnNorth.ROW_DHCP_OPTIONS, {SubnetMapper.OVN_DHCP_SERVER_ID: rest_data[SubnetMapper.REST_SUBNET_CIDR].split('/', 1)[0]} ), ( OvnNorth.ROW_DHCP_CIDR, rest_data[SubnetMapper.REST_SUBNET_CIDR] ), ( OvnNorth.ROW_DHCP_EXTERNAL_IDS, {SubnetMapper.OVN_NAME: rest_data[SubnetMapper.REST_SUBNET_NAME]} ), ( OvnNorth.ROW_DHCP_EXTERNAL_IDS, {SubnetMapper.OVN_NETWORK_ID: rest_data[SubnetMapper.REST_SUBNET_NETWORK_ID]} ), ( OvnNorth.ROW_DHCP_OPTIONS, {SubnetMapper.OVN_GATEWAY: rest_data[SubnetMapper.REST_SUBNET_GATEWAY_IP]} ), ( OvnNorth.ROW_DHCP_OPTIONS, {SubnetMapper.OVN_DNS_SERVER: rest_data[SubnetMapper.REST_SUBNET_DNS_NAMESERVERS][0]} ), ( OvnNorth.ROW_DHCP_OPTIONS, {SubnetMapper.OVN_DHCP_LEASE_TIME: dhcp_lease_time()} ), ( OvnNorth.ROW_DHCP_OPTIONS, {SubnetMapper.OVN_DHCP_SERVER_MAC: dhcp_server_mac()} ) ) """ @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsGetCommand.execute', lambda x: OvnNetworkRow(TestOvnNorth.NETWORK_ID10, TestOvnNorth .NETWORK_NAME10)) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.DhcpOptionsListCommand.' 'execute', lambda x: TestOvnNorth.subnets) def test_subnet_add_duplicate_network(self, mock_connection): ovn_north = OvnNorth() rest_data = { SubnetMapper.REST_SUBNET_NAME: 'subnet_name', SubnetMapper.REST_SUBNET_CIDR: '1.1.1.0/24', SubnetMapper.REST_SUBNET_NETWORK_ID: str(TestOvnNorth.NETWORK_ID10), SubnetMapper.REST_SUBNET_GATEWAY_IP: '1.1.1.0', } with pytest.raises(SubnetConfigError): ovn_north.add_subnet(rest_data) def test_subnet_dhcp_enabled_false(self, mock_connection): ovn_north = OvnNorth() rest_data = { SubnetMapper.REST_SUBNET_NAME: 'subnet_name', SubnetMapper.REST_SUBNET_CIDR: '1.1.1.0/24', SubnetMapper.REST_SUBNET_NETWORK_ID: '', SubnetMapper.REST_SUBNET_DNS_NAMESERVERS: ['1.1.1.1'], SubnetMapper.REST_SUBNET_GATEWAY_IP: '1.1.1.0', SubnetMapper.REST_SUBNET_ENABLE_DHCP: False } with pytest.raises(UnsupportedDataValueError): ovn_north.add_subnet(rest_data) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsGetCommand.execute', lambda x: None) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LsGetCommand.execute', lambda x: TestOvnNorth.NETWORK_10) def test_subnet_add_invalid_network(self, mock_connection): ovn_north = OvnNorth() rest_data = { SubnetMapper.REST_SUBNET_NAME: 'subnet_name', SubnetMapper.REST_SUBNET_CIDR: '1.1.1.0/24', SubnetMapper.REST_SUBNET_NETWORK_ID: 7, SubnetMapper.REST_SUBNET_DNS_NAMESERVERS: ['1.1.1.1'], SubnetMapper.REST_SUBNET_GATEWAY_IP: '1.1.1.0', } with pytest.raises(SubnetConfigError): ovn_north.add_subnet(rest_data) def test_port_admin_state_up_none_enabled_none(self, mock_connection): self._port_admin_state(mock_connection, None, None, False) def test_port_admin_state_up_true_enabled_none(self, mock_connection): self._port_admin_state(mock_connection, [True], None, True) def test_port_admin_state_up_false_enabled_none(self, mock_connection): self._port_admin_state(mock_connection, [False], None, False) def test_port_admin_state_up_none_enabled_true(self, mock_connection): self._port_admin_state(mock_connection, None, [True], False) def test_port_admin_state_up_true_enabled_true(self, mock_connection): self._port_admin_state(mock_connection, [True], [True], True) def test_port_admin_state_up_false_enabled_true(self, mock_connection): self._port_admin_state(mock_connection, [False], [True], False) @mock.patch( 'ovsdbapp.schema.ovn_northbound.commands.LsListCommand', ) @mock.patch('ovsdbapp.schema.ovn_northbound.commands.LspGetCommand') def _port_admin_state(self, mock_connection, is_up, is_enabled, result, mock_lsp_get, mock_ls_list): port_row = OvnPortRow(TestOvnNorth.PORT_ID01, external_ids={ PortMapper.OVN_NIC_NAME: TestOvnNorth.PORT_NAME01, PortMapper.OVN_DEVICE_ID: str(TestOvnNorth.PORT_ID01), PortMapper.OVN_DEVICE_OWNER: PortMapper.DEVICE_OWNER_OVIRT, }) port_row.up = is_up port_row.enabled = is_enabled mock_lsp_get.return_value.execute.return_value = port_row mock_ls_list.return_value.execute.return_value = [ OvnNetworkRow(TestOvnNorth.NETWORK_ID11, ports=[port_row]) ] ovn_north = OvnNorth() port = ovn_north.get_port(TestOvnNorth.PORT_ID01) assert port[PortMapper.REST_PORT_ADMIN_STATE_UP] == result @mock.patch( 'ovsdbapp.schema.ovn_northbound.impl_idl.OvnNbApiIdlImpl.lookup', ) def test_get_router(self, mock_lookup, mock_connection): mock_lookup.return_value = TestOvnNorth.ROUTER_20 ovn_north = OvnNorth() result = ovn_north.get_router(str(TestOvnNorth.ROUTER_ID20)) assert result['id'] == str(TestOvnNorth.ROUTER_ID20) assert result['name'] == str(TestOvnNorth.ROUTER_NAME20) assert mock_lookup.call_args == mock.call( OvnNorth.TABLE_LR, str(TestOvnNorth.ROUTER_ID20))
def change_destination(self, tracking_id: str, destination: str) -> None: self.booking_application.change_destination(UUID(tracking_id), Location[destination])
def validate_token(token: str) -> bool: try: return bool(UUID(token, version=4)) except ValueError: return False
def test_decoding(self): u = UUID("{3b1f8b40-222c-4a6e-b77e-779d5a94e21c}") self.assertEqual(decode("CXc85b4rqinB7s5J52TRYb"), u)
SERV_STATELESS_PROGRAMMABLE_SWITCH, VIDEO_CODEC_COPY, VIDEO_CODEC_H264_OMX, ) from openpeerpower.components.homekit.img_util import TurboJPEGSingleton from openpeerpower.components.homekit.type_cameras import Camera from openpeerpower.components.homekit.type_switches import Switch from openpeerpower.const import ATTR_DEVICE_CLASS, STATE_OFF, STATE_ON from openpeerpower.exceptions import OpenPeerPowerError from openpeerpower.setup import async_setup_component from .common import mock_turbo_jpeg MOCK_START_STREAM_TLV = "ARUCAQEBEDMD1QMXzEaatnKSQ2pxovYCNAEBAAIJAQECAgECAwEAAwsBAgAFAgLQAgMBHgQXAQFjAgQ768/RAwIrAQQEAAAAPwUCYgUDLAEBAwIMAQEBAgEAAwECBAEUAxYBAW4CBCzq28sDAhgABAQAAKBABgENBAEA" MOCK_END_POINTS_TLV = "ARAzA9UDF8xGmrZykkNqcaL2AgEAAxoBAQACDTE5Mi4xNjguMjA4LjUDAi7IBAKkxwQlAQEAAhDN0+Y0tZ4jzoO0ske9UsjpAw6D76oVXnoi7DbawIG4CwUlAQEAAhCyGcROB8P7vFRDzNF2xrK1Aw6NdcLugju9yCfkWVSaVAYEDoAsAAcEpxV8AA==" MOCK_START_STREAM_SESSION_UUID = UUID("3303d503-17cc-469a-b672-92436a71a2f6") PID_THAT_WILL_NEVER_BE_ALIVE = 2147483647 async def _async_start_streaming(opp, acc): """Start streaming a camera.""" acc.set_selected_stream_configuration(MOCK_START_STREAM_TLV) await acc.run() await opp.async_block_till_done() async def _async_setup_endpoints(opp, acc): """Set camera endpoints.""" acc.set_endpoints(MOCK_END_POINTS_TLV) await acc.run()
def test_group_query_includes_recording_events(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) create_group(team_id=self.team.pk, group_type_index=0, group_key="bla", properties={}) _create_session_recording_event( self.team.pk, "u1", "s1", timestamp=timezone.now(), ) _create_event( event="pageview", distinct_id="u1", team=self.team, timestamp=timezone.now(), properties={"$group_0": "bla"}, ) _create_event( event="pageview", distinct_id="u1", team=self.team, timestamp=timezone.now() + relativedelta(hours=2), properties={ "$session_id": "s1", "$window_id": "w1", "$group_0": "bla" }, uuid="b06e5a5e-e001-4293-af81-ac73e194569d", ) event = { "id": "pageview", "name": "pageview", "type": "events", "order": 0, "math": "unique_group", "math_group_type_index": 0, } filter = Filter( data={ "date_from": "2021-01-21T00:00:00Z", "date_to": "2021-01-22T00:00:00Z", "events": [event], "include_recordings": "true", }) entity = Entity(event) _, serialized_actors = ClickhouseTrendsActors(self.team, entity, filter).get_actors() self.assertCountEqual( serialized_actors[0].get("matched_recordings", []), [{ "session_id": "s1", "events": [{ "window_id": "w1", "timestamp": timezone.now() + relativedelta(hours=2), "uuid": UUID("b06e5a5e-e001-4293-af81-ac73e194569d"), }], }], )
def to_python(self, value): return value if isinstance(value, UUID) else UUID(value)
def __init__(self, uuid=NULL_KEY): if not isinstance(uuid, UUID): self.uuid = UUID(uuid) else: self.uuid = uuid
def __init__(self, *a, **k): try: UUID.__init__(self, *a, **k) except Exception, e: print "error initializing contact id: ", a, k raise e
def token(request): class GrantError(Exception): def __init__(self, msg): super().__init__(msg) self.msg = msg # Authenticate Client by Basic Access Authentication try: auth = request.META['HTTP_AUTHORIZATION'] (auth_scheme, auth_param) = auth.split(' ', 1) assert auth_scheme.lower() == 'basic' auth_param = b64decode(auth_param.encode('ascii')).decode('ascii') (client_id, client_secret) = auth_param.split(':', 1) client_id = UUID(hex=client_id) client_secret = UUID(hex=client_secret) client = Client.objects.get(id=client_id) assert client.secret == client_secret except (KeyError, ValueError, AssertionError, Client.DoesNotExist): response = JsonResponse({'error': 'invalid_client'}, status=401) response['WWW-Authenticate'] = 'Basic realm="Django OAuth2 Client"' return response # Grant an Access Token by Different Grant Type try: grant_type = request.POST['grant_type'] if grant_type == 'authorization_code': code = request.POST['code'] redirect_uri = request.POST['redirect_uri'] if client.get_grant_type_display() != 'authorization_code': raise GrantError('unauthorized_client') try: code = UUID(hex=code) code = Code.objects.get(id=code) assert not code.is_expired() assert code.client == client assert code.redirect_uri == redirect_uri except (ValueError, Code.DoesNotExist, AssertionError): raise GrantError('invalid_grant') access_token = code.get_access_token() elif grant_type == 'password': username = request.POST['username'] password = request.POST['password'] if client.get_grant_type_display() != 'password': raise GrantError('unauthorized_client') user = authenticate(username=username, password=password) if user is None or not user.is_active: raise GrantError('invalid_grant') access_token = AccessToken(client=client, user=user) access_token.save() elif grant_type == 'client_credentials': if client.get_grant_type_display() != 'client_credentials': raise GrantError('unauthorized_client') access_token = AccessToken(client=client, user=None) access_token.save() elif grant_type == 'refresh_token': refresh_token = request.POST['refresh_token'] if client.get_grant_type_display() != 'authorization_code': raise GrantError('unauthorized_client') try: refresh_token = UUID(hex=refresh_token) access_token = AccessToken.objects.get( refresh_id=refresh_token) assert access_token.client == client assert not access_token.is_refresh_expired() except AccessToken.DoesNotExist: raise GrantError('invalid_grant') access_token = access_token.refresh() else: return JsonResponse({'error': 'unsupported_grant_type'}, status=400) except KeyError: return JsonResponse({'error': 'invalid_request'}, status=400) except GrantError as e: return JsonResponse({'error': e.msg}, status=400) return JsonResponse({ 'access_token': str(access_token), 'token_type': 'bearer', 'expires_in': access_token.client.access_token_expires_in, 'refresh_token': access_token.get_refresh_token(), })
def test_queryStringFromExpression(self): # CompoundExpressions expression = CompoundExpression([ MatchExpression(self.service.fieldName.uid, u"a", matchType=MatchType.contains), MatchExpression(self.service.fieldName.guid, UUID(int=0), matchType=MatchType.contains), MatchExpression(self.service.fieldName.shortNames, u"c", matchType=MatchType.contains), MatchExpression(self.service.fieldName.emailAddresses, u"d", matchType=MatchType.startsWith), MatchExpression(self.service.fieldName.fullNames, u"e", matchType=MatchType.equals), ], Operand.AND) queryString, recordTypes = self.service._queryStringAndRecordTypesFromExpression( expression) self.assertEquals( recordTypes, set([ u"dsRecTypeStandard:Users", u"dsRecTypeStandard:Groups", u"dsRecTypeStandard:Places", u"dsRecTypeStandard:Resources", ])) self.assertEquals(queryString, (u"(&(dsAttrTypeStandard:GeneratedUID=*a*)" u"(dsAttrTypeStandard:GeneratedUID=" u"*00000000-0000-0000-0000-000000000000*)" u"(dsAttrTypeStandard:RecordName=*c*)" u"(dsAttrTypeStandard:EMailAddress=d*)" u"(dsAttrTypeStandard:RealName=e))")) expression = CompoundExpression([ MatchExpression(self.service.fieldName.shortNames, u"a", matchType=MatchType.contains), MatchExpression(self.service.fieldName.emailAddresses, u"b", matchType=MatchType.startsWith), MatchExpression(self.service.fieldName.fullNames, u"c", matchType=MatchType.equals), ], Operand.OR) queryString, recordTypes = self.service._queryStringAndRecordTypesFromExpression( expression) self.assertEquals( recordTypes, set([ u"dsRecTypeStandard:Users", u"dsRecTypeStandard:Groups", u"dsRecTypeStandard:Places", u"dsRecTypeStandard:Resources", ])) self.assertEquals(queryString, (u"(|(dsAttrTypeStandard:RecordName=*a*)" u"(dsAttrTypeStandard:EMailAddress=b*)" u"(dsAttrTypeStandard:RealName=c))"))
def _get_id(self): id = self.impl.getMessageId() if isinstance(id, JUUID): id = UUID( id.toString() ) return id
VAR_IN_DELETED_TRANSITION = 0xfe VAR_HEADER_VALID_ONLY = 0x7f VAR_ADDED_TRANSITION = VAR_ADDED & VAR_IN_DELETED_TRANSITION VAR_DELETED_TRANSITION = VAR_ADDED & VAR_DELETED & VAR_IN_DELETED_TRANSITION GLOBAL_VARIABLE_GUID = "8be4df61-93ca-11d2-aa0d-00e098032b8c" EfiGuid = Union(0, "efiguid" / Struct( "data1" / Hex(Int32ul), "data2" / Hex(Int16ul), "data3" / Hex(Int16ul), "data4" / Array(8, Hex(Int8ul)), ), "raw" / Bytes(16), "str" / Computed(lambda ctx: str(UUID(bytes_le=ctx.raw))), ) EfiTime = Struct( "year" / Int16ul, "month" / Int8ul, "day" / Int8ul, "hour" / Int8ul, "min" / Int8ul, "sec" / Int8ul, "_pad1" / Int8ul, # padding "nanosec" / Int32ul, "tz" / Int16ul, "daylight" / Int8ul, "_pad2" / Int8ul, # padding )
class EnrollmentTestMixin(CacheIsolationTestCase): """ Test data and helper functions """ ENABLED_CACHES = ['default'] organization_key = 'test' program_uuid = UUID('dddddddd-5f48-493d-9910-84e1d36c657f') curriculum_uuid_a = UUID('aaaaaaaa-bd26-4370-94b8-b4063858210b') @classmethod def setUpClass(cls): """ Set up test data """ super(EnrollmentTestMixin, cls).setUpClass() catalog_org = CatalogOrganizationFactory.create( key=cls.organization_key) cls.program = ProgramFactory.create( uuid=cls.program_uuid, authoring_organizations=[catalog_org]) organization = OrganizationFactory.create( short_name=cls.organization_key) SAMLProviderConfigFactory.create(organization=organization) catalog_course_id_str = 'course-v1:edX+ToyX' course_run_id_str = '{}+Toy_Course'.format(catalog_course_id_str) cls.course_id = CourseKey.from_string(course_run_id_str) CourseOverviewFactory(id=cls.course_id) course_run = CourseRunFactory(key=course_run_id_str) cls.course = CourseFactory(key=catalog_course_id_str, course_runs=[course_run]) cls.student_1 = UserFactory(username='******') cls.student_2 = UserFactory(username='******') def setUp(self): super(EnrollmentTestMixin, self).setUp() cache.set(PROGRAM_CACHE_KEY_TPL.format(uuid=self.program_uuid), self.program, None) def create_program_enrollment(self, external_user_key, user=False): """ Creates and returns a ProgramEnrollment for the given external_user_key and user if specified. """ program_enrollment = ProgramEnrollmentFactory.create( external_user_key=external_user_key, program_uuid=self.program_uuid, ) if user is not False: program_enrollment.user = user program_enrollment.save() return program_enrollment def create_program_course_enrollment(self, program_enrollment, course_status=CourseStatuses.ACTIVE): """ Creates and returns a ProgramCourseEnrollment for the given program_enrollment and self.course_key, creating a CourseEnrollment if the program enrollment has a user """ course_enrollment = None if program_enrollment.user: course_enrollment = CourseEnrollmentFactory.create( course_id=self.course_id, user=program_enrollment.user, mode=CourseMode.MASTERS) course_enrollment.is_active = course_status == CourseStatuses.ACTIVE course_enrollment.save() return ProgramCourseEnrollmentFactory.create( program_enrollment=program_enrollment, course_key=self.course_id, course_enrollment=course_enrollment, status=course_status, ) def create_program_and_course_enrollments( self, external_user_key, user=False, course_status=CourseStatuses.ACTIVE): program_enrollment = self.create_program_enrollment( external_user_key, user) return self.create_program_course_enrollment( program_enrollment, course_status=course_status)
def assertUUID(string): UUID(string)
def test_person_query_includes_recording_events(self): _create_person(team_id=self.team.pk, distinct_ids=["u1"], properties={"email": "bla"}) _create_event( event="pageview", distinct_id="u1", team=self.team, timestamp=timezone.now()) # No $session_id, so not included _create_event( event="pageview", distinct_id="u1", team=self.team, timestamp=timezone.now(), properties={ "$session_id": "s2", "$window_id": "w2" }, ) # No associated recording, so not included _create_session_recording_event( self.team.pk, "u1", "s1", timestamp=timezone.now(), ) _create_event( event="pageview", distinct_id="u1", team=self.team, timestamp=timezone.now() + relativedelta(hours=2), properties={ "$session_id": "s1", "$window_id": "w1" }, uuid="b06e5a5e-e001-4293-af81-ac73e194569d", ) _create_event( event="pageview", distinct_id="u1", team=self.team, timestamp=timezone.now() + relativedelta(hours=3), properties={ "$session_id": "s1", "$window_id": "w1" }, uuid="206e5a5e-e001-4293-af81-ac73e194569d", ) event = { "id": "pageview", "name": "pageview", "type": "events", "order": 0, } filter = Filter( data={ "date_from": "2021-01-21T00:00:00Z", "date_to": "2021-01-22T00:00:00Z", "events": [event], "include_recordings": "true", }) entity = Entity(event) _, serialized_actors = ClickhouseTrendsActors(self.team, entity, filter).get_actors() self.assertEqual(len(serialized_actors), 1) self.assertEqual(len(serialized_actors[0]["matched_recordings"]), 1) self.assertEqual( serialized_actors[0]["matched_recordings"][0]["session_id"], "s1") self.assertCountEqual( serialized_actors[0]["matched_recordings"][0]["events"], [ { "window_id": "w1", "timestamp": timezone.now() + relativedelta(hours=3), "uuid": UUID("206e5a5e-e001-4293-af81-ac73e194569d"), }, { "window_id": "w1", "timestamp": timezone.now() + relativedelta(hours=2), "uuid": UUID("b06e5a5e-e001-4293-af81-ac73e194569d"), }, ], )
class BundleDefinitionLocatorTests(TestCase): """ Tests for :class:`.BundleDefinitionLocator` """ @ddt.data( 'bundle-olx:4b33677f-7eb7-4376-8752-024ce057d7e8:5:html:html/introduction/definition.xml', 'bundle-olx:22825172-cde7-4fbd-ac03-a45b631e8e65:studio_draft:video:video/v1/definition.xml', ) def test_roundtrip_from_string(self, key): def_key = DefinitionKey.from_string(key) serialized = text_type(def_key) self.assertEqual(key, serialized) @ddt.data( { "bundle_uuid": "4b33677f-7eb7-4376-8752-024ce057d7e8", # string but will be converted to UUID automatically "block_type": "video", "olx_path": "video/vid_001/definition.xml", "bundle_version": 15, }, { "bundle_uuid": UUID("4b33677f-7eb7-4376-8752-024ce057d7e8"), "block_type": "video", "olx_path": "video/vid_001/definition.xml", "draft_name": "studio_draft", }, { "bundle_uuid": UUID("4b33677f-7eb7-4376-8752-024ce057d7e8"), "block_type": "video", "olx_path": "video/θήτα/definition.xml", "draft_name": "studio_draft", }, ) def test_roundtrip_from_key(self, key_args): key = BundleDefinitionLocator(**key_args) serialized = text_type(key) deserialized = DefinitionKey.from_string(serialized) self.assertEqual(key, deserialized) @ddt.data( { "bundle_uuid": "not-a-valid-uuid", "block_type": "video", "olx_path": "video/vid_001/definition.xml", "bundle_version": 15, }, { "bundle_uuid": UUID("4b33677f-7eb7-4376-8752-024ce057d7e8"), "block_type": "video", "olx_path": "video/vid_001/definition.xml", # Missing bundle_version or draft_name }, { "bundle_uuid": UUID("4b33677f-7eb7-4376-8752-024ce057d7e8"), "block_type": "video", "olx_path": "video/vid_001/definition.xml", # Both bundle_version and draft_name: "bundle_version": 15, "draft_name": "studio_draft", }, { "bundle_uuid": UUID("4b33677f-7eb7-4376-8752-024ce057d7e8"), "block_type": "colon:in:type", "olx_path": "video/vid_001/definition.xml", "draft_name": "studio_draft", }, { "bundle_uuid": UUID("4b33677f-7eb7-4376-8752-024ce057d7e8"), "block_type": "video", "olx_path": "https://www.example.com", # not a valid OLX path "draft_name": "studio_draft", }, ) def test_invalid_args(self, key_args): with self.assertRaises((InvalidKeyError, TypeError, ValueError)): BundleDefinitionLocator(**key_args)
def __repr__(self): return "<GroupId: %s>" % UUID.__repr__(self)
class Task: product: OutputProduct tile_index: TileIdx_xy geobox: GeoBox time_range: DateTimeRange datasets: Tuple[Dataset, ...] = field(repr=False) uuid: UUID = UUID(int=0) short_time: str = field(init=False, repr=False) source: Optional[WorkTokenInterface] = field(init=True, repr=False, default=None) def __post_init__(self): self.short_time = self.time_range.short if self.uuid.int == 0: self.uuid = odc_uuid( self.product.name, self.product.version, sources=self._lineage(), time=self.short_time, tile=self.tile_index, ) @property def location(self) -> str: """ Product relative location for this task """ return self.product.region_code(self.tile_index, "/") + "/" + self.short_time def _lineage(self) -> Tuple[UUID, ...]: return tuple(ds.id for ds in self.datasets) def _prefix(self, relative_to: str = "dataset") -> str: product = self.product region_code = product.region_code(self.tile_index) file_prefix = f"{product.short_name}_{region_code}_{self.short_time}" if relative_to == "dataset": return file_prefix elif relative_to == "product": return self.location + "/" + file_prefix else: return product.location + "/" + self.location + "/" + file_prefix def paths( self, relative_to: str = "dataset", ext: str = EXT_TIFF ) -> Dict[str, str]: """ Compute dictionary mapping band name to paths. :param relative_to: dataset|product|absolute """ prefix = self._prefix(relative_to) return {band: f"{prefix}_{band}.{ext}" for band in self.product.measurements} def metadata_path(self, relative_to: str = "dataset", ext: str = "yaml") -> str: """ Compute path for metadata file. :param relative_to: dataset|product|absolute """ return self._prefix(relative_to) + "." + ext def aux_path(self, name: str, relative_to: str = "dataset", ext: str = EXT_TIFF): """ Compute path for some auxilary file. :param relative_to: dataset|product|absolute :param name: "band" :param ext: File extension, defaults to tif """ prefix = self._prefix(relative_to) return f"{prefix}_{name}.{ext}" def render_metadata( self, ext: str = EXT_TIFF, processing_dt: Optional[datetime] = None ) -> Dict[str, Any]: """ Put together STAC metadata document for the output of this task. """ if processing_dt is None: processing_dt = datetime.utcnow() product = self.product geobox = self.geobox region_code = product.region_code(self.tile_index) inputs = list(map(str, self._lineage())) properties: Dict[str, Any] = deepcopy(product.properties) properties["dtr:start_datetime"] = format_datetime(self.time_range.start) properties["dtr:end_datetime"] = format_datetime(self.time_range.end) properties["odc:processing_datetime"] = format_datetime( processing_dt, timespec="seconds" ) properties["odc:region_code"] = region_code properties["odc:lineage"] = dict(inputs=inputs) properties["odc:product"] = product.name geobox_wgs84 = geobox.extent.to_crs( "epsg:4326", resolution=math.inf, wrapdateline=True ) bbox = geobox_wgs84.boundingbox item = pystac.Item( id=str(self.uuid), geometry=geobox_wgs84.json, bbox=[bbox.left, bbox.bottom, bbox.right, bbox.top], datetime=self.time_range.start.replace(tzinfo=timezone.utc), properties=properties, ) # Enable the Projection extension item.ext.enable("projection") item.ext.projection.epsg = geobox.crs.epsg # Add all the assets for band, path in self.paths(ext=ext).items(): asset = pystac.Asset( href=path, media_type="image/tiff; application=geotiff", roles=["data"], title=band, ) item.add_asset(band, asset) item.ext.projection.set_transform(geobox.transform, asset=asset) item.ext.projection.set_shape(geobox.shape, asset=asset) # Add links item.links.append( pystac.Link( rel="product_overview", media_type="application/json", target=product.href, ) ) item.links.append( pystac.Link( rel="self", media_type="application/json", target=self.metadata_path("absolute", ext="json"), ) ) return item.to_dict()
def __repr__(self): return "<ContactId: %s>" % UUID.__repr__(self)
def to_python(self, value): try: return value if isinstance(value, UUID) else UUID(value.strip()) except ValueError: return NotFound()
def get_urn(key): """Generate a unique identifier from the key.""" urn = UUID(hashfunc(key).hexdigest()).get_urn() urn = urn.split(':')[-1] return urn
def read_uuid(data, writer_schema=None, reader_schema=None): return UUID(data)
def test_template_sub_uuid_generator(): templated_str = "${uuid()}" output = template_sub(templated_str, {}) assert UUID(output, version=4)
('A', CharObject), ('Z', CharObject), ('⅓', CharObject), ('á', CharObject), ('ы', CharObject), ('カ', CharObject), ('Ø', CharObject), ('ß', CharObject), # string ('This is a test string', None), ('Кириллица', None), ('Little Mary had a lamb', String), # UUID (UUID('12345678123456789876543298765432'), None), (UUID('74274274274274274274274274274274'), UUIDObject), (uuid4(), None), # decimal (long internal representation in Java) (decimal.Decimal('-234.567'), None), (decimal.Decimal('200.0'), None), (decimal.Decimal('123.456'), DecimalObject), (decimal.Decimal('1.0'), None), (decimal.Decimal('0.02'), None), # decimal (BigInteger internal representation in Java) (decimal.Decimal('12345671234567123.45671234567'), None), (decimal.Decimal('-845678456.7845678456784567845'), None), # date and time
def valid_uuid4(uuid_string): try: UUID(uuid_string, version=4) except ValueError: return False return True
def read_configuration(self) -> 'JobLocation': """ Read configuration file from container subvolume :return: Corresponding location """ # Read configuration file out = self.exec_check_output('cat "%s"' % self.configuration_filename) file = out.decode().splitlines() corresponding_location = None parser = ConfigParser() parser.read_file(file) section = parser.sections()[0] # Section name implies location type if section == JobLocation.TYPE_SOURCE: location_type = JobLocation.TYPE_SOURCE elif section == JobLocation.TYPE_DESTINATION: location_type = JobLocation.TYPE_DESTINATION else: raise ValueError('invalid section name/location type [%s]' % section) # Parse config string values location_uuid = parser.get(section, self.__KEY_UUID, fallback=None) source = parser.get(section, self.__KEY_SOURCE, fallback=None) source_container = parser.get(section, self.__KEY_SOURCE_CONTAINER, fallback=None) destination = parser.get(section, self.__KEY_DESTINATION, fallback=None) # Keep has been renamed to retention. # Supporting the old name for backward compatibility. retention = parser.get(section, self.__KEY_RETENTION, fallback=None) if not retention: retention = parser.get(section, self.__KEY_KEEP, fallback=None) # Convert to instances where applicable location_uuid = UUID(location_uuid) if location_uuid else None source = parse.urlsplit(source) if source else None source_container = source_container if source_container else None destination = parse.urlsplit(destination) if destination else None retention = RetentionExpression(retention) if retention else None compress = True if distutils.util.strtobool(parser.get(section, self.__KEY_COMPRESS, fallback='False')) \ else False identical_filesystem = True if distutils.util.strtobool(parser.get(section, self.__KEY_IDENT_FS, fallback='False')) \ else False if location_type == JobLocation.TYPE_SOURCE: # Amend url/container relpath from current path for source locations # if container relative path was not provided if not self.container_subvolume_relpath: source_container = os.path.basename( self.container_subvolume_path.rstrip(os.path.sep)) source = parse.SplitResult(scheme=self.url.scheme, netloc=self.url.netloc, path=os.path.abspath( os.path.join( self.url.path, os.path.pardir)), query=self.url.query, fragment=None) self.url = source self.container_subvolume_relpath = source_container if destination: corresponding_location = JobLocation( destination, location_type=JobLocation.TYPE_DESTINATION) elif location_type == JobLocation.TYPE_DESTINATION: if source: corresponding_location = JobLocation( source, location_type=JobLocation.TYPE_SOURCE, container_subvolume_relpath=source_container) self.location_type = location_type self.uuid = location_uuid self.retention = retention self.compress = compress self.identical_filesystem = identical_filesystem return corresponding_location