def test_remember_lease_differently(self): bundle = TransactionBundle(self.relayed_solicit_message, received_over_multicast=False) bundle.response = reply_message client_id_option = bundle.request.get_option_of_type(ClientIdOption) ia_na_option = bundle.response.get_option_of_type(IANAOption) ia_address = ia_na_option.get_option_of_type(IAAddressOption) ia_pd_option = bundle.response.get_option_of_type(IAPDOption) ia_prefix = ia_pd_option.get_option_of_type(IAPrefixOption) remote_ids = set() for relay_message in bundle.incoming_relay_messages: for option in relay_message.get_options_of_type(RemoteIdOption): remote_ids.add("{}:{}".format(option.enterprise_number, normalise_hex(option.remote_id))) with TemporaryDirectory() as tmp_dir_name: store = LeasequerySqliteStore(os.path.join(tmp_dir_name, 'lq.sqlite')) store.worker_init([]) store.remember_lease(bundle) store.remember_lease(bundle) # Check that the data ended up in the database db = sqlite3.connect(store.sqlite_filename) db.row_factory = sqlite3.Row rows = list(db.execute("SELECT * FROM clients")) self.assertEqual(len(rows), 1) row = rows[0] client_row = row['id'] self.assertEqual(row['client_id'], normalise_hex(client_id_option.duid.save())) self.assertEqual(row['link_address'], bundle.link_address.exploded) self.assertAlmostEqual(row['last_interaction'], time.time(), delta=5) # print({key: row[key] for key in rows[0].keys()}) rows = list(db.execute("SELECT * FROM addresses")) self.assertEqual(len(rows), 1) row = rows[0] self.assertEqual(row['client_fk'], client_row) self.assertEqual(row['address'], ia_address.address.exploded) self.assertAlmostEqual(row['preferred_lifetime_end'], time.time() + ia_address.preferred_lifetime, delta=5) self.assertAlmostEqual(row['valid_lifetime_end'], time.time() + ia_address.valid_lifetime, delta=5) self.assertEqual(row['options'], b'') rows = list(db.execute("SELECT * FROM prefixes")) self.assertEqual(len(rows), 1) row = rows[0] self.assertEqual(row['client_fk'], client_row) self.assertEqual(row['first_address'], ia_prefix.prefix[0].exploded) self.assertEqual(row['last_address'], ia_prefix.prefix[-1].exploded) self.assertAlmostEqual(row['preferred_lifetime_end'], time.time() + ia_address.preferred_lifetime, delta=5) self.assertAlmostEqual(row['valid_lifetime_end'], time.time() + ia_address.valid_lifetime, delta=5) self.assertEqual(row['options'], b'') rows = list(db.execute("SELECT * FROM remote_ids")) self.assertEqual(len(rows), len(remote_ids)) self.assertSetEqual({row['remote_id'] for row in rows}, remote_ids) rows = list(db.execute("SELECT * FROM relay_ids")) self.assertEqual(len(rows), 1)
def test_hex_with_colons(self): self.assertEqual(normalise_hex('', include_colons=True), '') self.assertEqual(normalise_hex('1a2b3c', include_colons=True), '1a:2b:3c') self.assertEqual(normalise_hex('1a:2b:3c', include_colons=True), '1a:2b:3c') self.assertEqual(normalise_hex('1a:2b3c', include_colons=True), '1a:2b:3c') self.assertEqual(normalise_hex('1a2b:3c', include_colons=True), '1a:2b:3c')
def determine_local_duid() -> LinkLayerDUID: """ Calculate our own DUID based on one of our MAC addresses :return: The server DUID """ for interface_name in netifaces.interfaces(): link_addresses = netifaces.ifaddresses(interface_name).get(netifaces.AF_LINK, []) link_addresses = [link_address['addr'] for link_address in link_addresses if link_address.get('addr')] for link_address in link_addresses: try: # Build a DUID from this address ll_addr = bytes.fromhex(normalise_hex(link_address)) if len(ll_addr) != 6 or ll_addr == b'\x00\x00\x00\x00\x00\x00': # If it is not 6 bytes long then it is not an ethernet MAC address, and all-zeroes is just a fake continue # Assume it's ethernet, build a DUID duid = LinkLayerDUID(hardware_type=1, link_layer_address=ll_addr) logger.debug("Using server DUID based on {} link address: {}".format(interface_name, link_address)) return duid except ValueError: # Try the next one pass # We didn't find a useful server DUID raise ValueError("Cannot find a usable server DUID")
def to_python(self, value): # If it's a string, it should be hex-encoded data if value is not None: try: value = normalise_hex(value) except ValueError: raise ValidationError("Value is not a valid hex-string") return value
def hex_bytes(value: str) -> bytes: """ A sequence of bytes provided as a hexadecimal string. :param value: The hexadecimal string :return: The corresponding bytes """ value = normalise_hex(value) return bytes.fromhex(value)
def hex_as_ascii(value): try: decoded = codecs.decode(value.encode('ascii'), 'hex').decode('ascii') if all(c in acceptable_characters for c in decoded): return True, decoded except UnicodeDecodeError: pass return False, normalise_hex(value, include_colons=True)
def test_query_by_remote_id_on_wrong_link(self): bundle = TransactionBundle(self.relayed_solicit_message, received_over_multicast=False) bundle.response = reply_message # Test every remote-id for relay_message in bundle.incoming_relay_messages: for option in relay_message.get_options_of_type(RemoteIdOption): with self.subTest(msg="{}:{}".format(option.enterprise_number, normalise_hex(option.remote_id))): query = LQQueryOption(QUERY_BY_REMOTE_ID, link_address=IPv6Address('3ffe::'), options=[option]) self.query_empty(bundle, query)
def display_link_layer_address( self) -> Union[ElementDataRepresentation, bytes]: """ Nicer representation of link-layer address if we know the hardware type :return: Representation of link-layer address """ if self.hardware_type == 1: return ElementDataRepresentation( normalise_hex(self.link_layer_address, include_colons=True)) else: return self.link_layer_address
def test_hex(self): self.assertEqual(normalise_hex(''), '') self.assertEqual(normalise_hex('1a2b3c'), '1a2b3c') self.assertEqual(normalise_hex('1a:2b:3c'), '1a2b3c') self.assertEqual(normalise_hex('1a:2b3c'), '1a2b3c') self.assertEqual(normalise_hex('1a2b:3c'), '1a2b3c') self.assertEqual(normalise_hex(bytes.fromhex('1a2b3c')), '1a2b3c')
def compress(self, data_list): as_ascii, value = data_list if value in self.empty_values: return '' if isinstance(value, str): value = value.strip() if as_ascii: try: value = codecs.encode(value.encode('ascii'), 'hex').decode('ascii') except UnicodeEncodeError: raise ValidationError("Value is not a valid ASCII value") else: try: value = normalise_hex(value) except ValueError: raise ValidationError("Value is not a valid hexadecimal value") return value
def compress(self, data_list): as_ascii, value = data_list if value in self.empty_values: return '' if isinstance(value, str): value = value.strip() if as_ascii: try: value = codecs.encode(value.encode('ascii'), 'hex').decode('ascii') except UnicodeEncodeError: raise ValidationError("Value is not a valid ASCII value") else: try: value = normalise_hex(value) except ValueError: raise ValidationError( "Value is not a valid hexadecimal value") return value
def get_db_prep_value(self, value, connection, prepared=False): value = super().get_db_prep_value(value, connection, prepared) if value is not None: value = normalise_hex(value) return value
def parse_csv_file(csv_filename: str) -> List[Tuple[str, Assignment]]: """ Read the assignments from the file specified in the configuration :param csv_filename: The filename of the CSV file :return: An list of identifiers and their assignment """ logger.debug("Loading assignments from {}".format(csv_filename)) with open(csv_filename) as csv_file: # Auto-detect the CSV dialect sniffer = csv.Sniffer() sample = csv_file.read(10240) dialect = sniffer.sniff(sample) # Restart and parse csv_file.seek(0) reader = csv.DictReader(csv_file, dialect=dialect) # First line is column headings for row in reader: try: address_str = row['address'].strip() address = address_str and IPv6Address(address_str) or None prefix_str = row['prefix'].strip() prefix = prefix_str and IPv6Network(prefix_str) or None # Validate and normalise id input row_id = row['id'] if row_id.startswith('duid:'): duid_hex = row_id.split(':', 1)[1] duid_bytes = codecs.decode(duid_hex, 'hex') length, duid = DUID.parse(duid_bytes, length=len(duid_bytes)) duid_hex = codecs.encode(duid.save(), 'hex').decode('ascii') row_id = 'duid:{}'.format(duid_hex) elif row_id.startswith('interface-id:'): interface_id_hex = row_id.split(':', 1)[1] interface_id_hex = normalise_hex(interface_id_hex) interface_id = codecs.decode(interface_id_hex, 'hex') interface_id_hex = codecs.encode(interface_id, 'hex').decode('ascii') row_id = 'interface-id:{}'.format(interface_id_hex) elif row_id.startswith('interface-id-str:'): interface_id = row_id.split(':', 1)[1] interface_id_hex = codecs.encode(interface_id.encode('ascii'), 'hex').decode('ascii') row_id = 'interface-id:{}'.format(interface_id_hex) elif row_id.startswith('remote-id:') or row_id.startswith('remote-id-str:'): remote_id_data = row_id.split(':', 1)[1] try: enterprise_id, remote_id = remote_id_data.split(':', 1) enterprise_id = int(enterprise_id) if row_id.startswith('remote-id:'): remote_id = normalise_hex(remote_id) remote_id = codecs.decode(remote_id, 'hex') else: remote_id = remote_id.encode('ascii') row_id = 'remote-id:{}:{}'.format(enterprise_id, codecs.encode(remote_id, 'hex').decode('ascii')) except ValueError: raise ValueError("Remote-ID must be formatted as 'remote-id:<enterprise>:<remote-id-hex>', " "for example: 'remote-id:9:0123456789abcdef") elif row_id.startswith('subscriber-id:'): subscriber_id_hex = row_id.split(':', 1)[1] subscriber_id_hex = normalise_hex(subscriber_id_hex) subscriber_id = codecs.decode(subscriber_id_hex, 'hex') subscriber_id_hex = codecs.encode(subscriber_id, 'hex').decode('ascii') row_id = 'subscriber-id:{}'.format(subscriber_id_hex) elif row_id.startswith('subscriber-id-str:'): subscriber_id = row_id.split(':', 1)[1] subscriber_id_hex = codecs.encode(subscriber_id.encode('ascii'), 'hex').decode('ascii') row_id = 'subscriber-id:{}'.format(subscriber_id_hex) elif row_id.startswith('linklayer-id:') or row_id.startswith('linklayer-id-str:'): linklayer_id_data = row_id.split(':', 1)[1] try: linklayer_type, linklayer_id = linklayer_id_data.split(':', 1) linklayer_type = int(linklayer_type) if row_id.startswith('linklayer-id:'): linklayer_id = normalise_hex(linklayer_id) linklayer_id = codecs.decode(linklayer_id, 'hex') else: linklayer_id = linklayer_id.encode('ascii') row_id = 'linklayer-id:{}:{}'.format(linklayer_type, codecs.encode(linklayer_id, 'hex').decode('ascii')) except ValueError: raise ValueError("LinkLayer-ID must be formatted as 'linklayer-id:<type>:<address-hex>', " "for example: 'linklayer-id:1:002436ef1d89") else: raise ValueError("Unsupported ID type, supported types: duid, interface-id, interface-id-str," "remote-id, remote-id-str, subscriber-id, subscriber-id-str, linklayer-id and" "linklayer-id-str") # Store the normalised id logger.debug("Loaded assignment for {}".format(row_id)) yield row_id, Assignment(address=address, prefix=prefix) except KeyError: raise ValueError("Assignment CSV must have columns 'id', 'address' and 'prefix'") except ValueError as e: logger.error("Ignoring {} line {} with invalid value: {}".format(csv_file, reader.line_num, e))
def test_bad_hex(self): with self.assertRaisesRegex(ValueError, 'not valid hex'): normalise_hex('1a2:b3c') with self.assertRaisesRegex(ValueError, 'not valid hex'): normalise_hex('Something')
def test_hex(self): self.assertEqual(normalise_hex(''), '') self.assertEqual(normalise_hex('1a2b3c'), '1a2b3c') self.assertEqual(normalise_hex('1a:2b:3c'), '1a2b3c') self.assertEqual(normalise_hex('1a:2b3c'), '1a2b3c') self.assertEqual(normalise_hex('1a2b:3c'), '1a2b3c')
def parse_csv_file(csv_filename: str) -> List[Tuple[str, Assignment]]: """ Read the assignments from the file specified in the configuration :param csv_filename: The filename of the CSV file :return: An list of identifiers and their assignment """ logger.debug("Loading assignments from {}".format(csv_filename)) with open(csv_filename) as csv_file: # Auto-detect the CSV dialect sniffer = csv.Sniffer() sample = csv_file.read(10240) dialect = sniffer.sniff(sample) # Restart and parse csv_file.seek(0) reader = csv.DictReader(csv_file, dialect=dialect) # First line is column headings for row in reader: try: address_str = row['address'].strip() address = address_str and IPv6Address(address_str) or None prefix_str = row['prefix'].strip() prefix = prefix_str and IPv6Network(prefix_str) or None # Validate and normalise id input row_id = row['id'] if row_id.startswith('duid:'): duid_hex = row_id.split(':', 1)[1] duid_bytes = codecs.decode(duid_hex, 'hex') length, duid = DUID.parse(duid_bytes, length=len(duid_bytes)) duid_hex = codecs.encode(duid.save(), 'hex').decode('ascii') row_id = 'duid:{}'.format(duid_hex) elif row_id.startswith('interface-id:'): interface_id_hex = row_id.split(':', 1)[1] interface_id_hex = normalise_hex(interface_id_hex) interface_id = codecs.decode(interface_id_hex, 'hex') interface_id_hex = codecs.encode(interface_id, 'hex').decode('ascii') row_id = 'interface_id:{}'.format(interface_id_hex) elif row_id.startswith('interface-id-str:'): interface_id = row_id.split(':', 1)[1] interface_id_hex = codecs.encode(interface_id.encode('ascii'), 'hex').decode('ascii') row_id = 'interface_id:{}'.format(interface_id_hex) elif row_id.startswith('remote-id:') or row_id.startswith('remote-id-str:'): remote_id_data = row_id.split(':', 1)[1] try: enterprise_id, remote_id = remote_id_data.split(':', 1) enterprise_id = int(enterprise_id) if row_id.startswith('remote-id:'): remote_id = normalise_hex(remote_id) remote_id = codecs.decode(remote_id, 'hex') else: remote_id = remote_id.encode('ascii') row_id = 'remote-id:{}:{}'.format(enterprise_id, codecs.encode(remote_id, 'hex').decode('ascii')) except ValueError: raise ValueError("Remote-ID must be formatted as 'remote-id:<enterprise>:<remote-id-hex>', " "for example: 'remote-id:9:0123456789abcdef") else: raise ValueError("The id must start with duid: or interface-id: followed by a hex-encoded " "value, interface-id-str: followed by an ascii string, remote-id: followed by " "an enterprise-id, a colon and a hex-encoded value or remote-id-str: followed" "by an enterprise-id, a colon and an ascii string") # Store the normalised id logger.debug("Loaded assignment for {}".format(row_id)) yield row_id, Assignment(address=address, prefix=prefix) except KeyError: raise ValueError("Assignment CSV must have columns 'id', 'address' and 'prefix'") except ValueError as e: logger.error("Ignoring {} line {} with invalid value: {}".format(csv_file, reader.line_num, e))