def test_sha256(self): self.assertEquals( sha256(b''), b"\xe3\xb0\xc4B\x98\xfc\x1c\x14\x9a\xfb\xf4\xc8\x99o\xb9$'\xaeA" b"\xe4d\x9b\x93L\xa4\x95\x99\x1bxR\xb8U") self.assertEquals(len(sha256(b'')), 32) self.assertEquals(len(sha256(b'asdf')), 32)
def test_transform_key(self): self.assertEquals(transform_key(sha256(b'a'), sha256(b'b'), 1), b'"$\xe6\x83\xb7\xbf\xa9|\x82W\x01J\xce=\xaa\x8d{\x18\x99|0\x1f' b'\xbbLT4"F\x83\xd0\xc8\xf9') self.assertEquals(transform_key(sha256(b'a'), sha256(b'b'), 2000), b'@\xe5Y\x98\xf7\x97$\x0b\x91!\xbefX\xe8\xb6\xbb\t\xefX>\xb3E\x85' b'\xedz\x15\x9c\x96\x03K\x8a\xa1')
def test_aes_cbc_decrypt(self): self.assertEqual(aes_cbc_decrypt(b'datamustbe16byte', sha256(b'b'), b'ivmustbe16bytesl'), b'x]\xb5\xa6\xe3\x10\xf4\x88\x91_\x03\xc6\xb9\xfb`)') self.assertEqual(aes_cbc_decrypt(b'datamustbe16byte', sha256(b'c'), b'ivmustbe16bytesl'), b'\x06\x91 \xd9\\\xd8\x14\xa0\xdc\xd7\x82\xa0\x92\xfb\xe8l')
def test_aes_cbc_decrypt(self): self.assertEquals(aes_cbc_decrypt('datamustbe16byte', sha256(b'b'), 'ivmustbe16bytesl'), b'x]\xb5\xa6\xe3\x10\xf4\x88\x91_\x03\xc6\xb9\xfb`)') self.assertEquals(aes_cbc_decrypt('datamustbe16byte', sha256(b'c'), 'ivmustbe16bytesl'), b'\x06\x91 \xd9\\\xd8\x14\xa0\xdc\xd7\x82\xa0\x92\xfb\xe8l')
def test_transform_key(self): self.assertEqual(transform_key(sha256(b'a'), sha256(b'b'), 1), b'"$\xe6\x83\xb7\xbf\xa9|\x82W\x01J\xce=\xaa\x8d{\x18\x99|0\x1f' b'\xbbLT4"F\x83\xd0\xc8\xf9') self.assertEqual(transform_key(sha256(b'a'), sha256(b'b'), 2000), b'@\xe5Y\x98\xf7\x97$\x0b\x91!\xbefX\xe8\xb6\xbb\t\xefX>\xb3E\x85' b'\xedz\x15\x9c\x96\x03K\x8a\xa1')
def test_twofish_cbc_decrypt(self): self.assertEqual(twofish_cbc_encrypt(b'datamustbe16byte', sha256(b'b'), b'ivmustbe16bytesl'), b'\xd4^&`\xe1j\xfd{\xeb\x04{\x90f\xed\xebi') self.assertEqual(twofish_cbc_encrypt(b'datamustbe16byte', sha256(b'c'), b'ivmustbe16bytesl'), b'\xb3\x86\xbe\x0ficZW\x92P\xeb\x17\xa8\xa8\xac\xe8') self.assertEqual(twofish_cbc_decrypt(b'K\x07q\xad\xd0\x8d\xb9\x0f\x15\xef\x87\x089\xc3\x83\x9c', sha256(b'd'), b'ivmustbe16bytesl'), b'datamustbe16byte')
def _make_master_key(self): """ Make the master key by (1) combining the credentials to create a composite hash, (2) transforming the hash using the transform seed for a specific number of rounds and (3) finally hashing the result in combination with the master seed. """ super(KDB4File, self)._make_master_key() composite = sha256(b''.join(self.keys)) tkey = transform_key(composite, self.header.TransformSeed, self.header.TransformRounds) self.master_key = sha256(self.header.MasterSeed + tkey)
def _write_header(self, stream): """Serialize the header fields from self.header into a byte stream, prefix with file signature and version before writing header and out-buffer to `stream`. Note, that `stream` is flushed, but not closed!""" header = self._header() # write header to stream stream.write(header) headerHash = base64.b64encode(sha256(header)) self.obj_root.Meta.HeaderHash = headerHash # create HeaderHash if it does not exist if len(self.obj_root.Meta.xpath("HeaderHash")) < 1: etree.SubElement(self.obj_root.Meta, "HeaderHash") # reload out_buffer because we just changed the HeaderHash self.protect() self.out_buffer = io.BytesIO(self.pretty_print()) # zip or not according to header setting if self.header.CompressionFlags == 1: self._zip() self._encrypt() # write encrypted block to stream stream.write(self.out_buffer) stream.flush()
def _write_header(self, stream): """Serialize the header fields from self.header into a byte stream, prefix with file signature and version before writing header and out-buffer to `stream`. Note, that `stream` is flushed, but not closed!""" header = self._header() # write header to stream stream.write(header) headerHash = base64.b64encode(sha256(header)) self.obj_root.Meta.HeaderHash = headerHash # create HeaderHash if it does not exist if len(self.obj_root.Meta.xpath("HeaderHash")) < 1: etree.SubElement(self.obj_root.Meta, "HeaderHash") # reload out_buffer because we just changed the HeaderHash self.protect() self.out_buffer = io.BytesIO(self.pretty_print()) # zip or not according to header setting if self.header.CompressionFlags == 1: self._zip() self._encrypt(); # write encrypted block to stream stream.write(self.out_buffer) stream.flush()
def __init__(self, unprotect=True): self._salsa_buffer = bytearray() self.salsa = Salsa20(sha256(self.header.ProtectedStreamKey), KDB4_SALSA20_IV) self.in_buffer.seek(0) self.tree = objectify.parse(self.in_buffer) objectify.deannotate(self.tree, pytype=True, cleanup_namespaces=True) self.obj_root = self.tree.getroot() if unprotect: self.unprotect()
def __init__(self, unprotect=True): self._salsa_buffer = bytearray() self.salsa = Salsa20( sha256(self.header.ProtectedStreamKey), KDB4_SALSA20_IV) self.in_buffer.seek(0) self.tree = objectify.parse(self.in_buffer) self.obj_root = self.tree.getroot() if unprotect: self.unprotect()
def _decrypt(self, stream): super(KDB3File, self)._decrypt(stream) data = aes_cbc_decrypt(stream.read(), self.master_key, self.header.EncryptionIV) data = unpad(data) if self.header.ContentHash == sha256(data): # put data in bytes io self.in_buffer = io.BytesIO(data) # set successful decryption flag self.opened = True else: raise IOError('Master key invalid.')
def _make_master_key(self): """ Make the master key by (1) combining the credentials to create a composite hash, (2) transforming the hash using the transform seed for a specific number of rounds and (3) finally hashing the result in combination with the master seed. """ super(KDB3File, self)._make_master_key() # print "masterkey:", ''.join(self.keys).encode('hex') #composite = sha256(''.join(self.keys)) #TODO python-keepass does not support keyfiles, there seems to be a # different way to hash those keys in kdb3 composite = self.keys[0] tkey = transform_key(composite, self.header.MasterSeed2, self.header.KeyEncRounds) self.master_key = sha256(self.header.MasterSeed + tkey)
def load_plain_keyfile(filename): """ A "plain" keyfile is a file containing only the key. Any other file (JPEG, MP3, ...) can also be used as keyfile. """ with open(filename, 'rb') as f: key = f.read() # if the length is 32 bytes we assume it is the key if len(key) == 32: return key # if the length is 64 bytes we assume the key is hex encoded if len(key) == 64: return key.decode('hex') # anything else may be a file to hash for the key return sha256(key) raise IOError('Could not read keyfile.')
def _write_header(self, stream): """Serialize the header fields from self.header into a byte stream, prefix with file signature and version before writing header and out-buffer to `stream`. Note, that `stream` is flushed, but not closed!""" # serialize header to stream header = bytearray() # write file signature header.extend(struct.pack('<II', *KDB4_SIGNATURE)) # and version header.extend(struct.pack('<hh', 0, 3)) field_ids = list(self.header.keys()) field_ids.sort() field_ids.reverse() # field_id 0 must be last for field_id in field_ids: value = self.header.b[field_id] length = len(value) header.extend(struct.pack('<b', field_id)) header.extend(struct.pack('<h', length)) header.extend(struct.pack('{}s'.format(length), value)) # write header to stream stream.write(header) headerHash = base64.b64encode(sha256(header)) self.obj_root.Meta.HeaderHash = headerHash # create HeaderHash if it does not exist if len(self.obj_root.Meta.xpath("HeaderHash")) < 1: etree.SubElement(self.obj_root.Meta, "HeaderHash") # reload out_buffer because we just changed the HeaderHash self.protect() self.out_buffer = io.BytesIO(self.pretty_print()) # zip or not according to header setting if self.header.CompressionFlags == 1: self._zip() self._encrypt(); # write encrypted block to stream stream.write(self.out_buffer) stream.flush()
def _make_master_key(self): """ Make the master key by (1) combining the credentials to create a composite hash, (2) transforming the hash using the transform seed for a specific number of rounds and (3) finally hashing the result in combination with the master seed. """ super(KDB3File, self)._make_master_key() #print "masterkey:", ''.join(self.keys).encode('hex') #composite = sha256(''.join(self.keys)) #TODO python-keepass does not support keyfiles, there seems to be a # different way to hash those keys in kdb3 composite = self.keys[0] tkey = transform_key(composite, self.header.MasterSeed2, self.header.KeyEncRounds) self.master_key = sha256(self.header.MasterSeed + tkey)
def load_plain_keyfile(filename): """ A "plain" keyfile is a file containing only the key. Any other file (JPEG, MP3, ...) can also be used as keyfile. """ with open(filename, 'rb') as f: key = f.read() # if the length is 32 bytes we assume it is the key if len(key) == 32: return key # if the length is 64 bytes and hexadecimal characters, we assume the key is hex encoded try: int(key, 16) if len(key) == 64: return codecs.decode(key, 'hex') except ValueError: pass # anything else may be a file to hash for the key return sha256(key) raise IOError('Could not read keyfile.')
def _decrypt(self, stream): super(KDB3File, self)._decrypt(stream) if self.header.encryption_flags[self.header.Flags-1] == 'AES': data = aes_cbc_decrypt(stream.read(), self.master_key, self.header.EncryptionIV) data = unpad(data) elif self.header.encryption_flags[self.header.Flags-1] == 'Twofish': data = twofish_cbc_decrypt(stream.read(), self.master_key, self.header.EncryptionIV) data = unpad(data) else: raise IOError('Unsupported encryption type: %s'%self.header.encryption_flags.get(self.header['Flags']-1, self.header['Flags']-1)) if self.header.ContentHash == sha256(data): # put data in bytes io self.in_buffer = io.BytesIO(data) # set successful decryption flag self.opened = True else: raise IOError('Master key invalid.')
def convert_kdb3_to_kxml4(kdb3): "Convert given KDB3 to xml in v4 format." doc4 = lxml.etree.fromstring(u"""\ <KeePassFile> <Meta> <Generator>libkeepass (python)</Generator> <DatabaseName></DatabaseName> <DatabaseNameChanged></DatabaseNameChanged> <MemoryProtection> <ProtectTitle>False</ProtectTitle> <ProtectUserName>False</ProtectUserName> <ProtectPassword>True</ProtectPassword> <ProtectURL>False</ProtectURL> <ProtectNotes>False</ProtectNotes> </MemoryProtection> </Meta> <Root> </Root> </KeePassFile>""") doc4.find('.//DatabaseName').text = 'converted' root = doc4.find('Root') group_id_map = {} root_group = { 'group_id': 0, 'title': 'Root', 'icon': 48, 'created': datetime.datetime(2999, 12, 28, 23, 59, 59), 'modified': datetime.datetime(2999, 12, 28, 23, 59, 59), 'accessed': datetime.datetime(2999, 12, 28, 23, 59, 59), 'expires': datetime.datetime(2999, 12, 28, 23, 59, 59), 'expanded': True, 'level': -1, } for group in [root_group] + kdb3.groups: group = group.copy() if group['level'] == 0: group['groups'] = 0 # Create group uuid from 32-bit unique group id. All group ids with in # a file should be unique. However, its possible that two out of sync # files have two different groups with the same group id. Although this # should be sufficiently improbable. # Use first half of cryptographic sha256 hash to uniquely map the 32-bit # (4-byte) group id into the 16-byte UUID space. group['uuid'] = base64.b64encode( sha256(struct.pack("<L", group['group_id']))[:16]).decode('ascii') group_id_map[group['group_id']] = group['uuid'] group['expire_valid'] = (group['expires'] != datetime.datetime( 2999, 12, 28, 23, 59, 59)) group['expanded'] = str(bool(group['expanded'])) group['enable_auto_type'] = 'null' group['enable_searching'] = 'null' for k in ('uuid', 'title'): group[k] = escape(group[k]) if group['title'] == 'Backup': # by default we don't want to search the Backup group, since these # were deleted entries group['enable_auto_type'] = 'False' group['enable_searching'] = 'False' groupEl = lxml.etree.fromstring(u"""\ <Group> <UUID>{uuid}</UUID> <Name>{title}</Name> <IconID>{icon}</IconID> <Times> <CreationTime>{created:%Y-%m-%dT%H:%M:%S}Z</CreationTime> <LastModificationTime>{modified:%Y-%m-%dT%H:%M:%S}Z</LastModificationTime> <LastAccessTime>{accessed:%Y-%m-%dT%H:%M:%S}Z</LastAccessTime> <ExpiryTime>{expires:%Y-%m-%dT%H:%M:%S}Z</ExpiryTime> <Expires>{expire_valid}</Expires> </Times> <IsExpanded>{expanded}</IsExpanded> <EnableAutoType>{enable_auto_type}</EnableAutoType> <EnableSearching>{enable_searching}</EnableSearching> </Group>""".format(**group)) # FIXME: We assume the v3 timestamps are in UTC, but this is almost # certainly not the case. Perhaps we should allow the user to specify. # Did the old KeePassX always use UTC anyway? Need to check. if 'groups' in group: # This is a sub-group g_parent_uuid = group_id_map[group['groups']] root.find(".//Group[UUID='%s']" % g_parent_uuid).append(groupEl) else: root.append(groupEl) for entry in kdb3.entries: entry = entry.copy() entry['uuid'] = escape( base64.b64encode(codecs.decode(entry['id'], 'hex')).decode('ascii')) entry['expire_valid'] = (entry['expires'] != datetime.datetime( 2999, 12, 28, 23, 59, 59)) entryEl = lxml.etree.fromstring(u"""\ <Entry> <UUID>{uuid}</UUID> <IconID>{icon}</IconID> <Times> <CreationTime>{created:%Y-%m-%dT%H:%M:%S}Z</CreationTime> <LastModificationTime>{modified:%Y-%m-%dT%H:%M:%S}Z</LastModificationTime> <LastAccessTime>{accessed:%Y-%m-%dT%H:%M:%S}Z</LastAccessTime> <ExpiryTime>{expires:%Y-%m-%dT%H:%M:%S}Z</ExpiryTime> <Expires>{expire_valid}</Expires> </Times> </Entry>""".format(**entry)) for k4 in ('Title', 'URL', 'UserName', 'Password', 'Notes'): k3 = k4.lower() stringEl = lxml.etree.fromstring(u"""\ <String> <Key>{key}</Key> <Value>{value}</Value> </String>""".format(key=k4, value=escape(entry[k3]))) entryEl.append(stringEl) if 'bin_desc' in entry and entry['bin_desc']: raise ValueError("Unexpected bin_desc '%s'. (%r)" % (entry['bin_desc'], entry.get('binary', ''))) g_parent_uuid = group_id_map[entry['group_id']] root.find(".//Group[UUID='%s']" % g_parent_uuid).append(entryEl) return doc4
def add_credentials(self, **credentials): if 'password' in credentials: self.add_key_hash(sha256(credentials['password'].encode('utf-8'))) if 'keyfile' in credentials: self.add_key_hash(load_keyfile(credentials['keyfile']))
def _reset_salsa(self): """Clear the salsa buffer and reset algorithm.""" self._salsa_buffer = bytearray() self.salsa = Salsa20.new(sha256(self.header.ProtectedStreamKey), KDB4_SALSA20_IV)
def test_sha256(self): self.assertEquals(sha256(b''), b"\xe3\xb0\xc4B\x98\xfc\x1c\x14\x9a\xfb\xf4\xc8\x99o\xb9$'\xaeA" b"\xe4d\x9b\x93L\xa4\x95\x99\x1bxR\xb8U") self.assertEquals(len(sha256(b'')), 32) self.assertEquals(len(sha256(b'asdf')), 32)