def rsa_public_from_der_certificate(certificate): # Extract subject_public_key_info field from X.509 certificate (see RFC3280) try: # try to extract pubkey from scapy.layers.x509 X509Cert type in case # der_certificate is of type X509Cert # Note: der_certificate may not be of type X509Cert if it wasn't # received completely, in that case, we'll try to extract it anyway # using the old method. # TODO: get rid of the old method and always expect X509Cert obj ? return RSA.importKey(str(certificate.tbsCertificate.subjectPublicKeyInfo)) except AttributeError: pass # Fallback method, may pot. allow to extract pubkey from incomplete der streams cert = DerSequence() cert.decode(certificate) tbs_certificate = DerSequence() tbs_certificate.decode(cert[0]) # first DER SEQUENCE # search for pubkey OID: rsaEncryption: "1.2.840.113549.1.1.1" # hex: 06 09 2A 86 48 86 F7 0D 01 01 01 subject_public_key_info = None for seq in tbs_certificate: if not isinstance(seq, basestring): continue # skip numerics and non sequence stuff if "\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01" in seq: subject_public_key_info = seq if subject_public_key_info is None: raise ValueError("could not find OID rsaEncryption 1.2.840.113549.1.1.1 in certificate") # Initialize RSA key return RSA.importKey(subject_public_key_info)
def from_key(cls, key, config, private_key=None): identity = cls(None, None, config) identity._keypair = ( RSA.importKey(key), RSA.importKey(private_key) if private_key else None, ) identity.state = IdentityState.INITIALIZED return identity
def testExportKey14(self): # Export and re-import the encrypted key. It must match. # DER envelope, PKCS#8, PKCS#8 encryption key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) outkey = key.export_key('DER', 'test', pkcs=8) inkey = RSA.importKey(outkey, 'test') self.assertEqual(key.n, inkey.n) self.assertEqual(key.e, inkey.e) self.assertEqual(key.d, inkey.d)
def testExportKey12(self): # Export and re-import the encrypted key. It must match. # PEM envelope, PKCS#8, old PEM encryption key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) outkey = key.export_key('PEM', 'test', pkcs=8) self.failUnless(tostr(outkey).find('4,ENCRYPTED')!=-1) self.failUnless(tostr(outkey).find('BEGIN PRIVATE KEY')!=-1) inkey = RSA.importKey(outkey, 'test') self.assertEqual(key.n, inkey.n) self.assertEqual(key.e, inkey.e) self.assertEqual(key.d, inkey.d)
def testExportKey13(self): # Export and re-import the encrypted key. It must match. # PEM envelope, PKCS#8, PKCS#8 encryption key = RSA.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) outkey = key.export_key('PEM', 'test', pkcs=8, protection='PBKDF2WithHMAC-SHA1AndDES-EDE3-CBC') self.failUnless(tostr(outkey).find('4,ENCRYPTED')==-1) self.failUnless(tostr(outkey).find('BEGIN ENCRYPTED PRIVATE KEY')!=-1) inkey = RSA.importKey(outkey, 'test') self.assertEqual(key.n, inkey.n) self.assertEqual(key.e, inkey.e) self.assertEqual(key.d, inkey.d)
def _load_key(self): # load private key content = self._app_private_key_string if not content: with open(self._app_private_key_path) as fp: content = fp.read() self._app_private_key = RSA.importKey(content) # load public key content = self._alipay_public_key_string if not content: with open(self._alipay_public_key_path) as fp: content = fp.read() self._alipay_public_key = RSA.importKey(content)
def test_get(self): JWT_PRIVATE_SIGNING_KEY = RSA.generate(2048).exportKey('PEM') JWT_EXPIRED_PRIVATE_SIGNING_KEYS = [RSA.generate(2048).exportKey('PEM'), RSA.generate(2048).exportKey('PEM')] secret_keys = [JWT_PRIVATE_SIGNING_KEY] + JWT_EXPIRED_PRIVATE_SIGNING_KEYS with override_settings(JWT_PRIVATE_SIGNING_KEY=JWT_PRIVATE_SIGNING_KEY, JWT_EXPIRED_PRIVATE_SIGNING_KEYS=JWT_EXPIRED_PRIVATE_SIGNING_KEYS): response = self.client.get(reverse('jwks')) self.assertEqual(response.status_code, 200) actual = json.loads(response.content) expected = { 'keys': [views.JwksView.serialize_rsa_key(key) for key in secret_keys], } self.assertEqual(actual, expected)
def sign_transaction(self, sender, recipient, amount): signer = PKCS1_v1_5.new(RSA.importKey( binascii.unhexlify(self.private_key))) h = SHA256.new((str(sender) + str(recipient) + str(amount)).encode('utf8')) signature = signer.sign(h) return binascii.hexlify(signature).decode('ascii')
def generate_jwks(self, mode): if "rotenc" in self.behavior_type: # Rollover encryption keys rsa_key = RSAKey(kid="rotated_rsa_{}".format(time.time()), use="enc").load_key(RSA.generate(2048)) ec_key = ECKey(kid="rotated_ec_{}".format(time.time()), use="enc").load_key(P256) keys = [rsa_key.serialize(private=True), ec_key.serialize(private=True)] new_keys = {"keys": keys} #self.do_key_rollover(new_keys, "%d") signing_keys = [k.to_dict() for k in self.keyjar.get_signing_key()] new_keys["keys"].extend(signing_keys) return json.dumps(new_keys) elif "nokid1jwk" in self.behavior_type: alg = mode["sign_alg"] if not alg: alg = "RS256" keys = [k.to_dict() for kb in self.keyjar[""] for k in list(kb.keys())] for key in keys: if key["use"] == "sig" and key["kty"].startswith(alg[:2]): key.pop("kid", None) jwk = dict(keys=[key]) return json.dumps(jwk) raise Exception( "Did not find sig {} key for nokid1jwk test ".format(alg)) else: # Return all keys keys = [k.to_dict() for kb in self.keyjar[""] for k in list(kb.keys())] jwks = dict(keys=keys) return json.dumps(jwks)
def __call__(self): keyjar = self.conv.entity.keyjar self.conv.entity.original_keyjar = keyjar.copy() # invalidate the old key old_kid = self.op_args["old_kid"] old_key = keyjar.get_key_by_kid(old_kid) old_key.inactive_since = time.time() # setup new key key_spec = self.op_args["new_key"] typ = key_spec["type"].upper() if typ == "RSA": kb = KeyBundle(keytype=typ, keyusage=key_spec["use"]) kb.append(RSAKey(use=key_spec["use"]).load_key( RSA.generate(key_spec["bits"]))) elif typ == "EC": kb = ec_init(key_spec) else: raise Exception('Wrong key type') # add new key to keyjar with list(kb.keys())[0].kid = self.op_args["new_kid"] keyjar.add_kb("", kb) # make jwks and update file keys = [] for kb in keyjar[""]: keys.extend( [k.to_dict() for k in list(kb.keys()) if not k.inactive_since]) jwks = dict(keys=keys) with open(self.op_args["jwks_path"], "w") as f: f.write(json.dumps(jwks))
def setUp(self): comps = "Cryptodome.SelfTest.Signature.test_vectors.wycheproof".split(".") with open(pycryptodome_filename(comps, "rsa_signature_test.json"), "rt") as file_in: tv_tree = json.load(file_in) class TestVector(object): pass self.tv = [] for group in tv_tree['testGroups']: key = RSA.import_key(group['keyPem']) hash_name = group['sha'] if hash_name == "SHA-256": hash_module = SHA256 elif hash_name == "SHA-224": hash_module = SHA224 elif hash_name == "SHA-1": hash_module = SHA1 else: assert False assert group['type'] == "RSASigVer" for test in group['tests']: tv = TestVector() tv.id = test['tcId'] tv.comment = test['comment'] for attr in 'msg', 'sig': setattr(tv, attr, unhexlify(test[attr])) tv.key = key tv.hash_module = hash_module tv.valid = test['result'] != "invalid" tv.warning = test['result'] == "acceptable" self.tv.append(tv)
def importKey(self, key, passphrase): passphrase = self.getProcessedPassphrase(passphrase) try: self.key = RSA.importKey(key, passphrase) except ValueError: raise ValueError(u"Wrong current password") return self.key
def setUp(self): self.pem_priv_key = """-----BEGIN PRIVATE KEY----- MIIEwAIBADANBgkqhkiG9w0BAQEFAASCBKowggSmAgEAAoIBAQDDLrmt4lKRpm6P 2blptwJsa1EBuxuuAayLjwNqKGvm5c1CAUEa/NtEpUMM8WYKRDwxzakUIGI/BdP3 NOEMphcs5+OekgJLhzoSdtAIrXPy8JIidENZE6FzCJ2b6fHU5O4hoNvv1Bx5yoZr HVaWJIZMRRocJJ0Nf9oMaU8IE6m6OdBzQHEwcnL2/a8Q3VxstHufzjILmaZD9WL+ 6AESlQMKZPNQ+Xd7d4nvnVkY4ZV46tA+KvADGuotgovQwG+uiyQoGRrQUms21vHF zIvd3G9OCiyCTCHSyfsE3g7tks33NZ8O8gF8xa9OmU9TQPwwAyUr6JQXz0CW77o7 Cr9LpHuNAgMBAAECggEBAJRbMbtfqc8XqDYjEfGur2Lld19Pb0yl7RbvD3NjYhDR X2DqPyhaRfg5fWubGSp4jyBz6C5qJwMsVN80DFNm83qoj7T52lC6aoOaV6og3V8t SIZzxLUyXKdpRxM5kR13HSHmeQYkPbi9HcrRM/1PqdzTMXNuyQl3wq9oZDAJchsf fmoh080htkaxhEb1bMXa2Lj7j2OIkHOsQeIu6BdbxIKRPIT+zrcklE6ocW8fTWAS Qi3IZ1FYLL+fs6TTxjx0VkC8QLaxWxY0pqTiwS7ndZiZKc3l3ARuvRk8buP+X3Jg BD86FQ18OXZC9boMbDbzv2cOLtdkq5pS3lJE4F9gjYECgYEA69ukU2pNWot2OPwK PuPwAXWNrvnvFzQgIc0qOiCmgKJU6wqunlop4Bx5XmetHExVyJVBEhaHoDr0F3Rs gt8IclKDsWGXoVcgfu3llMimiZ05hOf/XtcGTCZwZenMQ30cFh4ZRuUu7WCZ9tqO 28P8jCXB3IcaRpRnNvVvmCr5NXECgYEA09nUzRW993SlohceRW2C9fT9HZ4BaPWO 5wVlnoo5mlUfAyzl+AGT/WlKmrn/1gAHIznQJ8ZIABQvPaBXhvkANXZP5Ie0lObw jA7qFuKt7yV4GGlDnU1MOLh+acABMQBGSx8BJDaomH7glTiPEPTZjoP6wfAsd1uv Knjt7jH2ad0CgYEAx9ghknRd+rx0fbBBVix4riPW20324ihOmZVnlD0aF6B0Z3tz ncUz+irmQ7GBIpsjjIO60QK6BHAvZrhFQVaNp6B26ZORkSlr5WDZyImDYtMPa6fP 36I+OcPQNOo3I3Acnjj+ne2PJ59Ula92oIudr3pGmv72qpsQIacw2TSAWGECgYEA sdNAN+HPMn68ZaGoLDjvW8uIB6tQnay5hhvWn8yA65YV0RGH+7Q/Z9BQ6i3EnPor A5uMqUZbu4011jHYJpiuXzHvf/GVWAO92KLQReOCgqHd/Aen1MtEdrwOiG+90Ebd ukLNL3ud61tc4oS2OlJ8p48LFm2mtY3FLA6UEYPoxhUCgYEAtsfWIGnBh7XC+HwI 2higSgN92VpJHSPOyOi0aG/u5AEQ+fsCUIi3KakxzvmiGMAEvWItkKyz2Gu8smtn 2HVsGxI5UW7aLw9s3qe8kyMSfUk6pGamVhJUQmDr77+5zEzykPBxwGwDwdeR43CR xVgf/Neb/avXgIgi6drj8dp1fWA= -----END PRIVATE KEY----- """ self.rsa = RSA.importKey(self.pem_priv_key)
def create_and_store_rsa_key_pair(name="pyoidc", path=".", size=2048): """ :param name: Name of the key file :param path: Path to where the key files are stored :param size: RSA key size :return: RSA key """ key = RSA.generate(size) if sys.version_info[0] > 2: os.makedirs(path, exist_ok=True) else: try: os.makedirs(path) except OSError: # assume this is because it already exists pass if name: with open(os.path.join(path, name), 'wb') as f: f.write(key.exportKey('PEM')) _pub_key = key.publickey() with open(os.path.join(path, '{}.pub'.format(name)), 'wb') as f: f.write(_pub_key.exportKey('PEM')) return key
def encryptDataWithPubKey(data, pubKeyFile=RSA_pubKeyFile, outFile=None): from Cryptodome.PublicKey import RSA from Cryptodome.Random import get_random_bytes from Cryptodome.Cipher import AES, PKCS1_OAEP recipient_key = RSA.import_key(open(pubKeyFile).read()) session_key = get_random_bytes(16) # Encrypt the session key with the public RSA key cipher_rsa = PKCS1_OAEP.new(recipient_key) # Encrypt the data with the AES session key cipher_aes = AES.new(session_key, AES.MODE_EAX) ciphertext, tag = cipher_aes.encrypt_and_digest(data) print () print (ciphertext) print () print (tag) print () if outFile: file_out = open(outFile, "wb") file_out.write(cipher_rsa.encrypt(session_key)) [ file_out.write(x) for x in (ciphertext.nonce, tag, ciphertext) ]
def testImportKey12(self): """Verify import of RSAPublicKey DER SEQUENCE, encoded with PEM""" der = asn1.DerSequence([17, 3]).encode() pem = der2pem(der) key = RSA.importKey(pem) self.assertEqual(key.n, 17) self.assertEqual(key.e, 3)
def read(self): if self.state == IdentityState.INITIALIZING: raise IdentityStateError() if not os.path.exists(self.id_rsa_path) or not os.path.exists( self.id_rsa_pub_path ): self.state = IdentityState.UNINITIALIZED raise IdentityNotInitialized() with open(self.id_rsa_pub_path, "rb") as id_rsa_pub: public_key = RSA.importKey(id_rsa_pub.read()) with open(self.id_rsa_path, "rb") as id_rsa: private_key = RSA.importKey(id_rsa.read()) self._keypair = (public_key, private_key) self.state = IdentityState.INITIALIZED
def testVerify1(self): for test in self._testData: # Build the key key = RSA.importKey(test[0]) # The real test cipher = PKCS.new(key) pt = cipher.decrypt(t2b(test[2]), "---") self.assertEqual(pt, b(test[1]))
def test_import_key_windows_cr_lf(self): pem_cr_lf = "\r\n".join(self.rsaKeyPEM.splitlines()) key = RSA.importKey(pem_cr_lf) self.assertEqual(key.n, self.n) self.assertEqual(key.e, self.e) self.assertEqual(key.d, self.d) self.assertEqual(key.p, self.p) self.assertEqual(key.q, self.q)
def handle(self, *args, **options): try: key = RSA.generate(2048) rsakey = RSAKey(key=key.exportKey('PEM').decode('utf8')) rsakey.save() self.stdout.write(u'RSA key successfully created with kid: {0}'.format(rsakey.kid)) except Exception as e: self.stdout.write('Something goes wrong: {0}'.format(e))
def test_dj_usage(): key_string = open(full_path("./size2048.key"), 'r').read() key = RSA.importKey(key_string) payload = "Please take a moment to register today" keys = [RSAKey(key=key, kid=md5(key_string.encode('utf-8')).hexdigest())] _jws = JWS(payload, alg='RS256') sjwt = _jws.sign_compact(keys) _jwt = factory(sjwt) assert _jwt.jwt.headers['alg'] == 'RS256'
def test_import_rsa_key(): _ckey = RSA.importKey(open(full_path(KEY), 'r').read()) assert isinstance(_ckey, RsaKey) djwk = jwk_wrap(_ckey).to_dict() print(djwk) assert _eq(djwk.keys(), ["kty", "e", "n", "p", "q", "d"]) assert djwk[ "n"] == '5zbNbHIYIkGGJ3RGdRKkYmF4gOorv5eDuUKTVtuu3VvxrpOWvwnFV-NY0LgqkQSMMyVzodJE3SUuwQTUHPXXY5784vnkFqzPRx6bHgPxKz7XfwQjEBTafQTMmOeYI8wFIOIHY5i0RWR-gxDbh_D5TXuUqScOOqR47vSpIbUH-nc' assert djwk['e'] == 'AQAB'
def testImportKey1(self): """Verify import of RSAPrivateKey DER SEQUENCE""" key = RSA.importKey(self.rsaKeyDER) self.failUnless(key.has_private()) self.assertEqual(key.n, self.n) self.assertEqual(key.e, self.e) self.assertEqual(key.d, self.d) self.assertEqual(key.p, self.p) self.assertEqual(key.q, self.q)
def testImportKey10(self): """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE, encoded with PEM""" key = RSA.importKey(self.rsaKeyPEM8) self.failUnless(key.has_private()) self.assertEqual(key.n, self.n) self.assertEqual(key.e, self.e) self.assertEqual(key.d, self.d) self.assertEqual(key.p, self.p) self.assertEqual(key.q, self.q)
def testImportKey3bytes(self): """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as byte string""" key = RSA.importKey(b(self.rsaKeyPEM)) self.assertEqual(key.has_private(),True) # assert_ self.assertEqual(key.n, self.n) self.assertEqual(key.e, self.e) self.assertEqual(key.d, self.d) self.assertEqual(key.p, self.p) self.assertEqual(key.q, self.q)
def create_managed_rsa_key(self, length): """ Create an RSA key with a given length. Basically the same as oidc_provider.creatersakey but with configurable key length. """ key = RSA.generate(length) rsakey = RSAKey.objects.create(key=key.exportKey('PEM').decode('utf8')) ManagedRSAKey.objects.create(rsakey=rsakey, created_at=timezone.now()) self.stdout.write('Created new key of length {0} with id: {1}'.format(length, rsakey))
def get_rsa_b64_from_der(public_key_der: bytes) -> bytes: """Get base64 encoded RSA from public key DER sequence""" public_key_rsa = RSA.importKey(public_key_der) rsa_bytes_n = Cryptodome.Util.number.long_to_bytes(public_key_rsa.n) rsa_bytes_e = Cryptodome.Util.number.long_to_bytes(public_key_rsa.e) keydata = bytearray() keydata.append(len(rsa_bytes_e)) keydata.extend(rsa_bytes_e) keydata.extend(rsa_bytes_n) return base64.b64encode(keydata)
def der2rsa(der): # Extract subjectPublicKeyInfo field from X.509 certificate (see RFC3280) cert = DerSequence() cert.decode(der) tbs_certificate = DerSequence() tbs_certificate.decode(cert[0]) subject_public_key_info = tbs_certificate[6] # Initialize RSA key return RSA.importKey(subject_public_key_info)
def testDecrypt1(self): # Verify decryption using all test vectors for test in self._testData: # Build the key comps = [ long(rws(test[0][x]),16) for x in ('n','e','d') ] key = RSA.construct(comps) # The real test cipher = PKCS.new(key, test[4]) pt = cipher.decrypt(t2b(test[2])) self.assertEqual(pt, t2b(test[1]))
def testImportKey8(self): """Verify import of encrypted PrivateKeyInfo DER SEQUENCE""" for t in self.rsaKeyEncryptedPEM: key = RSA.importKey(t[1], t[0]) self.failUnless(key.has_private()) self.assertEqual(key.n, self.n) self.assertEqual(key.e, self.e) self.assertEqual(key.d, self.d) self.assertEqual(key.p, self.p) self.assertEqual(key.q, self.q)
def __load_rsa_keys(self): loaded_key = self.load_file( msl_data_path=self.kodi_helper.msl_data_path, filename='rsa_key.bin') self.rsa_key = RSA.importKey(loaded_key)
def receive(ext): print('Waiting for message') print('Reading public key from {}'.format(CONFIG_DIR)) print('Using configuration: file_ext = {}'.format(ext)) with open(os.path.join(CONFIG_DIR, 'public_key'), 'r') as f: public_key = RSA.import_key(f.read()) if not os.path.exists(RECEIVER_DIR): print('Creating directory: '.format(RECEIVER_DIR)) os.makedirs(RECEIVER_DIR) messages_dict = dict() # type: Dict[str, MessageBuilder] print('Looking for files in {}'.format(SENDER_DIR)) for filename in os.listdir(SENDER_DIR): _, file_ext = os.path.splitext(filename) if file_ext[1:] != ext: print(' Skipping file with extension {}: {}'.format( file_ext, filename)) continue print(' Reading packet from file: {}'.format(filename)) filepath = os.path.join(SENDER_DIR, filename) try: if file_ext == '.txt': content = extract_content_from_text_file(filepath) elif file_ext == '.png': content = extract_content_from_image(filepath) except Exception as ex: print(' Error parsing contents, skipping file {}'.format( filename)) continue # noinspection PyUnboundLocalVariable packet = Packet.deserialize(content) if packet.message_hash not in messages_dict: messages_dict[packet.message_hash] = MessageBuilder( packet.message_hash, packet.message_signature, packet.total_chunks, public_key) messages_dict[packet.message_hash].add_packet(packet) print('Looking for complete messages from the packets received') for message_hash, builder in messages_dict.items(): if not builder.is_complete(): print( "Skipping message '{}' because not all packets have been received" .format(message_hash)) continue message = builder.build() outfile = os.path.join(RECEIVER_DIR, message_hash) print("Writing message '{}' to {}".format(message_hash, outfile)) with open(outfile, 'w') as f: f.write(message.message)
def verify_transaction(transaction): public_key = RSA.importKey(binascii.unhexlify(transaction.sender)) verifier = PKCS1_v1_5.new(public_key) h = SHA256.new((str(transaction.sender) + str(transaction.recipient) + str(transaction.amount)).encode('utf8')) return verifier.verify(h, binascii.unhexlify(transaction.signature))
from lti_consumer.lti_1p3.constants import LTI_1P3_CONTEXT_TYPE from lti_consumer.lti_1p3.consumer import LtiConsumer1p3, LtiAdvantageConsumer from lti_consumer.lti_1p3.ags import LtiAgs from lti_consumer.lti_1p3 import exceptions # Variables required for testing and verification ISS = "http://test-platform.example/" OIDC_URL = "http://test-platform/oidc" LAUNCH_URL = "http://test-platform/launch" CLIENT_ID = "1" DEPLOYMENT_ID = "1" NONCE = "1234" STATE = "ABCD" # Consider storing a fixed key RSA_KEY_ID = "1" RSA_KEY = RSA.generate(2048).export_key('PEM') # Test classes @ddt.ddt class TestLti1p3Consumer(TestCase): """ Unit tests for LtiConsumer1p3 """ def setUp(self): super(TestLti1p3Consumer, self).setUp() # Set up consumer self.lti_consumer = LtiConsumer1p3( iss=ISS, lti_oidc_url=OIDC_URL,
def merge(self, data, peer_ip, c, size_bypass=False, wait=False, revert=False): """ Checks and merge the tx list in out mempool :param data: :param peer_ip: :param c: :param size_bypass: if True, will merge whatever the mempool size is :param wait: if True, will wait until the main db_lock is free. if False, will just drop. :param revert: if True, we are reverting tx from digest_block, so main lock is on. Don't bother, process without lock. :return: """ global REFUSE_OLDER_THAN # Easy cases of empty or invalid data if not data: return "Mempool from {} was empty".format(peer_ip) mempool_result = [] if data == '*': raise ValueError("Connection lost") try: if self.peers_sent[peer_ip] > time.time( ) and peer_ip != '127.0.0.1': self.app_log.warning( "Mempool ignoring merge from frozen {}".format(peer_ip)) mempool_result.append( "Mempool ignoring merge from frozen {}".format(peer_ip)) return mempool_result except: # unknown peer pass if not essentials.is_sequence(data): if peer_ip != '127.0.0.1': with self.peers_lock: self.peers_sent[peer_ip] = time.time() + 10 * 60 self.app_log.warning( "Freezing mempool from {} for 10 min - Bad TX format". format(peer_ip)) mempool_result.append("Bad TX Format") return mempool_result if not revert: while self.db_lock.locked(): # prevent transactions which are just being digested from being added to mempool if not wait: # not reverting, but not waiting, bye # By default, we don't wait. mempool_result.append("Locked ledger, dropping txs") return mempool_result self.app_log.warning( "Waiting for block digestion to finish before merging mempool" ) time.sleep(1) # if reverting, don't bother with main lock, go on. # Let's really dig mempool_result.append( "Mempool merging started from {}".format(peer_ip)) # Single time reference here for the whole merge. time_now = time.time() # calculate current mempool size before adding txs mempool_size = self.size() # TODO: we check main ledger db is not locked before beginning, but we don't lock? ok, see comment in node.py. since it's called from a lock, it would deadlock. # merge mempool # while self.lock.locked(): # time.sleep(1) with self.lock: try: block_list = data if not isinstance( block_list[0], list ): # convert to list of lists if only one tx and not handled block_list = [block_list] for transaction in block_list: if size_bypass or self.space_left_for_tx( transaction, mempool_size): # all transactions in the mempool need to be cycled to check for special cases, # therefore no while/break loop here mempool_timestamp = '%.2f' % (quantize_two( transaction[0])) mempool_timestamp_float = float( transaction[0]) # limit Decimal where not needed mempool_address = str(transaction[1])[:56] mempool_recipient = str(transaction[2])[:56] mempool_amount = '%.8f' % (quantize_eight( transaction[3])) # convert scientific notation mempool_amount_float = float(transaction[3]) mempool_signature_enc = str(transaction[4])[:684] mempool_public_key_hashed = str(transaction[5])[:1068] if "b'" == mempool_public_key_hashed[:2]: mempool_public_key_hashed = transaction[5][2:1070] mempool_operation = str(transaction[6])[:30] mempool_openfield = str(transaction[7])[:100000] # Begin with the easy tests that do not require cpu or disk access if mempool_amount_float < 0: mempool_result.append( "Mempool: Negative balance spend attempt") continue if not essentials.address_validate(mempool_address): mempool_result.append( "Mempool: Invalid address {}".format( mempool_address)) continue if not essentials.address_validate(mempool_recipient): mempool_result.append( "Mempool: Invalid recipient {}".format( mempool_recipient)) continue if mempool_timestamp_float > time_now: mempool_result.append( "Mempool: Future transaction rejected {}s". format(mempool_timestamp_float - time_now)) continue if mempool_timestamp_float < time_now - REFUSE_OLDER_THAN: # don't accept old txs, mempool needs to be harsher than ledger mempool_result.append( "Mempool: Too old a transaction") continue # Then more cpu heavy tests hashed_address = hashlib.sha224( base64.b64decode( mempool_public_key_hashed)).hexdigest() if mempool_address != hashed_address: mempool_result.append( "Mempool: Attempt to spend from a wrong address {} instead of {}" .format(mempool_address, hashed_address)) continue # Crypto tests - more cpu hungry try: essentials.validate_pem(mempool_public_key_hashed) except ValueError as e: mempool_result.append( "Mempool: Public key does not validate: {}". format(e)) # recheck sig try: mempool_public_key = RSA.importKey( base64.b64decode(mempool_public_key_hashed)) mempool_signature_dec = base64.b64decode( mempool_signature_enc) verifier = PKCS1_v1_5.new(mempool_public_key) tx_signed = (mempool_timestamp, mempool_address, mempool_recipient, mempool_amount, mempool_operation, mempool_openfield) my_hash = SHA.new(str(tx_signed).encode("utf-8")) if not verifier.verify(my_hash, mempool_signature_dec): mempool_result.append( "Mempool: Wrong signature ({}) for data {} in mempool insert attempt" .format(mempool_signature_enc, tx_signed)) continue except Exception as e: mempool_result.append( "Mempool: Unexpected error checking sig: {}". format(e)) continue # Only now, process the tests requiring db access mempool_in = self.sig_check(mempool_signature_enc) # Temp: get last block for HF reason essentials.execute_param_c( c, "SELECT block_height FROM transactions WHERE 1 ORDER by block_height DESC limit ?", (1, ), self.app_log) last_block = c.fetchone()[0] # reject transactions which are already in the ledger # TODO: not clean, will need to have ledger as a module too. # TODO: need better txid index, this is very sloooooooow essentials.execute_param_c( c, "SELECT timestamp FROM transactions WHERE signature = ?", (mempool_signature_enc, ), self.app_log) ledger_in = bool(c.fetchone()) # remove from mempool if it's in both ledger and mempool already if mempool_in and ledger_in: try: # Do not lock, we already have the lock for the whole merge. self.execute(SQL_DELETE_TX, (mempool_signature_enc, )) self.commit() mempool_result.append( "Mempool: Transaction deleted from our mempool" ) except: # experimental try and except mempool_result.append( "Mempool: Transaction was not present in the pool anymore" ) continue if ledger_in: mempool_result.append( "That transaction is already in our ledger") # Can be a syncing node. Do not request mempool from this peer until FREEZE_MIN min # ledger_in is the ts of the tx in ledger. if it's recent, maybe the peer is just one block late. # give him 3 minute margin. if (peer_ip != '127.0.0.1') and ( ledger_in < time_now - 60 * 3): with self.peers_lock: self.peers_sent[peer_ip] = time.time( ) + FREEZE_MIN * 60 self.app_log.warning( "Freezing mempool from {} for {} min.". format(peer_ip, FREEZE_MIN)) # Here, we point blank stop processing the batch from this host since it's outdated. # Update: Do not, since it blocks further valid tx - case has been found in real use. # return mempool_result continue # Already there, just ignore then if mempool_in: mempool_result.append( "That transaction is already in our mempool") continue # Here we covered the basics, the current tx is conform and signed. Now let's check balance. # verify balance mempool_result.append( "Mempool: Received address: {}".format( mempool_address)) # include mempool fees result = self.fetchall( "SELECT amount, openfield, operation FROM transactions WHERE address = ?", (mempool_address, )) debit_mempool = 0 if result: for x in result: debit_tx = quantize_eight(x[0]) fee = quantize_eight( essentials.fee_calculate( x[1], x[2], last_block)) debit_mempool = quantize_eight(debit_mempool + debit_tx + fee) credit = 0 for entry in essentials.execute_param_c( c, "SELECT amount FROM transactions WHERE recipient = ?", (mempool_address, ), self.app_log): credit = quantize_eight(credit) + quantize_eight( entry[0]) debit_ledger = 0 for entry in essentials.execute_param_c( c, "SELECT amount FROM transactions WHERE address = ?", (mempool_address, ), self.app_log): debit_ledger = quantize_eight( debit_ledger) + quantize_eight(entry[0]) debit = debit_ledger + debit_mempool fees = 0 for entry in essentials.execute_param_c( c, "SELECT fee FROM transactions WHERE address = ?", (mempool_address, ), self.app_log): fees = quantize_eight(fees) + quantize_eight( entry[0]) rewards = 0 for entry in essentials.execute_param_c( c, "SELECT sum(reward) FROM transactions WHERE recipient = ?", (mempool_address, ), self.app_log): rewards = quantize_eight(rewards) + quantize_eight( entry[0]) balance = quantize_eight( credit - debit - fees + rewards - quantize_eight(mempool_amount)) balance_pre = quantize_eight(credit - debit_ledger - fees + rewards) fee = essentials.fee_calculate(mempool_openfield, mempool_operation, last_block) if quantize_eight(mempool_amount) > quantize_eight( balance_pre ): #mp amount is already included in "balance" var! also, that tx might already be in the mempool mempool_result.append( "Mempool: Sending more than owned") continue if quantize_eight(balance) - quantize_eight(fee) < 0: mempool_result.append( "Mempool: Cannot afford to pay fees") continue # Pfew! we can finally insert into mempool - all is str, type converted and enforced above self.execute( "INSERT INTO transactions VALUES (?,?,?,?,?,?,?,?,?)", (mempool_timestamp, mempool_address, mempool_recipient, mempool_amount, mempool_signature_enc, mempool_public_key_hashed, mempool_operation, mempool_openfield, int(time_now))) mempool_result.append( "Mempool updated with a received transaction from {}" .format(peer_ip)) mempool_result.append("Success") self.commit( ) # Save (commit) the changes to mempool db mempool_size += sys.getsizeof( str(transaction)) / 1000000.0 else: mempool_result.append( "Local mempool is already full for this tx type, skipping merging" ) # self.app_log.warning("Local mempool is already full for this tx type, skipping merging") # TEMP # print("Mempool insert", mempool_result) return mempool_result # TODO: Here maybe commit() on c to release the write lock? except Exception as e: self.app_log.warning("Mempool: Error processing: {} {}".format( data, e)) if self.config.debug_conf == 1: raise return mempool_result
def encrypt(self, msg): public_key = RSA.importKey(self.publickey) encryptor = PKCS1_OAEP.new(public_key) encryptedData = encryptor.encrypt(msg) return encryptedData
def __generate_ssh_key_pair(self): key = RSA.generate(2048) priv_key = key.exportKey("PEM") pub_key = key.publickey().exportKey("OpenSSH") return {"pub_key": pub_key, "priv_key": priv_key}
def import_key(extern_key): return RSA.importKey(extern_key)
def key_generator(secret_phrase, menuEncoded): rsakey_pair = RSA.generate(2048) return key_exporter(secret_phrase, rsakey_pair, menuEncoded)
def key_importer(secret_phrase, menuEncoded): prikey_bytes = open("Serverprivatekey.der", "rb").read() restored_keypair = RSA.import_key(prikey_bytes, passphrase=secret_phrase) pubkey_bytes = open("Serverpublickey.pem", "r").read() restored_pubkey = RSA.import_key(pubkey_bytes) return signer(restored_keypair, restored_pubkey, menuEncoded)
def generate_key_pair(bits=2048): key = RSA.generate(bits) private_key = key.exportKey('PEM') public_key = key.publickey().exportKey('OpenSSH') return private_key.decode('utf-8'), public_key.decode('utf-8')
def process_connection(conn, ip_addr, MAX_BUFFER_SIZE): blk_count = 0 net_bytes = conn.recv(MAX_BUFFER_SIZE) while net_bytes != b'': if blk_count == 0: usr_cmd = net_bytes[0:15].decode("utf8").rstrip() if cmd_GET_MENU in usr_cmd: src_file = open(default_menu, "rb") while True: read_bytes = src_file.read(MAX_BUFFER_SIZE) if read_bytes == b'': break # generating AES key to encrypt menu print(f'{"":-^40}') print(f'{"Generating AES key...": ^40}') key = get_random_bytes(keysize) print(f'{"Done!": ^40}') print(f'{"":-^40}') print(f'{"Encrypting data...": ^40}') cipher = AES.new(key, AES.MODE_ECB) encrypted_menu = cipher.encrypt(pad( read_bytes, BLOCK_SIZE)) print(f'{"Done!": ^40}') print(f'{"":-^40}') # import client public key to encrypt AES key print(f'{"Encrypting AES key...": ^40}') time.sleep(10) pubkey_bytes = open("../deployment-files/AESpublickey.pem", "r").read() pubkey = RSA.import_key(pubkey_bytes) cipher = PKCS1_OAEP.new(pubkey) encrypted_key = cipher.encrypt(key) print(f'{"Done!": ^40}') print(f'{"":-^40}') conn.send(encrypted_key) time.sleep(3) conn.send(encrypted_menu) # sending menu_today.txt data to client src_file.close() print(f'{"Processed SEND menu": ^40}') print(f'{"":-^40}') return elif cmd_END_DAY in usr_cmd: # generating RSA key pair print(f'{"":-^40}') print(f'{"Generating RSA key pair...": ^40}') aes_rsa_keypair = RSA.generate(2048) print(f'{"Done!": ^40}') print(f'{"":-^40}') # exporting public key for AES encryption print(f'{"Exporting the public key...": ^40}') AESpubkey = aes_rsa_keypair.publickey().exportKey() try: open("../deployment-files/AESpublickey2.pem", "wb").write(AESpubkey) print(f'{"Done!": ^40}') except: print(f'{"Oops! Failed to export the public key": ^40}') print(f'{"":-^40}') sys.exit(-1) # receive key and closing info time.sleep(3) closing_key = conn.recv(8192) time.sleep(3) closing_info = conn.recv(4096) # decrypting key and closing info closing_cipher = PKCS1_OAEP.new(aes_rsa_keypair) decrypt_closekey = closing_cipher.decrypt(closing_key) endday_cipher = AES.new(decrypt_closekey, AES.MODE_ECB) decrypted_closing = unpad(endday_cipher.decrypt(closing_info), BLOCK_SIZE) time.sleep(7) # importing public key for digital signature print(f'{"":-^40}') print(f'{"Getting public key...": ^40}') DSpubkey_bytes = open("../deployment-files/DSpublickey.pem", "r").read() DSpubkey = RSA.import_key(DSpubkey_bytes) print(f'{"Done!": ^40}') print(f'{"":-^40}') print(f'{"Verifying the Signature...": ^40}') print(f'{"":-^40}') verifier = pkcs1_15.new(DSpubkey) try: time.sleep(3) signature = conn.recv(9126) # generating digest digest = SHA256.new(decrypted_closing) # verifying signature verifier.verify(digest, signature) print(f'{"The signature is valid!": ^40}') print(f'{"":-^40}') now = datetime.datetime.now() filename = "../source-files/results/" + default_save_base + ip_addr + "-" + now.strftime( "%Y-%m-%d_%H%M") dest_file = open(filename, "wb") dest_file.write(decrypted_closing) except: print(f'{"Oh no! The signature is invalid!": ^40}') print(f'{"":-^40}') blk_count += 1 else: net_bytes = conn.recv(MAX_BUFFER_SIZE) dest_file.close() print(f'{"Processed CLOSING": ^40}') print(f'{"":-^40}')
def test_decode_works_1(self): key = RSA.generate(2048) encoded_key = jsonpickle.encode(key) decoded_key = jsonpickle.decode(encoded_key) self.assertEqual(key, decoded_key)
async def validate(self, match, sig_ptrs: SignaturePtrs) -> bool: # Check key name if sig_ptrs.signature_info is None or sig_ptrs.signature_info.key_locator is None: logging.info(f'{Name.to_str(match.name)} => Not signed') return False key_name = sig_ptrs.signature_info.key_locator.name if not key_name: logging.info(f'{Name.to_str(match.name)} => Not signed') return False key_match = match.root.match(key_name) if key_match.node is not self.key: logging.info( f'{Name.to_str(match.name)} => The key name {Name.to_str(key_name)} mismatch' ) return False if self.subject_to and not self.subject_to(match.env, key_match.env): logging.info( f'{Name.to_str(match.name)} => The key name {Name.to_str(key_name)} mismatch' ) return False # Get key_bits try: key_bits, _ = await key_match.need(must_be_fresh=True, can_be_prefix=True) except (NetworkError, InterestNack, InterestTimeout) as e: logging.info( f'{Name.to_str(match.name)} => Unable to fetch the key {Name.to_str(key_name)} due to {e}' ) return False except ValidationFailure: logging.info( f'{Name.to_str(match.name)} => The key {Name.to_str(key_name)} cannot be verified' ) return False # Import key sig_type = sig_ptrs.signature_info.signature_type key_bits = bytes(key_bits) try: if sig_type == SignatureType.SHA256_WITH_RSA: pub_key = RSA.import_key(key_bits) verifier = pkcs1_15.new(pub_key) elif sig_type == SignatureType.SHA256_WITH_ECDSA: pub_key = ECC.import_key(key_bits) verifier = DSS.new(pub_key, 'fips-186-3', 'der') else: logging.info( f'{Name.to_str(match.name)} => Unrecognized signature type {sig_type}' ) return False except (ValueError, IndexError, TypeError): logging.info( f'{Name.to_str(match.name)} => The key {Name.to_str(key_name)} is malformed' ) return False # Verify signature h = SHA256.new() for content in sig_ptrs.signature_covered_part: h.update(content) try: verifier.verify(h, bytes(sig_ptrs.signature_value_buf)) except ValueError: logging.info( f'{Name.to_str(match.name)} => Unable to verify the signature') return False logging.debug(f'{Name.to_str(match.name)} => Verification passed') return True
def new_keys(keysize): random_generator = Random.new().read key = RSA.generate(keysize, random_generator) private, public = key, key.publickey() return public, private
def parse_pem(cls, pem: str) -> "RsaKey": """ Parse PEM ("-----BEGIN PUBLIC KEY" header) key """ key = RSA.import_key(pem) return cls(key.n, key.e)
from Cryptodome.Cipher import AES, PKCS1_OAEP """ https://www.pycryptodome.org/en/latest/src/examples.html """ ''' Generate an RSA key The following code generates a new RSA key pair (secret) and saves it into a file, protected by a password. We use the scrypt key derivation function to thwart dictionary attacks. At the end, the code prints our RSA public key in ASCII/PEM format: ''' secret_code = "Unguessable" key = RSA.generate(2048) encrypted_key = key.export_key(passphrase=secret_code, pkcs=8, protection="scryptAndAES128-CBC") file_out = open("rsa_key.bin", "wb") file_out.write(encrypted_key) print(key.publickey().export_key()) file_out.close() ''' The following code reads the private RSA key back in, and then prints again the public key: ''' secret_code = "Unguessable"
# NOTE: Get server's certificate # sslSocket.getpeercert return DER certificate # but we are using PEM certificate here, so need to convert it server_cert = ssl.DER_cert_to_PEM_cert(conn.getpeercert(True)) print('server\'s certificate: \n', server_cert) #cert is the encrypted certificate int this format -----BEGIN -----END x509 = crypto.load_certificate(crypto.FILETYPE_PEM, server_cert) public_key = crypto.dump_publickey(crypto.FILETYPE_PEM, x509.get_pubkey()) print('public key', public_key) # NOTE: cipher session key will be used message = b'To be encrypted' print('cipher session text', message) h = SHA.new(message) # NOTE: RSA encryption with server's public key key = RSA.importKey(public_key) cipher = PKCS1_v1_5.new(key) ciphertext = cipher.encrypt(message+h.digest()) print('ciphertext', ciphertext) conn.send(ciphertext) data = conn.recv(4096) print('received data', data) conn.close()
def decrypt(self, cipher): private_key = RSA.importKey(self.privatekey) decryptor = PKCS1_OAEP.new(private_key) dec = decryptor.decrypt(cipher) return dec.decode()
def __init__(self, key: bytes): self._key = RSA.import_key(key) self._rsa = PKCS1_OAEP.new(self._key)
def private_to_public_key(pk_file): f = open(pk_file, 'r') pk = RSA.importKey(f.read()) return pk.publickey().exportKey('PEM')
def import_rsa_key_from_file(filename, passphrase=None): content = None with open(filename, 'r') as f: content = f.read() return RSA.importKey(content, passphrase=passphrase)
if start_dialog.ok_pressed: client_name = start_dialog.client_name.text() client_passwd = start_dialog.client_passwd.text() client_log.debug(f'Using USERNAME = {client_name}, PASSWD = {client_passwd}.') else: sys.exit(0) # Записываем логи client_log.info( f'Запущен клиент с параметрами: адрес сервера: {server_address} , порт: {server_port}, имя пользователя: {client_name}') # Загружаем ключи с файла, если же файла нет, то генерируем новую пару. dir_path = os.getcwd() key_file = os.path.join(dir_path, f'{client_name}.key') if not os.path.exists(key_file): keys = RSA.generate(2048, os.urandom) with open(key_file, 'wb') as key: key.write(keys.export_key()) else: with open(key_file, 'rb') as key: keys = RSA.import_key(key.read()) # !!!keys.publickey().export_key() client_log.debug("Keys sucsessfully loaded.") # Создаём объект базы данных database = ClientDatabase(client_name) # Создаём объект - транспорт и запускаем транспортный поток try: transport = ClientTransport( server_port, server_address,
def test_sign_message(self): key = RSA.importKey(PRIVKEY_DATA) with patch("salt.crypt.get_rsa_key", return_value=key): self.assertEqual( SIG, salt.crypt.sign_message("/keydir/keyname.pem", MSG))
import binascii import unittest from Cryptodome.Hash import SHA256 from Cryptodome.PublicKey import RSA from Cryptodome.Signature import PKCS1_v1_5 from base.transaction import Transaction genesisPublicKey = RSA.import_key(open('genesis_public.pem', 'r').read()) genesisPrivateKey = RSA.import_key(open('genesis_private.pem', 'r').read()) alicePublicKey = RSA.import_key(open('alice_public.pem', 'r').read()) alicePrivateKey = RSA.import_key(open('alice_private.pem', 'r').read()) bobPublicKey = RSA.import_key(open('bob_public.pem', 'r').read()) bobPrivateKey = RSA.import_key(open('bob_private.pem', 'r').read()) def sign(privateKey, tx, index): # Takes msg and sk and outputs signature for msg hashMsg = SHA256.new(tx.getRawDataToSign(index)) signer = PKCS1_v1_5.new(privateKey) signature = signer.sign(hashMsg) return signature def verify(publicKey, signature, msg): # Takes msg public key and signature and returns boolean hashMsg = SHA256.new(msg) verifier = PKCS1_v1_5.new(publicKey) try: verifier.verify(hashMsg, binascii.unhexlify(signature))
def prepare_shared_folder_add(params, folders): folder_hash = {} for f_uid in params.folder_cache: fol = params.folder_cache[f_uid] h = hashlib.md5() hs = '{0}|{1}'.format((fol.name or '').lower(), fol.parent_uid or '') h.update(hs.encode()) shared_folder_key = None if fol.type in { BaseFolderNode.SharedFolderType, BaseFolderNode.SharedFolderFolderType }: sf_uid = fol.shared_folder_uid if fol.type == BaseFolderNode.SharedFolderFolderType else fol.uid if sf_uid in params.shared_folder_cache: shared_folder_key = params.shared_folder_cache[sf_uid][ 'shared_folder_key_unencrypted'] folder_hash[h.hexdigest()] = f_uid, fol.type, shared_folder_key # public keys emails = {} for fol in folders: if fol.permissions: for perm in fol.permissions: if perm.name not in emails: _, email = parseaddr(perm.name) if email: if email != params.user: emails[email.lower()] = None if emails: request = {'command': "public_keys", 'key_owners': list(emails.keys())} try: rs = api.communicate(params, request) if 'public_keys' in rs: for pk in rs['public_keys']: if 'public_key' in pk: emails[pk['key_owner']] = pk['public_key'] except Exception as e: logging.debug(e) shared_folder_add = [] for fol in folders: skip_folder = False parent_uid = '' parent_type = '' parent_key = None comps = list(path_components(fol.path)) for i in range(len(comps)): comp = comps[i] h = hashlib.md5() hs = '{0}|{1}'.format(comp.lower(), parent_uid) h.update(hs.encode()) digest = h.hexdigest() is_last = False if i == len(comps) - 1: is_last = True if digest not in folder_hash: folder_uid = api.generate_record_uid() request = {'command': 'folder_add', 'folder_uid': folder_uid} folder_type = 'shared_folder' if is_last else 'user_folder' request['folder_type'] = folder_type encryption_key = params.data_key folder_key = os.urandom(32) request['key'] = api.encrypt_aes(folder_key, encryption_key) if parent_uid: request['parent_uid'] = parent_uid if folder_type == 'shared_folder': request['name'] = api.encrypt_aes(comp.encode('utf-8'), folder_key) data = {'name': comp} request['data'] = api.encrypt_aes( json.dumps(data).encode('utf-8'), folder_key) shared_folder_add.append(request) parent_uid = folder_uid parent_type = folder_type parent_key = folder_key folder_hash[ digest] = folder_uid, folder_type, folder_key if folder_type == 'shared_folder' else None else: parent_uid, parent_type, parent_key = folder_hash[digest] if is_last: skip_folder = parent_type != 'shared_folder' else: skip_folder = parent_type != 'user_folder' if skip_folder: break if not skip_folder and parent_type == 'shared_folder': request = { 'command': 'shared_folder_update', 'operation': 'update', 'pt': 'Commander', 'shared_folder_uid': parent_uid, 'force_update': True, 'default_manage_users': fol.manage_users, 'default_manage_records': fol.manage_records, 'default_can_edit': fol.can_edit, 'default_can_share': fol.can_share } if fol.permissions: for perm in fol.permissions: is_team = False if perm.uid and params.team_cache: is_team = perm.uid in params.team_cache else: _, email = parseaddr(perm.name) if not email: is_team = True if is_team: perm.uid = None for team in params.team_cache: if team['name'].lower() == perm.name.lower(): perm.uid = team['team_uid'] break if is_team: if perm.uid and perm.uid in params.team_cache: if 'add_teams' not in request: request['add_teams'] = [] team = params.team_cache[perm.uid] request['add_teams'].append({ 'team_uid': perm.uid, 'manage_users': perm.manage_users, 'manage_records': perm.manage_records, 'shared_folder_key': api.encrypt_aes(parent_key, team['team_key_unencrypted']) }) else: if 'add_users' not in request: request['add_users'] = [] email = perm.name.lower() if email == params.user.lower(): request['add_users'].append({ 'username': email, 'manage_users': perm.manage_users, 'manage_records': perm.manage_records, 'shared_folder_key': api.encrypt_aes(parent_key, params.data_key) }) elif email in emails: public_key = emails[email] if public_key: try: rsa_key = RSA.importKey( base64.urlsafe_b64decode(public_key + '==')) request['add_users'].append({ 'username': email, 'manage_users': perm.manage_users, 'manage_records': perm.manage_records, 'shared_folder_key': api.encrypt_rsa(parent_key, rsa_key) }) except: pass shared_folder_add.append(request) return shared_folder_add
from .params import RestApiContext from .error import KeeperApiError, CommunicationError from . import APIRequest_pb2 as proto from Cryptodome.PublicKey import RSA from Cryptodome.Cipher import AES, PKCS1_v1_5 CLIENT_VERSION = 'c14.0.0' SERVER_PUBLIC_KEYS = { 1: RSA.importKey( base64.urlsafe_b64decode( 'MIIBCgKCAQEA9Z_CZzxiNUz8-npqI4V10-zW3AL7-M4UQDdd_17759Xzm0MOEfH' + 'OOsOgZxxNK1DEsbyCTCE05fd3Hz1mn1uGjXvm5HnN2mL_3TOVxyLU6VwH9EDInn' + 'j4DNMFifs69il3KlviT3llRgPCcjF4xrF8d4SR0_N3eqS1f9CBJPNEKEH-am5Xb' + '_FqAlOUoXkILF0UYxA_jNLoWBSq-1W58e4xDI0p0GuP0lN8f97HBtfB7ijbtF-V' + 'xIXtxRy-4jA49zK-CQrGmWqIm5DzZcBvUtVGZ3UXd6LeMXMJOifvuCneGC2T2uB' + '6G2g5yD54-onmKIETyNX0LtpR1MsZmKLgru5ugwIDAQAB')), 2: RSA.importKey( base64.urlsafe_b64decode( 'MIIBCgKCAQEAkOpym7xC3sSysw5DAidLoVF7JUgnvXejbieDWmEiD-DQOKxzfQq' + 'YHoFfeeix__bx3wMW3I8cAc8zwZ1JO8hyB2ON732JE2Zp301GAUMnAK_rBhQWmY' + 'KP_-uXSKeTJPiuaW9PVG0oRJ4MEdS-t1vIA4eDPhI1EexHaY3P2wHKoV8twcGvd' + 'WUZB5gxEpMbx5CuvEXptnXEJlxKou3TZu9uwJIo0pgqVLUgRpW1RSRipgutpUsl' + 'BnQ72Bdbsry0KKVTlcPsudAnnWUtsMJNgmyQbESPm-aVv-GzdVUFvWKpKkAxDpN' + 'ArPMf0xt8VL2frw2LDe5_n9IMFogUiSYt156_mQIDAQAB')), 3: RSA.importKey( base64.urlsafe_b64decode(
RKFW_MAGIC = b"RKFW" RKFW_BOOT_MAGIC = b"BOOT" ANDROID_BOOT_MAGIC = b"ANDROID!" # secrets # secret used to decrypt system update files (not app updates) SYSTEM_SECRET = unhexlify("8704bc739081954c06411f6d8e531c37") # used to encrypt console's DNA (serial #) for generating update requests REQUEST_SECRET = unhexlify("9d7a196d7c461eb558ce9d2a29bc5d08") # RockChip RC4 key RKFW_KEY = unhexlify("7c4e0304550509072d2c7b38170d1711") # this is used to verify system updates assert isfile(PUBLIC_KEY_FILE), "Public key doesn't exist" with open(PUBLIC_KEY_FILE, "r") as f: RSA_PUB_KEY = RSA.import_key(f.read()) RSA_PUB_BITS = RSA_PUB_KEY.size_in_bits() RSA_PUB_BYTES = RSA_PUB_KEY.size_in_bytes() # this is used to sign the console's DNA (serial #) for update requests assert isfile(PRIVATE_KEY_FILE), "Private key doesn't exist" with open(PRIVATE_KEY_FILE, "r") as f: RSA_PRV_KEY = RSA.import_key(f.read()) RSA_PRV_BITS = RSA_PRV_KEY.size_in_bits() RSA_PRV_BYTES = RSA_PRV_KEY.size_in_bytes() # enums class RKFW_ChipID(IntEnum): RK3066 = 0x60 RK3188 = 0x70
encryptor = PKCS1_OAEP.new(private_key) encrypted_msg = encryptor.encrypt(a_message) print(encrypted_msg) encoded_encrypted_msg = base64.b64encode(encrypted_msg) print(encoded_encrypted_msg) return encoded_encrypted_msg def decrypt_public_key(encoded_encrypted_msg, public_key): encryptor = PKCS1_OAEP.new(public_key) decoded_encrypted_msg = base64.b64decode(encoded_encrypted_msg) print(decoded_encrypted_msg) decoded_decrypted_msg = encryptor.decrypt(decoded_encrypted_msg) print(decoded_decrypted_msg) #return decoded_decrypted_msg pub = RSA.import_key(pub_pem.strip()) priv = RSA.import_key(priv_pem.strip()) encryptor = PKCS1_OAEP.new(pub) decryptor = PKCS1_OAEP.new(priv) res = encrypt_private_key("hello".encode('utf-8'), priv) print(res) res = decrypt_public_key(res, pub).decode('utf-8') print(res) exit(0) #encryptor = pkcs1_15.new(priv) #decryptor = pkcs1_15.new(pub)
def test_encode_works_1(self): key = RSA.generate(2048) encoded = jsonpickle.encode(key)