def print_contents(contents: list, parents: list = [], line_pre_txt=''): """ Print content to output """ depth_space = len(parents) * 2 * ' ' item: WBHItem for item in contents: if item.is_dir: msg = f"{line_pre_txt}{depth_space}D {os.path.join(*parents, item.filename)}" msg += ", Size: {}".format(sizeof_fmt(item.size)) if hasattr(item, 'items_count'): msg += ", Items: {}".format(item.items_count) msg += ", ID: {}".format(item.id) print(msg) _parents = list(parents) _parents.append(item.filename) if hasattr(item, 'children'): print_contents(item.children, parents=_parents, line_pre_txt=line_pre_txt) elif hasattr(item, 'items'): print_contents(item.items, parents=_parents, line_pre_txt=line_pre_txt) else: msg = f"{line_pre_txt}{depth_space}📄 {os.path.join(*parents, item.filename)}" msg += ", Size: {}".format(sizeof_fmt(item.size)) if hasattr(item, 'chunks_count'): msg += ", Chunks: {}".format(item.chunks_count) msg += ", ID: {}".format(item.id) print(msg)
def dl_progress_folder_update(self, wrote_size: int, total_size: int): percentage = int((wrote_size * 100) / total_size) self.dl_progress_folder.setValue(percentage) self.dl_progress_folder.setFormat("{}/{} {}%".format( sizeof_fmt(wrote_size, 1), sizeof_fmt(total_size, 1), percentage)) self.dl_progress_folder.repaint() QCoreApplication.processEvents( ) # force process events because of GUI delay
def explorer_load_folder(self, blackhole_id, item_id): self.explorer_table.setProperty('blackhole_id', blackhole_id) self.explorer_data = [] if client.Database: items = client.Database.get_items_by_parent_id( blackhole_id=blackhole_id, items_parent=item_id) itm: WBHDbItems for itm in items: if itm.is_dir: self.explorer_data.append([ '__DIR', itm.filename, sizeof_fmt(itm.size), itm.id, itm.uploaded_at.strftime("%Y-%m-%d %H:%M:%S"), "{} items".format(itm.items_count), itm.created_at.strftime("%Y-%m-%d %H:%M:%S"), itm.modified_at.strftime("%Y-%m-%d %H:%M:%S") ]) else: self.explorer_data.append([ os.path.splitext(itm.filename)[1], itm.filename, sizeof_fmt(itm.size), itm.id, itm.uploaded_at.strftime("%Y-%m-%d %H:%M:%S"), "{} parts".format(itm.chunks_count), itm.created_at.strftime("%Y-%m-%d %H:%M:%S"), itm.modified_at.strftime("%Y-%m-%d %H:%M:%S") ]) self.explorer_model = ExplorerTableModel(data=self.explorer_data, header=[ ' ', 'Name', 'Size', 'ID', 'Uploaded', 'Contain', 'Created', 'Modified' ]) self.explorer_proxy_model = ExplorerTableProxyModel(self) self.explorer_proxy_model.setSourceModel(self.explorer_model) self.explorer_table.setModel(self.explorer_proxy_model) selection = self.explorer_table.selectionModel() selection.currentChanged.connect( self.on_explorer_table_current_changed) for ih in range(len(self.explorer_model.header)): self.explorer_table.resizeColumnToContents(ih)
def recalculate_blackhole_size(self, bh_id): try: session = self.Session() bh: WBHDbBlackHoles = session.query(WBHDbBlackHoles) \ .options(lazyload(WBHDbBlackHoles.items)) \ .filter_by(id=bh_id) \ .first() bh.size = 0 itm: WBHDbItems for itm in bh.items: bh.size += itm.size session.commit() self.logger.debug("Blackhole `{}` size recalculated: {}".format( bh.name, sizeof_fmt(bh.size))) except Exception as e: self.logger.error( " ERROR: could not recalculate blackhole by id of {}:\n {}". format(bh_id, str(e)))
def explorer_load_blackholes(self): self.explorer_table.setProperty('blackhole_id', None) self.explorer_data = [] # # clear address bar self.addressbar_clear() if client.Database: blackholes = client.Database.get_blackholes() bh: WBHDbBlackHoles for bh in blackholes: self.explorer_data.append( ['__BH', bh.name, sizeof_fmt(bh.size), bh.id]) self.explorer_model = ExplorerTableModel( data=self.explorer_data, header=[' ', 'Blackhole', 'Total Size', 'ID']) self.explorer_proxy_model = ExplorerTableProxyModel(self) self.explorer_proxy_model.setSourceModel(self.explorer_model) self.explorer_table.setModel(self.explorer_proxy_model) for ih in range(len(self.explorer_model.header)): self.explorer_table.resizeColumnToContents(ih) self.addressbar_add(name="ROOT", db_id=-2, blackhole_id=None)
import time from common.helper import create_random_content_file, sizeof_fmt from common.helper import get_checksum_sha256_file # Config test_filename = "checksum_file.tmp" test_filepath = os.path.join(os.path.split(__file__)[0], test_filename) test_file_size = 50 * 1024 * 1024 # 50MB # Create Test file start_t = time.process_time() create_random_content_file(test_filepath, test_file_size) elapsed_t = time.process_time() - start_t print("Create a {} test file in {:06f} secs...".format(sizeof_fmt(test_file_size), elapsed_t)) sha256_tests = [ [10, 4 * 1024, ], [10, 8 * 1024, ], [10, 16 * 1024, ], [10, 16 * 1024 * 1024, ], [10, 48 * 1024 * 1024, ], ] # ======== Checksum - SHA256 ======== for tst in sha256_tests: start_t = time.process_time() for i in range(tst[0]): get_checksum_sha256_file(filepath=test_filepath, block_size=tst[1]) elapsed_t = time.process_time() - start_t
def download_file(self, item_id, blackhole_id, save_to, db_item: WBHDbItems = None, ask_rewrite: bool = True, use_msg_box: bool = True): try: is_error = 0 wrote_size = 0 # check if exist and ask for rewrite if ask_rewrite and use_msg_box: if os.path.exists(save_to): if self.ask_for_rewrite(save_to) == QMessageBox.No: return else: ask_rewrite = False # Get item from Database if did not presented if db_item is None: db_item = client.Database.get_item_by_id( blackhole_id=blackhole_id, item_id=item_id) # update progressbar to set initial text self.dl_progress_update(0, db_item.size) # Open file to write with open(save_to, 'wb') as item_f: chunk: WBHDbChunks for chunk in db_item.chunks: # Download chunk chunk_filepath = os.path.join(client.tempdir, chunk.filename) download_retry = 0 while True: if client.TelegramBot.get_chunk(chunk, chunk_filepath): with open(chunk_filepath, 'rb') as chunk_f: chunk_data = chunk_f.read() client.logger_client.debug( "Read {} from chunk#{}".format( sizeof_fmt(len(chunk_data)), chunk.index)) if chunk.encryption == EncryptionType.ChaCha20Poly1305.value: client.logger_client.debug( "Decrypting {} ...".format( EncryptionType.ChaCha20Poly1305. name)) # Extract key and nonce from encryption_data chunk_key_hex, chunk_nonce_hex = chunk.encryption_data.split( 'O') # ask for password if never asked if client.password is None: ip_dialog = InputPasswordDialog( EncryptionType(chunk.encryption)) if ip_dialog.window.result( ) == QDialog.DialogCode.Rejected: # User didn't entered password, CANCEL client.logger_client.warning( "Aborted by user.") is_error = 2 break else: client.password = ip_dialog.encryption_pass QCoreApplication.processEvents( ) # force process events because of GUI delay # Decrypt chunk chunk_data = chacha20poly1305_decrypt_data( data=chunk_data, secret=client.password.encode(), key=bytes.fromhex(chunk_key_hex), nonce=bytes.fromhex(chunk_nonce_hex)) # Matching Checksums if chunk.checksum_type == ChecksumType.NONE.value: client.logger_client.debug( "There is no checksum for chunk#{}". format(chunk.index)) if chunk.checksum_type == ChecksumType.SHA256.value: chunk_checksum = get_checksum_sha256( chunk_data) if chunk_checksum == chunk.checksum: client.logger_client.debug( "{} checksum for chunk#{} matched." .format( ChecksumType( chunk.checksum_type).name, chunk.index)) else: raise Exception( "ERROR: {} checksum for chunk#{} mismatched." .format( ChecksumType( chunk.checksum_type).name, chunk.index)) # Write to file item_f.write(chunk_data) client.logger_client.debug( "Wrote {} to file `{}`".format( sizeof_fmt(len(chunk_data)), os.path.split(save_to)[1])) wrote_size += len(chunk_data) self.dl_progress_update( wrote_size, db_item.size) break elif download_retry < client.client[ 'max_download_retry']: # Retry to download. Timeout happens a lot while downloading download_retry += 1 client.logger_client.warning( "Retrying to download chunk#{} by name of `{}` from BlackHole ({}/{})..." .format(chunk.index, chunk.filename, download_retry, client.client['max_download_retry'])) else: raise Exception( "Could not download chunk#{} by name of `{}` from BlackHole after {} retries." .format(chunk.index, chunk.filename, download_retry)) # Remove chunk file if exist if os.path.exists(chunk_filepath): os.remove(chunk_filepath) if is_error == 0: # Match file checksum if db_item.checksum_type == ChecksumType.SHA256.value: db_item_checksum = get_checksum_sha256_file( filepath=save_to) if db_item_checksum == db_item.checksum: client.logger_client.debug( "{} checksum for `{}` matched.".format( ChecksumType(db_item.checksum_type).name, db_item.filename)) # File Downloaded Correctly if use_msg_box: msg_box = QMessageBox() msg_box.information( self.window, 'Download', "File successfully downloaded:\n`{}`". format(save_to)) return True else: raise Exception( "Mismatch checksum for `{}`".format( db_item.filename)) except cryptography.exceptions.InvalidTag: client.logger_client.error("Incorrect password.") client.password = None if use_msg_box: msg_box = QMessageBox() msg_box.critical(self.window, 'Error', "Password is incorrect.") except Exception as e: client.logger_client.error( "Can not download file by id `{}`\n\n{}".format( item_id, str(e))) if use_msg_box: msg_box = QMessageBox() msg_box.critical( self.window, 'Error', "Can not download file by id `{}`\n\n{}".format( item_id, str(e))) return False
def restore_pb_clicked(self): is_error = False try: secret_b = self.password_le.text().encode() bd_data = decompress_bytes_to_string_b64zlib(self.db_code) # print(len(bd_data)) # print(bd_data) key = bd_data[:32] # print(key) nonce = bd_data[32:44] # print(nonce) encrypted_data = bd_data[44:] self.log_info("Decrypting code...") raw_db_backup_data = chacha20poly1305_decrypt_data( data=encrypted_data, secret=secret_b, key=key, nonce=nonce) self.log_info("Done", color="green") db_code_chunks = raw_db_backup_data.decode().split('^') self.log_info("Found {} chunks".format(len(db_code_chunks))) db_c_raw_parts = [] db_c_i = 0 # Prepare new db file path new_db_filepath = os.path.join( client.tempdir, "wbh_{}.db".format(datetime.today().strftime('%Y%m%d%H%M%S'))) # Open db file to write on with open(new_db_filepath, 'wb') as new_db_f: for db_c in db_code_chunks: # Extract chunk data from string self.log_info( "Extracting data from chunk {} ...".format(db_c_i)) db_c_parts = db_c.split(';') encryption_type: EncryptionType = EncryptionType[ db_c_parts[0]] encryption_data = db_c_parts[1] checksum_type: ChecksumType = ChecksumType[db_c_parts[2]] checksum = db_c_parts[3] file_id = db_c_parts[4] self.log_info("Done", color="green") self.log_info("Chunk {} encrypted with {}".format( db_c_i, encryption_type.name)) if encryption_type == EncryptionType.ChaCha20Poly1305: # Extract key and nonce for decryption dc_c_key_hex, db_c_nonce_hex = encryption_data.split( 'O') # Prepare chunk file path db_c_filepath = os.path.join(client.tempdir, "chunk_{}".format(db_c_i)) # Download chunk file self.log_info( "Downloading chunk {} ...".format(db_c_i)) db_c_file = client.TelegramBot.get_file_by_id( file_id=file_id, path_to_save=db_c_filepath) if db_c_file: # if file downloaded self.log_info("Done", color="green") with open(db_c_filepath, 'rb') as db_c_f: # Read whole chunk db_c_data = db_c_f.read() self.log_info("Chunk {} is {}".format( db_c_i, sizeof_fmt(len(db_c_data)))) # Decrypt data self.log_info( "Decrypting chunk {} ...".format(db_c_i)) db_c_raw = chacha20poly1305_decrypt_data( data=db_c_data, secret=secret_b, key=bytes.fromhex(dc_c_key_hex), nonce=bytes.fromhex(db_c_nonce_hex)) self.log_info("Done", color="green") if checksum_type == ChecksumType.SHA256: db_c_raw_checksum = get_checksum_sha256( db_c_raw) if db_c_raw_checksum == checksum: # compare checksum new_db_f.write(db_c_raw) self.log_info( "Chunk {} decrypted successfully.". format(db_c_i)) else: # Mismatch checksum self.log_info( "Error! Mismatch checksum. " "check database code and password again.", "red") is_error = True break else: # Unsupported checksum self.log_info( "Error! ChecksumType is not supported.", "red") is_error = True break else: # Fail to get file self.log_info("Failed", color="red") self.log_info( "Error! Could not download chunk file.", "red") is_error = True break else: # Unsupported Encryption self.log_info( "Error! EncryptionType is not supported.", "red") is_error = True break db_c_i += 1 # check if there was any error if not is_error: self.log_info("New database downloaded completely.") # Backup last db for bi in reversed( range(1, client.client['keep_db_backup'] + 1)): backup_to = "{}.backup-{}".format( client.client['db_filepath'], bi) # Remove oldest backup if exist if bi == client.client['keep_db_backup']: if os.path.exists(backup_to): os.remove(backup_to) if bi > 1: # On newest backup backup_from = "{}.backup-{}".format( client.client['db_filepath'], bi - 1) else: backup_from = client.client['db_filepath'] # move backup if exit if os.path.exists(backup_from): shutil.move(backup_from, backup_to) # Replace downloaded db with current client db shutil.move(new_db_filepath, client.client['db_filepath']) time.sleep(1) self.window.done(QDialog.DialogCode.Accepted) return True else: self.log_info("Failed to restore db file completely.", "red") except Exception as e: self.log_info( "Error! Can not restore. check database code and password again.\n{}" .format(str(e)), "red") return False
from common.helper import chacha20poly1305_encrypt_file, create_random_content_file, get_checksum_sha256_file, \ sizeof_fmt, chacha20poly1305_decrypt_file # Config test_filename = "checksum_file.tmp" test_filepath = os.path.join(os.path.split(__file__)[0], test_filename) test_file_size = 48 * 1024 * 1024 # 48MB password_provided = "8mwHncKVXalaBAIe" # This is input in the form of a string password = password_provided.encode() # Convert to type bytes # Create Test file start_t = time.process_time() create_random_content_file(test_filepath, test_file_size) elapsed_t = time.process_time() - start_t print("Create a {} test file in {:06f} secs...".format( sizeof_fmt(test_file_size), elapsed_t)) test_file_checksum = get_checksum_sha256_file(test_filename) # ==== hazmat/Fernet ==== from cryptography.fernet import Fernet from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC encrypted_test_filepath = test_filename + ".hazmat-fernet" decrypted_test_filepath = test_filename + ".hazmat-fernet.decrypt" # salt = b'salt_' # CHANGE THIS - recommend using a key from os.urandom(16), must be of type bytes salt = os.urandom(16) kdf = PBKDF2HMAC(algorithm=hashes.SHA256(), length=32, salt=salt,
def send_file(self, item_wbhi: WBHItem, blackhole: WBHBlackHole, chunk_size: int, temp_dir: str, encryption_type: EncryptionType = EncryptionType.NONE, encryption_secret: str = None, delay_between_chunks=0) -> bool: """ return True if all chunks sent successfully """ is_all_successful = True if item_wbhi.chunks is None: # New list if there is no chunk yet item_wbhi.chunks = [] # Prepare original filename org_fullpath = os.path.join(*item_wbhi.parents, item_wbhi.filename) chunk_i = len(item_wbhi.chunks) self.logger.debug("Sending file `{}` in chunks of {}" .format(org_fullpath, sizeof_fmt(chunk_size))) try: # Open Original File with open(item_wbhi.full_path, 'rb') as org_file: # Seek to the start position of last existing chunk if exist. org_file.seek(chunk_i * chunk_size) while True: # Read a chunk chunk_bytes = org_file.read(chunk_size) if chunk_bytes: # get checksum before encryption checksum = get_checksum_sha256(chunk_bytes) # Check Encryption encryption_data = None if encryption_type == EncryptionType.ChaCha20Poly1305: # Encrypt chunk data self.logger.debug("Encrypting chunk using ChaCha20Poly1305 ...") chunk_bytes, key, nonce = chacha20poly1305_encrypt_data(data=chunk_bytes, secret=encryption_secret.encode()) encryption_data = '{}O{}'.format(key.hex(), nonce.hex()) chunk_filename = "WBHTF{}.p{:04d}".format(datetime.today().strftime('%Y%m%d%H%M%S%f'), chunk_i) chunk = WBHChunk(size=len(chunk_bytes), filename=chunk_filename, index=chunk_i, org_filename=os.path.split(item_wbhi.full_path)[1], org_fullpath=os.path.join(temp_dir, chunk_filename), org_size=os.fstat(org_file.fileno()).st_size, msg_id=None, state=QueueState.UPLOADING, checksum=checksum, checksum_type=ChecksumType.SHA256, encryption=encryption_type, encryption_data=encryption_data, parent_qid=item_wbhi.parent_qid, parent_db_id=item_wbhi.db_id) self.logger.debug(" Read {}".format(sizeof_fmt(chunk.size))) try: # Open chunk file to write with open(chunk.org_fullpath, 'wb') as chunk_file_w: # write to chunk file chunk_file_w.write(chunk_bytes) self.logger.debug(" Wrote {} to `{}` file" .format(sizeof_fmt(len(chunk_bytes)), chunk.filename)) self.logger.debug(f" Sending `{chunk.filename}` file to BlackHole") # Send chunk file to blackhole self.send_chunk_file(chunk=chunk, blackhole=blackhole) # Add to chunks list item_wbhi.chunks.append(chunk) if delay_between_chunks > 0: self.logger.debug(f"Rest for {delay_between_chunks} secs...") time.sleep(delay_between_chunks) except Exception as e: is_all_successful = False self.logger.error( f" ERROR: Could not send chunk#{chunk_i} `{chunk_filename}` to BlackHole: {str(e)}") else: break chunk_i += 1 except Exception as e: is_all_successful = False self.logger.error(f" ERROR: Could not send `{item_wbhi.full_path}` to BlackHole: {str(e)}") return is_all_successful