def main(): try: if not os.path.exists(sys.argv[1]): sys.exit("O_O No existe el archivo de parametros: ./" + sys.argv[1]) with open(sys.argv[1], 'r') as f: dic = json.load(f) f.close() dic['main_dir'] = os.getcwd().replace('\\', '/') dic['copy_file'] = os.getcwd().replace('\\', '/') + '/' + dic['copy_file'] dic['Filters'] = ['EstPresente', 'Copia', 'Omision'] # # Archivo de logging if os.path.exists(dic['main_dir'] + '/preprocessing.log'): os.remove(dic['main_dir'] + '/preprocessing.log') logging.basicConfig( filename=dic['main_dir'] + '/preprocessing.log', level=logging.INFO, format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') # validar archivo de SNP actualizacion if len(sys.argv) == 3: flagUpdate = True if not os.path.exists(dic['main_dir'] + '/input/' + sys.argv[2]): logging.info("ADVERTENCIA .... No existe el archivo (" + sys.argv[2] + ") No se filtraran los .dat") else: pdSNP = pd.read_csv( dic['main_dir'] + '/input/' + sys.argv[2], dtype="str") else: flagUpdate = False print('REVISANDO PARAMETROS...') if not os.path.exists(dic['main_dir'] + '/src/'): sys.exit("FALTA CARPETA SRC") if not os.path.exists(dic['main_dir'] + '/src/bin/'): sys.exit( "CARPETA BIN PARA BILOG NO EXISTE, COPIAR CARPETA EN 'src/'") if not os.path.exists(dic['main_dir'] + '/input'): sys.exit( "CARPETA 'input' NO EXISTE --> CREAR CARPETA INPUT CON .zip Y ARCHIVO DE COPIA" ) if not os.path.isfile(dic['copy_file']): sys.exit( "ARCHIVO DE COPIA NO ENCONTRADO --> REVISAR ARCHIVO DE PARAMETROS 'src/parameters.json' --> parametro 'copy_file'" ) isMissing = [ prueba for prueba in dic['Pruebas'].keys() if not prueba in dic['Codigos'].keys() ] if len(isMissing) > 0: sys.exit( "PARAMETROS INCORRECTOS DE JUNTURAS FALTA AGREGAR: ---->\n" + ', '.join(isMissing) + "\n-------------------------------------------------------" + "\ncambiar en 'src/parameters.json' --> parametro 'Codigos'") isMissing = [ prueba for prueba in dic['Codigos'].keys() if not prueba in dic['Pruebas'].keys() ] if len(isMissing) > 0: sys.exit( "PARAMETROS INCORRECTOS DE FORMAS FALTA AGREGAR: ---->\n" + ', '.join(isMissing) + "\n-------------------------------------------------------" + "\ncambiar en 'src/parameters.json' --> parametro 'Codigos'") logging.info( '#######################################################################' ) logging.info('COMENZO: ' + datetime.now().strftime('%Y-%m-%d %H:%M:%S')) logging.info('Corriendo Preprocessing_stage.py') logging.info( '#######################################################################' ) if dic['subloque']: if ("subloques" not in dic.keys()): logging.info( 'NO DEFINIO SUBLOQUES, no se correran sub-bloques...') dic['subloques'] = [] dic['subloque'] = False if len(dic['subloques']) == 0: logging.info( 'NO DEFINIO SUBLOQUES, no se correran sub-bloques...') dic['subloque'] = False else: logging.info( 'NO DEFINIO BANDERA SUBLOQUE, no se correran sub-bloques...') logging.info('Limpiando carpeta principal') if os.path.exists(dic['main_dir'] + '/input/Descargas'): shutil.rmtree( dic['main_dir'] + '/input/Descargas', ignore_errors=True) if os.path.exists(dic['main_dir'] + '/output'): shutil.rmtree(dic['main_dir'] + '/output', ignore_errors=True) if os.path.exists(dic['main_dir'] + '/doc'): shutil.rmtree(dic['main_dir'] + '/doc', ignore_errors=True) time.sleep(15) logging.info('\tLeyendo archivo de parametros: ' + sys.argv[1]) zipfiles = [] logging.info( "ARCHIVOS .ZIP ENCONTRADOS en 'input'--> ALGUNA INCONSISTENCIA EN LOS ARCHIVOS --> REVISAR CARPETA INPUT Y EJECUTAR NUEVAMENTE EL PROCESO" ) for file in os.listdir(dic['main_dir'] + '/input/'): if file.endswith('.zip'): zipfiles.append(file) if len(zipfiles) == 0: sys.exit( "NINGUN ARCHIVO ZIP ENCONTRADO -- > COPIAR ARCHIVOS ZIP EN 'input/'" ) else: logging.info(str(zipfiles)) logging.info(sep) logging.info('REVISANDO SCRIPTS NECESARIOS...') dirs = os.listdir(dic['main_dir'] + '/src') if not 'JuntarModulosSaber11.pl' in dirs: logging.info("FALTA SCRIPT 'src/JuntarModulosSaber11.pl'") sys.exit("FALTA SCRIPT 'src/JuntarModulosSaber11.pl'") groups = { '01_Estudiantes': None, '02_NoEstudiantes': None, '03_Discapacitados': None } folder_structure = { 'input': None, 'output': { 'calibracion': groups, 'calificacion': groups }, 'doc': None } logging.info(sep) logging.info('CREANDO ESTRUCTURA DE CARPETAS EN: ' + dic['main_dir'] + '/' + str(folder_structure) + '...') utils.make_dirs_from_dict(folder_structure, dic['main_dir'] + '/') logging.info('\tTerminado...') logging.info(sep) logging.info('EXTRAYENDO ARCHIVOS...') utils.extract_files(dic['main_dir'] + '/') logging.info('\tTerminado...') logging.info(sep) if dic['subloque']: logging.info('ELIMINANDO SUBLOQUES 0...') for root, dirs, files in os.walk(dic["main_dir"]): for file in files: pattern = re.compile(r".*sblq(.*)\.|\-.*", re.I) matcher = pattern.match(file) if not matcher == None and not matcher.groups()[0].split( '-')[0] in dic['subloques']: print('ARCHIVO A ELIMINAR..', file) os.remove(os.path.join(root, file)) logging.info('\tTerminado...') logging.info(sep) logging.info('GENERANDO FORMAS DE SUBLOQUES...') print('Generando formas de subloques') pattern = re.compile(r'.*sblq(.*).con', re.I) ext = dic['out_f'][:] ext.remove('.con') for root, dirs, files in os.walk(dic["main_dir"]): for file in files: matcher = pattern.search(file) if not matcher == None: newPath = root + '_' + matcher.groups()[0] if not os.path.exists(newPath): print('NEW_FOLDER: ' + newPath) ensure_dir_exists(newPath + '/salidas') params = { 'main_path': root + '/', 'con_file': file.replace('.con', '') } utils.filterISELECT(params, newPath) logging.info('\tTerminado...') logging.info(sep) logging.info('ADICIONANDO SUBLOQUES A DICCIONARIO...') print('Adicionando subloques a diccionario...') utils.add_subloques(dic) logging.info(dic['Pruebas']) logging.info('\tTerminado...') logging.info(sep) logging.info('ARCHIVOS DE SUBLOQUES...') print('Eliminando archivos de subloques') for root, dirs, files in os.walk(dic["main_dir"]): for file in files: if re.match("(.*)sblq(.*)", file): print('ARCHIVO A ELIMINAR..', file) os.remove(os.path.join(root, file)) logging.info('\tTerminado...') logging.info(sep) logging.info('APLICANDO FILTROS A ARCHIVOS .DAT...') mpath = '' for root, dirs, files in os.walk(dic["main_dir"]): for file in files: if file.endswith(".con"): flagFiltro = True print('CONFILE ENCONTRADO: ' + file) path = (os.path.join(root, file)).replace('\\', '/').split('/') mpath = ('/').join((os.path.join(root, file)).replace( '\\', '/').split('/')[:-1]) + '/' if dic['subloque']: indSubl = [ bloque in file for bloque in dic['subloques'] ] print(indSubl) if not any(indSubl): flagFiltro = False shutil.rmtree(root) if flagFiltro: confile = path[-2] dic['con_file'] = path[-2] logging.info('\tAplicando filtros a : ' + confile) dic['filtered_data'] = '' dic['main_path'] = mpath dic['count_log'] = dic['main_path'] + '/registro.log' con = utils.create_dict_from_con(dic) params = dict(dic.items() + con.items()) params['id_form'] = confile params['aplicacion'] = con['DATA'].split('-')[0][0:7] logging.info('\t\tAplicacion: ' + params['aplicacion']) f_g = {} utils.set_f_g(params['Pruebas'].copy(), confile, f_g) if not f_g == {}: params['curr_group'] = f_g[confile] logging.info('\t\tGrupo: ' + params['curr_group']) if not flagUpdate: filtrado.apply_filters(params) mpath = mpath.replace(confile, '') logging.info('\t\tTerminado...') logging.info(sep) if not flagUpdate: logging.info('CREANDO ARCHIVO DE CONFIGURACION.TXT...') f = [] for root, dirs, files in os.walk(dic['main_dir'] + '/input/Descargas/'): for dir in dirs: f.append(dir) config_file = [] utils.create_config_file(dic['Pruebas'], dic['Codigos'], dic['Pruebas'].keys(), '', config_file, [], f) np.savetxt( dic['main_dir'] + '/output/configuracion.txt', config_file, delimiter=",", fmt="%s") logging.info('\t\tTerminado...') logging.info(sep) logging.info('JUNTANDO ARCHIVOS DAT (JUNTAR.pl)...') os.chdir(mpath) #Change to forms_path p = subprocess.Popen([ 'perl', dic['main_dir'] + '/src/JuntarModulosSaber11.pl', '-com', '-dat', '-conf', dic['main_dir'] + '/output/configuracion.txt' ]) p.communicate() logging.info('\t\tTerminado...') logging.info(sep) ext = dic['out_f'] logging.info( 'MOVIENDO ARCHIVOS A SUS RESPECTIVAS CARPETAS DE SALIDA...') config = [] logging.info( 'SACANDO CARPETAS DE JUNTURA...(Leyendo archivo de configuracion.txt)' ) with open(dic['main_dir'] + '/output/configuracion.txt') as f: config = f.readlines() f.close() J = [] #Formas ya presentes en carpeta JUNTAS for line in config: if line.startswith("PRUEBA"): J.append(line.split()[2]) logging.info('FORMAS DE JUNTURA: ' + str(J)) dirs = os.listdir(os.getcwd()) for d in dirs: if not d in J and os.path.isdir(os.getcwd() + '/' + d): logging.info(sep) logging.info('PATH DE INPUT - OUTPUT PARA CARPETA: ' + d) if d == "JUNTAS": f_g = {'JUNTAS': '01_Estudiantes'} else: f_g = {} utils.set_f_g(dic['Pruebas'].copy(), d, f_g) if not f_g == {}: path_output = dic[ 'main_dir'] + '/output/calibracion/' + f_g[ d] + '/' + d ensure_dir_exists(path_output) if d == 'JUNTAS': path_input = os.getcwd() + '/' + d logging.info('PATH INPUT: ' + path_input) logging.info('PATH OUTPUT: ' + path_output) utils.copytree(path_input, path_output) else: for root, dirs, files in os.walk(os.getcwd() + '/' + d): for file in files: for ex in dic['out_f']: if ex in file: path_input = os.path.join( root, file).replace('\\', '/') logging.info('PATH INPUT: ' + path_input) logging.info('PATH OUTPUT: ' + path_output + '/' + file) shutil.copyfile( path_input, path_output + '/' + file) if not d == 'JUNTAS': ensure_dir_exists(path_output + '/salidas') logging.info('SALIDAS: ' + path_output + '/salidas') logging.info(sep) logging.info( 'MOVIENDO ARCHIVOS DE CALIFICACION A SUS RESPECTIVAS CARPETAS...') ext = [] for e in dic['out_f']: if e.endswith('.DAT') and not flagUpdate: ext.append(e.replace('.DAT', '.O')) else: ext.append(e) removePaths = [] for root, dirs, files in os.walk(os.getcwd()): for d in dirs: if not d == 'JUNTAS': f_g = {} utils.set_f_g(dic['Pruebas'].copy(), d, f_g) if not f_g == {}: path_output = dic[ 'main_dir'] + '/output/calificacion/' + f_g[ d] + '/' + d ensure_dir_exists(path_output) if not os.path.exists(path_output + '/salidas'): os.makedirs(path_output + '/salidas') for file in os.listdir(os.path.join(root, d)): for e in ext: if e in file: path_input = os.path.join(root, d) + '/' + file if file.endswith('.O'): output = path_output + '/' + file.replace( '.O', '.DAT') else: output = path_output + '/' + file if not os.path.exists(output): shutil.copyfile(path_input, output) if flagUpdate and file.endswith( '.DAT'): if ('pdSNP' in locals()): nUpdate = filtroActualizacion( output, pdSNP, dic['id_len']) if nUpdate == 0: removePaths.append( path_output) logging.info( 'FILTRANDO NUEVAS PERSONAS PARA CALIFICAR: ' + file) else: statFile = os.stat(path_input) if str(statFile. st_size) == '0': removePaths.append( path_output) logging.info( 'SE CALIFICARAN TODAS LAS PERSONAS: ' + file) # # Removiendo if flagUpdate: logging.info(sep) logging.info('ELIMANDO CARPETAS EN BLANCO DE LA CALIFICACION...') for path in removePaths: logging.info('Eliminando: ' + path) shutil.rmtree(path) logging.info(sep) logging.info('\tTerminado...') logging.info(sep) logging.info('TERMINO: ' + datetime.now().strftime('%Y-%m-%d %H:%M:%S')) except Exception as e: print(logging.error(traceback.format_exc()))
def extract_backup(commands): if (commands.virtual_drive): # Если диск V уже занят, смонтирует диск на указаной букве virtual_drive = commands.virtual_drive else: virtual_drive = DEFAULT_VIRTUAL_DRIVE if (not virtual_drive.endswith(':')): virtual_drive = virtual_drive + ':' if (commands.name): volume = utils.find_path_to_volume_by_backup_name( commands.name, program_directory) elif (commands.volume): volume = commands.volume else: return Font.YELLOW + '[!] Введите имя бэкапа или путь к тому бэкапа для обновления' if (commands.password): if (len(commands.password) < MIN_PASSWORD_LENGTH): return Font.YELLOW + '[!] Пароль слишком короткий. Минимум 25 символов' else: return Font.YELLOW + '[!] Пароль не найден' if (not commands.path_to_save): return Font.YELLOW + '[!] Укажите путь для разархивирования бэкапа' if (utils.volume_is_mount(virtual_drive)): print(Font.YELLOW + '[!] Том уже смонтирован или диск с таким именем уже существует') else: # Монтирование тома if (not utils.mount_veracrypt_volume(DEFAULT_VERACRYPT_PATH, volume, commands.password, virtual_drive)): return Font.YELLOW + '[!] Возникла ошибка при монтировании тома' # Проверка смонтированого тома на наличие нужных файлов бэкапа if (not utils.is_backup_drive(virtual_drive)): return Font.YELLOW + '[i] Диск не является бэкапом' print(Font.YELLOW + '[i] Загрузка метаданных...') try: backup_metadata = utils.load_metadata_from_json( os.path.join(virtual_drive, 'metadata.json')) except utils.CastomException as exc: return exc files_metadata = backup_metadata['metadata'] backup_name = backup_metadata['backup_name'] common_path = backup_metadata['common_path'] path_to_extract = commands.path_to_save filelist = [] for filename in files_metadata: file = files_metadata[filename] if (not file['is_deleted']): if (file['has_parent']): parent = files_metadata[file['parent']] if (parent['has_parent']): file_ufn = files_metadata[parent['parent']]['ufn'] else: file_ufn = parent['ufn'] else: file_ufn = file['ufn'] if (file['is_dir']): filelist.append((file_ufn, file['parent_dir_name'])) else: filelist.append((file_ufn, filename[len(common_path):])) print(Font.YELLOW + '[i] Начало извлечения!') utils.extract_files(virtual_drive, backup_name, filelist, path_to_extract, Font) # Удаляем нашу костыльную папку shutil.rmtree(os.path.join(path_to_extract, 'ignore')) print(Font.YELLOW + '[i] Извлечение файлов окончено!') print(Font.YELLOW + '[i] Начало размонтирования тома...') utils.dismount_veracrypt_volume(DEFAULT_VERACRYPT_PATH, virtual_drive) return Font.CYAN + '[>] Бэкап успешно распакован'
def main(): try: with open(sys.argv[1], 'r') as f: dic = json.load(f) f.close() logging.basicConfig(filename=dic['main_dir']+'preprocessing.log', level=logging.INFO, format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') logging.info('#######################################################################') logging.info('COMENZO: '+datetime.now().strftime('%Y-%m-%d %H:%M:%S')) logging.info('Corriendo Preprocessing_stage.py') logging.info('#######################################################################') if dic['execute'] == 0: logging.info('\tLeyendo archivo de parametros: '+sys.argv[1]) cal = {'calibracion':None,'calificacion':None} groups_output = {'01_MuestraEspecial': cal, '02_Estudiantes': cal, '03_NoEstudiantes':cal,'04_Discapacitados':cal} groups_input = {'01_MuestraEspecial': None, '02_Estudiantes': None, '03_NoEstudiantes':None,'04_Discapacitados':None} folder_structure = {'input': groups_input, 'output': groups_output, 'src': None, 'doc':None} logging.info('CREANDO ESTRUCTURA DE CARPETAS EN: '+dic['main_dir']+str(folder_structure)+'...') utils.make_dirs_from_dict(folder_structure,dic['main_dir']) logging.info('\tTERMINADO...') logging.info('EXTRAYENDO ARCHIVOS...') utils.extract_files(dic['main_dir'],dic['forms_path'],dic['id_forms'],dic['id_groups']) logging.info('\tTERMINADO...') logging.info('CREANDO CARPETAS CON ANALISIS IRTPRO Y ARCHIVOS CSV (WS2IRTPro.pl)...') os.chdir(dic["main_dir"] + "input") if(1==1): p = subprocess.Popen(["perl", dic["main_dir"]+"src/WS2IRTPro.pl","-irt","-csv","-trad","-cal","-sco"]) p.communicate() logging.info('\tTERMINADO...') logging.info('APLICANDO FILTROS A ARCHIVOS CSV...') for root, dirs, files in os.walk(dic["main_dir"]): for file in files: if file.endswith(".con"): path = (os.path.join(root, file)).replace('\\','/').split('/') mpath = ('/').join((os.path.join(root, file)).replace('\\','/').split('/')[:-1])+'/' confile = path[-2] dic['con_file'] = path[-2] logging.info('\tAplicando filtros a : '+path[-2].replace('.con','')+'...') dic['filtered_data']='' dic['analysis']=['TRA','CAL','SCO'] dic['main_path'] = mpath con = utils.create_dict_from_con(dic) params = dict(dic.items() + con.items()) if params['PSELECT'][-2:] == '?1': logging.info('\t\tCambiando valor de PSELECT: ?1 -> 01..') params['PSELECT'] = params['PSELECT'][0:-2]+'01' filtrado.apply_filters(params) logging.info('\tTERMINADO...') logging.info('MOVIENDO ARCHIVOS A SUS RESPECTIVAS CARPETAS DE SALIDA...') for root, dirs, files in os.walk(params["main_dir"]): for file in files: if file.endswith(".con"): path = (os.path.join(root, file)).replace('\\','/').split('/') mpath = ('/').join((os.path.join(root, file)).replace('\\','/').split('/')[:-1])+'/' confile = path[-2] params['con_file'] = path[-2] for analysis in params['analysis']: if not analysis == "SCO": logging.info('\t\tMoviendo analisis: '+analysis+ ' a carpeta calibracion') output = mpath.replace('input','output').replace(params['con_file'],'calibracion/'+params['con_file']+'/'+analysis) else: logging.info('\t\tMoviendo analisis: '+analysis+ ' a carpeta calificacion') output = mpath.replace('input','output').replace(params['con_file'],'calificacion/'+params['con_file']+'/'+analysis) shutil.move(mpath+analysis,output) logging.info('\tTERMINADO...') logging.info('CREANDO ARCHIVOS SSIG EN CADA GRUPO...') for root, dirs, files in os.walk(dic['main_dir']+'output'): for file in files: if file.endswith(".csv"): csv = ("/").join((os.path.join(root, file).replace("\\","/").split("/"))) logging.info('\tCreando archivo ssig para grupo: '+csv+'...') utils.create_ssig_file(dic["main_dir"]+'src/ascii2ssig.bat',dic["main_dir"]+'src/ASCII2SSIG64.exe',csv,dic["#_items"]) logging.info('\tTERMINADO...') logging.info('GUARDANDO PARAMETROS --> EXECUTE = 1') dic['execute'] = 1 with open(dic['main_dir']+'src/parameters.json', 'w') as f: json.dump(dic, f) f.close() else: print 'EL PROCESO YA SE EJECUTO ANTERIORMENTE' logging.info('EL PROCESO YA SE EJECUTO ANTERIORMENTE') logging.info('PARA VOLVER A EJECUTAR --> CAMBIAR PARAMETRO EXECUTE --> 0') logging.info('TERMINO: '+datetime.now().strftime('%Y-%m-%d %H:%M:%S')) except Exception as e: print logging.error(traceback.format_exc())
def get_file_from_backup(commands): if (commands.virtual_drive): # Если диск V уже занят, смонтирует диск на указаной букве virtual_drive = commands.virtual_drive else: virtual_drive = DEFAULT_VIRTUAL_DRIVE if (not virtual_drive.endswith(':')): virtual_drive = virtual_drive + ':' if (commands.name): volume = utils.find_path_to_volume_by_backup_name(commands.name) elif (commands.volume): volume = commands.volume else: return Font.YELLOW + '[!] Введите имя бэкапа или путь к тому бэкапа для обновления' if (commands.password): if (len(commands.password) < MIN_PASSWORD_LENGTH): return Font.YELLOW + '[!] Пароль слишком короткий. Минимум 25 символов' else: return Font.YELLOW + '[!] Пароль не найден' if (not commands.unique_filename) and (not commands.filename) and ( not commands.shahash): return Font.YELLOW + '[i] Укажите уникальное имя файла, его хэш или уникальное имя в архиве(ufn)' if (not commands.path_to_save): return Font.YELLOW + '[i] Укажите куда нужно распаковать файл' if (utils.volume_is_mount(virtual_drive)): print(Font.YELLOW + '[!] Том уже смонтирован или диск с таким именем уже существует') else: # Монтирование тома if (not utils.mount_veracrypt_volume(DEFAULT_VERACRYPT_PATH, volume, commands.password, virtual_drive)): return Font.YELLOW + '[!] Возникла ошибка при монтировании тома' # Проверка смонтированого тома на наличие нужных файлов бэкапа if (not utils.is_backup_drive(virtual_drive)): return Font.YELLOW + '[i] Диск не является бэкапом' print(Font.YELLOW + '[i] Загрузка метаданных...') try: backup_metadata = utils.load_metadata_from_json( os.path.join(virtual_drive, 'metadata.json')) except utils.CastomException as exc: return exc files_metadata = backup_metadata['metadata'] backup_name = backup_metadata['backup_name'] if (commands.unique_filename): file_ufn = commands.unique_filename.lower() else: file_ufn = None if (commands.shahash): file_hash = commands.shahash.lower() else: file_hash = None if (commands.filename): filename = commands.filename else: filename = None for filename in files_metadata: file = files_metadata[filename] if (file_ufn) and (file_ufn == file['ufn']): print(Font.CYAN + '[i] Имя файла: %s\tразмер: %s' % (file['name'], utils.normilize_size(file['st_size']))) if (file['has_parent']): file_ufn = files_metadata[file['parent']]['ufn'] utils.extract_files(virtual_drive, backup_name, [file_ufn], commands.path_to_save, Font) # Переименование файла до начального имени os.rename(os.path.join(commands.path_to_save, file_ufn), os.path.join(commands.path_to_save, file['name'])) break # Просто выходит с цыкла, поскольку unf уникален, а значит можно вытащить только один файл смысл крутиться в цыкле? :D elif (filename) and (filename == file['name']): print(Font.CYAN + '[i] Размер: %s\t ufn: %s' % (utils.normilize_size(file['st_size']), file['ufn'])) extract_files(virtual_drive, backup_name, [file['ufn']], os.path.join(commands.path_to_save, file['name']), Font) break # Выход с цыкла нужен для того чтобы избежать, повторного разархивирования файла с бэкапа, поскольку МОЖУТ иметься данные о нескольких одинаковых файлах elif (file_hash) and (file_hash == file['hash']): print(Font.CYAN + '[i] Имя файла: %s\tразмер: %s\t ufn: %s' % (file['name'], utils.normilize_size(file['st_size']), file['ufn'])) extract_files(virtual_drive, backup_name, [file['ufn']], os.path.join(commands.path_to_save, file['name']), Font) break else: return Font.YELLOW + '[i] Файл не найден' print(Font.YELLOW + '[i] Начало размонтирования тома...') utils.dismount_veracrypt_volume(DEFAULT_VERACRYPT_PATH, virtual_drive) return Font.CYAN + '[>] Файл успешно разархивирован'