def extractFollowup(user_batches=False): import os, time import pandas as pd from fup.utils.dbwrap import sql2df from fup.utils.jsoninfo import configInfo, sessionInfo from fup.helpers.files import extendRowsFollowup config = configInfo() xlpath = config['path_to_excels_exported_from_database'] df = sql2df('followup') if user_batches == True: session = sessionInfo() user_working = session['current_user_working'] df_batchesinWork = df[df['Responsible'] == user_working] save_path = os.path.join(xlpath, '{} batches.xlsx'.format(user_working)) df_batchesinWork.to_excel(save_path, index=False) else: save_path = os.path.join(xlpath, 'followup.xlsx') df.to_excel(save_path, index=False) for n in range(20): if os.path.isfile(save_path): extendRowsFollowup() else: time.sleep(2)
def renameAssgnDir(dictInfo): import os, re from fup.utils.jsoninfo import configInfo newuser = dictInfo["Responsible"] bid = dictInfo["BatchID"] config = configInfo() dir_assigned = config['path_to_batches_assigned'] dir_assignedli = os.listdir(dir_assigned) dirtoRename = [d for d in dir_assignedli if re.search(bid, d)] if len(dirtoRename) == 1: originalDirName = dirtoRename[0] dhead = newuser dtail = originalDirName.split('-')[1] newBatchdirName = dhead + '-' + dtail src = os.path.join(dir_assigned, originalDirName) dst = os.path.join(dir_assigned, newBatchdirName) try: os.rename(src, dst) except: print('Cannot rename{} to {}'.format(src, dst)) return False return True else: return False
def unassignedtoPrepfiles(): #Copy batches from UNASSIGNED to PREPARED FILES import re, os, shutil from fup.utils.jsoninfo import configInfo from fup.utils.commun import copytree config = configInfo() unassignedpath = os.path.abspath(config['path_to_batches_unassigned']) prepfilespath = os.path.abspath(config['path_to_batches_prepfiles']) unassigneddirli = os.listdir(unassignedpath) for folder in unassigneddirli: src = os.path.join(unassignedpath, folder) if re.search("Please check these Batches.txt", src): continue #skip dst = os.path.join(prepfilespath, folder) try: os.mkdir(dst) copytree(src, dst) except: #copy new files added to the batch src_filesli = os.listdir(src) dst_fileli = os.listdir(dst) if len(src_filesli) > len(dst_fileli): for srcFile in src_filesli: s = os.path.join(src, srcFile) d = os.path.join(dst, srcFile) try: shutil.copy2(s, d) except: pass
def moveDirName(dirName, cfg_olddir, cfg_newdir): #Move a folder from one path to another, delete the old one import os, re, shutil from fup.utils.commun import copytree, deletetree from fup.utils.jsoninfo import configInfo try: config = configInfo() olddir = config[cfg_olddir] newdir = config[cfg_newdir] dirtobeMoved = os.listdir(olddir) dir_bidtbMoved = [d for d in dirtobeMoved if re.search(dirName, d)] pathOld = os.path.join(olddir, dir_bidtbMoved[0]) pathNew = os.path.join(newdir, dir_bidtbMoved[0]) src, dst = pathOld, pathNew copytree(src, dst, symlinks=False, ignore=None) deletetree(pathOld) return True except: return False
def createAssignedDirFiles(unassignedBatch): import os, re, shutil from fup.utils.jsoninfo import configInfo, sessionInfo from fup.utils.commun import getfilespath_from, deletetree config = configInfo() session = sessionInfo() user = session['current_user_working'] if re.search('@', user): user = user.split('@')[0] dir_unassigned = config[ "path_to_batches_prepfiles"] #changed from unassigned path to prepfiles path dir_assigned = config['path_to_batches_assigned'] dir_frontend = config['path_to_frontend'] dir_feli = os.listdir(dir_frontend) dir_feli = [f for f in dir_feli if re.search('.xlsm', f)] dir_feFile = [f for f in dir_feli if not re.search('BETA', f.upper())] dir_bidli = os.listdir(dir_unassigned) try: for biddir in dir_bidli: if re.search(unassignedBatch, biddir): #Get the unassigned and assigned folders paths opfile_dirunassigned = os.path.join(dir_unassigned, biddir) opfile_dirassigned = os.path.join(dir_assigned, str(user + '-' + biddir)) #Make a new directory in the Assigned folder os.mkdir(opfile_dirassigned) #Copy the FE macro to the folder created fepathfile = os.path.join(dir_frontend, dir_feFile[0]) shutil.copy2(fepathfile, opfile_dirassigned) #Create also here the OP FILE folder and copy here the files from unassigned opfilepath = os.path.join(opfile_dirassigned, 'OP FILE') os.mkdir(opfilepath) org_filesli = getfilespath_from(opfile_dirunassigned) org_files = [ f for f in org_filesli if not re.search('Thumbs.db', f) ] for file in org_files: shutil.copy2(file, opfilepath) #Rename the FE macro filesinassigned = os.listdir(opfile_dirassigned) fenameold = [ f for f in filesinassigned if re.search('.xlsm', f) ][0] fefileold = os.path.join(opfile_dirassigned, fenameold) fenamenew = unassignedBatch + '-' + fenameold fefilenew = os.path.join(opfile_dirassigned, fenamenew) os.rename(fefileold, fefilenew) #delete the dir from prep files folder deletetree(os.path.join(dir_unassigned, biddir)) return True except Exception as e: print("GOT: ", e) return False
def mergeDirBatches(batchidstrli): import os, re, shutil, time from fup.utils.commun import copytree, generateID, listifyString from fup.utils.jsoninfo import configInfo, sessionInfo config = configInfo() session = sessionInfo() try: user = session['current_user_working'] if re.search('@', user): user = user.split('@')[0] batchidli = listifyString(batchidstrli) dir_assigned = config['path_to_batches_assigned'] dirs_assigned = os.listdir(dir_assigned) #Make a dir for the merged batches mergedID = generateID() mergeddirpath = os.path.join( dir_assigned, '{}-___ A___ BID_{}'.format(user, mergedID)) os.mkdir(mergeddirpath) #Get names of the folders from Assigned folder by checking the BID dirstomergeli = [] for batchid in batchidli: dir_bidAssigned = [ d for d in dirs_assigned if re.search(batchid, d) ] dirstomergeli.append(dir_bidAssigned[0]) #Copy contents of the old batches into the new created merged folder for folderName in dirstomergeli: assignedPathOld = os.path.join(dir_assigned, folderName) src, dst = assignedPathOld, mergeddirpath copytree(src, dst, symlinks=False, ignore=None) files_not_deleted = True while files_not_deleted: try: shutil.rmtree(assignedPathOld) #delete folders files_not_deleted = False except: print("Please close file(s) open in folder {}".format( assignedPathOld)) time.sleep(2) mergedInfodict = { 'mergedID': mergedID, 'mergeddirpath': mergeddirpath, 'batchidli': batchidli } return mergedInfodict except Exception as e: print('mergeDirBatches/helpers Got :', e) return False
def connection(): #Connect to a db and if it not exists creates one with the name given import sqlite3 from fup.utils.jsoninfo import configInfo config = configInfo() dbNamePath = config["path_to_database"] try: connection = sqlite3.connect(dbNamePath) #cursor = connection.cursor() return connection except: return False
def extractFileshistory(): import os import pandas as pd from fup.utils.dbwrap import sql2df from fup.utils.jsoninfo import configInfo from fup.utils.commun import xllook config = configInfo() xlpath = config['path_to_excels_exported_from_database'] df = sql2df('fileshistory') save_path = os.path.join(xlpath, 'fileshistory.xlsx') df.to_excel(save_path, index=False) xllook(save_path, 'A1:E1', close=False)
def generateID(): from fup.utils.jsoninfo import configInfo import uuid from datetime import datetime config = configInfo() genbigID = config["generateBigID"].strip().upper() gencustomID = config["generateCustomID"].strip().upper() lencustomID = int(config["customIDlentgh"].strip()) #print(genbigID, gencustomID, lencustomID) if genbigID == "YES": bigID = datetime.now().strftime('%Y-%m-%d-%H-%M') +'-'+ str(uuid.uuid4()) return bigID elif gencustomID == "YES": return generateCustomID(lencustomID) else: return generateCustomID(6)
def extendRowsFollowup(): import os import pandas as pd pd.options.mode.chained_assignment = None # default='warn' from fup.utils.jsoninfo import configInfo from fup.utils.commun import current_date, listifyString, xllook config = configInfo() xlpath = config['path_to_excels_exported_from_database'] xlfilepath = os.path.join(xlpath, 'followup.xlsx') #xllook(xlfilepath, 'A1:W1', close=True) fupdf = pd.read_excel(xlfilepath) #Append to a list of dfs, bids that have more than one file orgfilesdfsli = [] bidtodel = [] for i, cell in enumerate(fupdf["OriginalFilesName"].tolist()): cellli = listifyString(str(cell)) if len(cellli) > 1: bid = fupdf.loc[i, "BatchID"] bidtodel.append(bid) for j, orgfile in enumerate(cellli): #print(orgfile, bid) fup_bid = fupdf[fupdf['BatchID'] == bid] fup_bid.loc[i, "OriginalFilesName"] = orgfile fidli = listifyString(fup_bid.loc[i, "FilesID"]) fup_bid.loc[i, "FilesID"] = fidli[j] orgfilesdfsli.append(fup_bid) #Make one df from df list created up orgfilesdf = pd.concat(orgfilesdfsli) #Remove from df batches that have more than one file fupdf = fupdf[~fupdf["BatchID"].str.contains('|'.join(bidtodel), na=False)] extended_fup = pd.concat([fupdf, orgfilesdf]) extended_fup.reset_index(drop=True, inplace=True) extfilepath = os.path.join( xlpath, "followup {} DO NOT IMPORT THIS IN DATABASE.xlsx".format( current_date())) extended_fup.to_excel(extfilepath, index=False) xllook(extfilepath, 'A1:W1', close=False)
def checkNew(): """Check if folder from new contain only files""" import os from fup.utils.jsoninfo import configInfo config = configInfo() newFilesPath = config["path_to_new_opfiles"] newFilesPath = os.path.abspath(newFilesPath) newFiles = os.listdir(newFilesPath) dirli = [] for folder in newFiles: if folder == "Keep only files in these folders.txt": continue folderPath = os.path.join(newFilesPath, folder) files = os.listdir(folderPath) for file in files: filePath = os.path.join(folderPath, file) if os.path.isdir(filePath): dirli.append(folderPath) dirli = list(set(dirli)) errtxt_path = os.path.join(newFilesPath, "Keep only files in these folders.txt") try: os.remove(errtxt_path) except: pass if len(dirli) > 0: errtxt = open(errtxt_path, "a") errtxt.write( "\n\nPlease keep ONLY FILES in the bellow mentioned folders:\n\n\n\n" ) for d in dirli: folder = str(d).split("0 NEW")[-1] errtxt.write(str(folder + "\n\n")) errtxt.close() return "Please keep ONLY FILES in NEW folder! Check file 'Keep only files in these folders.txt' to correct the issue."
def importFollowup(): #check if the excel contains the columns needed and import it if it has import pandas as pd from fup.utils.dbwrap import tb_cols_placeholder, df2sql from fup.utils.jsoninfo import configInfo config = configInfo() followup_columns = list(tb_cols_placeholder('followup')['columns']) df = pd.read_excel(config['path_to_excels_to_be_imported_in_database'] + 'followup.xlsx') xlfollowupColsli = df.columns.tolist() cols_diff = list(set(followup_columns).difference(set(xlfollowupColsli))) if len(cols_diff) == 0: df2sql(df, 'followup') return True else: colsneeded = ', '.join(followup_columns) response = "The followup.xlsx must contain theese columns: " + colsneeded return response
def delDirsnotindb(): import os from fup.utils.jsoninfo import configInfo from fup.utils.commun import deletetree from fup.helpers.batch import batchExists config = configInfo() unassignedpath = os.path.abspath(config['path_to_batches_unassigned']) unassigneddirli = os.listdir(unassignedpath) todelDirs = {} for batchNameFolder in unassigneddirli: bid = batchNameFolder.split('BID_')[-1].replace('_', '') if batchNameFolder == '_info.txt': continue if not batchExists(bid): todelDirs[bid] = batchNameFolder for kbid, vdirName in todelDirs.items(): deldir = os.path.join(unassignedpath, vdirName) deletetree(deldir)
def createSplitDirs(splitFactor, batchid): import os, re, shutil from fup.utils.commun import copytree, deletetree, generateID from fup.utils.jsoninfo import configInfo from fup.helpers.batch import batchExists config = configInfo() if batchExists(batchid) == False: return False #Get path to ASSIGNED and filer by BID dir_assigned = config['path_to_batches_assigned'] dirs_assigned = os.listdir(dir_assigned) dir_bidAssigned = [d for d in dirs_assigned if re.search(batchid, d)] if len(dir_bidAssigned) == 0: return False #Copy the main directory in the range of the splitFactor assignedPathOld = os.path.join(dir_assigned, dir_bidAssigned[0]) oldDirName = dir_bidAssigned[0] respNameAC = ' '.join(oldDirName.split(' ')[0:-1]) newSplitpathsd = {} idli = [] for splitedID in range(splitFactor): newid = generateID() newdirName = respNameAC + ' BID_' + newid assignedPathNew = os.path.join(dir_assigned, newdirName) src, dst = assignedPathOld, assignedPathNew copytree(src, dst, symlinks=False, ignore=None) newSplitpathsd[newdirName] = assignedPathNew idli.append(newid) newSplitpathsd['newids'] = ', '.join(idli) newSplitpathsd['oldid'] = batchid deletetree(assignedPathOld) return newSplitpathsd
def updateBatchOptions(batchlink=''): #get batch update options depending on the user type (user/responsible, admin or proofreader) import re from fup.utils.commun import listifyString from fup.utils.jsoninfo import configInfo, sessionInfo from fup.helpers.user import get_usersdict from fup.helpers.batch import viewBatches from fup.helpers.batch import batchInfo #print('updateBatchOptions: ', batchlink) config = configInfo() update_options_responsible = listifyString( config['batch_status_options_responsible']) update_options_proofreader = listifyString( config['batch_status_options_proofreader']) update_options_overall = listifyString( config['batch_status_options_overall']) aircraft = listifyString(config['aircraft']) split_batch_factor = listifyString(config['split_batch_factor']) allusers = get_usersdict(True) allusers = [user for user in allusers if re.search('\.', user)] print("users: ", allusers) session = sessionInfo() current_user_rights = session['current_user_rights'] infoBatch = batchInfo(batchlink) try: infoBatch["Operator"] except: infoBatch = { 'Operator': '', 'Aircraft': '', 'OverallStatus': '', 'AddedDate': '', 'StartDate': '', 'ImportedDateISAIM': '' } update_batch_dict = { "responsibleStatus": update_options_responsible, "proofreaderStatus": update_options_proofreader, "overallStatus": update_options_overall, "aircraft": aircraft, "splitBatch": split_batch_factor, "allusers": allusers, "batchlink": batchlink, "userBatches": viewBatches(user_batches=True), "disableCommentResponsible": '', "disableCommentProofreader": '', "disableCheckbox": '', "infoBatch": infoBatch } if current_user_rights == 'user': update_batch_dict["proofreaderStatus"] = ['You cannot change this'] update_batch_dict["allusers"] = ['You cannot change this'] update_batch_dict["overallStatus"] = ['You cannot change this'] update_batch_dict["disableCommentProofreader"] = "disabled" update_batch_dict["disableCheckbox"] = "disabled" return update_batch_dict elif current_user_rights == 'proofreader': update_batch_dict["responsibleStatus"] = ['You cannot change this'] update_batch_dict["disableCommentResponsible"] = "disabled" return update_batch_dict elif current_user_rights == 'admin': return update_batch_dict
def originalFilesPaths(infoDict, auto=False): import os, re from fup.utils.commun import getDirs from fup.utils.jsoninfo import configInfo config = configInfo() newFilesPath = config["path_to_new_opfiles"] newFilesPath = os.path.abspath(newFilesPath) orgdirli = os.listdir(newFilesPath) if auto: orgdirs = [os.path.join(newFilesPath, adir) for adir in orgdirli] orgdirs = getDirs(orgdirs) dirsdict = {} for path in orgdirs: try: #print(path) op = path.split('\\')[-1].split(' ')[0].strip() ac = str(path.split('\\')[-1].split(' ')[1].strip()) if not re.search('A', ac): ac = 'A' + ac opac = op + ' ' + ac infoDict['Operator'] = op infoDict['Aircraft'] = ac filespath = originalFilesPaths(infoDict, auto=False) #recursive dirsdict[opac] = {'files': filespath, 'rootpath': path} #print(dirsdict[opac]) except Exception as e: #in case there is no op or ac #print("originalFilesPaths", str(e)) pass #print(dirsdict) return dirsdict else: #Get original files paths to the new files added to batch try: orgdirli = [ p for p in orgdirli if re.search(infoDict['Operator'], p) ] orgdirli = [ p for p in orgdirli if re.search(infoDict['Aircraft'], p) or re.search(infoDict['Aircraft'][1:], p) ] except: response = "Can't collect Operator and Aircraft info.." return response if len(orgdirli) == 1: orgdir = orgdirli[0] else: response = "Operator '{}' with Aircraft '{}' was not found in NEW folder or possible duplicate!".format( infoDict['Operator'], infoDict['Aircraft']) return response orgpath = os.path.join(newFilesPath, orgdir) filespath = [ os.path.join(orgpath, filepath) for filepath in os.listdir(orgpath) ] #print('asd',filespath) return filespath
def checkNew(): """Check if folder from new contain only files and no duplicates""" import os from fup.utils.jsoninfo import configInfo from fup.helpers.files import checklifor_dups config = configInfo() newFilesPath = config["path_to_new_opfiles"] newFilesPath = os.path.abspath(newFilesPath) newFiles = os.listdir(newFilesPath) opacli = [] for folderName in newFiles: folderNameli = folderName.split(' ') nameli = [f.strip() for f in folderNameli] op = str(nameli[0]) ac = str(nameli[1].replace('A', '')) opac = str(op + ' ' + ac) opacli.append(opac) dupsli = checklifor_dups(opacli) errtxt_path = os.path.join(newFilesPath, "Keep only files in these folders.txt") try: os.remove(errtxt_path) except: pass if len(dupsli) > 0: errtxt = open(errtxt_path, "a") errtxt.write( "\n\nPlease delete duplicate Operator + AC mentioned below:\n\n\n") for folder in dupsli: errtxt.write(str(folder + "\n\n")) errtxt.write( "\n\n\nFound {} folders/files which are not conform.\nRead the guideline for info about to proceeed next." .format(len(dupsli))) errtxt.close() msg = str( "Please don't keep DUPLICATES in NEW folder (Like: TCX 300/TCX A300)! Check file 'Keep only files in these folders.txt' to correct the issue. Found {} folders not ok" .format(len(dupsli))) return msg dirli = [] for folder in newFiles: folderPath = os.path.join(newFilesPath, folder) if not os.path.isdir(folderPath): dirli.append(folder) continue if not len(folder.split(' ')) == 2: dirli.append(folder) continue files = os.listdir(folderPath) for file in files: filePath = os.path.join(folderPath, file) if file == "Thumbs.db": try: os.remove(filePath) except: msg = str( "Please delete manually this {}".format(filePath)) return msg, '', '' if os.path.isdir(filePath): dirli.append(folderPath) dirli = list(set(dirli)) if len(dirli) > 0: errtxt = open(errtxt_path, "a") errtxt.write( "\n\nFolders must contain ONLY FILES\nThe shoulnd't be any files in 0 NEW\nMove/process bellow mentioned folders/files:\n\n\n\n" ) for d in dirli: folder = str(d).split("0 NEW")[-1].replace('\\', '') errtxt.write(str(folder + "\n\n")) errtxt.write( "\n\n\nFound {} folders/files which are not conform.\nRead the guideline for info about to proceeed next." .format(len(dirli))) errtxt.close() msg = "Please keep ONLY FILES in NEW folder! Check file 'Keep only files in these folders.txt' to correct the issue. Found {} folders not ok".format( len(dirli)) return str(msg)
def autoNewDirs(): #do a check before generating batches resultCheckNew = checkNew() if isinstance(resultCheckNew, str): return resultCheckNew, '', '' import os, shutil from fup.helpers.files import originalFilesPaths, getfileSizeMtime from fup.utils.commun import generateID, current_date from fup.utils.jsoninfo import configInfo config = configInfo() bindir = os.path.abspath(config["path_to_bin"]) filesinfodict = originalFilesPaths(infoDict={}, auto=True) #print('filesinfodict', str(filesinfodict).encode("utf-8")) newdirsNames = list(filesinfodict.keys()) unassignedpath = os.path.abspath(config['path_to_batches_unassigned']) unassigneddirli = os.listdir(unassignedpath) unsdict = {} for d in unassigneddirli: commName = d.split('BID_')[0].strip() unsdict[commName] = d unassigneddirNames = list(unsdict.keys()) communliBatch = list(set(newdirsNames).intersection(unassigneddirNames)) auto = False infoDictli = [] tobinli = [] for opac, vdict in filesinfodict.items(): #similar to uploadFilesCreateBatch, but without flask file object print("\nProcessing {}..".format(opac)) batchID = generateID() operator = opac.split(' ')[0] aircraft = opac.split(' ')[1] bindir_batch = os.path.join(bindir, batchID) if opac not in communliBatch: batchNameFolder = operator + ' ' + aircraft + ' BID_' + batchID path = os.path.join(unassignedpath, batchNameFolder) os.mkdir(path) else: auto = True communOpAc = list(set([opac]).intersection(communliBatch)) batchNameFolder = unsdict[communOpAc[0]] path = os.path.join(unassignedpath, batchNameFolder) existingBatchID = batchNameFolder.split('BID_')[-1].replace( '_', '') bindir_batch = os.path.join(bindir, existingBatchID) tobinli.append({ 'source': vdict['rootpath'], 'destination': bindir_batch }) filesnameli = [] fileIDli = [] for filepath in vdict['files']: if auto: fileinfo = getfileSizeMtime(filepath) #print("autoNewDirs: getfileSizeMtime, fileinfo> ", fileinfo) fileinfo["FileName"] = filepath.split("\\")[-1] responseFileInfo = checkFileInfo(fileinfo) if responseFileInfo != True: return responseFileInfo, auto, auto filename = filepath.split('\\')[-1].replace(',', ' ') #print("autoNewDirs: filename> ", filename, file) fileid = generateID() newFileName = 'FID_' + fileid + ' ' + filename save_path = os.path.join(path, newFileName) try: print("Copying this file:\n{}\nto this path:\n{}".format( filepath, save_path)) shutil.copy2(filepath, save_path) filesnameli.append(filename) fileIDli.append(fileid) except Exception as e: errmsg = "Make sure that all folders from NEW contains ONLY FILES! Got: {}".format( str(e)) return str(errmsg), str(e), str(e) orgfilesname = ', '.join(filesnameli) orgfilespath = path filesId = ', '.join(fileIDli) addedDate = current_date() infoaddDict = { 'BatchID': batchID, 'Aircraft': aircraft, 'Operator': operator, 'OriginalFilesName': orgfilesname, 'OriginalFilesPath': orgfilespath, 'FilesID': filesId, 'AddedDate': addedDate } infoDictli.append(infoaddDict) print("Info about:\ninfoDictli:\n{}\n, auto:\n{}\n, tobinli:\n{}\n".format( infoDictli, auto, tobinli)) return infoDictli, auto, tobinli
def autoNewDirs(): import os, shutil from fup.helpers.files import originalFilesPaths, getfileSizeMtime from fup.utils.commun import generateID, current_date from fup.utils.jsoninfo import configInfo config = configInfo() bindir = os.path.abspath(config["path_to_bin"]) filesinfodict = originalFilesPaths(infoDict={}, auto=True) newdirsNames = list(filesinfodict.keys()) unassignedpath = os.path.abspath(config['path_to_batches_unassigned']) unassigneddirli = os.listdir(unassignedpath) unsdict = {} for d in unassigneddirli: commName = d.split('BID_')[0].strip() unsdict[commName] = d unassigneddirNames = list(unsdict.keys()) communliBatch = list(set(newdirsNames).intersection(unassigneddirNames)) auto = False infoDictli = [] tobinli = [] for opac, vdict in filesinfodict.items(): #similar to uploadFilesCreateBatch, but without flask file object batchID = generateID() operator = opac.split(' ')[0] aircraft = opac.split(' ')[1] bindir_batch = os.path.join(bindir, batchID) if opac not in communliBatch: batchNameFolder = operator + ' ' + aircraft + ' BID_' + batchID path = os.path.join(unassignedpath, batchNameFolder) os.mkdir(path) else: auto = True communOpAc = list(set([opac]).intersection(communliBatch)) batchNameFolder = unsdict[communOpAc[0]] path = os.path.join(unassignedpath, batchNameFolder) existingBatchID = batchNameFolder.split('BID_')[-1].replace( '_', '') bindir_batch = os.path.join(bindir, existingBatchID) tobinli.append({ 'source': vdict['rootpath'], 'destination': bindir_batch }) filesnameli = [] fileIDli = [] for file in vdict['files']: if auto: #print("yuhuu file",file) filepath = file fileinfo = getfileSizeMtime(filepath) fileinfo["FileName"] = file.split("\\")[-1] responseFileInfo = checkFileInfo(fileinfo) if responseFileInfo != True: return responseFileInfo, auto, auto filename = file.split('\\')[-1] fileid = generateID() newFileName = 'FID_' + fileid + ' ' + filename save_path = os.path.join(path, newFileName) filesnameli.append(filename) fileIDli.append(fileid) try: shutil.copy2(file, save_path) except Exception as e: return str(e), str(e), str(e) orgfilesname = ', '.join(filesnameli) orgfilespath = path filesId = ', '.join(fileIDli) addedDate = current_date() infoaddDict = { 'BatchID': batchID, 'Aircraft': aircraft, 'Operator': operator, 'OriginalFilesName': orgfilesname, 'OriginalFilesPath': orgfilespath, 'FilesID': filesId, 'AddedDate': addedDate } infoDictli.append(infoaddDict) #print(infoaddDict) return infoDictli, auto, tobinli
def applyUpdateBatchChanges(): batchChangesdict = {} config = configInfo() batchChangesdict['BatchID'] = request.form['batchid'] batchid = batchChangesdict['BatchID'] #print('applyUpdateBatchChanges: ', batchid) batchChangesdict['ResponsibleStatus'] = str( request.form['responsibleStatus']).replace("**", "") batchChangesdict['ProofreaderStatus'] = str( request.form['proofreaderStatus']).replace("**", "") batchChangesdict['OverallStatus'] = request.form['overallStatus'] batchChangesdict['Aircraft'] = request.form['aircraft'] batchChangesdict['Responsible'] = request.form['reAssignBatch'] try: batchChangesdict['splitBatchFactor'] = request.form['splitBatch'] splitFactor = batchChangesdict['splitBatchFactor'] except: pass try: fileobli = request.files.getlist("files2upload") batchChangesdict['filestoupload'] = len(fileobli) except: pass try: batchChangesdict['EstimatedTaskNbr'] = request.form['aproxtasknr'] except: batchChangesdict['EstimatedTaskNbr'] = '' pass try: batchChangesdict['EstimatedFdgNbr'] = request.form['aproxfdgnr'] except: batchChangesdict['EstimatedFdgNbr'] = '' pass try: batchChangesdict['comments'] = request.form['comments'] except: batchChangesdict['comments'] = '' pass updateResponse = checkupdate(batchChangesdict) print('updateResponse: ', updateResponse) if updateResponse != False: if updateResponse == 'merge': #Deal with Merge Batches batches = str(batchid) return redirect( url_for('updatebatch.applyMergeBatches', batches=batches)) elif updateResponse == 'add': batchStatus = batchInfo(batchid) batchStatus = batchStatus['OverallStatus'][0] #Deal with the adding more files to one batch if batchStatus != False: if batchStatus != "UNASSIGNED": bidDirAssigned = os.path.abspath( config['path_to_batches_assigned']) assginedDirsli = os.listdir(bidDirAssigned) assignedDir = [ folderName for folderName in assginedDirsli if re.search(batchid, folderName) ][0] path = os.path.join(bidDirAssigned, assignedDir) filesnameli = [] pathsli = [] fileIDli = [] for fileob in fileobli: filename = secure_filename(fileob.filename) fileid = generateID() newFileName = 'FID_' + fileid + ' ' + filename save_path = os.path.join(path, newFileName) fileob.save(save_path) #Check if file was added before fileinfo = getfileSizeMtime(save_path) fileinfo['FileID'], fileinfo[ 'FileName'] = fileid, filename fileinfo['AddedInBatch'] = batchid responseFileInfo = checkFileInfo(fileinfo) if responseFileInfo != True: os.remove(save_path) errormessage = responseFileInfo return redirect( url_for('comm.showFailedPage', errormessage=errormessage)) else: sql_insertDict('fileshistory', fileinfo) filesnameli.append(filename) pathsli.append(path) fileIDli.append(fileid) orgfilesname = ', '.join(filesnameli) newfilespath = ', '.join(pathsli) filesId = ', '.join(fileIDli) if appendNewFilesToBatch(batchid, orgfilesname, newfilespath, filesId) == True: return redirect(url_for('comm.showSuccessPage')) else: errormessage = "Changes not saved into the database!" return redirect( url_for('comm.showFailedPage', errormessage=errormessage)) elif batchStatus == "UNASSIGNED": errormessage = "Barch is UNASSIGNED! You can add new files using this method only if this batch is ASSIGNED!" return redirect( url_for('comm.showFailedPage', errormessage=errormessage)) elif updateResponse == 'split': #Deal with the splitBatch splitFactor_Batch = str(splitFactor) + '_' + str(batchid) return redirect( url_for('updatebatch.applySplitBatches', splitFactor_Batch=splitFactor_Batch)) elif updateResponse == 'update': #Just update the batch in the database if updateBatchinFollowup(batchChangesdict): return redirect(url_for('comm.showSuccessPage')) else: errormessage = str( "Moving BID_{} folders failed or DCS info not found! Check docs for more info.." .format(batchid)) return redirect( url_for('comm.showFailedPage', errormessage=errormessage)) else: print(updateResponse) errormessage = "Only one change can be applyed for options with '*' sign! Reset to defaults by clicking '| Update Batches' title" return redirect( url_for('comm.showFailedPage', errormessage=errormessage))
def moveDirsforUpdate(infoBatchdict): import os, re from fup.helpers.batchdirs import moveDirName from fup.utils.jsoninfo import configInfo from fup.helpers.files import dcsinfo from fup.utils.dbwrap import sql_updateDict from fup.utils.commun import current_date #infoBatchdict = {'ResponsibleStatus': 'TO BE CHECKED', 'BatchID': 'VqUSKc'} batchid = infoBatchdict['BatchID'] config = configInfo() for kcol, val in infoBatchdict.items(): #print(kcol, val) if kcol == 'ResponsibleStatus' and val == 'TO BE CHECKED': ok = moveDirName(batchid, "path_to_batches_assigned", "path_to_batches_tobechecked") if ok: return True else: print("[info]Batch {} not found in ASSIGNED folder".format( batchid)) return False elif kcol == 'ProofreaderStatus' and val == 'REWORK': ok = moveDirName(batchid, "path_to_batches_tobechecked", "path_to_batches_assigned") if ok: return True else: print( "[info]Batch {} not found in TO BE CHECKED folder".format( batchid)) return False elif kcol == 'ProofreaderStatus' and val == 'STANDBY': ok = moveDirName(batchid, "path_to_batches_tobechecked", "path_to_batches_instandby") if ok: return True else: print( "[info]Batch {} not found in TO BE CHECKED folder".format( batchid)) return False elif kcol == 'ProofreaderStatus' and val == 'UNRECORDABLE': ok = moveDirName(batchid, "path_to_batches_tobechecked", "path_to_batches_unrecordable") if ok: return True else: print( "[info]Batch {} not found in TO BE CHECKED folder".format( batchid)) return False elif kcol == 'ProofreaderStatus' and val == 'TO BE IMPORTED': try: dcspath = config['path_to_dcs_info'] dcsli = os.listdir(dcspath) dcsli = [f for f in dcsli if re.search(batchid, f)] dcsli = [f for f in dcsli if re.search('DCS', f)] dcsli = [f for f in dcsli if re.search('.xml', f)] dcsfilepath = os.path.join(dcspath, dcsli[0]) dcsdictinfo = dcsinfo(dcsfilepath) dcsdictinfo['BatchID'] = batchid if sql_updateDict('followup', dcsdictinfo, 'BatchID') == False: print("Cannot update the DCS information to database!") return False ok = moveDirName(batchid, "path_to_batches_tobechecked", "path_to_batches_tbimported") if ok: return True else: print("Batch {} not found in TO BE CHECKED folder".format( batchid)) return False except: print('Cannot open/read the DCS xml file!') return False elif kcol == 'ProofreaderStatus' and val == 'FINISHED': ok = moveDirName(batchid, "path_to_batches_tbimported", "path_to_batches_finished") if ok: cdate = current_date() importedDatedict = { 'BatchID': batchid, 'ImportedDateISAIM': cdate, 'ResponsibleStatus': 'FINISHED' } sql_updateDict('followup', importedDatedict, 'BatchID') else: print("Batch {} not found in TO BE IMPORTED folder".format( batchid)) return False
def autoNewDirs(): #do a check before generating batches resultCheckNew = checkNew() if isinstance(resultCheckNew, str): return resultCheckNew, '', '' import os, shutil from fup.helpers.files import originalFilesPaths, getfileSizeMtime from fup.utils.commun import generateID, current_date from fup.utils.jsoninfo import configInfo config = configInfo() bindir = os.path.abspath(config["path_to_bin"]) filesinfodict = originalFilesPaths(infoDict={}, auto=True) newdirsNames = list(filesinfodict.keys()) unassignedpath = os.path.abspath(config['path_to_batches_unassigned']) unassigneddirli = os.listdir(unassignedpath) unsdict = {} for d in unassigneddirli: commName = d.split('BID_')[0].strip() unsdict[commName] = d unassigneddirNames = list(unsdict.keys()) communliBatch = list(set(newdirsNames).intersection(unassigneddirNames)) auto = False infoDictli = [] tobinli = [] for opac, vdict in filesinfodict.items(): #similar to uploadFilesCreateBatch, but without flask file object batchID = generateID() operator = opac.split(' ')[0] aircraft = opac.split(' ')[1] bindir_batch = os.path.join(bindir, batchID) if opac not in communliBatch: batchNameFolder = operator + ' ' + aircraft + ' BID_' + batchID path = os.path.join(unassignedpath, batchNameFolder) os.mkdir(path) else: auto = True communOpAc = list(set([opac]).intersection(communliBatch)) batchNameFolder = unsdict[communOpAc[0]] path = os.path.join(unassignedpath, batchNameFolder) existingBatchID = batchNameFolder.split('BID_')[-1].replace( '_', '') bindir_batch = os.path.join(bindir, existingBatchID) tobinli.append({ 'source': vdict['rootpath'], 'destination': bindir_batch }) filesnameli = [] fileIDli = [] # errtxt_path = os.path.join(unassignedpath, "ERRORS_ifEmptyOK.txt") # try: # os.remove(errtxt_path) # except: # pass # errtxt = open(errtxt_path, "a") for file in vdict['files']: if auto: #print("yuhuu file",file) filepath = file fileinfo = getfileSizeMtime(filepath) fileinfo["FileName"] = file.split("\\")[-1] responseFileInfo = checkFileInfo(fileinfo) if responseFileInfo != True: return responseFileInfo, auto, auto filename = file.split('\\')[-1] fileid = generateID() newFileName = 'FID_' + fileid + ' ' + filename save_path = os.path.join(path, newFileName) try: shutil.copy2(file, save_path) filesnameli.append(filename) fileIDli.append(fileid) except Exception as e: errmsg = "Make sure that all folders from NEW contains ONLY FILES! Please delete from UNASSIGNED all new created folders" #errmsg = "\n\nERROR:\n{} \n\nPlease check if all files are in\n{}\nfor\n{}\nPlease move the files needed manually!\n\n".format(str(e), save_path, path) print(errmsg) #errtxt.write(errmsg) return str(errmsg), str(e), str(e) #errtxt.close() orgfilesname = ', '.join(filesnameli) orgfilespath = path filesId = ', '.join(fileIDli) addedDate = current_date() infoaddDict = { 'BatchID': batchID, 'Aircraft': aircraft, 'Operator': operator, 'OriginalFilesName': orgfilesname, 'OriginalFilesPath': orgfilespath, 'FilesID': filesId, 'AddedDate': addedDate } infoDictli.append(infoaddDict) #print(infoaddDict) print("yuhuuu infoDictli, auto, tobinli", infoDictli, auto, tobinli) return infoDictli, auto, tobinli