def addBatch(infoaddDict, auto): from fup.utils.dbwrap import sql_insertDict from fup.helpers.files import saveFilesInfo from fup.utils.commun import deletetree #, cleanPath infoaddDict['Responsible'] = 'UNASSIGNED' infoaddDict['Proofreader'] = 'UNASSIGNED' infoaddDict['ResponsibleStatus'] = 'UNASSIGNED' infoaddDict['ProofreaderStatus'] = 'UNASSIGNED' infoaddDict['OverallStatus'] = 'UNASSIGNED' #print('addbatch: ', infoaddDict) #Check files added to this batch response = saveFilesInfo(infoaddDict, auto) if response == True: #infoaddDict['OriginalFilesPath'] = cleanPath(infoaddDict['OriginalFilesPath']) return sql_insertDict('followup', infoaddDict) else: #if new batch error then deletetree else(case new files added) don't if auto: deletetree(infoaddDict['OriginalFilesPath']) return response else: # DEFAULT IS AUTO! (all that is not auto is not taken into processing) return response
def splitBatches(splitFactor_batchid): from fup.helpers.batchdirs import createSplitDirs from fup.helpers.batch import batchInfo from fup.utils.commun import current_date, listifyString from fup.utils.dbwrap import sql_insertDict, sql_deleteRow splitBatchidli = splitFactor_batchid.split('_') splitFactor, batchid = int(splitBatchidli[0]), splitBatchidli[1] oldBatchinfo = batchInfo(batchid) infodirs = createSplitDirs(splitFactor, batchid) if infodirs != False: prevBatchID = infodirs['oldid'] newBatchID = infodirs['newids'] else: return False prevloginfo = [str(l) for l in oldBatchinfo['ChangesLog']] loginfo = ''.join(prevloginfo) + ', Batch "{}" was splited in batches: "{}", on {}'.format(prevBatchID, newBatchID, current_date()) oldBatchinfo['ChangesLog'] = loginfo newBIDli = listifyString(newBatchID) for bid in newBIDli: oldBatchinfo['BatchID'] = bid oldBatchinfo.pop('EstimatedTaskNbr', None) oldBatchinfo.pop('EstimatedFdgNbr', None) if sql_insertDict('followup', oldBatchinfo) == False: return False if sql_deleteRow('followup', 'BatchID', batchid) == False: return False return newBatchID
def mergeBatches(batchidstrli): from fup.helpers.batchdirs import mergeDirBatches from fup.utils.commun import current_date, listifyString from fup.helpers.batch import batchInfo from fup.utils.dbwrap import sql_insertDict, sql_updateDict, sql_deleteRow import pandas as pd mergedInfodict = mergeDirBatches(batchidstrli) bidli = listifyString(batchidstrli) if isinstance(mergedInfodict, dict): prevInfodict = {} for batch in mergedInfodict['batchidli']: previnfo = batchInfo(batch) previnfo.pop('EstimatedTaskNbr', None) previnfo.pop('EstimatedFdgNbr', None) prevInfodict[batch] = previnfo else: print('Cannot merge dirs!') return False #gather prev info in a df then a dict dfli = [] for bid in prevInfodict.keys(): df = pd.DataFrame.from_dict(prevInfodict[bid]) #make df from dict dfli.append(df) dfall = pd.concat(dfli, axis=0) prevInfoDictAll = dfall.to_dict('list') prevInfoDictAll['BatchID'] = mergedInfodict['mergedID'] infolog = 'Batch merged from "{}" on {}'.format(batchidstrli, current_date()) prevlog = [str(l) for l in prevInfoDictAll['ChangesLog']] prevInfoDictAll['ChangesLog'] = ', '.join(list( set(prevlog))) + ', ' + infolog prevInfoDictAll['AddedDate'] = current_date() if sql_insertDict('followup', prevInfoDictAll) == False: return False for bid in bidli: if sql_deleteRow('followup', 'BatchID', bid) == False: return False return mergedInfodict['mergedID']
def saveFilesInfo(infoDict, auto): import os import pandas from fup.helpers.files import getfileSizeMtime, matchOriginalinNew, getFileId, originalFilesPaths, checkFileInfo, updateDBforNewFiles from fup.utils.dbwrap import sql_insertDict, sql2df from fup.utils.commun import deletetree path = infoDict['OriginalFilesPath'] #print("yuhuu ",path) newfiles = os.listdir(path) orgfilespath = originalFilesPaths(infoDict) if isinstance(orgfilespath, str): return orgfilespath #response orgfiles = [path.split('\\')[-1] for path in orgfilespath] matchedFiles = matchOriginalinNew(orgfiles, newfiles) for filepath in orgfilespath: fileinfo = getfileSizeMtime(filepath) fileinfo['FileID'], fileinfo['FileName'] = getFileId( filepath, matchedFiles) fileinfo['AddedInBatch'] = infoDict['BatchID'] responseFileInfo = checkFileInfo(fileinfo) #print(filepath) if responseFileInfo != True: deletetree(path) return responseFileInfo else: if auto: pass else: if sql_insertDict('fileshistory', fileinfo) == False: return False return True
def updateDBforNewFiles(): #Verify if new files were added to a existing batch if so, update db import os, re import pandas as pd from fup.utils.dbwrap import sql_insertDict, sql_updateDict, get_dftable, sql_deleteRow from fup.helpers.batch import batchInfo from fup.helpers.files import getfileSizeMtime from fup.utils.commun import list_duplicates #Update followup with the new file added to the batch followupdf = get_dftable('followup') orgpaths = followupdf['OriginalFilesPath'].tolist() orgpaths_nodups = list(set(orgpaths)) newtempbid = {} for opath in orgpaths_nodups: bid = opath.split("\\")[-1].split('BID_')[-1].strip() followupdf_bid = followupdf[ followupdf['OriginalFilesPath'].str.contains('|'.join([bid]), na=False)] bids = followupdf_bid["BatchID"].tolist() bidtodelli = [b for b in bids if b != bid] tempd = {} for biddel in bidtodelli: infobatch_previous = batchInfo(biddel) if infobatch_previous != False: for k in list(infobatch_previous.keys()): if k not in [ 'OriginalFilesName', 'FilesID', 'ChangesLog', 'BatchID' ]: infobatch_previous.pop(k, None) tempd["prevInfo"] = infobatch_previous # else: # response_notfound = "BatchID {} is not in database! Please delete from unassigned folder {}!".format(existingBatchID, existingBatchID) # tempd["prevInfo"] = response_notfound # #return response_notfound, response_notfound, response_notfound newtempbid[bid] = tempd orgpaths_dups = list_duplicates(orgpaths) existingbid = {} for opath in orgpaths_dups: tempd = {} bid = opath.split("\\")[-1].split('BID_')[-1].strip() infobatch_previous = batchInfo(bid) if infobatch_previous != False: for k in list(infobatch_previous.keys()): if k not in [ 'OriginalFilesName', 'FilesID', 'ChangesLog', 'BatchID' ]: infobatch_previous.pop(k, None) #print('OK ',infobatch_previous) tempd["prevInfo"] = infobatch_previous # else: # response_notfound = "BatchID {} is not in database! Please delete from unassigned folder {}!".format(existingBatchID, existingBatchID) # #print('NOK ',response_notfound) # tempd["prevInfo"] = response_notfound # #return response_notfound, response_notfound, response_notfound existingbid[bid] = tempd tempbidtodel = [] for bidorg, dorg in existingbid.items(): for bidtemp, dtemp in newtempbid.items(): if bidorg == bidtemp: #make df from dict dforg = pd.DataFrame.from_dict(dorg['prevInfo']) dftemp = pd.DataFrame.from_dict(dtemp['prevInfo']) todelli = dftemp['BatchID'].tolist() for b in todelli: tempbidtodel.append(b) bidtodelli = list(set(tempbidtodel)) dfconcat = pd.concat([dforg, dftemp], axis=0) dfdict = dfconcat.to_dict('list') #Create dict to update followup joineddict = {} for kcol, vrow in dfdict.items(): if kcol == "BatchID": vrow = list(set(vrow).difference(set(bidtodelli))) try: li = list(set(filter(None, vrow))) vrow = ', '.join(li) except: pass joineddict[kcol] = vrow if sql_updateDict('followup', joineddict, 'BatchID') == False: updatefup_failed = "Update in followup failed for BID_{} file {}..".format( joineddict['BatchID'], joineddict['OriginalFilesName']) #print(updatefup_failed) return updatefup_failed #Delete new temp bid from db for bid in bidtodelli: if sql_deleteRow('followup', 'BatchID', bid): pass else: #print("NOK") return "Please delete from database {}".format( str(bidtodelli)) #Update fileshistory table in db fileshistorydf = get_dftable('fileshistory') fileInfoli = [] for fpath in orgpaths_nodups: fileInfo = {} bid = fpath.split("\\")[-1].split('BID_')[-1].strip() fhistdf_filtered = fileshistorydf[fileshistorydf["AddedInBatch"] == bid] fids = fhistdf_filtered["FileID"].tolist() files = os.listdir(fpath) fidorgli = [] for file in files: fidorg = file.split(' ')[0].split('_')[-1] fidorgli.append(fidorg) newfid = list(set(fids).symmetric_difference(set( fidorgli))) # difference of/from 2 lists [1,2] and [1,2,3] => [3] #print(newfid) newfilepathli = [] for fid in newfid: for file in files: if fid == file.split(' ')[0].split('_')[-1]: #print(fid, file) newfilepath = os.path.join(fpath, file) newfilepathli.append(newfilepath) for newfilepath in newfilepathli: fileSpec = getfileSizeMtime(newfilepath) fileName = ' '.join(newfilepath.split('\\')[-1].split(' ')[1:]) fileInfo = { 'FileID': newfid, 'AddedInBatch': [bid], 'ModificationDate': [fileSpec['ModificationDate']], 'FileName': [fileName], 'FileSizeBytes': [fileSpec['FileSizeBytes']] } fileInfoli.append(fileInfo) for finfodict in fileInfoli: if sql_insertDict('fileshistory', finfodict) == False: return "Please update manually in fileshistory {}".format( str(finfodict)) #print("update manually") #print("return True") return True
def applyUpdateBatchChanges(): batchChangesdict = {} config = configInfo() batchChangesdict['BatchID'] = request.form['batchid'] batchid = batchChangesdict['BatchID'] #print('applyUpdateBatchChanges: ', batchid) batchChangesdict['ResponsibleStatus'] = str( request.form['responsibleStatus']).replace("**", "") batchChangesdict['ProofreaderStatus'] = str( request.form['proofreaderStatus']).replace("**", "") batchChangesdict['OverallStatus'] = request.form['overallStatus'] batchChangesdict['Aircraft'] = request.form['aircraft'] batchChangesdict['Responsible'] = request.form['reAssignBatch'] try: batchChangesdict['splitBatchFactor'] = request.form['splitBatch'] splitFactor = batchChangesdict['splitBatchFactor'] except: pass try: fileobli = request.files.getlist("files2upload") batchChangesdict['filestoupload'] = len(fileobli) except: pass try: batchChangesdict['EstimatedTaskNbr'] = request.form['aproxtasknr'] except: batchChangesdict['EstimatedTaskNbr'] = '' pass try: batchChangesdict['EstimatedFdgNbr'] = request.form['aproxfdgnr'] except: batchChangesdict['EstimatedFdgNbr'] = '' pass try: batchChangesdict['comments'] = request.form['comments'] except: batchChangesdict['comments'] = '' pass updateResponse = checkupdate(batchChangesdict) print('updateResponse: ', updateResponse) if updateResponse != False: if updateResponse == 'merge': #Deal with Merge Batches batches = str(batchid) return redirect( url_for('updatebatch.applyMergeBatches', batches=batches)) elif updateResponse == 'add': batchStatus = batchInfo(batchid) batchStatus = batchStatus['OverallStatus'][0] #Deal with the adding more files to one batch if batchStatus != False: if batchStatus != "UNASSIGNED": bidDirAssigned = os.path.abspath( config['path_to_batches_assigned']) assginedDirsli = os.listdir(bidDirAssigned) assignedDir = [ folderName for folderName in assginedDirsli if re.search(batchid, folderName) ][0] path = os.path.join(bidDirAssigned, assignedDir) filesnameli = [] pathsli = [] fileIDli = [] for fileob in fileobli: filename = secure_filename(fileob.filename) fileid = generateID() newFileName = 'FID_' + fileid + ' ' + filename save_path = os.path.join(path, newFileName) fileob.save(save_path) #Check if file was added before fileinfo = getfileSizeMtime(save_path) fileinfo['FileID'], fileinfo[ 'FileName'] = fileid, filename fileinfo['AddedInBatch'] = batchid responseFileInfo = checkFileInfo(fileinfo) if responseFileInfo != True: os.remove(save_path) errormessage = responseFileInfo return redirect( url_for('comm.showFailedPage', errormessage=errormessage)) else: sql_insertDict('fileshistory', fileinfo) filesnameli.append(filename) pathsli.append(path) fileIDli.append(fileid) orgfilesname = ', '.join(filesnameli) newfilespath = ', '.join(pathsli) filesId = ', '.join(fileIDli) if appendNewFilesToBatch(batchid, orgfilesname, newfilespath, filesId) == True: return redirect(url_for('comm.showSuccessPage')) else: errormessage = "Changes not saved into the database!" return redirect( url_for('comm.showFailedPage', errormessage=errormessage)) elif batchStatus == "UNASSIGNED": errormessage = "Barch is UNASSIGNED! You can add new files using this method only if this batch is ASSIGNED!" return redirect( url_for('comm.showFailedPage', errormessage=errormessage)) elif updateResponse == 'split': #Deal with the splitBatch splitFactor_Batch = str(splitFactor) + '_' + str(batchid) return redirect( url_for('updatebatch.applySplitBatches', splitFactor_Batch=splitFactor_Batch)) elif updateResponse == 'update': #Just update the batch in the database if updateBatchinFollowup(batchChangesdict): return redirect(url_for('comm.showSuccessPage')) else: errormessage = str( "Moving BID_{} folders failed or DCS info not found! Check docs for more info.." .format(batchid)) return redirect( url_for('comm.showFailedPage', errormessage=errormessage)) else: print(updateResponse) errormessage = "Only one change can be applyed for options with '*' sign! Reset to defaults by clicking '| Update Batches' title" return redirect( url_for('comm.showFailedPage', errormessage=errormessage))