Ejemplo n.º 1
0
def saveFilesInfo(infoDict, auto):
    import os
    import pandas
    from fup.helpers.files import getfileSizeMtime, matchOriginalinNew, getFileId, originalFilesPaths, checkFileInfo, updateDBforNewFiles
    from fup.utils.dbwrap import sql_insertDict, sql2df
    from fup.utils.commun import deletetree

    path = infoDict['OriginalFilesPath']
    #print("yuhuu ",path)

    newfiles = os.listdir(path)

    orgfilespath = originalFilesPaths(infoDict)
    if isinstance(orgfilespath, str):
        return orgfilespath  #response

    orgfiles = [path.split('\\')[-1] for path in orgfilespath]

    matchedFiles = matchOriginalinNew(orgfiles, newfiles)

    for filepath in orgfilespath:
        fileinfo = getfileSizeMtime(filepath)
        fileinfo['FileID'], fileinfo['FileName'] = getFileId(
            filepath, matchedFiles)
        fileinfo['AddedInBatch'] = infoDict['BatchID']
        responseFileInfo = checkFileInfo(fileinfo)
        #print(filepath)
        if responseFileInfo != True:
            deletetree(path)
            return responseFileInfo

        else:
            if auto:
                pass
            else:
                if sql_insertDict('fileshistory', fileinfo) == False:
                    return False

    return True
Ejemplo n.º 2
0
def updateDBforNewFiles():
    #Verify if new files were added to a existing batch if so, update db
    import os, re
    import pandas as pd
    from fup.utils.dbwrap import sql_insertDict, sql_updateDict, get_dftable, sql_deleteRow
    from fup.helpers.batch import batchInfo
    from fup.helpers.files import getfileSizeMtime
    from fup.utils.commun import list_duplicates

    #Update followup with the new file added to the batch

    followupdf = get_dftable('followup')
    orgpaths = followupdf['OriginalFilesPath'].tolist()
    orgpaths_nodups = list(set(orgpaths))

    newtempbid = {}
    for opath in orgpaths_nodups:
        bid = opath.split("\\")[-1].split('BID_')[-1].strip()

        followupdf_bid = followupdf[
            followupdf['OriginalFilesPath'].str.contains('|'.join([bid]),
                                                         na=False)]

        bids = followupdf_bid["BatchID"].tolist()
        bidtodelli = [b for b in bids if b != bid]

        tempd = {}
        for biddel in bidtodelli:
            infobatch_previous = batchInfo(biddel)
            if infobatch_previous != False:
                for k in list(infobatch_previous.keys()):
                    if k not in [
                            'OriginalFilesName', 'FilesID', 'ChangesLog',
                            'BatchID'
                    ]:
                        infobatch_previous.pop(k, None)
                tempd["prevInfo"] = infobatch_previous
            # else:
            #     response_notfound = "BatchID {} is not in database! Please delete from unassigned folder {}!".format(existingBatchID, existingBatchID)
            #     tempd["prevInfo"] = response_notfound
            #     #return response_notfound, response_notfound, response_notfound

            newtempbid[bid] = tempd

    orgpaths_dups = list_duplicates(orgpaths)

    existingbid = {}
    for opath in orgpaths_dups:
        tempd = {}
        bid = opath.split("\\")[-1].split('BID_')[-1].strip()

        infobatch_previous = batchInfo(bid)
        if infobatch_previous != False:
            for k in list(infobatch_previous.keys()):
                if k not in [
                        'OriginalFilesName', 'FilesID', 'ChangesLog', 'BatchID'
                ]:
                    infobatch_previous.pop(k, None)
            #print('OK ',infobatch_previous)
            tempd["prevInfo"] = infobatch_previous
        # else:
        #     response_notfound = "BatchID {} is not in database! Please delete from unassigned folder {}!".format(existingBatchID, existingBatchID)
        #     #print('NOK ',response_notfound)
        #     tempd["prevInfo"] = response_notfound
        #     #return response_notfound, response_notfound, response_notfound

        existingbid[bid] = tempd

    tempbidtodel = []
    for bidorg, dorg in existingbid.items():
        for bidtemp, dtemp in newtempbid.items():
            if bidorg == bidtemp:
                #make df from dict
                dforg = pd.DataFrame.from_dict(dorg['prevInfo'])
                dftemp = pd.DataFrame.from_dict(dtemp['prevInfo'])

                todelli = dftemp['BatchID'].tolist()
                for b in todelli:
                    tempbidtodel.append(b)

                bidtodelli = list(set(tempbidtodel))

                dfconcat = pd.concat([dforg, dftemp], axis=0)
                dfdict = dfconcat.to_dict('list')

                #Create dict to update followup
                joineddict = {}
                for kcol, vrow in dfdict.items():
                    if kcol == "BatchID":
                        vrow = list(set(vrow).difference(set(bidtodelli)))
                    try:
                        li = list(set(filter(None, vrow)))
                        vrow = ', '.join(li)
                    except:
                        pass

                    joineddict[kcol] = vrow

                if sql_updateDict('followup', joineddict, 'BatchID') == False:
                    updatefup_failed = "Update in followup failed for BID_{} file {}..".format(
                        joineddict['BatchID'], joineddict['OriginalFilesName'])
                    #print(updatefup_failed)
                    return updatefup_failed
                #Delete new temp bid from db
                for bid in bidtodelli:
                    if sql_deleteRow('followup', 'BatchID', bid):
                        pass
                    else:
                        #print("NOK")
                        return "Please delete from database {}".format(
                            str(bidtodelli))

    #Update fileshistory table in db

    fileshistorydf = get_dftable('fileshistory')

    fileInfoli = []
    for fpath in orgpaths_nodups:
        fileInfo = {}
        bid = fpath.split("\\")[-1].split('BID_')[-1].strip()
        fhistdf_filtered = fileshistorydf[fileshistorydf["AddedInBatch"] ==
                                          bid]
        fids = fhistdf_filtered["FileID"].tolist()
        files = os.listdir(fpath)

        fidorgli = []
        for file in files:
            fidorg = file.split(' ')[0].split('_')[-1]
            fidorgli.append(fidorg)

        newfid = list(set(fids).symmetric_difference(set(
            fidorgli)))  # difference of/from 2 lists [1,2] and [1,2,3] => [3]

        #print(newfid)

        newfilepathli = []
        for fid in newfid:
            for file in files:
                if fid == file.split(' ')[0].split('_')[-1]:
                    #print(fid, file)
                    newfilepath = os.path.join(fpath, file)
                    newfilepathli.append(newfilepath)

            for newfilepath in newfilepathli:
                fileSpec = getfileSizeMtime(newfilepath)
                fileName = ' '.join(newfilepath.split('\\')[-1].split(' ')[1:])
                fileInfo = {
                    'FileID': newfid,
                    'AddedInBatch': [bid],
                    'ModificationDate': [fileSpec['ModificationDate']],
                    'FileName': [fileName],
                    'FileSizeBytes': [fileSpec['FileSizeBytes']]
                }

                fileInfoli.append(fileInfo)

    for finfodict in fileInfoli:
        if sql_insertDict('fileshistory', finfodict) == False:
            return "Please update manually in fileshistory {}".format(
                str(finfodict))
            #print("update manually")

    #print("return True")
    return True
Ejemplo n.º 3
0
def autoNewDirs():

    #do a check before generating batches

    resultCheckNew = checkNew()
    if isinstance(resultCheckNew, str):
        return resultCheckNew, '', ''

    import os, shutil
    from fup.helpers.files import originalFilesPaths, getfileSizeMtime
    from fup.utils.commun import generateID, current_date
    from fup.utils.jsoninfo import configInfo

    config = configInfo()
    bindir = os.path.abspath(config["path_to_bin"])

    filesinfodict = originalFilesPaths(infoDict={}, auto=True)

    #print('filesinfodict', str(filesinfodict).encode("utf-8"))

    newdirsNames = list(filesinfodict.keys())
    unassignedpath = os.path.abspath(config['path_to_batches_unassigned'])
    unassigneddirli = os.listdir(unassignedpath)

    unsdict = {}
    for d in unassigneddirli:
        commName = d.split('BID_')[0].strip()
        unsdict[commName] = d

    unassigneddirNames = list(unsdict.keys())
    communliBatch = list(set(newdirsNames).intersection(unassigneddirNames))

    auto = False
    infoDictli = []
    tobinli = []
    for opac, vdict in filesinfodict.items():
        #similar to uploadFilesCreateBatch, but without flask file object
        print("\nProcessing {}..".format(opac))
        batchID = generateID()
        operator = opac.split(' ')[0]
        aircraft = opac.split(' ')[1]
        bindir_batch = os.path.join(bindir, batchID)

        if opac not in communliBatch:
            batchNameFolder = operator + ' ' + aircraft + ' BID_' + batchID
            path = os.path.join(unassignedpath, batchNameFolder)
            os.mkdir(path)
        else:
            auto = True
            communOpAc = list(set([opac]).intersection(communliBatch))
            batchNameFolder = unsdict[communOpAc[0]]
            path = os.path.join(unassignedpath, batchNameFolder)

            existingBatchID = batchNameFolder.split('BID_')[-1].replace(
                '_', '')
            bindir_batch = os.path.join(bindir, existingBatchID)

        tobinli.append({
            'source': vdict['rootpath'],
            'destination': bindir_batch
        })

        filesnameli = []
        fileIDli = []

        for filepath in vdict['files']:
            if auto:
                fileinfo = getfileSizeMtime(filepath)
                #print("autoNewDirs: getfileSizeMtime, fileinfo> ", fileinfo)
                fileinfo["FileName"] = filepath.split("\\")[-1]
                responseFileInfo = checkFileInfo(fileinfo)
                if responseFileInfo != True:
                    return responseFileInfo, auto, auto
            filename = filepath.split('\\')[-1].replace(',', ' ')
            #print("autoNewDirs: filename> ", filename, file)
            fileid = generateID()
            newFileName = 'FID_' + fileid + ' ' + filename
            save_path = os.path.join(path, newFileName)

            try:
                print("Copying this file:\n{}\nto this path:\n{}".format(
                    filepath, save_path))
                shutil.copy2(filepath, save_path)
                filesnameli.append(filename)
                fileIDli.append(fileid)
            except Exception as e:
                errmsg = "Make sure that all folders from NEW contains ONLY FILES! Got: {}".format(
                    str(e))
                return str(errmsg), str(e), str(e)

        orgfilesname = ', '.join(filesnameli)
        orgfilespath = path
        filesId = ', '.join(fileIDli)
        addedDate = current_date()

        infoaddDict = {
            'BatchID': batchID,
            'Aircraft': aircraft,
            'Operator': operator,
            'OriginalFilesName': orgfilesname,
            'OriginalFilesPath': orgfilespath,
            'FilesID': filesId,
            'AddedDate': addedDate
        }

        infoDictli.append(infoaddDict)

    print("Info about:\ninfoDictli:\n{}\n, auto:\n{}\n, tobinli:\n{}\n".format(
        infoDictli, auto, tobinli))
    return infoDictli, auto, tobinli
def applyUpdateBatchChanges():
    batchChangesdict = {}
    config = configInfo()
    batchChangesdict['BatchID'] = request.form['batchid']
    batchid = batchChangesdict['BatchID']

    #print('applyUpdateBatchChanges: ', batchid)

    batchChangesdict['ResponsibleStatus'] = str(
        request.form['responsibleStatus']).replace("**", "")
    batchChangesdict['ProofreaderStatus'] = str(
        request.form['proofreaderStatus']).replace("**", "")
    batchChangesdict['OverallStatus'] = request.form['overallStatus']
    batchChangesdict['Aircraft'] = request.form['aircraft']
    batchChangesdict['Responsible'] = request.form['reAssignBatch']
    try:
        batchChangesdict['splitBatchFactor'] = request.form['splitBatch']
        splitFactor = batchChangesdict['splitBatchFactor']
    except:
        pass
    try:
        fileobli = request.files.getlist("files2upload")
        batchChangesdict['filestoupload'] = len(fileobli)
    except:
        pass

    try:
        batchChangesdict['EstimatedTaskNbr'] = request.form['aproxtasknr']
    except:
        batchChangesdict['EstimatedTaskNbr'] = ''
        pass

    try:
        batchChangesdict['EstimatedFdgNbr'] = request.form['aproxfdgnr']
    except:
        batchChangesdict['EstimatedFdgNbr'] = ''
        pass

    try:
        batchChangesdict['comments'] = request.form['comments']
    except:
        batchChangesdict['comments'] = ''
        pass

    updateResponse = checkupdate(batchChangesdict)
    print('updateResponse: ', updateResponse)

    if updateResponse != False:
        if updateResponse == 'merge':
            #Deal with Merge Batches
            batches = str(batchid)
            return redirect(
                url_for('updatebatch.applyMergeBatches', batches=batches))
        elif updateResponse == 'add':
            batchStatus = batchInfo(batchid)
            batchStatus = batchStatus['OverallStatus'][0]
            #Deal with the adding more files to one batch
            if batchStatus != False:
                if batchStatus != "UNASSIGNED":
                    bidDirAssigned = os.path.abspath(
                        config['path_to_batches_assigned'])
                    assginedDirsli = os.listdir(bidDirAssigned)
                    assignedDir = [
                        folderName for folderName in assginedDirsli
                        if re.search(batchid, folderName)
                    ][0]
                    path = os.path.join(bidDirAssigned, assignedDir)

                    filesnameli = []
                    pathsli = []
                    fileIDli = []
                    for fileob in fileobli:
                        filename = secure_filename(fileob.filename)
                        fileid = generateID()
                        newFileName = 'FID_' + fileid + ' ' + filename
                        save_path = os.path.join(path, newFileName)
                        fileob.save(save_path)
                        #Check if file was added before
                        fileinfo = getfileSizeMtime(save_path)
                        fileinfo['FileID'], fileinfo[
                            'FileName'] = fileid, filename
                        fileinfo['AddedInBatch'] = batchid
                        responseFileInfo = checkFileInfo(fileinfo)
                        if responseFileInfo != True:
                            os.remove(save_path)
                            errormessage = responseFileInfo
                            return redirect(
                                url_for('comm.showFailedPage',
                                        errormessage=errormessage))
                        else:
                            sql_insertDict('fileshistory', fileinfo)

                        filesnameli.append(filename)
                        pathsli.append(path)
                        fileIDli.append(fileid)

                    orgfilesname = ', '.join(filesnameli)
                    newfilespath = ', '.join(pathsli)
                    filesId = ', '.join(fileIDli)
                    if appendNewFilesToBatch(batchid, orgfilesname,
                                             newfilespath, filesId) == True:
                        return redirect(url_for('comm.showSuccessPage'))
                    else:
                        errormessage = "Changes not saved into the database!"
                        return redirect(
                            url_for('comm.showFailedPage',
                                    errormessage=errormessage))

                elif batchStatus == "UNASSIGNED":
                    errormessage = "Barch is UNASSIGNED! You can add new files using this method only if this batch is ASSIGNED!"
                    return redirect(
                        url_for('comm.showFailedPage',
                                errormessage=errormessage))

        elif updateResponse == 'split':
            #Deal with the splitBatch
            splitFactor_Batch = str(splitFactor) + '_' + str(batchid)
            return redirect(
                url_for('updatebatch.applySplitBatches',
                        splitFactor_Batch=splitFactor_Batch))

        elif updateResponse == 'update':
            #Just update the batch in the database
            if updateBatchinFollowup(batchChangesdict):
                return redirect(url_for('comm.showSuccessPage'))
            else:
                errormessage = str(
                    "Moving BID_{} folders failed or DCS info not found! Check docs for more info.."
                    .format(batchid))
                return redirect(
                    url_for('comm.showFailedPage', errormessage=errormessage))
    else:
        print(updateResponse)
        errormessage = "Only one change can be applyed for options with  '*'  sign! Reset to defaults by clicking '| Update Batches' title"
        return redirect(
            url_for('comm.showFailedPage', errormessage=errormessage))
def autoNewDirs():
    import os, shutil
    from fup.helpers.files import originalFilesPaths, getfileSizeMtime
    from fup.utils.commun import generateID, current_date
    from fup.utils.jsoninfo import configInfo

    config = configInfo()
    bindir = os.path.abspath(config["path_to_bin"])

    filesinfodict = originalFilesPaths(infoDict={}, auto=True)

    newdirsNames = list(filesinfodict.keys())
    unassignedpath = os.path.abspath(config['path_to_batches_unassigned'])
    unassigneddirli = os.listdir(unassignedpath)

    unsdict = {}
    for d in unassigneddirli:
        commName = d.split('BID_')[0].strip()
        unsdict[commName] = d

    unassigneddirNames = list(unsdict.keys())
    communliBatch = list(set(newdirsNames).intersection(unassigneddirNames))

    auto = False
    infoDictli = []
    tobinli = []
    for opac, vdict in filesinfodict.items():
        #similar to uploadFilesCreateBatch, but without flask file object
        batchID = generateID()
        operator = opac.split(' ')[0]
        aircraft = opac.split(' ')[1]
        bindir_batch = os.path.join(bindir, batchID)

        if opac not in communliBatch:
            batchNameFolder = operator + ' ' + aircraft + ' BID_' + batchID
            path = os.path.join(unassignedpath, batchNameFolder)
            os.mkdir(path)
        else:
            auto = True
            communOpAc = list(set([opac]).intersection(communliBatch))
            batchNameFolder = unsdict[communOpAc[0]]
            path = os.path.join(unassignedpath, batchNameFolder)

            existingBatchID = batchNameFolder.split('BID_')[-1].replace(
                '_', '')
            bindir_batch = os.path.join(bindir, existingBatchID)

        tobinli.append({
            'source': vdict['rootpath'],
            'destination': bindir_batch
        })

        filesnameli = []
        fileIDli = []
        for file in vdict['files']:
            if auto:
                #print("yuhuu file",file)
                filepath = file
                fileinfo = getfileSizeMtime(filepath)
                fileinfo["FileName"] = file.split("\\")[-1]
                responseFileInfo = checkFileInfo(fileinfo)
                if responseFileInfo != True:
                    return responseFileInfo, auto, auto
            filename = file.split('\\')[-1]
            fileid = generateID()
            newFileName = 'FID_' + fileid + ' ' + filename
            save_path = os.path.join(path, newFileName)

            filesnameli.append(filename)
            fileIDli.append(fileid)

            try:
                shutil.copy2(file, save_path)
            except Exception as e:
                return str(e), str(e), str(e)

        orgfilesname = ', '.join(filesnameli)
        orgfilespath = path
        filesId = ', '.join(fileIDli)
        addedDate = current_date()

        infoaddDict = {
            'BatchID': batchID,
            'Aircraft': aircraft,
            'Operator': operator,
            'OriginalFilesName': orgfilesname,
            'OriginalFilesPath': orgfilespath,
            'FilesID': filesId,
            'AddedDate': addedDate
        }

        infoDictli.append(infoaddDict)
        #print(infoaddDict)

    return infoDictli, auto, tobinli
Ejemplo n.º 6
0
def autoNewDirs():

    #do a check before generating batches

    resultCheckNew = checkNew()
    if isinstance(resultCheckNew, str):
        return resultCheckNew, '', ''

    import os, shutil
    from fup.helpers.files import originalFilesPaths, getfileSizeMtime
    from fup.utils.commun import generateID, current_date
    from fup.utils.jsoninfo import configInfo

    config = configInfo()
    bindir = os.path.abspath(config["path_to_bin"])

    filesinfodict = originalFilesPaths(infoDict={}, auto=True)

    newdirsNames = list(filesinfodict.keys())
    unassignedpath = os.path.abspath(config['path_to_batches_unassigned'])
    unassigneddirli = os.listdir(unassignedpath)

    unsdict = {}
    for d in unassigneddirli:
        commName = d.split('BID_')[0].strip()
        unsdict[commName] = d

    unassigneddirNames = list(unsdict.keys())
    communliBatch = list(set(newdirsNames).intersection(unassigneddirNames))

    auto = False
    infoDictli = []
    tobinli = []
    for opac, vdict in filesinfodict.items():
        #similar to uploadFilesCreateBatch, but without flask file object
        batchID = generateID()
        operator = opac.split(' ')[0]
        aircraft = opac.split(' ')[1]
        bindir_batch = os.path.join(bindir, batchID)

        if opac not in communliBatch:
            batchNameFolder = operator + ' ' + aircraft + ' BID_' + batchID
            path = os.path.join(unassignedpath, batchNameFolder)
            os.mkdir(path)
        else:
            auto = True
            communOpAc = list(set([opac]).intersection(communliBatch))
            batchNameFolder = unsdict[communOpAc[0]]
            path = os.path.join(unassignedpath, batchNameFolder)

            existingBatchID = batchNameFolder.split('BID_')[-1].replace(
                '_', '')
            bindir_batch = os.path.join(bindir, existingBatchID)

        tobinli.append({
            'source': vdict['rootpath'],
            'destination': bindir_batch
        })

        filesnameli = []
        fileIDli = []
        # errtxt_path = os.path.join(unassignedpath, "ERRORS_ifEmptyOK.txt")
        # try:
        #     os.remove(errtxt_path)
        # except:
        #     pass
        # errtxt = open(errtxt_path, "a")
        for file in vdict['files']:
            if auto:
                #print("yuhuu file",file)
                filepath = file
                fileinfo = getfileSizeMtime(filepath)
                fileinfo["FileName"] = file.split("\\")[-1]
                responseFileInfo = checkFileInfo(fileinfo)
                if responseFileInfo != True:
                    return responseFileInfo, auto, auto
            filename = file.split('\\')[-1]
            fileid = generateID()
            newFileName = 'FID_' + fileid + ' ' + filename
            save_path = os.path.join(path, newFileName)

            try:
                shutil.copy2(file, save_path)
                filesnameli.append(filename)
                fileIDli.append(fileid)
            except Exception as e:
                errmsg = "Make sure that all folders from NEW contains ONLY FILES! Please delete from UNASSIGNED all new created folders"
                #errmsg = "\n\nERROR:\n{} \n\nPlease check if all files are in\n{}\nfor\n{}\nPlease move the files needed manually!\n\n".format(str(e), save_path, path)
                print(errmsg)
                #errtxt.write(errmsg)
                return str(errmsg), str(e), str(e)

        #errtxt.close()

        orgfilesname = ', '.join(filesnameli)
        orgfilespath = path
        filesId = ', '.join(fileIDli)
        addedDate = current_date()

        infoaddDict = {
            'BatchID': batchID,
            'Aircraft': aircraft,
            'Operator': operator,
            'OriginalFilesName': orgfilesname,
            'OriginalFilesPath': orgfilespath,
            'FilesID': filesId,
            'AddedDate': addedDate
        }

        infoDictli.append(infoaddDict)
        #print(infoaddDict)

    print("yuhuuu infoDictli, auto, tobinli", infoDictli, auto, tobinli)
    return infoDictli, auto, tobinli