def appendNewFilesToBatch(batchID, orgfilesname, newfilespath, filesId):
    from fup.utils.dbwrap import sql_updateDict
    from fup.utils.commun import listifyString, uniquelist
    from fup.helpers.batch import batchInfo
    from fup.utils.commun import current_date, cleanPath

    colstoChange = ['OriginalFilesName', 'OriginalFilesPath', 'FilesID', 'ChangesLog', 'BatchID']
    infodict_previous = batchInfo(batchID)

    changeInfodict = {}
    for kcol, val in infodict_previous.items():
        if kcol in colstoChange:
            if kcol == 'ChangesLog':
                changeInfodict[kcol] = val[0] + ', New files added on {}'.format(current_date())

            elif kcol == 'OriginalFilesName':
                changeInfodict[kcol] = val[0] + ',\n' + orgfilesname

            elif kcol == 'OriginalFilesPath':
                #print("newfilespath", val[0], ' yuhuu ',newfilespath)
                if newfilespath in uniquelist(listifyString(val[0])):
                    changeInfodict[kcol] = cleanPath(newfilespath)

            elif kcol == 'FilesID':
                changeInfodict[kcol] = val[0] + ',\n' + filesId

            elif kcol == 'BatchID':
                changeInfodict[kcol] = batchID

    if sql_updateDict('followup', changeInfodict, 'BatchID') == False:
        return False
    else:
        return True
def mergeBatches(batchidstrli):
    from fup.helpers.batchdirs import mergeDirBatches
    from fup.utils.commun import current_date, listifyString
    from fup.helpers.batch import batchInfo
    from fup.utils.dbwrap import sql_insertDict, sql_updateDict, sql_deleteRow
    import pandas as pd

    mergedInfodict = mergeDirBatches(batchidstrli)

    bidli = listifyString(batchidstrli)

    if isinstance(mergedInfodict, dict):
        prevInfodict = {}
        for batch in mergedInfodict['batchidli']:
            previnfo = batchInfo(batch)
            previnfo.pop('EstimatedTaskNbr', None)
            previnfo.pop('EstimatedFdgNbr', None)
            prevInfodict[batch] = previnfo
    else:
        print('Cannot merge dirs!')
        return False

    #gather prev info in a df then a dict
    dfli = []
    for bid in prevInfodict.keys():
        df = pd.DataFrame.from_dict(prevInfodict[bid])  #make df from dict
        dfli.append(df)

    dfall = pd.concat(dfli, axis=0)
    prevInfoDictAll = dfall.to_dict('list')

    prevInfoDictAll['BatchID'] = mergedInfodict['mergedID']
    infolog = 'Batch merged from "{}" on {}'.format(batchidstrli,
                                                    current_date())
    prevlog = [str(l) for l in prevInfoDictAll['ChangesLog']]
    prevInfoDictAll['ChangesLog'] = ', '.join(list(
        set(prevlog))) + ', ' + infolog
    prevInfoDictAll['AddedDate'] = current_date()

    if sql_insertDict('followup', prevInfoDictAll) == False:
        return False

    for bid in bidli:
        if sql_deleteRow('followup', 'BatchID', bid) == False:
            return False

    return mergedInfodict['mergedID']
def verifyStatus(infoBatchdict):
    #print('verifyStatus: ',infoBatchdict)
    try:
        try:  #if user
            #'TO BE CHECKED'
            if infoBatchdict['ResponsibleStatus'] == 'TO BE CHECKED':
                infoBatchdict['OverallStatus'] = 'TO BE CHECKED'
                return infoBatchdict
            elif infoBatchdict['ResponsibleStatus'] != 'UNASSIGNED':
                infoBatchdict['OverallStatus'] = 'ONGOING'
                return infoBatchdict
        except:  #if proofreader
            #"TO BE IMPORTED, FINISHED, REWORK, STANDBY, UNRECORDABLE"
            if infoBatchdict['ProofreaderStatus'] == 'REWORK':
                infoBatchdict['OverallStatus'] = 'REWORK'
                infoBatchdict['ResponsibleStatus'] = 'REWORK'
                return infoBatchdict
            elif infoBatchdict['ProofreaderStatus'] == 'STANDBY':

                from fup.helpers.batch import batchInfo
                from fup.utils.commun import current_date
                from fup.utils.dbwrap import sql_updateDict

                prevInfoDictAll = batchInfo(infoBatchdict['BatchID'])
                prevlog = [str(l) for l in prevInfoDictAll['ChangesLog']]
                infolog = str("SET to STANDBY on {}".format(current_date()))
                log = ', '.join(list(set(prevlog))) + ',\n' + infolog
                upd = {
                    "ChangesLog": log,
                    "StartDate": "-",
                    "BatchID": infoBatchdict['BatchID']
                }
                sql_updateDict('followup', upd, "BatchID")
                infoBatchdict['OverallStatus'] = 'STANDBY'
                infoBatchdict['ResponsibleStatus'] = 'STANDBY'
                return infoBatchdict
            elif infoBatchdict['ProofreaderStatus'] == 'UNRECORDABLE':
                infoBatchdict['OverallStatus'] = 'UNRECORDABLE'
                infoBatchdict['ResponsibleStatus'] = 'UNRECORDABLE'
                return infoBatchdict
            elif infoBatchdict['ProofreaderStatus'] == 'FINISHED':
                infoBatchdict['OverallStatus'] = 'FINISHED'
                infoBatchdict['ResponsibleStatus'] = 'FINISHED'
                return infoBatchdict
            elif infoBatchdict['ProofreaderStatus'] == 'TO BE IMPORTED':
                infoBatchdict['OverallStatus'] = 'TO BE IMPORTED'
                return infoBatchdict
    except:
        return infoBatchdict
def resetStartDate(infodict):
    from fup.helpers.batch import batchInfo
    from fup.utils.commun import current_date
    from fup.utils.dbwrap import sql_updateDict

    bid = infodict["BatchID"]#"eyi0hT"
    newStatus = infodict["OverallStatus"]

    prevInfoDictAll = batchInfo(bid)

    standby = prevInfoDictAll['OverallStatus'][0] == 'STANDBY'
    startdate = prevInfoDictAll['StartDate'][0] == '-'
    if standby and startdate and newStatus != "STANDBY":
        date = current_date()
        sql_updateDict('followup', {"StartDate": date, "BatchID": bid}, "BatchID")
Esempio n. 5
0
def extendRowsFollowup():
    import os
    import pandas as pd
    pd.options.mode.chained_assignment = None  # default='warn'
    from fup.utils.jsoninfo import configInfo
    from fup.utils.commun import current_date, listifyString, xllook

    config = configInfo()
    xlpath = config['path_to_excels_exported_from_database']
    xlfilepath = os.path.join(xlpath, 'followup.xlsx')

    #xllook(xlfilepath, 'A1:W1', close=True)

    fupdf = pd.read_excel(xlfilepath)

    #Append to a list of dfs, bids that have more than one file
    orgfilesdfsli = []
    bidtodel = []
    for i, cell in enumerate(fupdf["OriginalFilesName"].tolist()):
        cellli = listifyString(str(cell))
        if len(cellli) > 1:
            bid = fupdf.loc[i, "BatchID"]
            bidtodel.append(bid)
            for j, orgfile in enumerate(cellli):
                #print(orgfile, bid)
                fup_bid = fupdf[fupdf['BatchID'] == bid]
                fup_bid.loc[i, "OriginalFilesName"] = orgfile
                fidli = listifyString(fup_bid.loc[i, "FilesID"])
                fup_bid.loc[i, "FilesID"] = fidli[j]
                orgfilesdfsli.append(fup_bid)

    #Make one df from df list created up
    orgfilesdf = pd.concat(orgfilesdfsli)

    #Remove from df batches that have more than one file
    fupdf = fupdf[~fupdf["BatchID"].str.contains('|'.join(bidtodel), na=False)]

    extended_fup = pd.concat([fupdf, orgfilesdf])
    extended_fup.reset_index(drop=True, inplace=True)

    extfilepath = os.path.join(
        xlpath, "followup {} DO NOT IMPORT THIS IN DATABASE.xlsx".format(
            current_date()))
    extended_fup.to_excel(extfilepath, index=False)

    xllook(extfilepath, 'A1:W1', close=False)
def splitBatches(splitFactor_batchid):
    from fup.helpers.batchdirs import createSplitDirs
    from fup.helpers.batch import batchInfo
    from fup.utils.commun import current_date, listifyString
    from fup.utils.dbwrap import sql_insertDict, sql_deleteRow

    splitBatchidli = splitFactor_batchid.split('_')
    splitFactor, batchid = int(splitBatchidli[0]), splitBatchidli[1]
    oldBatchinfo = batchInfo(batchid)

    infodirs = createSplitDirs(splitFactor, batchid)
    if infodirs != False:
        prevBatchID = infodirs['oldid']
        newBatchID = infodirs['newids']
    else:
        return False

    prevloginfo = [str(l) for l in oldBatchinfo['ChangesLog']]
    loginfo = ''.join(
        prevloginfo
    ) + ', Batch "{}" was splited in batches: "{}", on {}'.format(
        prevBatchID, newBatchID, current_date())
    oldBatchinfo['ChangesLog'] = loginfo

    newBIDli = listifyString(newBatchID)
    for bid in newBIDli:
        oldBatchinfo['BatchID'] = bid
        oldBatchinfo.pop('EstimatedTaskNbr', None)
        oldBatchinfo.pop('EstimatedFdgNbr', None)
        if sql_insertDict('followup', oldBatchinfo) == False:
            return False

    if sql_deleteRow('followup', 'BatchID', batchid) == False:
        return False

    return newBatchID
Esempio n. 7
0
def autoNewDirs():

    #do a check before generating batches

    resultCheckNew = checkNew()
    if isinstance(resultCheckNew, str):
        return resultCheckNew, '', ''

    import os, shutil
    from fup.helpers.files import originalFilesPaths, getfileSizeMtime
    from fup.utils.commun import generateID, current_date
    from fup.utils.jsoninfo import configInfo

    config = configInfo()
    bindir = os.path.abspath(config["path_to_bin"])

    filesinfodict = originalFilesPaths(infoDict={}, auto=True)

    #print('filesinfodict', str(filesinfodict).encode("utf-8"))

    newdirsNames = list(filesinfodict.keys())
    unassignedpath = os.path.abspath(config['path_to_batches_unassigned'])
    unassigneddirli = os.listdir(unassignedpath)

    unsdict = {}
    for d in unassigneddirli:
        commName = d.split('BID_')[0].strip()
        unsdict[commName] = d

    unassigneddirNames = list(unsdict.keys())
    communliBatch = list(set(newdirsNames).intersection(unassigneddirNames))

    auto = False
    infoDictli = []
    tobinli = []
    for opac, vdict in filesinfodict.items():
        #similar to uploadFilesCreateBatch, but without flask file object
        print("\nProcessing {}..".format(opac))
        batchID = generateID()
        operator = opac.split(' ')[0]
        aircraft = opac.split(' ')[1]
        bindir_batch = os.path.join(bindir, batchID)

        if opac not in communliBatch:
            batchNameFolder = operator + ' ' + aircraft + ' BID_' + batchID
            path = os.path.join(unassignedpath, batchNameFolder)
            os.mkdir(path)
        else:
            auto = True
            communOpAc = list(set([opac]).intersection(communliBatch))
            batchNameFolder = unsdict[communOpAc[0]]
            path = os.path.join(unassignedpath, batchNameFolder)

            existingBatchID = batchNameFolder.split('BID_')[-1].replace(
                '_', '')
            bindir_batch = os.path.join(bindir, existingBatchID)

        tobinli.append({
            'source': vdict['rootpath'],
            'destination': bindir_batch
        })

        filesnameli = []
        fileIDli = []

        for filepath in vdict['files']:
            if auto:
                fileinfo = getfileSizeMtime(filepath)
                #print("autoNewDirs: getfileSizeMtime, fileinfo> ", fileinfo)
                fileinfo["FileName"] = filepath.split("\\")[-1]
                responseFileInfo = checkFileInfo(fileinfo)
                if responseFileInfo != True:
                    return responseFileInfo, auto, auto
            filename = filepath.split('\\')[-1].replace(',', ' ')
            #print("autoNewDirs: filename> ", filename, file)
            fileid = generateID()
            newFileName = 'FID_' + fileid + ' ' + filename
            save_path = os.path.join(path, newFileName)

            try:
                print("Copying this file:\n{}\nto this path:\n{}".format(
                    filepath, save_path))
                shutil.copy2(filepath, save_path)
                filesnameli.append(filename)
                fileIDli.append(fileid)
            except Exception as e:
                errmsg = "Make sure that all folders from NEW contains ONLY FILES! Got: {}".format(
                    str(e))
                return str(errmsg), str(e), str(e)

        orgfilesname = ', '.join(filesnameli)
        orgfilespath = path
        filesId = ', '.join(fileIDli)
        addedDate = current_date()

        infoaddDict = {
            'BatchID': batchID,
            'Aircraft': aircraft,
            'Operator': operator,
            'OriginalFilesName': orgfilesname,
            'OriginalFilesPath': orgfilespath,
            'FilesID': filesId,
            'AddedDate': addedDate
        }

        infoDictli.append(infoaddDict)

    print("Info about:\ninfoDictli:\n{}\n, auto:\n{}\n, tobinli:\n{}\n".format(
        infoDictli, auto, tobinli))
    return infoDictli, auto, tobinli
def assignBatchtoUser(batchID, assignedtoProofreader):
    from fup.utils.jsoninfo import sessionInfo
    from fup.utils.commun import current_date
    from fup.helpers.batch import getUnassignedBatch
    from fup.helpers.batchdirs import createAssignedDirFiles
    from fup.models.batch import checkOverallStatus
    from fup.utils.dbwrap import sql_updateDict
    from fup.helpers.batch import batchExists
    from fup.helpers.user import getuserProofreader

    if checkOverallStatus() == True:

        date = current_date()
        userinfo = sessionInfo()
        responsible_user = userinfo["current_user_working"]
        defaultProofreader = getuserProofreader(responsible_user)

        tableName = 'followup'

        updatedict = {
            "BatchID": batchID,
            "Responsible": responsible_user,
            "ResponsibleStatus": "ASSIGNED",
            "Proofreader": defaultProofreader,
            "ProofreaderStatus": "ASSIGNED",
            "ChangesLog": ''
        }
        if defaultProofreader == "UNASSIGNED":
            updatedict["ProofreaderStatus"] = "UNASSIGNED"

        updatedict_fallback = {
            "BatchID": batchID,
            "Responsible": "UNASSIGNED",
            "ResponsibleStatus": "UNASSIGNED",
            "Proofreader": "UNASSIGNED",
            "ProofreaderStatus": "UNASSIGNED",
            "ChangesLog": ''
        }

        colIDName = "BatchID"

        #print(updatedict)
        if (batchID == '' or batchExists(batchID)) and (assignedtoProofreader
                                                        == True):
            unassignedBatch = getUnassignedBatch(batchID, 'ProofreaderStatus')
            updatedict["BatchID"] = unassignedBatch
            updatedict_fallback["BatchID"] = unassignedBatch
        elif (batchID == '' or batchExists(batchID)) and (assignedtoProofreader
                                                          == False):
            unassignedBatch = getUnassignedBatch(batchID, 'ResponsibleStatus')
            updatedict["BatchID"] = unassignedBatch
            updatedict_fallback["BatchID"] = unassignedBatch

        if assignedtoProofreader == True:
            updatedict.pop("Responsible", None)
            updatedict.pop("ResponsibleStatus", None)
            updatedict.pop("ChangesLog", None)
            if sql_updateDict(tableName, updatedict, colIDName) == True:
                checkOverallStatus()
                return True
            else:
                print('1fallback', tableName, updatedict, colIDName)
                sql_updateDict(tableName, updatedict_fallback, colIDName)
                return False

        elif assignedtoProofreader == False:
            loginfo = "ASSIGNED to {} on {}".format(responsible_user, date)
            updatedict["ChangesLog"] = loginfo
            updatedict["StartDate"] = date
            if (sql_updateDict(tableName, updatedict,
                               colIDName)) == True and (createAssignedDirFiles(
                                   updatedict["BatchID"]) == True):
                checkOverallStatus()
                return True
            else:
                print('2fallback', tableName, updatedict, colIDName)
                sql_updateDict(tableName, updatedict_fallback, colIDName)
                return False
    else:
        return False
Esempio n. 9
0
def moveDirsforUpdate(infoBatchdict):
    import os, re
    from fup.helpers.batchdirs import moveDirName
    from fup.utils.jsoninfo import configInfo
    from fup.helpers.files import dcsinfo
    from fup.utils.dbwrap import sql_updateDict
    from fup.utils.commun import current_date

    #infoBatchdict = {'ResponsibleStatus': 'TO BE CHECKED', 'BatchID': 'VqUSKc'}
    batchid = infoBatchdict['BatchID']
    config = configInfo()

    for kcol, val in infoBatchdict.items():
        #print(kcol, val)
        if kcol == 'ResponsibleStatus' and val == 'TO BE CHECKED':
            ok = moveDirName(batchid, "path_to_batches_assigned",
                             "path_to_batches_tobechecked")
            if ok:
                return True
            else:
                print("[info]Batch {} not found in ASSIGNED folder".format(
                    batchid))
                return False

        elif kcol == 'ProofreaderStatus' and val == 'REWORK':
            ok = moveDirName(batchid, "path_to_batches_tobechecked",
                             "path_to_batches_assigned")
            if ok:
                return True
            else:
                print(
                    "[info]Batch {} not found in TO BE CHECKED folder".format(
                        batchid))
                return False

        elif kcol == 'ProofreaderStatus' and val == 'STANDBY':
            ok = moveDirName(batchid, "path_to_batches_tobechecked",
                             "path_to_batches_instandby")
            if ok:
                return True
            else:
                print(
                    "[info]Batch {} not found in TO BE CHECKED folder".format(
                        batchid))
                return False

        elif kcol == 'ProofreaderStatus' and val == 'UNRECORDABLE':
            ok = moveDirName(batchid, "path_to_batches_tobechecked",
                             "path_to_batches_unrecordable")
            if ok:
                return True
            else:
                print(
                    "[info]Batch {} not found in TO BE CHECKED folder".format(
                        batchid))
                return False

        elif kcol == 'ProofreaderStatus' and val == 'TO BE IMPORTED':
            try:
                dcspath = config['path_to_dcs_info']
                dcsli = os.listdir(dcspath)
                dcsli = [f for f in dcsli if re.search(batchid, f)]
                dcsli = [f for f in dcsli if re.search('DCS', f)]
                dcsli = [f for f in dcsli if re.search('.xml', f)]

                dcsfilepath = os.path.join(dcspath, dcsli[0])
                dcsdictinfo = dcsinfo(dcsfilepath)
                dcsdictinfo['BatchID'] = batchid

                if sql_updateDict('followup', dcsdictinfo, 'BatchID') == False:
                    print("Cannot update the DCS information to database!")
                    return False

                ok = moveDirName(batchid, "path_to_batches_tobechecked",
                                 "path_to_batches_tbimported")
                if ok:
                    return True
                else:
                    print("Batch {} not found in TO BE CHECKED folder".format(
                        batchid))
                    return False
            except:
                print('Cannot open/read the DCS xml file!')
                return False

        elif kcol == 'ProofreaderStatus' and val == 'FINISHED':
            ok = moveDirName(batchid, "path_to_batches_tbimported",
                             "path_to_batches_finished")
            if ok:
                cdate = current_date()
                importedDatedict = {
                    'BatchID': batchid,
                    'ImportedDateISAIM': cdate,
                    'ResponsibleStatus': 'FINISHED'
                }
                sql_updateDict('followup', importedDatedict, 'BatchID')
            else:
                print("Batch {} not found in TO BE IMPORTED folder".format(
                    batchid))
                return False
def autoNewDirs():
    import os, shutil
    from fup.helpers.files import originalFilesPaths, getfileSizeMtime
    from fup.utils.commun import generateID, current_date
    from fup.utils.jsoninfo import configInfo

    config = configInfo()
    bindir = os.path.abspath(config["path_to_bin"])

    filesinfodict = originalFilesPaths(infoDict={}, auto=True)

    newdirsNames = list(filesinfodict.keys())
    unassignedpath = os.path.abspath(config['path_to_batches_unassigned'])
    unassigneddirli = os.listdir(unassignedpath)

    unsdict = {}
    for d in unassigneddirli:
        commName = d.split('BID_')[0].strip()
        unsdict[commName] = d

    unassigneddirNames = list(unsdict.keys())
    communliBatch = list(set(newdirsNames).intersection(unassigneddirNames))

    auto = False
    infoDictli = []
    tobinli = []
    for opac, vdict in filesinfodict.items():
        #similar to uploadFilesCreateBatch, but without flask file object
        batchID = generateID()
        operator = opac.split(' ')[0]
        aircraft = opac.split(' ')[1]
        bindir_batch = os.path.join(bindir, batchID)

        if opac not in communliBatch:
            batchNameFolder = operator + ' ' + aircraft + ' BID_' + batchID
            path = os.path.join(unassignedpath, batchNameFolder)
            os.mkdir(path)
        else:
            auto = True
            communOpAc = list(set([opac]).intersection(communliBatch))
            batchNameFolder = unsdict[communOpAc[0]]
            path = os.path.join(unassignedpath, batchNameFolder)

            existingBatchID = batchNameFolder.split('BID_')[-1].replace(
                '_', '')
            bindir_batch = os.path.join(bindir, existingBatchID)

        tobinli.append({
            'source': vdict['rootpath'],
            'destination': bindir_batch
        })

        filesnameli = []
        fileIDli = []
        for file in vdict['files']:
            if auto:
                #print("yuhuu file",file)
                filepath = file
                fileinfo = getfileSizeMtime(filepath)
                fileinfo["FileName"] = file.split("\\")[-1]
                responseFileInfo = checkFileInfo(fileinfo)
                if responseFileInfo != True:
                    return responseFileInfo, auto, auto
            filename = file.split('\\')[-1]
            fileid = generateID()
            newFileName = 'FID_' + fileid + ' ' + filename
            save_path = os.path.join(path, newFileName)

            filesnameli.append(filename)
            fileIDli.append(fileid)

            try:
                shutil.copy2(file, save_path)
            except Exception as e:
                return str(e), str(e), str(e)

        orgfilesname = ', '.join(filesnameli)
        orgfilespath = path
        filesId = ', '.join(fileIDli)
        addedDate = current_date()

        infoaddDict = {
            'BatchID': batchID,
            'Aircraft': aircraft,
            'Operator': operator,
            'OriginalFilesName': orgfilesname,
            'OriginalFilesPath': orgfilespath,
            'FilesID': filesId,
            'AddedDate': addedDate
        }

        infoDictli.append(infoaddDict)
        #print(infoaddDict)

    return infoDictli, auto, tobinli
Esempio n. 11
0
def autoNewDirs():

    #do a check before generating batches

    resultCheckNew = checkNew()
    if isinstance(resultCheckNew, str):
        return resultCheckNew, '', ''

    import os, shutil
    from fup.helpers.files import originalFilesPaths, getfileSizeMtime
    from fup.utils.commun import generateID, current_date
    from fup.utils.jsoninfo import configInfo

    config = configInfo()
    bindir = os.path.abspath(config["path_to_bin"])

    filesinfodict = originalFilesPaths(infoDict={}, auto=True)

    newdirsNames = list(filesinfodict.keys())
    unassignedpath = os.path.abspath(config['path_to_batches_unassigned'])
    unassigneddirli = os.listdir(unassignedpath)

    unsdict = {}
    for d in unassigneddirli:
        commName = d.split('BID_')[0].strip()
        unsdict[commName] = d

    unassigneddirNames = list(unsdict.keys())
    communliBatch = list(set(newdirsNames).intersection(unassigneddirNames))

    auto = False
    infoDictli = []
    tobinli = []
    for opac, vdict in filesinfodict.items():
        #similar to uploadFilesCreateBatch, but without flask file object
        batchID = generateID()
        operator = opac.split(' ')[0]
        aircraft = opac.split(' ')[1]
        bindir_batch = os.path.join(bindir, batchID)

        if opac not in communliBatch:
            batchNameFolder = operator + ' ' + aircraft + ' BID_' + batchID
            path = os.path.join(unassignedpath, batchNameFolder)
            os.mkdir(path)
        else:
            auto = True
            communOpAc = list(set([opac]).intersection(communliBatch))
            batchNameFolder = unsdict[communOpAc[0]]
            path = os.path.join(unassignedpath, batchNameFolder)

            existingBatchID = batchNameFolder.split('BID_')[-1].replace(
                '_', '')
            bindir_batch = os.path.join(bindir, existingBatchID)

        tobinli.append({
            'source': vdict['rootpath'],
            'destination': bindir_batch
        })

        filesnameli = []
        fileIDli = []
        # errtxt_path = os.path.join(unassignedpath, "ERRORS_ifEmptyOK.txt")
        # try:
        #     os.remove(errtxt_path)
        # except:
        #     pass
        # errtxt = open(errtxt_path, "a")
        for file in vdict['files']:
            if auto:
                #print("yuhuu file",file)
                filepath = file
                fileinfo = getfileSizeMtime(filepath)
                fileinfo["FileName"] = file.split("\\")[-1]
                responseFileInfo = checkFileInfo(fileinfo)
                if responseFileInfo != True:
                    return responseFileInfo, auto, auto
            filename = file.split('\\')[-1]
            fileid = generateID()
            newFileName = 'FID_' + fileid + ' ' + filename
            save_path = os.path.join(path, newFileName)

            try:
                shutil.copy2(file, save_path)
                filesnameli.append(filename)
                fileIDli.append(fileid)
            except Exception as e:
                errmsg = "Make sure that all folders from NEW contains ONLY FILES! Please delete from UNASSIGNED all new created folders"
                #errmsg = "\n\nERROR:\n{} \n\nPlease check if all files are in\n{}\nfor\n{}\nPlease move the files needed manually!\n\n".format(str(e), save_path, path)
                print(errmsg)
                #errtxt.write(errmsg)
                return str(errmsg), str(e), str(e)

        #errtxt.close()

        orgfilesname = ', '.join(filesnameli)
        orgfilespath = path
        filesId = ', '.join(fileIDli)
        addedDate = current_date()

        infoaddDict = {
            'BatchID': batchID,
            'Aircraft': aircraft,
            'Operator': operator,
            'OriginalFilesName': orgfilesname,
            'OriginalFilesPath': orgfilespath,
            'FilesID': filesId,
            'AddedDate': addedDate
        }

        infoDictli.append(infoaddDict)
        #print(infoaddDict)

    print("yuhuuu infoDictli, auto, tobinli", infoDictli, auto, tobinli)
    return infoDictli, auto, tobinli