def splitBatches(splitFactor_batchid):
    from fup.helpers.batchdirs import createSplitDirs
    from fup.helpers.batch import batchInfo
    from fup.utils.commun import current_date, listifyString
    from fup.utils.dbwrap import sql_insertDict, sql_deleteRow

    splitBatchidli = splitFactor_batchid.split('_')
    splitFactor, batchid = int(splitBatchidli[0]), splitBatchidli[1]
    oldBatchinfo = batchInfo(batchid)

    infodirs = createSplitDirs(splitFactor, batchid)
    if infodirs != False:
        prevBatchID = infodirs['oldid']
        newBatchID = infodirs['newids']
    else:
        return False

    prevloginfo = [str(l) for l in oldBatchinfo['ChangesLog']]
    loginfo =  ''.join(prevloginfo) + ', Batch "{}" was splited in batches: "{}", on {}'.format(prevBatchID, newBatchID, current_date())
    oldBatchinfo['ChangesLog'] = loginfo

    newBIDli = listifyString(newBatchID)
    for bid in newBIDli:
        oldBatchinfo['BatchID'] = bid
        oldBatchinfo.pop('EstimatedTaskNbr', None)
        oldBatchinfo.pop('EstimatedFdgNbr', None) 
        if sql_insertDict('followup', oldBatchinfo) == False:
            return False

    if sql_deleteRow('followup', 'BatchID', batchid) == False:
        return False

    return newBatchID
def appendNewFilesToBatch(batchID, orgfilesname, newfilespath, filesId):
    from fup.utils.dbwrap import sql_updateDict
    from fup.utils.commun import listifyString, uniquelist
    from fup.helpers.batch import batchInfo
    from fup.utils.commun import current_date, cleanPath

    colstoChange = ['OriginalFilesName', 'OriginalFilesPath', 'FilesID', 'ChangesLog', 'BatchID']
    infodict_previous = batchInfo(batchID)

    changeInfodict = {}
    for kcol, val in infodict_previous.items():
        if kcol in colstoChange:
            if kcol == 'ChangesLog':
                changeInfodict[kcol] = val[0] + ', New files added on {}'.format(current_date())

            elif kcol == 'OriginalFilesName':
                changeInfodict[kcol] = val[0] + ',\n' + orgfilesname

            elif kcol == 'OriginalFilesPath':
                #print("newfilespath", val[0], ' yuhuu ',newfilespath)
                if newfilespath in uniquelist(listifyString(val[0])):
                    changeInfodict[kcol] = cleanPath(newfilespath)

            elif kcol == 'FilesID':
                changeInfodict[kcol] = val[0] + ',\n' + filesId

            elif kcol == 'BatchID':
                changeInfodict[kcol] = batchID

    if sql_updateDict('followup', changeInfodict, 'BatchID') == False:
        return False
    else:
        return True
예제 #3
0
def extendRowsFollowup():
    import os
    import pandas as pd
    pd.options.mode.chained_assignment = None  # default='warn'
    from fup.utils.jsoninfo import configInfo
    from fup.utils.commun import current_date, listifyString, xllook

    config = configInfo()
    xlpath = config['path_to_excels_exported_from_database']
    xlfilepath = os.path.join(xlpath, 'followup.xlsx')

    #xllook(xlfilepath, 'A1:W1', close=True)

    fupdf = pd.read_excel(xlfilepath)

    #Append to a list of dfs, bids that have more than one file
    orgfilesdfsli = []
    bidtodel = []
    for i, cell in enumerate(fupdf["OriginalFilesName"].tolist()):
        cellli = listifyString(str(cell))
        if len(cellli) > 1:
            bid = fupdf.loc[i, "BatchID"]
            bidtodel.append(bid)
            for j, orgfile in enumerate(cellli):
                #print(orgfile, bid)
                fup_bid = fupdf[fupdf['BatchID'] == bid]
                fup_bid.loc[i, "OriginalFilesName"] = orgfile
                fidli = listifyString(fup_bid.loc[i, "FilesID"])
                fup_bid.loc[i, "FilesID"] = fidli[j]
                orgfilesdfsli.append(fup_bid)

    #Make one df from df list created up
    orgfilesdf = pd.concat(orgfilesdfsli)

    #Remove from df batches that have more than one file
    fupdf = fupdf[~fupdf["BatchID"].str.contains('|'.join(bidtodel), na=False)]

    extended_fup = pd.concat([fupdf, orgfilesdf])
    extended_fup.reset_index(drop=True, inplace=True)

    extfilepath = os.path.join(
        xlpath, "followup {} DO NOT IMPORT THIS IN DATABASE.xlsx".format(
            current_date()))
    extended_fup.to_excel(extfilepath, index=False)

    xllook(extfilepath, 'A1:W1', close=False)
예제 #4
0
def mergeDirBatches(batchidstrli):
    import os, re, shutil, time
    from fup.utils.commun import copytree, generateID, listifyString
    from fup.utils.jsoninfo import configInfo, sessionInfo
    config = configInfo()
    session = sessionInfo()
    try:
        user = session['current_user_working']
        if re.search('@', user):
            user = user.split('@')[0]

        batchidli = listifyString(batchidstrli)

        dir_assigned = config['path_to_batches_assigned']
        dirs_assigned = os.listdir(dir_assigned)

        #Make a dir for the merged batches
        mergedID = generateID()
        mergeddirpath = os.path.join(
            dir_assigned, '{}-___ A___ BID_{}'.format(user, mergedID))
        os.mkdir(mergeddirpath)

        #Get names of the folders from Assigned folder by checking the BID
        dirstomergeli = []
        for batchid in batchidli:
            dir_bidAssigned = [
                d for d in dirs_assigned if re.search(batchid, d)
            ]
            dirstomergeli.append(dir_bidAssigned[0])

        #Copy contents of the old batches into the new created merged folder
        for folderName in dirstomergeli:
            assignedPathOld = os.path.join(dir_assigned, folderName)
            src, dst = assignedPathOld, mergeddirpath
            copytree(src, dst, symlinks=False, ignore=None)
            files_not_deleted = True
            while files_not_deleted:
                try:
                    shutil.rmtree(assignedPathOld)  #delete folders
                    files_not_deleted = False
                except:
                    print("Please close file(s) open in folder {}".format(
                        assignedPathOld))
                    time.sleep(2)

        mergedInfodict = {
            'mergedID': mergedID,
            'mergeddirpath': mergeddirpath,
            'batchidli': batchidli
        }
        return mergedInfodict
    except Exception as e:
        print('mergeDirBatches/helpers Got :', e)
        return False
def mergeBatches(batchidstrli):
    from fup.helpers.batchdirs import mergeDirBatches
    from fup.utils.commun import current_date, listifyString
    from fup.helpers.batch import batchInfo
    from fup.utils.dbwrap import sql_insertDict, sql_updateDict, sql_deleteRow
    import pandas as pd

    mergedInfodict = mergeDirBatches(batchidstrli)

    bidli = listifyString(batchidstrli)

    if isinstance(mergedInfodict, dict):
        prevInfodict = {}
        for batch in mergedInfodict['batchidli']:
            previnfo = batchInfo(batch)
            previnfo.pop('EstimatedTaskNbr', None)
            previnfo.pop('EstimatedFdgNbr', None)
            prevInfodict[batch] = previnfo
    else:
        print('Cannot merge dirs!')
        return False

    #gather prev info in a df then a dict
    dfli = []
    for bid in prevInfodict.keys():
        df = pd.DataFrame.from_dict(prevInfodict[bid])  #make df from dict
        dfli.append(df)

    dfall = pd.concat(dfli, axis=0)
    prevInfoDictAll = dfall.to_dict('list')

    prevInfoDictAll['BatchID'] = mergedInfodict['mergedID']
    infolog = 'Batch merged from "{}" on {}'.format(batchidstrli,
                                                    current_date())
    prevlog = [str(l) for l in prevInfoDictAll['ChangesLog']]
    prevInfoDictAll['ChangesLog'] = ', '.join(list(
        set(prevlog))) + ', ' + infolog
    prevInfoDictAll['AddedDate'] = current_date()

    if sql_insertDict('followup', prevInfoDictAll) == False:
        return False

    for bid in bidli:
        if sql_deleteRow('followup', 'BatchID', bid) == False:
            return False

    return mergedInfodict['mergedID']
def cleanBatchPath(batchstr):
    from fup.utils.commun import cleanPath, listifyString
    from fup.helpers.batch import batchInfo
    from fup.utils.dbwrap import sql_updateDict

    batches = listifyString(batchstr)

    for batch in batches:
        infoBatch = batchInfo(batch)

        #Prepare dict for sql_updateDict func (in future maybe move this to sql_updateDict func)
        prepinfoBatch = {}
        for k, v in infoBatch.items():
            if isinstance(v, list):
                val = v[0]
                if val == 'None' or val == None:
                    val = ''
                prepinfoBatch[k] = val
            else:
                prepinfoBatch[k] = v

        prepinfoBatch['OriginalFilesPath'] = cleanPath(prepinfoBatch['OriginalFilesPath'])
        sql_updateDict('followup', prepinfoBatch, 'BatchID')
예제 #7
0
def updateBatchOptions(batchlink=''):
    #get batch update options depending on the user type (user/responsible, admin or proofreader)
    import re
    from fup.utils.commun import listifyString
    from fup.utils.jsoninfo import configInfo, sessionInfo
    from fup.helpers.user import get_usersdict
    from fup.helpers.batch import viewBatches
    from fup.helpers.batch import batchInfo

    #print('updateBatchOptions: ', batchlink)

    config = configInfo()
    update_options_responsible = listifyString(
        config['batch_status_options_responsible'])
    update_options_proofreader = listifyString(
        config['batch_status_options_proofreader'])
    update_options_overall = listifyString(
        config['batch_status_options_overall'])
    aircraft = listifyString(config['aircraft'])
    split_batch_factor = listifyString(config['split_batch_factor'])
    allusers = get_usersdict(True)
    allusers = [user for user in allusers if re.search('\.', user)]
    print("users: ", allusers)

    session = sessionInfo()
    current_user_rights = session['current_user_rights']

    infoBatch = batchInfo(batchlink)
    try:
        infoBatch["Operator"]
    except:
        infoBatch = {
            'Operator': '',
            'Aircraft': '',
            'OverallStatus': '',
            'AddedDate': '',
            'StartDate': '',
            'ImportedDateISAIM': ''
        }

    update_batch_dict = {
        "responsibleStatus": update_options_responsible,
        "proofreaderStatus": update_options_proofreader,
        "overallStatus": update_options_overall,
        "aircraft": aircraft,
        "splitBatch": split_batch_factor,
        "allusers": allusers,
        "batchlink": batchlink,
        "userBatches": viewBatches(user_batches=True),
        "disableCommentResponsible": '',
        "disableCommentProofreader": '',
        "disableCheckbox": '',
        "infoBatch": infoBatch
    }

    if current_user_rights == 'user':
        update_batch_dict["proofreaderStatus"] = ['You cannot change this']
        update_batch_dict["allusers"] = ['You cannot change this']
        update_batch_dict["overallStatus"] = ['You cannot change this']
        update_batch_dict["disableCommentProofreader"] = "disabled"
        update_batch_dict["disableCheckbox"] = "disabled"
        return update_batch_dict
    elif current_user_rights == 'proofreader':
        update_batch_dict["responsibleStatus"] = ['You cannot change this']
        update_batch_dict["disableCommentResponsible"] = "disabled"
        return update_batch_dict
    elif current_user_rights == 'admin':
        return update_batch_dict