class Queries: func = Commun() conf = func.config_info() db = Dbwrap(conf["path_to_database"]) """Contains static variables with sql queries for this app tables""" #Query for creating the followup table in db sql_create_table_followup = """CREATE TABLE IF NOT EXISTS `followup` (`BatchID` TEXT, `Aircraft` TEXT, `Operator` TEXT, `OriginalFilesName` TEXT, `OriginalFilesPath` TEXT, `FilesID` TEXT, `AddedDate` DATE, `StartDate` DATE, `Responsible` TEXT, `Proofreader` TEXT, `ResponsibleStatus` TEXT, `ProofreaderStatus` TEXT, `ResponsibleComment` TEXT, `ProofreaderComment` TEXT, `OverallStatus` TEXT, `EstimatedTaskNbr` INTEGER, `EstimatedFdgNbr` INTEGER, `TotalRowsNbr` INTEGER, `MPDTaskRowsNbr` INTEGER, `OperatorRowsNbr` INTEGER, `FindingsRowsNbr` INTEGER, `ChangesLog` TEXT, `ImportedDateISAIM` DATE); """ #Query for creating the fileshistory table in db sql_create_table_fileshistory = """CREATE TABLE IF NOT EXISTS `fileshistory` (`FileName` TEXT, `AddedInBatch` TEXT, `FileSize` INTEGER, `ModificationDate` DATE); """ #Used for creation of user table sql_create_table_users = """CREATE TABLE IF NOT EXISTS `users` (`User` TEXT, `Password` TEXT, `Rights` TEXT, `Proofreader` TEXT, PRIMARY KEY(`User`));""" def create_followup_db(self): """Create the db with the default tables""" self.db.execute_query(self.sql_create_table_users) self.db.execute_query(self.sql_create_table_followup) self.db.execute_query(self.sql_create_table_fileshistory) self.func.wait_file_on_disk(self.conf["path_to_database"], timeout=60)
class App: func = Commun() conf = func.config_info() def create_app(self): """Create the app from Blueprints""" app = Flask(__name__) from dc.views.users import user_bp from dc.views.commun import com_bp from dc.views.batches import batches_bp app.register_blueprint(user_bp) app.register_blueprint(com_bp) app.register_blueprint(batches_bp) return app def run_server(self, run_in_browser=True, webview_name="App", width=1024, height=720): """Run in browser or in pywebview browser""" app = self.create_app() if run_in_browser: port_nbr = int(self.conf["port"]) app.run(host='127.0.0.1', port=port_nbr, debug=True) else: def start_server(): app.run() if __name__ == '__main__': t = threading.Thread(target=start_server) t.daemon = True t.start() webview.create_window(webview_name, "http://127.0.0.1:{}/".format( self.conf["port"]), width=width, height=height) sys.exit()
import pytest from dc.utils.commun import Commun com_funcs = Commun() com_funcs
from flask import Blueprint from flask import render_template, request, redirect, url_for batches_bp = Blueprint('batches_bp', __name__) from dc.models.batches import Batches from dc.utils.commun import Commun batch = Batches() func = Commun() @batches_bp.route("/generate-batch") def generateBatch(): try: bid_created = batch.add_batch() message = "Batch '{}' succesfully created!".format(bid_created) return redirect( url_for('com_bp.showSuccessPagewithMessage', message=message)) except Exception as err: errmsg = func.write_traceback(err) print(errmsg) return redirect(url_for('com_bp.showFailedPage', errormessage=str(err))) @batches_bp.route('/view-batches') def viewBatches(): context = batch.followup_reversed() return render_template('view_batches.html', context=context)
class User: func = Commun() conf = func.config_info() db = Dbwrap(conf["path_to_database"]) def add_user(self, username, useremail, userpassword, userrights, defaultproofreader): """Adds a user to the database""" user_row = { "User": username, "Email": useremail, "Password": userpassword, "Rights": userrights, "Proofreader": defaultproofreader} return self.db.create_row("users", user_row) def remove_user(self, username): """Delete user from users table""" return self.db.delete_row("users", {"User": username}) def get_all_users(self, asdict=True): """Get user table from database in dictionary format or dataframe format""" table = self.db.get_table("users", asdict=asdict) return table def verify_user(self, username, userpassword): """Check if user is in the database""" user_data = self.db.select_row("users", "User", username) if len(user_data["User"]) == 1 and len(user_data["Password"]) == 1: user_session = {"User": user_data["User"][0], "Email": user_data["Email"][0], "Password": user_data["Password"][0], "Rights": user_data["Rights"][0]} self.func.write_json(user_session, "session.json") return user_session def session_info(self): """Get info from session.json file""" session_data = self.func.read_json("session.json") return session_data def get_users_by_rights(self, rights): """Get users list by specified rights from users table""" users_df = self.get_all_users(asdict=False) users_df = users_df[users_df["Rights"] == rights] users_df = users_df.to_dict("list") usersli = users_df["User"] return usersli def get_proofreaders(self): """Get proofreaders list""" return self.get_users_by_rights(rights="proofreader") def get_users(self): """Get users list""" return self.get_users_by_rights(rights="user") def get_admins(self): """Get admins list""" return self.get_users_by_rights(rights="user") def get_settings(self): """Get app settings""" data = self.func.read_json("config.json") return data def context_disable(self): """Get a dict needed to find out if html element needs to be disabled""" session = self.session_info() context = {} if session["Rights"] == "user": context["disabled"] = "disabled" else: context["disabled"] = "" return context def export_table(self, table_name): """Export followup from db""" export_path = self.conf["path_to_excels_exported_from_database"] df = self.db.read_table(table_name) save_path = os.path.join(export_path, "{}.xlsx".format(table_name)) df.to_excel(save_path, index=False) def import_table(self, table_name): """Import table into db, replace table if exists""" import_path = self.conf["path_to_excels_to_be_imported_in_database"] followup_file_path = os.path.join(import_path, "{}.xlsx".format(table_name)) fupdf = pd.read_excel(followup_file_path) self.db.insert_table(fupdf, table_name) def extend_rows_followup(self): """Extend followup my spliting batch in rows in each file""" xlpath = self.conf['path_to_excels_exported_from_database'] xlfilepath = os.path.join(xlpath, 'followup.xlsx') #xllook(xlfilepath, 'A1:W1', close=True) fupdf = pd.read_excel(xlfilepath) #Append to a list of dfs, bids that have more than one file orgfilesdfsli = [] bidtodel = [] for i, cell in enumerate(fupdf["OriginalFilesName"].tolist()): cellli = self.func.listify_string(str(cell)) if len(cellli) > 1: bid = fupdf.loc[i, "BatchID"] bidtodel.append(bid) for j, orgfile in enumerate(cellli): #print(orgfile, bid) fup_bid = fupdf[fupdf['BatchID'] == bid] fup_bid.loc[i, "OriginalFilesName"] = orgfile fidli = self.func.listify_string(fup_bid.loc[i, "FilesID"]) fup_bid.loc[i, "FilesID"] = fidli[j] orgfilesdfsli.append(fup_bid) #Make one df from df list created up orgfilesdf = pd.concat(orgfilesdfsli) #Remove from df batches that have more than one file fupdf = fupdf[~fupdf["BatchID"].str.contains('|'.join(bidtodel), na=False)] extended_fup = pd.concat([fupdf, orgfilesdf]) extended_fup.reset_index(drop=True, inplace=True) extfilepath = os.path.join(xlpath, "followup {} DO NOT IMPORT THIS IN DATABASE.xlsx".format(self.func.current_date())) extended_fup.to_excel(extfilepath, index=False) self.func.xl_look(extfilepath, 'A1:W1', close=False)
class Batches: func = Commun() conf = func.config_info() db = Dbwrap(conf["path_to_database"]) folder = Folders() user = User() def process_files_paths(self, filesInfodict): """Extract files names, set and id for each file path""" id_lentgh = int(self.conf["IDlentgh"]) new_opfiles_path = self.conf["path_to_new_opfiles"] paths_to_files = list(filesInfodict.keys()) original_files_names = [] new_files_names = [] for file_path in paths_to_files: fid = self.func.generate_id(id_lentgh) file_name = self.func.get_file_name(file_path) original_files_names.append(file_name) new_file_name = os.path.join("{} {}".format(fid, file_name)) new_files_names.append(new_file_name) files = ", ".join(original_files_names) new_files = ", ".join(new_files_names) return files, new_files def get_paths_for_unassigned_prepared(self, bid_info): """Create the paths to move the folders from new to unassigned and prepared""" org_path = bid_info["OriginalFilesPath"] opfolder = "{} {}".format(bid_info["Operator"], bid_info["Aircraft"]) new_path = os.path.join(org_path, opfolder) bid_opfolder = "{} _{}".format(opfolder, bid_info["BatchID"]) unassigned_path = os.path.join(self.conf["path_to_batches_unassigned"], bid_opfolder) prepared_path = os.path.join(self.conf["path_to_batches_prepfiles"], bid_opfolder) return new_path, unassigned_path, prepared_path, bid_opfolder def add_id_to_prepfiles(self, bid_info, bid_opfolder): """Add the new files names to files in prepared/unassigned files""" org_files = self.func.listify_string(bid_info["OriginalFilesName"]) new_files = self.func.listify_string(bid_info["FilesID"]) prep_path = self.conf["path_to_batches_prepfiles"] unassg_path = self.conf["path_to_batches_unassigned"] prep_bid_path = os.path.join(prep_path, bid_opfolder) unassg_bid_path = os.path.join(unassg_path, bid_opfolder) for org, new in zip(org_files, new_files): #Paths for prepared files preporg_path = os.path.join(prep_bid_path, org) prepnew_path = os.path.join(prep_bid_path, new) #Paths for unassigned files unassgorg_path = os.path.join(unassg_bid_path, org) unassgnew_path = os.path.join(unassg_bid_path, new) self.func.move_folder(preporg_path, prepnew_path) self.func.move_folder(unassgorg_path, unassgnew_path) def copy_files_to_unassigned_prepared_dirs(self, bid_info): """Copy files from new folder to unassigned and prepared path folder""" new_path, unassigned_path, prepared_path, bid_opfolder = self.get_paths_for_unassigned_prepared( bid_info) #Raise error if a dir is found in new operator files self.func.accept_only_files(new_path) #Copy from new to unassigned self.func.copy_dirs(new_path, unassigned_path) #Copy from unassigned to prepared self.func.copy_dirs(unassigned_path, prepared_path) #Rename files from prepared folder self.add_id_to_prepfiles(bid_info, bid_opfolder) def check_file_history(self, file_info): """Check if an operator file was added before based on name, size and modification date""" dfgen, conn = self.db.read_table("fileshistory", chunk_size=50000) for df in dfgen: df_name = df[df["FileName"] == file_info["FileName"]] if df_name.shape[0] > 0: df_size = df[df["FileSize"] == int(file_info["FileSize"])] df_date = df[df["ModificationDate"] == file_info["ModificationDate"]] if df_size.shape[0] > 0 and df_date.shape[0] > 0: conn.close() raise Exception( "File '{}' was added before in batch '{}'".format( df_name["FileName"].tolist()[0], df_name["AddedInBatch"].tolist()[0])) conn.close() def prepare_fileshistory_info(self, bid_info, files_info): """Get rows to insert in fileshistory and check if was previously added""" rows = [] for file_path, info in files_info["FilesInfo"].items(): file_name = self.func.get_file_name(file_path) file_info = { "FileName": file_name, "AddedInBatch": bid_info["BatchID"], "FileSize": info["FileSize"], "ModificationDate": info["ModificationDate"] } self.check_file_history(file_info) rows.append(file_info) return rows def add_batch(self): """Add batch to database """ bid = self.func.generate_id(int(self.conf["IDlentgh"])) files_info = self.folder.new_opfiles_info() files_names, new_files_names = self.process_files_paths( files_info["FilesInfo"]) userdata = self.user.session_info() batch_info_followup = { "BatchID": bid, "Aircraft": files_info["Aircraft"], "Operator": files_info["Operator"], "OriginalFilesName": files_names, "OriginalFilesPath": self.conf["path_to_new_opfiles"], "FilesID": new_files_names, "AddedDate": self.func.current_date(), "Responsible": "UNASSIGNED", "Proofreader": "UNASSIGNED", "ResponsibleStatus": "UNASSIGNED", "ProofreaderStatus": "UNASSIGNED", "OverallStatus": "UNASSIGNED", "ChangesLog": "Batch added by {},".format(userdata["User"]) } batch_info_fileshistory = self.prepare_fileshistory_info( batch_info_followup, files_info) #Data for db is prepared now copy the files. self.copy_files_to_unassigned_prepared_dirs(batch_info_followup) #Now the files a copied now insert data to database #Insert the batch self.db.create_row("followup", batch_info_followup) #Insert the file history for file_history in batch_info_fileshistory: self.db.create_row("fileshistory", file_history) return bid def followup_reversed(self): """Get followup table from db reverse it and return it as dict""" df = self.db.read_table("followup") df_dict = df.reindex(index=df.index[::-1]).to_dict('list') return df_dict def get_batch(self, bid=""): """Get batch id if specified""" bid_data = self.db.select_row("followup", "BatchID", bid) return bid_data def bid_options(self): """Batch status options from config.json file""" batch_opt = { "status_user": self.func.listify_string( self.conf["batch_status_options_responsible"]), "status_proofreader": self.func.listify_string( self.conf["batch_status_options_proofreader"]), "status_overall": self.func.listify_string( self.conf["batch_status_options_overall"]), "aircrafts": self.func.listify_string(self.conf["aircrafts"]), "split_batch_factor": self.func.listify_string(self.conf["split_batch_factor"]) } bid_data = self.func.read_json("batch.json") batch_opt.update(bid_data) return batch_opt
class Batches: func = Commun() conf = func.config_info() db = Dbwrap(conf["path_to_database"]) folder = Folders() user = User() def process_files_paths(self, filesInfodict): """Extract files names, set and id for each file path""" id_lentgh = int(self.conf["IDlentgh"]) new_opfiles_path = self.conf["path_to_new_opfiles"] paths_to_files = list(filesInfodict.keys()) original_files_names = [] new_files_names = [] for file_path in paths_to_files: fid = self.func.generate_id(id_lentgh) file_name = self.func.get_file_name(file_path) original_files_names.append(file_name) new_file_name = os.path.join("{} {}".format(fid, file_name)) new_files_names.append(new_file_name) files = ", ".join(original_files_names) new_files = ", ".join(new_files_names) return files, new_files def get_paths_for_unassigned_prepared(self, bid_info): """Create the paths to move the folders from new to unassigned and prepared""" org_path = bid_info["OriginalFilesPath"] opfolder = "{} {}".format(bid_info["Operator"], bid_info["Aircraft"]) new_path = os.path.join(org_path, opfolder) bid_opfolder = "{} _{}".format(opfolder, bid_info["BatchID"]) unassigned_path = os.path.join(self.conf["path_to_batches_unassigned"], bid_opfolder) prepared_path = os.path.join(self.conf["path_to_batches_prepfiles"], bid_opfolder) return new_path, unassigned_path, prepared_path, bid_opfolder def add_id_to_prepfiles(self, bid_info, bid_opfolder): """Add the new files names to files in prepared/unassigned files""" org_files = self.func.listify_string(bid_info["OriginalFilesName"]) new_files = self.func.listify_string(bid_info["FilesID"]) prep_path = self.conf["path_to_batches_prepfiles"] unassg_path = self.conf["path_to_batches_unassigned"] prep_bid_path = os.path.join(prep_path, bid_opfolder) unassg_bid_path = os.path.join(unassg_path, bid_opfolder) for org, new in zip(org_files, new_files): #Paths for prepared files preporg_path = os.path.join(prep_bid_path, org) prepnew_path = os.path.join(prep_bid_path, new) #Paths for unassigned files unassgorg_path = os.path.join(unassg_bid_path, org) unassgnew_path = os.path.join(unassg_bid_path, new) self.func.move_folder(preporg_path, prepnew_path) self.func.move_folder(unassgorg_path, unassgnew_path) def copy_files_to_unassigned_prepared_dirs(self, bid_info): """Copy files from new folder to unassigned and prepared path folder""" new_path, unassigned_path, prepared_path, bid_opfolder = self.get_paths_for_unassigned_prepared(bid_info) #Raise error if a dir is found in new operator files self.func.accept_only_files(new_path) #Copy from new to unassigned self.func.copy_dirs(new_path, unassigned_path) #Copy from unassigned to prepared self.func.copy_dirs(unassigned_path, prepared_path) #Rename files from prepared folder self.add_id_to_prepfiles(bid_info, bid_opfolder) def check_file_history(self, file_info): """Check if an operator file was added before based on name, size and modification date""" dfgen, conn = self.db.read_table("fileshistory", chunk_size=50000) for df in dfgen: df_name = df[df["FileName"] == file_info["FileName"]] if df_name.shape[0] > 0: df_size = df[df["FileSize"] == int(file_info["FileSize"])] df_date = df[df["ModificationDate"] == file_info["ModificationDate"]] if df_size.shape[0] > 0 and df_date.shape[0] > 0: conn.close() raise Exception("File '{}' was added before in batch '{}'".format(df_name["FileName"].tolist()[0], df_name["AddedInBatch"].tolist()[0])) conn.close() def prepare_fileshistory_info(self, bid_info, files_info): """Get rows to insert in fileshistory and check if was previously added""" rows = [] for file_path, info in files_info["FilesInfo"].items(): file_name = self.func.get_file_name(file_path) file_info = {"FileName": file_name, "AddedInBatch": bid_info["BatchID"], "FileSize": info["FileSize"], "ModificationDate": info["ModificationDate"]} self.check_file_history(file_info) rows.append(file_info) return rows def add_batch(self): """Add batch to database """ bid = self.func.generate_id(int(self.conf["IDlentgh"])) files_info = self.folder.new_opfiles_info() files_names, new_files_names = self.process_files_paths(files_info["FilesInfo"]) userdata = self.user.session_info() batch_info_followup = {"BatchID": bid, "Aircraft": files_info["Aircraft"], "Operator": files_info["Operator"], "OriginalFilesName": files_names, "OriginalFilesPath": self.conf["path_to_new_opfiles"], "FilesID": new_files_names, "AddedDate": self.func.current_date(), "Responsible": "UNASSIGNED", "Proofreader": "UNASSIGNED", "ResponsibleStatus": "", "ProofreaderStatus": "", "OverallStatus": "UNASSIGNED", "ChangesLog": "Batch added by {},".format(userdata["User"])} batch_info_fileshistory = self.prepare_fileshistory_info(batch_info_followup, files_info) #Data for db is prepared now copy the files. self.copy_files_to_unassigned_prepared_dirs(batch_info_followup) #Now the files a copied now insert data to database #Insert the batch self.db.create_row("followup", batch_info_followup) #Insert the file history for file_history in batch_info_fileshistory: self.db.create_row("fileshistory", file_history) return bid def followup_reversed(self): """Get followup table from db reverse it and return it as dict""" df = self.db.read_table("followup") df_dict = df.reindex(index=df.index[::-1]).to_dict('list') return df_dict def followup_for_responsible(self): """Get followup table from db reverse it filter it for current user and return it as dict""" userdata = self.user.session_info() df = self.db.select_row("followup", "Responsible", userdata["User"], asdict=False) df_dict = df.reindex(index=df.index[::-1]).to_dict('list') return df_dict def get_batch(self, bid=""): """Get batch id if specified""" bid_data = self.db.select_row("followup", "BatchID", bid) data = {} for col, val in bid_data.items(): data[col] = val[0] return data def bid_options(self): """Batch status options from config.json file""" batch_opt = { "status_user" : self.func.listify_string(self.conf["batch_status_options_responsible"]), "status_proofreader" : self.func.listify_string(self.conf["batch_status_options_proofreader"]), "status_overall" : self.func.listify_string(self.conf["batch_status_options_overall"]), "aircrafts" : self.func.listify_string(self.conf["aircrafts"]), "split_batch_factor" : self.func.listify_string(self.conf["split_batch_factor"]), } bid_data = self.func.read_json("batch.json") batch_opt.update(bid_data) batch_opt.update(self.user.context_disable()) batch_opt.update({"users": self.user.get_users(), "proofreaders": self.user.get_proofreaders()}) return batch_opt def set_default_proofreader(self, data): """Update with the default proofreader for the given responsible if needed""" if "Responsible" in list(data.keys()): user_data = self.db.select_row("users", "User", data["Responsible"]) data["Proofreader"] = user_data["Proofreader"][0] data["ResponsibleStatus"] = "" data["ProofreaderStatus"] = "" data["OverallStatus"] = "ONGOING" data["StartDate"] = self.func.current_date() bid_info = self.func.read_json("batch.json") data["ChangesLog"] = bid_info["ChangesLog"] + "\nAssigned to {} on {},".format(data["Responsible"], data["StartDate"]) return data else: return data def clear_start_date(self, data): """If Status is STANDBY or UNRECORDABLE then clear start date""" if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "UNRECORDABLE" or data["ProofreaderStatus"] == "STANDBY": data["StartDate"] = "" data["ImportedDateISAIM"] = "" data["TotalRowsNbr"] = "" data["MPDTaskRowsNbr"] = "" data["OperatorRowsNbr"] = "" data["FindingsRowsNbr"] = "" data["EstimatedTaskNbr"] = "" data["EstimatedFdgNbr"] = "" return data def dcs_info(self, dcspath): """Get xml info from the extracted xml from FE""" file = open(dcspath) tree = etree.parse(file) sumAll = tree.xpath('//sum') totalRows = sum([int(s.text) for s in sumAll]) sumMpd = tree.xpath('//mpdTask//sum') mpdtask = sum([int(s.text) for s in sumMpd]) sumOp = tree.xpath('//opeTask//sum') optask = sum([int(s.text) for s in sumOp]) sumFindings = tree.xpath("//finding[@activated='true']//sum") findings = sum([int(s.text) for s in sumFindings]) info_dcs = {"TotalRowsNbr": totalRows, "MPDTaskRowsNbr": mpdtask, "OperatorRowsNbr": optask, "FindingsRowsNbr": findings } return info_dcs def update_ifstatus_finished(self, data): """If Status is FINISHED update ImportedDateISAIM column""" if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "FINISHED": data["ImportedDateISAIM"] = self.func.current_date() return data def update_ifstatus_toimport(self, data): """If ProofreaderStatus is To import then look for dcs info and update data""" if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "TO BE IMPORTED": dcs_files = os.listdir(self.conf["path_to_dcs_info"]) dcs_file = [f for f in dcs_files if data["BatchID"] in f] if len(dcs_file) == 1: dcs_file_path = os.path.join(self.conf["path_to_dcs_info"], dcs_file[0]) dcs_result = self.dcs_info(dcs_file_path) data.update(dcs_result) return data else: raise Exception("DCS file for batch '{}' not found!".format(data["BatchID"])) else: return data def process_status_batch_form(self, data): """Process dict from, form received from update_status page""" data = self.func.remove_null(data) data = self.set_default_proofreader(data) data = self.clear_start_date(data) data = self.update_ifstatus_toimport(data) data = self.update_ifstatus_finished(data) #Move if needed the folders self.folder.move_prepfile_in_assigned(data) #assign self.folder.move_assigned_in_tbchecked(data) #to be checked self.folder.move_tobchecked_in_tbimported(data) #to be imported self.folder.move_tbimported_in_finished(data) #finished self.folder.move_tbchecked_in_assigned(data) #rework self.folder.move_tobchecked_in_standby(data) #standby self.folder.move_tobchecked_in_unrecordable(data) #unrecordable return self.db.update_row("followup", data, "BatchID")
class Folders: """Creation/manipulation of folders for this app""" cwd = os.getcwd() func = Commun() def create_needed_dirs(self): """Create the working directories if doesn't exist""" root_path = os.path.join(self.cwd, 'DC BATCHES IN WORK') sub_folders = [ '0 NEW', '1 UNASSIGNED', '2 PREPARED FILES', '3 ASSIGNED', '4 TO BE CHECKED', '5 TO BE IMPORTED', '6 FINISHED', '7 IN STANDBY', '8 UNRECORDABLE' ] extraDirs = [ root_path, 'excels exported', 'excels to be imported', 'FUDB', 'bin' ] for extradir in extraDirs: try: os.mkdir(extradir) except: pass for adir in sub_folders: subdir = os.path.join(root_path, adir) try: os.mkdir(subdir) except: pass def defaultconfig(self): """"Create default config.json file""" config_data = { "path_to_database": "FUDB/FOLLOWUP.DB", "path_to_frontend": "FUDB/", "path_to_dcs_info": "FUDB/", "path_to_bin": "bin/", "path_to_excels_exported_from_database": "excels exported/", "path_to_excels_to_be_imported_in_database": "excels to be imported/", "path_to_new_opfiles": "DC BATCHES IN WORK/0 NEW/", "path_to_batches_unassigned": "DC BATCHES IN WORK/1 UNASSIGNED/", "path_to_batches_prepfiles": "DC BATCHES IN WORK/2 PREPARED FILES/", "path_to_batches_assigned": "DC BATCHES IN WORK/3 ASSIGNED/", "path_to_batches_tobechecked": "DC BATCHES IN WORK/4 TO BE CHECKED/", "path_to_batches_tbimported": "DC BATCHES IN WORK/5 TO BE IMPORTED/", "path_to_batches_finished": "DC BATCHES IN WORK/6 FINISHED/", "path_to_batches_instandby": "DC BATCHES IN WORK/7 IN STANDBY/", "path_to_batches_unrecordable": "DC BATCHES IN WORK/8 UNRECORDABLE/", "batch_status_options_responsible": "PREP. OP FILE, IMPORTATION & SPLIT FILE, RELIABILITY & DATA UPGRADE, CHECK OP FILE, CHECK SPLIT FILE, CHECK FRONT END, **TO BE CHECKED", "batch_status_options_proofreader": "OP FILE OK, SPLIT FILE OK, FRONT END OK, **TO BE IMPORTED, **FINISHED, **REWORK, **STANDBY, **UNRECORDABLE", "batch_status_options_overall": "ONGOING, STANDBY, FINISHED, UNRECORDABLE", "aircrafts": "A300, A300-600, A310, A320, A330, A340, A350, A380", "split_batch_factor": "2, 3, 4, 5, 6, 7, 8, 9", "IDlentgh": "6", "port": "5000" } if not os.path.isfile(os.path.join(self.cwd, "config.json")): self.func.write_json(config_data, self.cwd, fname="config.json") def make_default_dirs(self): """Create default dirs and config.json if needed""" self.defaultconfig() self.create_needed_dirs() def db_exists(self): """Check if db exists in the path""" conf = self.func.config_info() return self.func.file_exists(conf["path_to_database"]) def operator_aircraft_info(self, apath): """Get operator and aircraft info from the path given""" opfolder_path = apath.split("0 NEW")[-1] opfolder = opfolder_path.replace("/", "") opfolder = opfolder.replace("\\", "") opfolder = opfolder.split(" ") operator = opfolder[0].strip() aircraft = opfolder[1].strip() return operator, aircraft def get_files_info(self, files_paths): """Get files size and modification date in a dict""" files_info = {} for file_path in files_paths: file_info = self.func.get_file_size_mtime(file_path) files_info[file_path] = file_info return files_info def new_opfiles_info(self): """Get info from new opfiles folders, files""" conf = self.func.config_info() new_folders = self.func.get_folders(conf["path_to_new_opfiles"]) if len(new_folders) > 1 and len(new_folders) != 0: raise Exception("Only one folder must be in '0 NEW' folder!") else: folder = new_folders[0] op, ac = self.operator_aircraft_info(folder) files = self.func.get_files(folder) files_info = self.get_files_info(files) opfiles_info = { "Path": folder, "Operator": op, "Aircraft": ac, "FilesInfo": files_info } return opfiles_info def bid_folder_name(self): """Ex: TCX 320 _WOVteA - remake the folder name of the batch""" bid_info = self.func.read_json("batch.json") folder_name = "{} {} _{}".format(bid_info["Operator"], bid_info["Aircraft"], bid_info["BatchID"]) return folder_name def move_prepfile_in_assigned(self, data): """Move folder from prepared files to assigned folder""" conf = self.func.config_info() folder_name = self.bid_folder_name() if "Responsible" in list(data.keys()): prep_files = os.listdir(conf["path_to_batches_prepfiles"]) if folder_name in prep_files: #Move folder from prepared to assgned src = os.path.join(conf["path_to_batches_prepfiles"], folder_name) dst = os.path.join(conf["path_to_batches_assigned"], folder_name) self.func.move_folder(src, dst) #Copy front end macro to assigned dir dir_feli = os.listdir(conf["path_to_frontend"]) dir_feli = [f for f in dir_feli if f.endswith('.xlsm')] feFile = [f for f in dir_feli if 'BETA' not in f.upper()][0] fe_macro_path = os.path.join(conf["path_to_frontend"], feFile) fe_newpath = os.path.join(dst, "_{} {}".format(folder_name, feFile)) self.func.copy_file(fe_macro_path, fe_newpath) if not self.func.folder_exists(dst): raise Exception( "Folder {} not moved in '3 ASSIGNED'!".format( folder_name)) else: raise Exception( "Folder {} not found in '2 PREPARED FILES'!".format( folder_name)) def move_assigned_in_tbchecked(self, data): """If ResponsibleStatus is X then move folder from src to dst""" conf = self.func.config_info() folder_name = self.bid_folder_name() if "ResponsibleStatus" in list(data.keys()): if data["ResponsibleStatus"] == "TO BE CHECKED": files = os.listdir(conf["path_to_batches_assigned"]) if folder_name in files: src = os.path.join(conf["path_to_batches_assigned"], folder_name) dst = os.path.join(conf["path_to_batches_tobechecked"], folder_name) self.func.move_folder(src, dst) if not self.func.folder_exists(dst): raise Exception( "Folder {} not moved in '4 TO BE CHECKED'!".format( folder_name)) else: raise Exception( "Folder {} not found in '3 ASSIGNED'!".format( folder_name)) def move_tbchecked_in_assigned(self, data): """If ProofreaderStatus is X then move folder from src to dst""" conf = self.func.config_info() folder_name = self.bid_folder_name() if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "REWORK": files = os.listdir(conf["path_to_batches_tobechecked"]) if folder_name in files: src = os.path.join(conf["path_to_batches_tobechecked"], folder_name) dst = os.path.join(conf["path_to_batches_assigned"], folder_name) self.func.move_folder(src, dst) if not self.func.folder_exists(dst): raise Exception( "Folder {} not moved in '3 ASSIGNED'!".format( folder_name)) else: raise Exception( "Folder {} not found in '4 TO BE CHECKED'!".format( folder_name)) def move_tobchecked_in_tbimported(self, data): """If ProofreaderStatus is X then move folder from src to dst""" conf = self.func.config_info() folder_name = self.bid_folder_name() if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "TO BE IMPORTED": files = os.listdir(conf["path_to_batches_tobechecked"]) if folder_name in files: src = os.path.join(conf["path_to_batches_tobechecked"], folder_name) dst = os.path.join(conf["path_to_batches_tbimported"], folder_name) self.func.move_folder(src, dst) if not self.func.folder_exists(dst): raise Exception( "Folder {} not moved in '5 TO BE IMPORTED'!". format(folder_name)) else: raise Exception( "Folder {} not found in '4 TO BE CHECKED'!".format( folder_name)) def move_tbimported_in_finished(self, data): """If ProofreaderStatus is X then move folder from src to dst""" conf = self.func.config_info() folder_name = self.bid_folder_name() if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "FINISHED": files = os.listdir(conf["path_to_batches_tbimported"]) if folder_name in files: src = os.path.join(conf["path_to_batches_tbimported"], folder_name) dst = os.path.join(conf["path_to_batches_finished"], folder_name) self.func.move_folder(src, dst) if not self.func.folder_exists(dst): raise Exception( "Folder {} not moved in '6 FINISHED'!".format( folder_name)) else: raise Exception( "Folder {} not found in '5 TO BE IMPORTED'!".format( folder_name)) def move_tobchecked_in_standby(self, data): """If ProofreaderStatus is X then move folder from src to dst""" conf = self.func.config_info() folder_name = self.bid_folder_name() if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "STANDBY": files = os.listdir(conf["path_to_batches_tobechecked"]) if folder_name in files: src = os.path.join(conf["path_to_batches_tobechecked"], folder_name) dst = os.path.join(conf["path_to_batches_instandby"], folder_name) self.func.move_folder(src, dst) if not self.func.folder_exists(dst): raise Exception( "Folder {} not moved in '7 IN STANDBY'!".format( folder_name)) else: raise Exception( "Folder {} not found in '4 TO BE CHECKED'!".format( folder_name)) def move_tobchecked_in_unrecordable(self, data): """If ProofreaderStatus is X then move folder from src to dst""" conf = self.func.config_info() folder_name = self.bid_folder_name() if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "UNRECORDABLE": files = os.listdir(conf["path_to_batches_tobechecked"]) if folder_name in files: src = os.path.join(conf["path_to_batches_tobechecked"], folder_name) dst = os.path.join(conf["path_to_batches_unrecordable"], folder_name) self.func.move_folder(src, dst) if not self.func.folder_exists(dst): raise Exception( "Folder {} not moved in '8 UNRECORDABLE'!".format( folder_name)) else: raise Exception( "Folder {} not found in '4 TO BE CHECKED'!".format( folder_name))
class User: func = Commun() conf = func.config_info() db = Dbwrap(conf["path_to_database"]) def add_user(self, username, userpassword, userrights, defaultproofreader): """Adds a user to the database""" user_row = { "User": username, "Password": userpassword, "Rights": userrights, "Proofreader": defaultproofreader } return self.db.create_row("users", user_row) def remove_user(self, username): """Delete user from users table""" return self.db.delete_row("users", "User", username) def get_all_users(self, asdict=True): """Get user table from database in dictionary format or dataframe format""" table = self.db.get_table("users", asdict=asdict) return table def verify_user(self, username, userpassword): """Check if user is in the database""" user_data = self.db.select_row("users", "User", username) if len(user_data["User"]) == 1 and len(user_data["Password"]) == 1: user_session = { "User": user_data["User"][0], "Password": user_data["Password"][0], "Rights": user_data["Rights"][0] } self.func.write_json(user_session, "session.json") return user_session def session_info(self): """Get info from session.json file""" session_data = self.func.read_json("session.json") return session_data def get_users_by_rights(self, rights): """Get users list by specified rights from users table""" users_df = self.get_all_users(asdict=False) users_df = users_df[users_df["Rights"] == rights] users_df = users_df.to_dict("list") usersli = users_df["User"] return usersli def get_proofreaders(self): """Get proofreaders list""" return self.get_users_by_rights(rights="proofreader") def get_users(self): """Get users list""" return self.get_users_by_rights(rights="user") def get_admins(self): """Get admins list""" return self.get_users_by_rights(rights="user") def get_settings(self): """Get app settings""" data = self.func.read_json("config.json") return data def export_table(self, table_name): """Export followup from db""" export_path = self.conf["path_to_excels_exported_from_database"] df = self.db.read_table(table_name) save_path = os.path.join(export_path, "{}.xlsx".format(table_name)) df.to_excel(save_path, index=False) def import_table(self, table_name): """Import table into db, replace table if exists""" import_path = self.conf["path_to_excels_to_be_imported_in_database"] followup_file_path = os.path.join(import_path, "{}.xlsx".format(table_name)) fupdf = pd.read_excel(followup_file_path) self.db.insert_table(fupdf, table_name)
class Folders: """Creation/manipulation of folders for this app""" cwd = os.getcwd() func = Commun() def create_needed_dirs(self): """Create the working directories if doesn't exist""" root_path = os.path.join(self.cwd, 'DC BATCHES IN WORK') sub_folders = ['0 NEW', '1 UNASSIGNED', '2 PREPARED FILES', '3 ASSIGNED', '4 TO BE CHECKED', '5 TO BE IMPORTED', '6 FINISHED', '7 IN STANDBY', '8 UNRECORDABLE'] extraDirs = [root_path, 'excels exported', 'excels to be imported', 'FUDB', 'bin'] for extradir in extraDirs: try: os.mkdir(extradir) except: pass for adir in sub_folders: subdir = os.path.join(root_path, adir) try: os.mkdir(subdir) except: pass def defaultconfig(self): """"Create default config.json file""" config_data = { "path_to_database": "FUDB/FOLLOWUP.DB", "path_to_frontend": "FUDB/", "path_to_dcs_info": "FUDB/", "path_to_bin": "bin/", "path_to_excels_exported_from_database": "excels exported/", "path_to_excels_to_be_imported_in_database": "excels to be imported/", "path_to_new_opfiles": "DC BATCHES IN WORK/0 NEW/", "path_to_batches_unassigned": "DC BATCHES IN WORK/1 UNASSIGNED/", "path_to_batches_prepfiles": "DC BATCHES IN WORK/2 PREPARED FILES/", "path_to_batches_assigned": "DC BATCHES IN WORK/3 ASSIGNED/", "path_to_batches_tobechecked": "DC BATCHES IN WORK/4 TO BE CHECKED/", "path_to_batches_tbimported": "DC BATCHES IN WORK/5 TO BE IMPORTED/", "path_to_batches_finished": "DC BATCHES IN WORK/6 FINISHED/", "path_to_batches_instandby": "DC BATCHES IN WORK/7 IN STANDBY/", "path_to_batches_unrecordable": "DC BATCHES IN WORK/8 UNRECORDABLE/", "batch_status_options_responsible": "PREP. OP FILE, IMPORTATION & SPLIT FILE, RELIABILITY & DATA UPGRADE, CHECK OP FILE, CHECK SPLIT FILE, CHECK FRONT END, **TO BE CHECKED", "batch_status_options_proofreader": "OP FILE OK, SPLIT FILE OK, FRONT END OK, **TO BE IMPORTED, **FINISHED, **REWORK, **STANDBY, **UNRECORDABLE", "batch_status_options_overall": "ONGOING, STANDBY, FINISHED, UNRECORDABLE", "aircraft": "A300, A300-600, A310, A320, A330, A340, A350, A380", "split_batch_factor": "2, 3, 4, 5, 6, 7, 8, 9", "generateBigID": "NO", "generateCustomID": "YES", "customIDlentgh": "6" } if not os.path.isfile(os.path.join(self.cwd, "config.json")): self.func.write_json(config_data, self.cwd, fname="config.json") def make_default_dirs(self): """Create default dirs and config.json if needed""" self.defaultconfig() self.create_needed_dirs() def db_exists(self): """Check if db exists in the path""" conf = self.func.config_info() return self.func.file_exists(conf["path_to_database"])
from dc.models.queries import Queries from dc.utils.commun import Commun from dc.utils.dbwrap import Dbwrap q = Queries() func = Commun() conf = func.config_info() db = Dbwrap(conf["path_to_database"]) class Batches:
class Batches: func = Commun() conf = func.config_info() db = Dbwrap(conf["path_to_database"]) folder = Folders() user = User() mail = Mail(conf["MAIL_SERVER"], conf["MAIL_PORT"]) def process_files_paths(self, filesInfodict): """Extract files names, set and id for each file path""" id_lentgh = int(self.conf["IDlentgh"]) new_opfiles_path = self.conf["path_to_new_opfiles"] paths_to_files = list(filesInfodict.keys()) original_files_names = [] new_files_names = [] for file_path in paths_to_files: fid = self.func.generate_id(id_lentgh) file_name = self.func.get_file_name(file_path) original_files_names.append(file_name) new_file_name = os.path.join("{} {}".format(fid, file_name)) new_files_names.append(new_file_name) files = ", ".join(original_files_names) new_files = ", ".join(new_files_names) return files, new_files def get_paths_for_unassigned_prepared(self, bid_info): """Create the paths to move the folders from new to unassigned and prepared""" org_path = bid_info["OriginalFilesPath"] opfolder = "{} {}".format(bid_info["Operator"], bid_info["Aircraft"]) new_path = os.path.join(org_path, opfolder) bid_opfolder = "{} _{}".format(opfolder, bid_info["BatchID"]) unassigned_path = os.path.join(self.conf["path_to_batches_unassigned"], bid_opfolder) prepared_path = os.path.join(self.conf["path_to_batches_prepfiles"], bid_opfolder) return new_path, unassigned_path, prepared_path, bid_opfolder def add_id_to_prepfiles(self, bid_info, bid_opfolder): """Add the new files names to files in prepared/unassigned files""" org_files = self.func.listify_string(bid_info["OriginalFilesName"]) new_files = self.func.listify_string(bid_info["FilesID"]) prep_path = self.conf["path_to_batches_prepfiles"] unassg_path = self.conf["path_to_batches_unassigned"] prep_bid_path = os.path.join(prep_path, bid_opfolder) unassg_bid_path = os.path.join(unassg_path, bid_opfolder) for org, new in zip(org_files, new_files): #Paths for prepared files preporg_path = os.path.join(prep_bid_path, org) prepnew_path = os.path.join(prep_bid_path, new) #Paths for unassigned files unassgorg_path = os.path.join(unassg_bid_path, org) unassgnew_path = os.path.join(unassg_bid_path, new) self.func.move_folder(preporg_path, prepnew_path) self.func.move_folder(unassgorg_path, unassgnew_path) def copy_files_to_unassigned_prepared_dirs(self, bid_info): """Copy files from new folder to unassigned and prepared path folder""" new_path, unassigned_path, prepared_path, bid_opfolder = self.get_paths_for_unassigned_prepared( bid_info) #Raise error if a dir is found in new operator files self.func.accept_only_files(new_path) #Copy from new to unassigned self.func.copy_dirs(new_path, unassigned_path) #Copy from unassigned to prepared self.func.copy_dirs(unassigned_path, prepared_path) #Rename files from prepared folder self.add_id_to_prepfiles(bid_info, bid_opfolder) def check_file_history(self, file_info): """Check if an operator file was added before based on name, size and modification date""" dfgen, conn = self.db.read_table("fileshistory", chunk_size=50000) for df in dfgen: df_name = df[df["FileName"] == file_info["FileName"]] if df_name.shape[0] > 0: df_size = df[df["FileSize"] == int(file_info["FileSize"])] df_date = df[df["ModificationDate"] == file_info["ModificationDate"]] if df_size.shape[0] > 0 and df_date.shape[0] > 0: conn.close() raise Exception( "File '{}' was added before in batch '{}'".format( df_name["FileName"].tolist()[0], df_name["AddedInBatch"].tolist()[0])) conn.close() def prepare_fileshistory_info(self, bid_info, files_info): """Get rows to insert in fileshistory and check if was previously added""" rows = [] for file_path, info in files_info["FilesInfo"].items(): file_name = self.func.get_file_name(file_path) file_info = { "FileName": file_name, "AddedInBatch": bid_info["BatchID"], "FileSize": info["FileSize"], "ModificationDate": info["ModificationDate"] } self.check_file_history(file_info) rows.append(file_info) return rows def add_batch(self): """Add batch to database """ bid = self.func.generate_id(int(self.conf["IDlentgh"])) files_info = self.folder.new_opfiles_info() files_names, new_files_names = self.process_files_paths( files_info["FilesInfo"]) userdata = self.user.session_info() batch_info_followup = { "BatchID": bid, "Aircraft": files_info["Aircraft"], "Operator": files_info["Operator"], "OriginalFilesName": files_names, "OriginalFilesPath": self.conf["path_to_new_opfiles"], "FilesID": new_files_names, "AddedDate": self.func.current_date(), "Responsible": "UNASSIGNED", "Proofreader": "UNASSIGNED", "ResponsibleStatus": "", "ProofreaderStatus": "", "OverallStatus": "UNASSIGNED", "ChangesLog": "Batch added by {},".format(userdata["User"]) } batch_info_fileshistory = self.prepare_fileshistory_info( batch_info_followup, files_info) #Data for db is prepared now copy the files. self.copy_files_to_unassigned_prepared_dirs(batch_info_followup) #Now the files a copied now insert data to database #Insert the batch self.db.create_row("followup", batch_info_followup) #Insert the file history for file_history in batch_info_fileshistory: self.db.create_row("fileshistory", file_history) return bid def followup_reversed(self): """Get followup table from db reverse it and return it as dict""" df = self.db.read_table("followup") df_dict = df.reindex(index=df.index[::-1]).to_dict('list') return df_dict def followup_for_responsible(self): """Get followup table from db reverse it filter it for current user and return it as dict""" userdata = self.user.session_info() df = self.db.select_row("followup", "Responsible", userdata["User"], asdict=False) df_dict = df.reindex(index=df.index[::-1]).to_dict('list') return df_dict def fileshistory_table(self): """Get file history table as dict by default""" return self.db.get_table("fileshistory") def get_batch(self, bid=""): """Get batch id if specified""" bid_data = self.db.select_row("followup", "BatchID", bid) data = {} for col, val in bid_data.items(): data[col] = val[0] return data def bid_options(self, get_followup=False, get_filehistory=False): """Batch status options from config.json file""" batch_opt = { "status_user": self.func.listify_string( self.conf["batch_status_options_responsible"]), "status_proofreader": self.func.listify_string( self.conf["batch_status_options_proofreader"]), "status_overall": self.func.listify_string( self.conf["batch_status_options_overall"]), } bid_data = self.func.read_json("batch.json") batch_opt.update(bid_data) batch_opt.update(self.user.context_disable()) batch_opt.update({ "users": self.user.get_users(), "proofreaders": self.user.get_proofreaders() }) if get_followup: fup_data = self.followup_reversed() batch_opt.update(fup_data) if get_filehistory: hist_data = self.fileshistory_table() batch_opt.update(hist_data) return batch_opt def data_mail_for_send_mail(self, newdata): """Send email between proofreader and responsible""" session = self.user.session_info() olddata = self.func.read_json("batch.json") if olddata["Responsible"] != "UNASSIGNED" and olddata[ "Proofreader"] != "UNASSIGNED": user_responsible = self.db.select_row("users", "User", olddata["Responsible"]) user_proofreader = self.db.select_row("users", "User", olddata["Proofreader"]) responsible_mail = user_responsible["Email"][0] proofreader_mail = user_proofreader["Email"][0] else: user_responsible = self.db.select_row("users", "User", newdata["Responsible"]) user_proofreader = self.db.select_row("users", "User", newdata["Proofreader"]) responsible_mail = user_responsible["Email"][0] proofreader_mail = user_proofreader["Email"][0] sender_email = session["Email"] if self.conf["MAIL_PASSWORD"] == "": sender_password = session["Password"] else: sender_password = self.conf["MAIL_PASSWORD"] cols_of_interest = [ "Responsible", "Proofreader", "ResponsibleStatus", "ProofreaderStatus", "ResponsibleComment", "ProofreaderComment" ] mail_receiver, update_subject, update_message = "", "", "" for col in cols_of_interest: if col in list(newdata.keys()): if olddata[col] != newdata[col]: update_subject = "{} {} {} UPDATE".format( olddata["Operator"], olddata["Aircraft"], olddata["BatchID"]) if col == "Responsible": if olddata["Responsible"] != newdata["Responsible"]: mail_receiver = responsible_mail update_message = "Hi there,\n\n\nYou({}) are now the responsible for batch {}\n\n\n\n\n\n\n\nThis is an automatic message sent by the Followup.".format( newdata[col], olddata["BatchID"]) break if col == "Proofreader": if olddata["Proofreader"] != newdata["Proofreader"]: mail_receiver = responsible_mail update_message = "Hi there,\n\n\n{} is now the proofreader for batch {}\n\n\n\n\n\n\n\nThis is an automatic message sent by the Followup.".format( newdata[col], olddata["BatchID"]) break if col == "ResponsibleStatus": if olddata["ResponsibleStatus"] != newdata[ "ResponsibleStatus"]: mail_receiver = proofreader_mail update_message = "Hi there,\n\n\nResponsible changed status to '{}' for batch {}\n\n\n\n\n\n\n\nThis is an automatic message sent by the Followup.".format( newdata[col], olddata["BatchID"]) break if col == "ProofreaderStatus": if olddata["ProofreaderStatus"] != newdata[ "ProofreaderStatus"]: mail_receiver = responsible_mail update_message = "Hi there,\n\n\nProofreader changed status to '{}' for batch {}\n\n\n\n\n\n\n\nThis is an automatic message sent by the Followup.".format( newdata[col], olddata["BatchID"]) break if col == "ResponsibleComment": if olddata["ResponsibleComment"] != newdata[ "ResponsibleComment"]: mail_receiver = proofreader_mail update_message = "Hi there,\n\n\nResponsible said '{}' for batch {}\n\n\n\n\n\n\n\nThis is an automatic message sent by the Followup.".format( newdata[col], olddata["BatchID"]) break if col == "ProofreaderComment": if olddata["ProofreaderComment"] != newdata[ "ProofreaderComment"]: mail_receiver = responsible_mail update_message = "Hi there,\n\n\nProofreader said '{}' for batch {}\n\n\n\n\n\n\n\nThis is an automatic message sent by the Followup.".format( newdata[col], olddata["BatchID"]) break data = { "sender_email": sender_email, "sender_password": sender_password, "mail_receiver": mail_receiver, "update_subject": update_subject, "update_message": update_message } print(newdata, "\n\n", data) if update_subject != "": try: self.mail.send_mail(sender_email, sender_password, mail_receiver, update_subject, update_message) except Exception as err: errmsg = self.func.write_traceback(err) print(errmsg) print(data) raise Exception( "Email cannot be sent! Please sent email to {} and inform him/her of what you did!" .format(mail_receiver)) def set_default_proofreader(self, data): """Update with the default proofreader for the given responsible if needed""" if "Responsible" in list(data.keys()): user_data = self.db.select_row("users", "User", data["Responsible"]) data["Proofreader"] = user_data["Proofreader"][0] data["ResponsibleStatus"] = "" data["ProofreaderStatus"] = "" data["OverallStatus"] = "ONGOING" data["StartDate"] = self.func.current_date() bid_info = self.func.read_json("batch.json") data["ChangesLog"] = bid_info[ "ChangesLog"] + "\nAssigned to {} on {},".format( data["Responsible"], data["StartDate"]) return data else: return data def clear_start_date(self, data): """If Status is STANDBY or UNRECORDABLE then clear start date""" if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "UNRECORDABLE" or data[ "ProofreaderStatus"] == "STANDBY": data["StartDate"] = "" data["ImportedDateISAIM"] = "" data["TotalRowsNbr"] = "" data["MPDTaskRowsNbr"] = "" data["OperatorRowsNbr"] = "" data["FindingsRowsNbr"] = "" data["EstimatedTaskNbr"] = "" data["EstimatedFdgNbr"] = "" if data["ProofreaderStatus"] == "UNRECORDABLE": data["OverallStatus"] = "UNRECORDABLE" if data["ProofreaderStatus"] == "STANDBY": data["OverallStatus"] = "STANDBY" return data def dcs_info(self, dcspath): """Get xml info from the extracted xml from FE""" file = open(dcspath) tree = etree.parse(file) sumAll = tree.xpath('//sum') totalRows = sum([int(s.text) for s in sumAll]) sumMpd = tree.xpath('//mpdTask//sum') mpdtask = sum([int(s.text) for s in sumMpd]) sumOp = tree.xpath('//opeTask//sum') optask = sum([int(s.text) for s in sumOp]) sumFindings = tree.xpath("//finding[@activated='true']//sum") findings = sum([int(s.text) for s in sumFindings]) info_dcs = { "TotalRowsNbr": totalRows, "MPDTaskRowsNbr": mpdtask, "OperatorRowsNbr": optask, "FindingsRowsNbr": findings } return info_dcs def update_ifstatus_finished(self, data): """If Status is FINISHED update ImportedDateISAIM column""" if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "FINISHED": data["ImportedDateISAIM"] = self.func.current_date() data["OverallStatus"] = "FINISHED" return data def update_ifstatus_toimport(self, data): """If ProofreaderStatus is To import then look for dcs info and update data""" if "ProofreaderStatus" in list(data.keys()): if data["ProofreaderStatus"] == "TO BE IMPORTED": dcs_files = os.listdir(self.conf["path_to_dcs_info"]) dcs_file = [f for f in dcs_files if data["BatchID"] in f] if len(dcs_file) == 1: dcs_file_path = os.path.join(self.conf["path_to_dcs_info"], dcs_file[0]) dcs_result = self.dcs_info(dcs_file_path) data.update(dcs_result) return data else: raise Exception( "DCS file for batch '{}' not found!".format( data["BatchID"])) else: return data else: return data def alternate_status(self, data): """Set to empty the status of the proof or responsible when someone changed the status""" if "ProofreaderStatus" in list(data.keys()): data["ResponsibleStatus"] = "" data["OverallStatus"] = data["ProofreaderStatus"] if "ResponsibleStatus" in list(data.keys()): if data["ResponsibleStatus"] != "": data["ProofreaderStatus"] = "" data["OverallStatus"] = data["ResponsibleStatus"] return data def process_status_batch_form(self, data): """Process dict from, form received from update_status page""" data = self.func.remove_null(data) if len(data) == 1: raise Exception("No enough data to process!") data = self.set_default_proofreader(data) data = self.clear_start_date(data) data = self.update_ifstatus_toimport(data) data = self.update_ifstatus_finished(data) data = self.alternate_status(data) #Move if needed the folders self.folder.move_prepfile_in_assigned(data) #assign self.folder.move_assigned_in_tbchecked(data) #to be checked self.folder.move_tobchecked_in_tbimported(data) #to be imported self.folder.move_tbimported_in_finished(data) #finished self.folder.move_tbchecked_in_assigned(data) #rework self.folder.move_tobchecked_in_standby(data) #standby self.folder.move_tobchecked_in_unrecordable(data) #unrecordable #Update database with data indb = self.db.update_row("followup", data, "BatchID") #write data mail for each status change try: self.data_mail_for_send_mail(data) mailsent = True except: mailsent = False return indb, mailsent def delete_batch_or_file(self, data): """Delete batch along with the files attached to it or a file from fileshistory""" if data["BatchID"] == "" and data["FileToDelete"] == "": fup = self.db.delete_row("followup", {"BatchID": data["DefaultBatchID"]}) hist = self.db.delete_row("fileshistory", {"AddedInBatch": data["DefaultBatchID"]}) if fup and hist: return True else: if fup == False: raise Exception("Changes not saved in followup!") if hist == False: raise Exception("Changes not saved in fileshistory!") if len(data["BatchID"]) > 0: fup = self.db.delete_row("followup", {"BatchID": data["BatchID"]}) hist = self.db.delete_row("fileshistory", {"AddedInBatch": data["BatchID"]}) if fup and hist: return True else: if fup == False: raise Exception("Changes not saved in followup!") if hist == False: raise Exception("Changes not saved in fileshistory!") if len(data["FileToDelete"]) > 0: valli = data["FileToDelete"].split( "//") #Ex: ym3SGI//EIS-SM2K-SM180731.XML//189209 colrow_dict = { "AddedInBatch": valli[0], "FileName": valli[1], "FileSize": valli[2] } if not self.db.delete_row("fileshistory", colrow_dict): raise Exception("Changes not saved in fileshistory!")
class Folders: """Creation/manipulation of folders for this app""" cwd = os.getcwd() func = Commun() conf = func.config_info() def create_needed_dirs(self): """Create the working directories if doesn't exist""" root_path = os.path.join(self.cwd, 'DC BATCHES IN WORK') sub_folders = ['0 NEW', '1 UNASSIGNED', '2 PREPARED FILES', '3 ASSIGNED', '4 TO BE CHECKED', '5 TO BE IMPORTED', '6 FINISHED', '7 IN STANDBY', '8 UNRECORDABLE'] extraDirs = [root_path, 'excels exported', 'excels to be imported', 'FUDB', 'bin'] for extradir in extraDirs: try: os.mkdir(extradir) except: pass for adir in sub_folders: subdir = os.path.join(root_path, adir) try: os.mkdir(subdir) except: pass def defaultconfig(self): """"Create default config.json file""" config_data = { "path_to_database": "FUDB/FOLLOWUP.DB", "path_to_frontend": "FUDB/", "path_to_dcs_info": "FUDB/", "path_to_bin": "bin/", "path_to_excels_exported_from_database": "excels exported/", "path_to_excels_to_be_imported_in_database": "excels to be imported/", "path_to_new_opfiles": "DC BATCHES IN WORK/0 NEW/", "path_to_batches_unassigned": "DC BATCHES IN WORK/1 UNASSIGNED/", "path_to_batches_prepfiles": "DC BATCHES IN WORK/2 PREPARED FILES/", "path_to_batches_assigned": "DC BATCHES IN WORK/3 ASSIGNED/", "path_to_batches_tobechecked": "DC BATCHES IN WORK/4 TO BE CHECKED/", "path_to_batches_tbimported": "DC BATCHES IN WORK/5 TO BE IMPORTED/", "path_to_batches_finished": "DC BATCHES IN WORK/6 FINISHED/", "path_to_batches_instandby": "DC BATCHES IN WORK/7 IN STANDBY/", "path_to_batches_unrecordable": "DC BATCHES IN WORK/8 UNRECORDABLE/", "batch_status_options_responsible": "PREP. OP FILE, IMPORTATION & SPLIT FILE, RELIABILITY & DATA UPGRADE, CHECK OP FILE, CHECK SPLIT FILE, CHECK FRONT END, **TO BE CHECKED", "batch_status_options_proofreader": "OP FILE OK, SPLIT FILE OK, FRONT END OK, **TO BE IMPORTED, **FINISHED, **REWORK, **STANDBY, **UNRECORDABLE", "batch_status_options_overall": "ONGOING, STANDBY, FINISHED, UNRECORDABLE", "aircraft": "A300, A300-600, A310, A320, A330, A340, A350, A380", "split_batch_factor": "2, 3, 4, 5, 6, 7, 8, 9", "IDlentgh": "6", "port": "5000" } if not os.path.isfile(os.path.join(self.cwd, "config.json")): self.func.write_json(config_data, self.cwd, fname="config.json") def make_default_dirs(self): """Create default dirs and config.json if needed""" self.defaultconfig() self.create_needed_dirs() def db_exists(self): """Check if db exists in the path""" return self.func.file_exists(self.conf["path_to_database"]) def operator_aircraft_info(self, apath): """Get operator and aircraft info from the path given""" opfolder_path = apath.split("0 NEW")[-1] opfolder = opfolder_path.replace("/", "") opfolder = opfolder.replace("\\", "") opfolder = opfolder.split(" ") operator = opfolder[0].strip() aircraft = opfolder[1].strip() return operator, aircraft def get_files_info(self, files_paths): """Get files size and modification date in a dict""" files_info = {} for file_path in files_paths: file_info = self.func.get_file_size_mtime(file_path) files_info[file_path] = file_info return files_info def new_opfiles_info(self): """Get info from new opfiles folders, files""" new_folders = self.func.get_folders(self.conf["path_to_new_opfiles"]) if len(new_folders) > 1 and len(new_folders) != 0: raise Exception("Only one folder must be in '0 NEW' folder!") else: folder = new_folders[0] op, ac = self.operator_aircraft_info(folder) files = self.func.get_files(folder) files_info = self.get_files_info(files) opfiles_info = { "Path": folder, "Operator": op, "Aircraft": ac, "FilesInfo": files_info } return opfiles_info