def ReadColwithColumnName(self, TableName, ColumnName, DBName=None): try: def sortList(Target, Index): zipped_pairs = zip(Index, Target) z = [x for _, x in sorted(zipped_pairs)] return z DBConnection = self.ConnectDB(DBName=DBName) cursor = DBConnection.cursor(dictionary=True) cursor.execute("SELECT " + str(ColumnName) + ", " + "ID_" + str(TableName) + " FROM " + str(TableName)) # sql = ('select field1, field2 from table') List1 = [] ID = [] for rows in cursor: List1.append(rows[str(ColumnName)]) ID.append(rows["ID_" + str(TableName)]) List = sortList(List1, ID) return List except SQLError as error: DBConnection.rollback() logging.exception(f"Connection to the database failed: {error}") except Exception as e: logging.exception(e) raise finally: if DBConnection.is_connected(): cursor.close() DBConnection.close() logging.info("MySQL connection is closed")
def CheckInputDict(self, inputDict): try: outputDict = {} arrDf_Index = FN.getGoodIndexes(Array=inputDict['Df'], Bound=self._arrDf_Bound) arrKf_Index = FN.getGoodIndexes(Array=inputDict['Kf'], Bound=self._arrKf_Bound) arrRI_Real_Index = FN.getGoodIndexes(Array=inputDict['RI_Real'], Bound=self._arrRI_Real_Bound) arrRI_Imag_Index = FN.getGoodIndexes(Array=inputDict['RI_Imag'], Bound=self._arrRI_Imag_Bound) arrNp_Index = FN.getGoodIndexes(Array=inputDict['Np'], Bound=self._arrNp_Bound) arrMonomerParameter_Index = FN.getGoodIndexes(Array=inputDict['MP'], Bound=self._arrMonomerParameter_Bound) arrPossible_Indexes = FN.findCommonIndex(arrDf_Index, arrKf_Index, arrRI_Real_Index, arrRI_Imag_Index, arrNp_Index, arrMonomerParameter_Index) outputDict['Df'] = FN.getPossibleArray(Array=inputDict['Df'], Indexes=arrPossible_Indexes) outputDict['Kf'] = FN.getPossibleArray(Array=inputDict['Kf'], Indexes=arrPossible_Indexes) outputDict['RI_Real'] = FN.getPossibleArray(Array=inputDict['RI_Real'], Indexes=arrPossible_Indexes) outputDict['RI_Imag'] = FN.getPossibleArray(Array=inputDict['RI_Imag'], Indexes=arrPossible_Indexes) outputDict['Np'] = FN.getPossibleArray(Array=inputDict['Np'], Indexes=arrPossible_Indexes) outputDict['dp'] = FN.getPossibleArray(Array=inputDict['dp'], Indexes=arrPossible_Indexes) outputDict['wL'] = FN.getPossibleArray(Array=inputDict['wL'], Indexes=arrPossible_Indexes) outputDict['chance'] = FN.getPossibleArray(Array=inputDict['chance'], Indexes=arrPossible_Indexes) sum = 0 for i in outputDict['chance']: sum += i outputDict['chanceSum'] = sum #### to do add more if needed logging.info("Boundary issued.") return outputDict except Exception as e: logging.exception(e) raise
def ReadAllRowsfromTable(self, TableName, DBName=None): try: DBConnection = self.ConnectDB(DBName=DBName) cursor = DBConnection.cursor() cursor.execute("SELECT * FROM " + str(TableName)) Row = cursor.fetchall() columnName = [] i = 0 for cd in cursor.description: columnName.append(cd[0]) return columnName, Row except SQLError as error: DBConnection.rollback() logging.exception(f"Connection to the database failed: {error}") except Exception as e: logging.exception(e) raise finally: if DBConnection.is_connected(): cursor.close() DBConnection.close() logging.info("MySQL connection is closed")
def ReadRowfromTablewithID(self, TableName, ID=1, DBName=None): try: DBConnection = self.ConnectDB(DBName=DBName) cursor = DBConnection.cursor() cursor.execute( "SELECT * FROM " + str(TableName) + " WHERE " + "ID_" + str(TableName) + "=%s", (ID, )) Row = cursor.fetchall() tableDict = {} i = 0 for cd in cursor.description: tableDict[cd[0]] = Row[0][i] i += 1 return tableDict except SQLError as error: DBConnection.rollback() logging.exception(f"Connection to the database failed: {error}") except Exception as e: logging.exception(e) raise finally: if DBConnection.is_connected(): cursor.close() DBConnection.close() logging.info("MySQL connection is closed")
def ReportVariable(name, value): try: logging.info(f'{name}: {value}') print(f'{name}: {value}') except Exception as e: logging.exception(e) raise
def ReportDictionary(dictionary): try: for key in dictionary.keys(): logging.info(f'{key}: {dictionary[key]}') print(f'{key}: {dictionary[key]}') except Exception as e: logging.exception(e) raise
def DropTable(self, TableName, DBName=None): try: DBConnection = self.ConnectDB(DBName=DBName) cursor = DBConnection.cursor() cursor.execute(f"DROP TABLE IF EXISTS {TableName}") DBConnection.commit() except SQLError as error: DBConnection.rollback() logging.exception(f"Connection to the database failed: {error}") except Exception as e: logging.exception(e) raise finally: if DBConnection.is_connected(): cursor.close() DBConnection.close() logging.info("MySQL connection is closed")
def DeleteAllRowsfromTable(self, TableName, DBName=None): try: DBConnection = self.ConnectDB(DBName=DBName) cursor = DBConnection.cursor() cursor.execute("DELETE FROM " + str(TableName)) DBConnection.commit() except SQLError as error: DBConnection.rollback() logging.exception(f"Connection to the database failed: {error}") except Exception as e: logging.exception(e) raise finally: if DBConnection.is_connected(): cursor.close() DBConnection.close() logging.info("MySQL connection is closed")
def InsertArrayIntoTable(self, TableName, NameArray, Array, giveID=False, DBName=None): try: DBConnection = self.ConnectDB(DBName=DBName) cursor = DBConnection.cursor() S1 = " (" S2 = " (" for key in NameArray: S1 += str(key) + ", " S2 += "%s, " S1 = S1[:-2] S2 = S2[:-2] S1 += ") " S2 += ")" query = ("INSERT INTO " + str(TableName) + S1 + "VALUES" + S2) cursor.executemany(query, Array) ID = cursor.lastrowid DBConnection.commit() if giveID: return ID except SQLError as error: DBConnection.rollback() logging.exception(f"Connection to the database failed: {error}") except Exception as e: logging.exception(e) raise finally: if DBConnection.is_connected(): cursor.close() DBConnection.close() logging.info("MySQL connection is closed")
def DropDB(self, DBName=None): try: # don't expose this to public: SQL injection danger DBnameToConnect = self.CheckDBName(DBName=DBName) serverConnection = self.ConnectServer() cursor = serverConnection.cursor() cursor.execute(f"DROP DATABASE IF EXISTS {DBnameToConnect}") serverConnection.commit() except SQLError as error: serverConnection.rollback() logging.exception(f"Connection to the server failed: {error}") except Exception as e: logging.exception(e) raise finally: if serverConnection.is_connected(): cursor.close() serverConnection.close() logging.info("MySQL connection is closed")
def ReadRowwithColumnNameandValue(self, TableName, ColumnName, Value, isRowCount=True, DBName=None): try: DBConnection = self.ConnectDB(DBName=DBName) cursor = DBConnection.cursor() cursor.execute( "SELECT * FROM " + str(TableName) + " WHERE " + str(ColumnName) + "=%s", (Value, )) Rows = cursor.fetchall() RowCount = cursor.rowcount tableDict = {} if RowCount: i = 0 for cd in cursor.description: tableDict[cd[0]] = Rows[0][i] i += 1 if isRowCount: return tableDict, RowCount else: return tableDict except SQLError as error: DBConnection.rollback() logging.exception(f"Connection to the database failed: {error}") except Exception as e: logging.exception(e) raise finally: if DBConnection.is_connected(): cursor.close() DBConnection.close() logging.info("MySQL connection is closed")
def __init__(self, DBInfo): try: logging.info("T-Matrix method started.") TMatrix_DB_Main_TableName = 'Raw_V1' DB = MySQLManagement(DBInfo) TMatrix_DB_Main_Column_Name, TMatrix_DB_Main_Data_Full = DB.ReadAllRowsfromTable( TableName=TMatrix_DB_Main_TableName) self.__TMatrix_DB_Main_Column_Name = GF.SelectColumnsList( columnIndex=[1, 2, 3, 4, 5], list=TMatrix_DB_Main_Column_Name, dimension=1) self.__TMatrix_DB_Main_SCT_Coeff_Full = GF.SelectColumnsList( columnIndex=[7], list=TMatrix_DB_Main_Data_Full) self.__TMatrix_DB_Main_ABS_Coeff_Full = GF.SelectColumnsList( columnIndex=[8], list=TMatrix_DB_Main_Data_Full) self.__TMatrix_DB_Main_Data_Full = GF.SelectColumnsList( columnIndex=[1, 2, 3, 4, 5], list=TMatrix_DB_Main_Data_Full) self.__TMatrix_DB_Main_Unique_Values = FN.uniqueEntry( self.__TMatrix_DB_Main_Data_Full) except Exception as e: logging.exception(e) raise
def RDGCalc(self, RDG_Planned_Input): try: logging.info("RDG calculation started.") RDG_Planned_Output = [] for i in range(len(RDG_Planned_Input)): arrRDG = [] rDG_ABS, rDG_SCA = self.FSAC_RDG( Df=RDG_Planned_Input[i][0], kf=RDG_Planned_Input[i][1], R_RI=RDG_Planned_Input[i][2], I_RL=RDG_Planned_Input[i][3], WaveL_nm=RDG_Planned_Input[i][4], dp=RDG_Planned_Input[i][5], Np=RDG_Planned_Input[i][6]) arrRDG.append(rDG_ABS) arrRDG.append(rDG_SCA) RDG_Planned_Output.append(arrRDG) logging.info("RDG calculation finished.") return RDG_Planned_Input, RDG_Planned_Output except Exception as e: logging.exception(e) raise
def Calc(self): try: if self.__TmatrixActive: T1 = TMatrixCalculation(DBInfo=self.__info) if self.__RDGActive: R1 = RDGCalculation() dictInputT1, dictOutputT1, dictInputR1, dictOutputR1 = {}, {}, {}, {} for dm in self.__calcDict: if self.__TmatrixActive: dictInputT1[dm], dictOutputT1[dm] = T1.TMatrixCalc( TMatrix_Planned_Input=self.__calcDict[dm]) if self.__RDGActive: dictInputR1[dm], dictOutputR1[dm] = R1.RDGCalc( RDG_Planned_Input=self.__calcDict[dm]) # if dictInputT1[dm] != dictInputR1[dm]: # raise logging.info(f"Calculation for dm:{dm} was finished.") return dictOutputT1, dictOutputR1 except Exception as e: logging.exception(e) raise
def TMatrixCalc(self, TMatrix_Planned_Input, thread=3): try: logging.info("T-Matrix calculation started.") division = thread TMatrix_Planned_Input_Chopped = GF.DivideArray( numberOfDivisions=division, List=TMatrix_Planned_Input) func = partial(self.TMatrixInterpolator, self.__TMatrix_DB_Main_Data_Full, self.__TMatrix_DB_Main_Unique_Values, self.__TMatrix_DB_Main_ABS_Coeff_Full, self.__TMatrix_DB_Main_SCT_Coeff_Full) arrInTMatrix = [] arrOutTMatrix = [] with ThreadPoolExecutor() as executor: for inputs, outputs in zip( TMatrix_Planned_Input_Chopped, executor.map(func, TMatrix_Planned_Input_Chopped)): arrInTMatrix.append(inputs) arrOutTMatrix.append(outputs) #################################### self.__TMatrix_Interpolation_Input = [] self.__TMatrix_Interpolation_Output = [] for i in range(division): for j in range(len(arrInTMatrix[i])): self.__TMatrix_Interpolation_Input.append( arrInTMatrix[i][j]) for j in range(len(arrOutTMatrix[i][0])): arrT = [] arrT.append(arrOutTMatrix[i][0][j]) arrT.append(arrOutTMatrix[i][1][j]) self.__TMatrix_Interpolation_Output.append(arrT) logging.info("T-Matrix calculation finished.") return self.__TMatrix_Interpolation_Input, self.__TMatrix_Interpolation_Output except Exception as e: logging.exception(e) raise
def ShowDBs(self): try: serverConnection = self.ConnectServer() cursor = serverConnection.cursor() cursor.execute("SHOW DATABASES") print( "======================== Databases ========================") for x in cursor: print(x[0]) print("================================================") except SQLError as error: serverConnection.rollback() logging.exception(f"Connection to the server failed: {error}") except Exception as e: logging.exception(e) raise finally: if serverConnection.is_connected(): cursor.close() serverConnection.close() logging.info("MySQL connection is closed")
# Graphing figures to compare experiment data with RDG-FA and T-matrix using simulated data Version = 0.2 # Nov 2019 import ConfigReaderModule as CPM from ConfigReaderModule import logging import RevExperimentGraphModule as REGM if __name__ == "__main__": CPM.ReadLogConfig() logging.info("Graph App Started.") FF_Info = CPM.ReadConfigToDict(sectionName="FilesFoldersInfo") # AGG_Info = CPM.ReadConfigToDict(sectionName="AggregateDetails", convertParseTo='float', hasComment=True) logging.info("config retrieved.") ############################################## G1 = REGM.GraphTools(folderInfo=FF_Info) G1.RExperiment_RDG_TMatrixComparisonGraphs() ################################################################################################################ logging.info("Graph App finished.") A = 51
import ConfigReaderModule as CP import DBMethods as DB from ConfigReaderModule import logging import GeneralFunctions as GF if __name__ == "__main__": CP.ReadLogConfig() DB_Info = CP.ReadConfigToDict(sectionName="DatabaseInfo") FF_Info = CP.ReadConfigToDict(sectionName="FilesFoldersInfo") logging.info("Import Started!") appDirectory = GF.GetRootDirectory() ############################################################################## DB.createDB(INFO=DB_Info) # DB.dumpDB(INFO=DB_Info, FileAddress=GF.GetAddressTo(appDirectory, FF_Info['FOLDER_NAME_DATABASE'], FileName=GF.getDateandTimeUTC(), Extension="sql.gz")) # DB.loadDB(INFO=DB_Info, FileAddress=GF.FindLatestFile(GF.GetFilesNameAddressInFolder(GF.GetAddressTo(appDirectory, FF_Info['FOLDER_NAME_DATABASE']), Extension="sql.gz"))) # DB.showAllTablesInDBSummary(DB_Info) # DB.reinitializeDB(DB_Info) ############################################################################## Address1p8 = GF.GetAddressTo(main=appDirectory, folderName=FF_Info['FOLDER_NAME_BC_DATABASE'], fileName=FF_Info['FILE_NAME_BC_DATABASE_1p8'], extension="DAT") Address2p3 = GF.GetAddressTo(main=appDirectory, folderName=FF_Info['FOLDER_NAME_BC_DATABASE'], fileName=FF_Info['FILE_NAME_BC_DATABASE_2p3'], extension="DAT") Address2p8 = GF.GetAddressTo(main=appDirectory, folderName=FF_Info['FOLDER_NAME_BC_DATABASE'], fileName=FF_Info['FILE_NAME_BC_DATABASE_2p8'], extension="DAT")
def CreateTable(self, tableName, dictInputHeader, withHash=True, HashType="SHA1", withUnique=True, dictOutputHeader=None, outputTableNameExt="Out", withCreated_UpdatedAt=True, withShadowTable=True, shadowTableNameExt="Shadow", DBEngine="InnoDB", DBName=None): try: DBConnection = self.ConnectDB(DBName=DBName) cursor = DBConnection.cursor() if dictInputHeader: S_main = "" S_in = "" S_hash = "" S_unique = "" S_createdAt = "" S_updatedAt = "" S_primary = "" S_foreign = "" withPrimaryKey = True withForeignKey = False for key in dictInputHeader: item = dictInputHeader[key] S_in += f"{key} {item['dataType']}" if item['notNull'] == 1: S_in += " NOT NULL , " elif item['notNull'] == 0: S_in += " , " if withHash: if item['inHash'] == 1: S_hash += f"{key} , " if withUnique: if item['inUnique'] == 1: S_unique += f"{key} , " S_main += S_in if withHash: S_hash = f"Hash varchar(255) AS ({HashType} (CONCAT({S_hash[:-2]})))," S_main += S_hash if withCreated_UpdatedAt: S_createdAt = "created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, " S_updatedAt = "updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, " S_main += S_createdAt S_main += S_updatedAt if withUnique: if withHash: S_unique += "Hash, " S_unique = f"UNIQUE({S_unique[:-2]}), " S_main += S_unique if withPrimaryKey: S_primary += f"PRIMARY KEY (ID_{tableName}), " S_main += S_primary if withForeignKey: S_foreign += f"MLink INT NOT NULL REFERENCES {tableName}(ID_{tableName}), " S_main += S_foreign query = f"CREATE TABLE IF NOT EXISTS {tableName} ( ID_{tableName} INT NOT NULL AUTO_INCREMENT , {S_main[:-2]}) ENGINE={DBEngine}" cursor.execute(query) DBConnection.commit() if dictOutputHeader: S_main = "" S_in = "" S_hash = "" S_unique = "" S_createdAt = "" S_updatedAt = "" S_primary = "" S_foreign = "" withPrimaryKeyOut = True withForeignKeyOut = True withUniqueOut = False withHashOut = False for key in dictOutputHeader: item = dictOutputHeader[key] S_in += f"{key} {item['dataType']}" if item['notNull'] == 1: S_in += " NOT NULL , " elif item['notNull'] == 0: S_in += " , " if withHashOut: if item['inHash'] == 1: S_hash += f"{key} , " if withUniqueOut: if item['inUnique'] == 1: S_unique += f"{key} , " S_main += S_in if withHashOut: S_hash = f"Hash varchar(255) AS ({HashType} (CONCAT({S_hash[:-2]})))," S_main += S_hash if withCreated_UpdatedAt: S_createdAt = "created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, " S_updatedAt = "updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, " S_main += S_createdAt S_main += S_updatedAt if withUniqueOut: if withHashOut: S_unique += "Hash, " S_unique = f"UNIQUE({S_unique[:-2]}), " S_main += S_unique if withPrimaryKeyOut: S_primary += f"PRIMARY KEY (ID_{tableName}_{outputTableNameExt}), " S_main += S_primary if withForeignKeyOut: S_foreign += f"MLink INT NOT NULL REFERENCES {tableName}(ID_{tableName}), " S_main += S_foreign query = f"CREATE TABLE IF NOT EXISTS {tableName}_{outputTableNameExt} ( ID_{tableName}_{outputTableNameExt} INT NOT NULL AUTO_INCREMENT , {S_main[:-2]}) ENGINE={DBEngine}" cursor.execute(query) DBConnection.commit() if withShadowTable: S_main = "" S_in = "" S_hash = "" S_unique = "" S_createdAt = "" S_updatedAt = "" S_primary = "" S_foreign = "" withPrimaryKeyShadow = True withForeignKeyShadow = False withUniqueShadow = True withHashShadow = True withCreated_UpdatedAtShadow = False for key in dictInputHeader: item = dictInputHeader[key] S_in += f"{key} {item['dataType']}" if item['notNull'] == 1: S_in += " NOT NULL , " elif item['notNull'] == 0: S_in += " , " if withHashShadow: if item['inHash'] == 1: S_hash += f"{key} , " if withUniqueShadow: if item['inUnique'] == 1: S_unique += f"{key} , " S_main += S_in if withHashShadow: S_hash = f"Hash varchar(255) AS ({HashType} (CONCAT({S_hash[:-2]})))," S_main += S_hash if withCreated_UpdatedAtShadow: S_createdAt = "created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, " S_updatedAt = "updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, " S_main += S_createdAt S_main += S_updatedAt if withUniqueShadow: if withHashShadow: S_unique += "Hash, " S_unique = f"UNIQUE({S_unique[:-2]}), " S_main += S_unique if withPrimaryKeyShadow: S_primary += f"PRIMARY KEY (ID_{tableName}_{shadowTableNameExt}), " S_main += S_primary if withForeignKeyShadow: S_foreign += f"MLink INT NOT NULL REFERENCES {tableName}(ID_{tableName}), " S_main += S_foreign query = f"CREATE TABLE IF NOT EXISTS {tableName}_{shadowTableNameExt} ( ID_{tableName}_{shadowTableNameExt} INT NOT NULL AUTO_INCREMENT , {S_main[:-2]}) ENGINE={DBEngine}" cursor.execute(query) DBConnection.commit() except SQLError as error: DBConnection.rollback() logging.exception(f"Connection to the database failed: {error}") except Exception as e: logging.exception(e) raise finally: if DBConnection.is_connected(): cursor.close() DBConnection.close() logging.info("MySQL connection is closed")