def tokenizeQuery (self, psQuery=''): ROUTINE = HDR.SYS._getframe().f_code.co_name tResult = () HDR.goDebug.write (psMsg='[%s] - Inside [%s]' %(HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write (psMsg='[%s] - Syntax [%s]' %(HDR.OS.path.basename(__file__), psQuery)) tResult = self.sStmt.runTests (psQuery, parseAll=True, fullDump=True, printResults=False) if tResult[0]: tResult = tResult [1:] HDR.goDebug.write (psMsg='[%s] - [%s] Tokenized Statement [%s]' %(HDR.OS.path.basename(__file__), ROUTINE, str (tResult))) else: HDR.printException ('DB001', 'Error in SQL Syntax.' + str (tResult [1:])) tResult = () HDR.goDebug.write (psMsg='[%s] - Returning from [%s]' %(HDR.OS.path.basename(__file__), ROUTINE)) return tResult
def processQuery(self, pdQuery={}): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write(psMsg='[%s] - Inside [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) iFlag = False if HDR.OS.path.exists( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'])): HDR.printException( 'DB005', 'Cannot create %s. Already Exists. Drop it First before recreation.' % (pdQuery['TABLE'])) else: try: HDR.OS.makedirs( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'])) except Exception as e: raise Exception('Cannot Create Table. Critical!!!. Aborting') else: print('Table [%s] Created' % (pdQuery['TABLE'])) iFlag = True return iFlag
def createDatabase (self, plDBNames=[]): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write (psMsg='[%s] - Inside [%s]' %(HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write (psMsg='[%s] - Syntax [%s]' %(HDR.OS.path.basename(__file__), str (plDBNames))) iTmp = 0 while (iTmp < len (plDBNames)): try: if HDR.OS.path.exists (HDR.OS.path.join (HDR.gsInstallPath, 'data', plDBNames [iTmp])): HDR.printException ('DB004', 'Cannot create %s. Already Exists. Drop it First before recreation.' %(plDBNames [iTmp])) else: try: HDR.OS.makedirs (HDR.OS.path.join (HDR.gsInstallPath, 'data', plDBNames [iTmp])) except Exception as e: raise Exception ('Cannot Create Database. Critical!!!. Aborting') else: print ('Database [%s] Created' %(plDBNames [iTmp])) except Exception as e: raise Exception (e) iTmp += 1; return
def do_SHOW(self, psQuery): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write(psMsg='[%s] - Inside [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write(psMsg='[%s] - Syntax [%s]' % (HDR.OS.path.basename(__file__), psQuery)) HDR.goDebug.write(psMsg='[%s] - psQuery [:8] [%s]' % (HDR.OS.path.basename(__file__), psQuery[:9])) if psQuery[:8] == 'DATABASE': self.proc_SHOWDB(psQuery) elif psQuery[:5] == 'TABLE': if self.bUSEFlag == True: self.SHOWTBL_SQL = SHOWTBL_SQL.startProcedure() self.SHOWTBL_SQL.executeQuery('SHOW ' + psQuery) else: print('Create and/or Select Database First to use.') else: HDR.printException('DB001', 'Error in SQL Syntax.') HDR.goDebug.write(psMsg='[%s] - Returning from [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) return
def processQuery(self, pdQuery={}): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write(psMsg='[%s] - Inside [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write(psMsg='[%s] - Syntax [%s]' % (HDR.OS.path.basename(__file__), str(pdQuery))) sTmp = '' sVar1 = '' sOp = '' sVar2 = '' sVal1 = '' sVal2 = '' oOpFunc = None lTmp = [] lTmp2 = [] lHeaders = [] lFlat = [] lFlat2 = [] iCtr1 = 0 iCtr2 = 0 iNumCol = 0 iNumRow = 0 iRecordSize = 0 bFlag = False if not HDR.OS.path.exists( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'])): HDR.printException('DB010', 'Table [%s] does not exist' % pdQuery['TABLE']) return # Create a Ordered Dictionary (COLUMN_NAME (Key) -> DATA_TYPE (Value)) odTableStructure = HDR.COLLECTIONS.OrderedDict() # Create a Ordered Dictionary (COLUMN_NAME (Key) -> Value) odTableValue = HDR.COLLECTIONS.OrderedDict() odTableValueConstraint = HDR.COLLECTIONS.OrderedDict() odTableValueNullable = HDR.COLLECTIONS.OrderedDict() self.joBPTTBLProcessCtg = bPlusTree.javaBPTTblSQLProcessing( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE']), HDR.OS.path.join(HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join(HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) lTmp = self.joBPTTBLProcessCtg.getTableStructureFromCatalog( pdQuery['TABLE']) for sItem in lTmp: iCtr1 += 1 iCtr2 += 1 if ( (iCtr2 % 8) == 0 ): # 8 -> Fixed Num of colums in Catalog Column Table (davisbase_columns.tbl) odTableValueConstraint[sTmp.decode( encoding='utf-8')] = sItem.split('|')[0] iCtr1 = 0 if (iCtr1 == 4): sTmp = sItem.split('|')[0] # Add key to dictionary odTableStructure[sTmp.decode(encoding='utf-8')] = '' odTableValue[sTmp.decode(encoding='utf-8')] = '' odTableValueNullable[sTmp.decode(encoding='utf-8')] = '' odTableValueConstraint[sTmp.decode(encoding='utf-8')] = '' lHeaders.append(sTmp.decode(encoding='utf-8')) elif (iCtr1 == 5): # Add value corresponding to the key odTableStructure[sTmp.decode( encoding='utf-8')] = sItem.split('|')[0] iNumCol += 1 elif (iCtr1 == 7): odTableValueNullable[sTmp.decode( encoding='utf-8')] = sItem.split('|')[0] # Reset the variables lTmp = [] # Check for constraints here if odTableValueNullable[pdQuery['COLUMN']] == 'N': if pdQuery['VALUE'].lower() == 'null': HDR.printException( 'DB008', 'Field [%s] is Not Nullable. Cannot Nullify the same' % (str(pdQuery['COLUMN']))) return self.joBPTTBLProcessNormal = bPlusTree.javaBPTTblSQLProcessing( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE']), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl')) for sItem in self.joBPTTBLProcessNormal.getTableValuesFromNormal(): iNumRow += 1 lTmp = sItem.split('~') for sItem2 in lTmp: lTmp2.append(str(sItem2)) iCtr1 = 0 # Update to the File self.joBPTTBLUpdateNormal = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], pdQuery['TABLE'] + '.tbl'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl' + '.update'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl'), HDR.goDebug, HDR.goBPTPD) # If no WHERE_CONDITION is given, update all rows if not 'WHERE_COND' in pdQuery.keys(): lTmp = [] while (iCtr1 < len(lTmp2)): for sKey, sValue in odTableValue.items(): odTableValue[sKey] = lTmp2[iCtr1] iCtr1 += 1 if pdQuery['COLUMN'] != 'ROWID' and odTableValueConstraint[ pdQuery['COLUMN']] == 'PK': oTmp1 = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join( HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx' + '.update'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.goDebug, HDR.goBPTPD) if (oTmp1.IdxFind(pdQuery['VALUE']) == 1): HDR.printException( 'DB009', 'Field [%s] is a [%s]. Cannot have duplicate values for this field' % (str(pdQuery['COLUMN']), str(odTableValueConstraint[pdQuery['COLUMN']]))) return odTableValue[pdQuery['COLUMN']] = pdQuery['VALUE'] lFlat = [] lFlat2 = [] iCtr2 = 0 iRecordSize = 0 # Flatten the query here for sKey, sValue in odTableValue.items(): iCtr2 += 1 # Handle for DATETIME and DATE here if sKey == pdQuery['COLUMN']: if (odTableStructure[sKey] == 'DATETIME') or (odTableStructure[sKey] == 'DATE'): sValue = HDR.convertDT(0, sValue) if (sValue.lower () == 'null') and (odTableStructure [sKey] != 'TEXT') and \ (odTableStructure [sKey] [:4] != 'null'): odTableStructure[sKey] = sValue.lower( ) + '_' + odTableStructure[sKey] else: if (sValue.lower () == 'null') and (odTableStructure [sKey] != 'TEXT') and \ (odTableStructure [sKey] [:4] != 'null'): odTableStructure[sKey] = sValue.lower( ) + '_' + odTableStructure[sKey] lFlat.append([ str(iCtr2), sKey, odTableStructure[sKey].decode(encoding='utf-8'), sValue ]) # Update into table here for item in lFlat: item.append(HDR.dDataTypeSerialCode[item[2]]) # For DATA_TYPE as TEXT, calculate the length of the actual text and append to list if (HDR.dDataTypeSerialCode[item[2]] == '0x0C'): item.append(str(len(item[3]))) iRecordSize = iRecordSize + len(item[3]) else: item.append(HDR.dDataTypeLength[item[2]]) iRecordSize = iRecordSize + int( HDR.dDataTypeLength[item[2]]) lFlat2.append(item) if (lFlat2): self.updateToIndex(odTableValue[pdQuery['COLUMN']], pdQuery['VALUE'], odTableValue, odTableStructure, pdQuery) self.joBPTTBLUpdateNormal.update(lFlat2, len(lFlat2), iRecordSize) bFlag = True else: lTmp = [] while (iCtr1 < len(lTmp2)): for sKey, sValue in odTableValue.items(): odTableValue[sKey] = lTmp2[iCtr1] iCtr1 += 1 if (len(pdQuery['WHERE_COND']) == 1): iCtr2 = 0 while (iCtr2 < len(pdQuery['WHERE_COND'])): sVar1 = pdQuery['WHERE_COND'][0][iCtr2] iCtr2 += 1 sOp = pdQuery['WHERE_COND'][0][iCtr2] iCtr2 += 1 sVar2 = pdQuery['WHERE_COND'][0][iCtr2] iCtr2 += 1 oOpFunc = HDR.dOpFunc[sOp] if (odTableValue[sVar1].lower() != 'null'): sVal1 = HDR.castValue(odTableValue[sVar1], odTableStructure[sVar1]) else: sVal1 = odTableValue[sVar1].lower() if (sVar2.lower() != 'null'): sVal2 = HDR.castValue( sVar2, odTableStructure[sVar1] ) # Try to cast it to same data type as Column else: sVal2 = sVar2.lower() if (odTableStructure[sVar1] == 'DATETIME') or (odTableStructure[sVar1] == 'DATE'): sVal2 = HDR.convertDT(0, str(sVal2)) if (odTableStructure[sVar1] == 'DATE'): sVal2 = sVal2[:10] if (sVal2 != 'null'): # Recast sVal2 Here sVal2 = HDR.castValue(sVal2, odTableStructure[sVar1]) bResult = oOpFunc(sVal1, sVal2) # Reset the variable iCtr2 = 0 if bResult == True: if pdQuery[ 'COLUMN'] != 'ROWID' and odTableValueConstraint[ pdQuery['COLUMN']] == 'PK': oTmp1 = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join( HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx' + '.update'), HDR.OS.path.join( HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join( HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.goDebug, HDR.goBPTPD) if (oTmp1.IdxFind(pdQuery['VALUE']) == 1): HDR.printException( 'DB009', 'Field [%s] is a [%s]. Cannot have duplicate values for this field' % (str(pdQuery['COLUMN']), str(odTableValueConstraint[ pdQuery['COLUMN']]))) return odTableValue[pdQuery['COLUMN']] = pdQuery['VALUE'] lFlat = [] lFlat2 = [] iCtr2 = 0 iRecordSize = 0 # Flatten the query here for sKey, sValue in odTableValue.items(): iCtr2 += 1 HDR.goDebug.write (psMsg='[%s] - Where Condition sKey [%s], sValue [%s]' \ %(HDR.OS.path.basename(__file__), sKey, sValue)) # Handle for DATETIME and DATE here if sKey == pdQuery['COLUMN']: HDR.goDebug.write (psMsg='[%s] - If part sKey = pdQuery [COLUMN], Constraint [%s]' \ %(HDR.OS.path.basename(__file__), odTableStructure [sKey])) if (odTableStructure[sKey] == 'DATETIME') or ( odTableStructure[sKey] == 'DATE'): sValue = HDR.convertDT(0, sValue) HDR.goDebug.write( psMsg='[%s] - If sValue [%s]' % (HDR.OS.path.basename(__file__), sValue)) if (sValue.lower () == 'null') and (odTableStructure [sKey] != 'TEXT') and \ (odTableStructure [sKey] [:4] != 'null'): odTableStructure[sKey] = sValue.lower( ) + '_' + odTableStructure[sKey] else: HDR.goDebug.write (psMsg='[%s] - Else part sKey = pdQuery [COLUMN], Constraint [%s]' \ %(HDR.OS.path.basename(__file__), odTableStructure [sKey])) HDR.goDebug.write( psMsg='[%s] - Else sValue [%s]' % (HDR.OS.path.basename(__file__), sValue)) if (sValue.lower () == 'null') and (odTableStructure [sKey] != 'TEXT') and \ (odTableStructure [sKey] [:4] != 'null'): odTableStructure[sKey] = sValue.lower( ) + '_' + odTableStructure[sKey] lFlat.append([ str(iCtr2), sKey, odTableStructure[sKey].decode( encoding='utf-8'), sValue ]) # Update into table here for item in lFlat: item.append(HDR.dDataTypeSerialCode[item[2]]) # For DATA_TYPE as TEXT, calculate the length of the actual text and append to list if (HDR.dDataTypeSerialCode[item[2]] == '0x0C'): item.append(str(len(item[3]))) iRecordSize = iRecordSize + len(item[3]) else: item.append(HDR.dDataTypeLength[item[2]]) iRecordSize = iRecordSize + int( HDR.dDataTypeLength[item[2]]) lFlat2.append(item) if (lFlat2): self.updateToIndex(odTableValue[pdQuery['COLUMN']], pdQuery['VALUE'], odTableValue, odTableStructure, pdQuery) self.joBPTTBLUpdateNormal.update( lFlat2, len(lFlat2), iRecordSize) bFlag = True if bFlag == True: self.joBPTTBLUpdateNormal.write() return
def processQuery (self, pdQuery={}): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write (psMsg='[%s] - Inside [%s]' %(HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write (psMsg='[%s] - Syntax [%s]' %(HDR.OS.path.basename(__file__), str (pdQuery))) sTmp = '' lTmp = [] lTmp2 = [] lHeaders = [] iCtr1 = 0 iCtr2 = 0 iNumCol = 0 iNumRow = 0 bResult = False # Create a Ordered Dictionary (COLUMN_NAME (Key) -> DATA_TYPE (Value)) odTableStructure = HDR.COLLECTIONS.OrderedDict () odTableValue = HDR.COLLECTIONS.OrderedDict () odTableValueConstraint = HDR.COLLECTIONS.OrderedDict () odTableValueNullable = HDR.COLLECTIONS.OrderedDict () # Special handling for viewing catalog tables - START ----> if pdQuery ['TABLES'].lower () == 'davisbase_columns': self.joBPTTBLProcessCtg = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) lTmp = self.joBPTTBLProcessCtg.getTableStructureFromCatalog (pdQuery ['TABLES']) for sItem in lTmp: iCtr1 += 1 iCtr2 += 1 if ((iCtr2 % 8) == 0): # 8 -> Fixed Num of colums in Catalog Column Table (davisbase_columns.tbl) odTableValueConstraint [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] iCtr1 = 0 if (iCtr1 == 4): sTmp = sItem.split ('|') [0] # Add key to dictionary odTableStructure [sTmp.decode (encoding='utf-8')] = '' odTableValue [sTmp.decode (encoding='utf-8')] = '' odTableValueConstraint [sTmp.decode (encoding='utf-8')] = '' odTableValueNullable [sTmp.decode (encoding='utf-8')] = '' lHeaders.append (sTmp.decode (encoding='utf-8')) elif (iCtr1 == 5): # Add value corresponding to the key odTableStructure [sTmp.decode (encoding='utf-8')] = sItem.split ('|') [0] iNumCol += 1 elif (iCtr1 == 7): odTableValueNullable [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] # Reset the variable lTmp = [] self.joBPTTBLProcessNormal = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) for sItem in self.joBPTTBLProcessNormal.getTableValuesFromNormal (): iNumRow += 1 lTmp = sItem.split ('~') for sItem2 in lTmp: lTmp2.append (str (sItem2)) # Reset the variable iCtr1 = 0 lTmp = [] if 'WHERE_COND' in pdQuery.keys (): iNumRow = 0 while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (len (pdQuery ['WHERE_COND']) == 1): iCtr2 = 0 while (iCtr2 < len (pdQuery ['WHERE_COND'])): sVar1 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sOp = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sVar2 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 oOpFunc = HDR.dOpFunc [sOp] if (odTableValue [sVar1].lower () != 'null'): sVal1 = HDR.castValue (odTableValue [sVar1], odTableStructure [sVar1]) else: sVal1 = odTableValue [sVar1].lower () if (sVar2.lower () != 'null'): sVal2 = HDR.castValue (sVar2, odTableStructure [sVar1]) # Try to cast it to same data type as Column else: sVal2 = sVar2.lower () if (odTableStructure [sVar1] == 'DATETIME') or (odTableStructure [sVar1] == 'DATE'): sVal2 = HDR.convertDT (0, str (sVal2)) if (odTableStructure [sVar1] == 'DATE'): sVal2 = sVal2 [:10] if (sVal2.lower () != 'null'): # Recast sVal2 Here sVal2 = HDR.castValue (sVal2, odTableStructure [sVar1]) bResult = oOpFunc (sVal1, sVal2) # Reset the variable iCtr2 = 0 if bResult == True: iNumRow += 1 for sKey, sValue in odTableValue.items (): if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] lTmp.append (str(odTableValue [sKey])) else: while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] for sKey, sValue in odTableValue.items (): lTmp.append (str (odTableValue [sKey])) lTmp2 = lTmp if pdQuery ['COLUMNS'] == '*': # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) else: lHeaders = pdQuery ['COLUMNS'].split (',') iNumCol = len (lHeaders) iCtr = 0 lTmp = [] while (iCtr < len (lTmp2)): for sKey, sValue in odTableStructure.items (): odTableStructure [sKey] = lTmp2 [iCtr] iCtr += 1 for sKey in lHeaders: if sKey in odTableStructure.keys(): lTmp.append (str (odTableStructure [sKey])) lTmp2 = lTmp # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) print ('\n' + HDR.TABULATE.tabulate (lTmp, headers=lHeaders, tablefmt='psql') + '\n') return elif pdQuery ['TABLES'].lower () == 'davisbase_tables': self.joBPTTBLProcessCtg = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) lTmp = self.joBPTTBLProcessCtg.getTableStructureFromCatalog (pdQuery ['TABLES']) for sItem in lTmp: iCtr1 += 1 iCtr2 += 1 if ((iCtr2 % 8) == 0): # 8 -> Fixed Num of colums in Catalog Column Table (davisbase_columns.tbl) odTableValueConstraint [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] iCtr1 = 0 if (iCtr1 == 4): sTmp = sItem.split ('|') [0] # Add key to dictionary odTableStructure [sTmp.decode (encoding='utf-8')] = '' odTableValue [sTmp.decode (encoding='utf-8')] = '' odTableValueConstraint [sTmp.decode (encoding='utf-8')] = '' odTableValueNullable [sTmp.decode (encoding='utf-8')] = '' lHeaders.append (sTmp.decode (encoding='utf-8')) elif (iCtr1 == 5): # Add value corresponding to the key odTableStructure [sTmp.decode (encoding='utf-8')] = sItem.split ('|') [0] iNumCol += 1 elif (iCtr1 == 7): odTableValueNullable [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] # Reset the variable lTmp = [] self.joBPTTBLProcessNormal = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_tables.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_tables.tbl')) for sItem in self.joBPTTBLProcessNormal.getTableValuesFromNormal (): iNumRow += 1 lTmp = sItem.split ('~') for sItem2 in lTmp: lTmp2.append (str (sItem2)) # Reset the variable iCtr1 = 0 lTmp = [] if 'WHERE_COND' in pdQuery.keys (): iNumRow = 0 while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (len (pdQuery ['WHERE_COND']) == 1): iCtr2 = 0 while (iCtr2 < len (pdQuery ['WHERE_COND'])): sVar1 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sOp = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sVar2 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 oOpFunc = HDR.dOpFunc [sOp] if (odTableValue [sVar1].lower () != 'null'): sVal1 = HDR.castValue (odTableValue [sVar1], odTableStructure [sVar1]) else: sVal1 = odTableValue [sVar1].lower () if (sVar2.lower () != 'null'): sVal2 = HDR.castValue (sVar2, odTableStructure [sVar1]) # Try to cast it to same data type as Column else: sVal2 = sVar2.lower () if (odTableStructure [sVar1] == 'DATETIME') or (odTableStructure [sVar1] == 'DATE'): sVal2 = HDR.convertDT (0, str (sVal2)) if (odTableStructure [sVar1] == 'DATE'): sVal2 = sVal2 [:10] if (sVal2.lower () != 'null'): # Recast sVal2 Here sVal2 = HDR.castValue (sVal2, odTableStructure [sVar1]) bResult = oOpFunc (sVal1, sVal2) # Reset the variable iCtr2 = 0 if bResult == True: iNumRow += 1 for sKey, sValue in odTableValue.items (): if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] lTmp.append (str(odTableValue [sKey])) else: while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] for sKey, sValue in odTableValue.items (): lTmp.append (str (odTableValue [sKey])) lTmp2 = lTmp if pdQuery ['COLUMNS'] == '*': # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) else: lHeaders = pdQuery ['COLUMNS'].split (',') iNumCol = len (lHeaders) iCtr = 0 lTmp = [] while (iCtr < len (lTmp2)): for sKey, sValue in odTableStructure.items (): odTableStructure [sKey] = lTmp2 [iCtr] iCtr += 1 for sKey in lHeaders: if sKey in odTableStructure.keys(): lTmp.append (str (odTableStructure [sKey])) lTmp2 = lTmp # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) print ('\n' + HDR.TABULATE.tabulate (lTmp, headers=lHeaders, tablefmt='psql') + '\n') return # End of Special handling for viewing Catalog Tables if not HDR.OS.path.exists (HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLES'])): HDR.printException ('DB010', 'Table [%s] does not exist' %pdQuery ['TABLES']) return self.joBPTTBLProcessCtg = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLES'], '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) lTmp = self.joBPTTBLProcessCtg.getTableStructureFromCatalog (pdQuery ['TABLES']) for sItem in lTmp: iCtr1 += 1 iCtr2 += 1 if ((iCtr2 % 8) == 0): # 8 -> Fixed Num of colums in Catalog Column Table (davisbase_columns.tbl) odTableValueConstraint [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] iCtr1 = 0 if (iCtr1 == 4): sTmp = sItem.split ('|') [0] # Add key to dictionary odTableStructure [sTmp.decode (encoding='utf-8')] = '' odTableValue [sTmp.decode (encoding='utf-8')] = '' odTableValueConstraint [sTmp.decode (encoding='utf-8')] = '' odTableValueNullable [sTmp.decode (encoding='utf-8')] = '' lHeaders.append (sTmp.decode (encoding='utf-8')) elif (iCtr1 == 5): # Add value corresponding to the key odTableStructure [sTmp.decode (encoding='utf-8')] = sItem.split ('|') [0] iNumCol += 1 elif (iCtr1 == 7): odTableValueNullable [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] # Reset the variable lTmp = [] self.joBPTTBLProcessNormal = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLES'], '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLES'], '.' + pdQuery ['TABLES'] + '.tbl'), HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLES'], '.' + pdQuery ['TABLES'] + '.tbl')) for sItem in self.joBPTTBLProcessNormal.getTableValuesFromNormal (): iNumRow += 1 lTmp = sItem.split ('~') for sItem2 in lTmp: lTmp2.append (str (sItem2)) # Reset the variable iCtr1 = 0 lTmp = [] if 'WHERE_COND' in pdQuery.keys (): iNumRow = 0 while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (len (pdQuery ['WHERE_COND']) == 1): iCtr2 = 0 while (iCtr2 < len (pdQuery ['WHERE_COND'])): sVar1 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sOp = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sVar2 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 oOpFunc = HDR.dOpFunc [sOp] if (odTableValue [sVar1].lower () != 'null'): sVal1 = HDR.castValue (odTableValue [sVar1], odTableStructure [sVar1]) else: sVal1 = odTableValue [sVar1].lower () if (sVar2.lower () != 'null'): sVal2 = HDR.castValue (sVar2, odTableStructure [sVar1]) # Try to cast it to same data type as Column else: sVal2 = sVar2.lower () if (odTableStructure [sVar1] == 'DATETIME') or (odTableStructure [sVar1] == 'DATE'): sVal2 = HDR.convertDT (0, str (sVal2)) if (odTableStructure [sVar1] == 'DATE'): sVal2 = sVal2 [:10] if (sVal2.lower () != 'null'): # Recast sVal2 Here sVal2 = HDR.castValue (sVal2, odTableStructure [sVar1]) bResult = oOpFunc (sVal1, sVal2) # Reset the variable iCtr2 = 0 if bResult == True: iNumRow += 1 for sKey, sValue in odTableValue.items (): if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] lTmp.append (str(odTableValue [sKey])) else: while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] for sKey, sValue in odTableValue.items (): lTmp.append (str (odTableValue [sKey])) lTmp2 = lTmp if pdQuery ['COLUMNS'] == '*': # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) else: lHeaders = pdQuery ['COLUMNS'].split (',') iNumCol = len (lHeaders) iCtr = 0 lTmp = [] while (iCtr < len (lTmp2)): for sKey, sValue in odTableStructure.items (): odTableStructure [sKey] = lTmp2 [iCtr] iCtr += 1 for sKey in lHeaders: if sKey in odTableStructure.keys(): lTmp.append (str (odTableStructure [sKey])) lTmp2 = lTmp # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) print ('\n' + HDR.TABULATE.tabulate (lTmp, headers=lHeaders, tablefmt='psql') + '\n') return
def insertIntoTable(self, pdTableStructure, pdTableValue, pdTableValueNullable, pdTableValueConstraint, pdQuery={}): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write(psMsg='[%s] - Inside [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write(psMsg='[%s] - Syntax [%s]' % (HDR.OS.path.basename(__file__), str(pdQuery))) lFlat = [] lFlat2 = [] iRowID = 0 iCtr = 0 iRecordSize = 0 bFlag = False # Format the data to insert into table # Get the Last Row ID iRowID = int( HDR.pickle( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.config'), 1, 'LAST_ROW_ID_NORMAL_TABLES', 0)) if iRowID < 0: raise Exception('Database Table Corrupted.. Critical. Aborting!!!') else: iRowID += 1 pdTableValue['ROWID'] = str(iRowID) # Flatten the query here for sKey, sValue in pdTableValue.items(): iCtr += 1 # Handle for DATETIME and DATE here if (pdTableStructure[sKey] == 'DATETIME') or (pdTableStructure[sKey] == 'DATE'): sValue = HDR.convertDT(0, sValue) if (sValue.lower() == 'null') and (pdTableStructure[sKey] != 'TEXT'): pdTableStructure[sKey] = sValue.lower( ) + '_' + pdTableStructure[sKey] lFlat.append([ str(iCtr), sKey, pdTableStructure[sKey].decode(encoding='utf-8'), sValue ]) # Insert into table here for item in lFlat: item.append(HDR.dDataTypeSerialCode[item[2]]) # For DATA_TYPE as TEXT, calculate the length of the actual text and append to list if (HDR.dDataTypeSerialCode[item[2]] == '0x0C'): item.append(str(len(item[3]))) iRecordSize = iRecordSize + len(item[3]) else: item.append(HDR.dDataTypeLength[item[2]]) iRecordSize = iRecordSize + int(HDR.dDataTypeLength[item[2]]) lFlat2.append(item) if lFlat2: # Check for Constraint and Nullables here. If everything passes, then insert. # If any condition fails, then do not insert for sKey, sValue in pdTableValueNullable.items(): if pdTableValueNullable[sKey] == 'N': if pdTableValue[sKey].lower() == 'null': HDR.printException( 'DB008', 'Field [%s] is Not Nullable. Cannot Nullify the same' % (str(sKey))) bFlag = True if bFlag == False: for sKey, sValue in pdTableValue.items(): if sKey != 'ROWID' and pdTableValueConstraint[sKey] == 'PK': self.joBPTTBLInsertNormal = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], sKey + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + sKey + '.idx' + '.insert'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + sKey + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + sKey + '.idx'), HDR.goDebug, HDR.goBPTPD) if (self.joBPTTBLInsertNormal.IdxFind(sValue) == 1): HDR.printException( 'DB009', 'Field [%s] is a [%s]. Cannot have duplicate values for this field' % (str(sKey), str(pdTableValueConstraint[sKey]))) bFlag = True break if bFlag == False: self.joBPTTBLInsertNormal = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], pdQuery['TABLE'] + '.tbl'), HDR.OS.path.join( HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl' + '.insert'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl'), HDR.goDebug, HDR.goBPTPD) self.joBPTTBLInsertNormal.insert(lFlat2, len(lFlat2), iRecordSize) self.joBPTTBLInsertNormal.write() # Write the indexes for every column here self.writeIndex(pdTableValue, pdTableStructure, pdQuery) lFlat2 = [] iRecordSize = 0 # Write the last ROW ID to the config file if everything is successful HDR.pickle( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.config'), 0, 'LAST_ROW_ID_NORMAL_TABLES', iRowID) return
def processQuery(self, pdQuery={}): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write(psMsg='[%s] - Inside [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write(psMsg='[%s] - Syntax [%s]' % (HDR.OS.path.basename(__file__), str(pdQuery))) # Create a Ordered Dictionary (COLUMN_NAME (Key) -> DATA_TYPE (Value)) odTableStructure = HDR.COLLECTIONS.OrderedDict() # Create a Ordered Dictionary (COLUMN_NAME (Key) -> Value) odTableValue = HDR.COLLECTIONS.OrderedDict() odTableValueNullable = HDR.COLLECTIONS.OrderedDict() odTableValueConstraint = HDR.COLLECTIONS.OrderedDict() sTmp = '' lTmp = [] iNumCol = 0 iCtr1 = 0 iCtr2 = 0 HDR.goDebug.write (psMsg='[%s] - HDR.OS.path.join (HDR.OS.getcwd (), pdQuery [TABLE]) = [%s]'\ %(HDR.OS.path.basename(__file__), HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLE']))) if not HDR.OS.path.exists( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'])): HDR.printException( 'DB006', 'Cannot find %s. No such Table exists.' % (pdQuery['TABLE'])) else: sTmp = '.' + pdQuery['TABLE'] self.joBPTTBLProcessCtg = bPlusTree.javaBPTTblSQLProcessing( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], sTmp), HDR.OS.path.join(HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join(HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) # Get the Table structure lTmp = self.joBPTTBLProcessCtg.getTableStructureFromCatalog( pdQuery['TABLE']) for sItem in lTmp: iCtr1 += 1 iCtr2 += 1 if ( (iCtr2 % 8) == 0 ): # 8 -> Fixed Num of colums in Catalog Column Table (davisbase_columns.tbl) odTableValueConstraint[sTmp.decode( encoding='utf-8')] = sItem.split('|')[0] iCtr1 = 0 if (iCtr1 == 4): sTmp = sItem.split('|')[0] # Add key to dictionary odTableStructure[sTmp.decode(encoding='utf-8')] = '' odTableValue[sTmp.decode(encoding='utf-8')] = '' odTableValueNullable[sTmp.decode(encoding='utf-8')] = '' odTableValueConstraint[sTmp.decode(encoding='utf-8')] = '' elif (iCtr1 == 5): # Add value corresponding to the key odTableStructure[sTmp.decode( encoding='utf-8')] = sItem.split('|')[0] iNumCol += 1 elif (iCtr1 == 7): odTableValueNullable[sTmp.decode( encoding='utf-8')] = sItem.split('|')[0] iCtr1 = 0 lTmp = pdQuery['VALUES'].split(',') if (iNumCol == 1): iNumCol += 1 if (len(lTmp) == (iNumCol - 1)): # Exclude ROWID as its internally populated for sKey, sValue in odTableStructure.items(): if sKey != 'ROWID': # Do not populate ROWID Column odTableValue[sKey] = lTmp[iCtr1] iCtr1 += 1 self.insertIntoTable(odTableStructure, odTableValue, odTableValueNullable, odTableValueConstraint, pdQuery) else: HDR.printException( 'DB007', 'Cannot Insert. Table structure and Insert Column Count Mismatch' ) return
def interpretQuery(self, ptTokens=()): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write(psMsg='[%s] - Inside [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write(psMsg='[%s] - Syntax [%s]' % (HDR.OS.path.basename(__file__), str(ptTokens))) dQuery = {} dTmp = {} sTmp = '' iTmp = 0 lTmp = [] lTmp2 = [] lValidDT = [ 'NULL', 'TINYINT', 'SMALLINT', 'INT', 'BIGINT', 'REAL', 'DOUBLE', 'DATETIME', 'DATE', 'TEXT' ] lValidConstraints = ['PRIMARY KEY', 'NOT NULL', 'DEFAULT'] try: if not ptTokens: raise Exception( 'Query Tokens are Empty. Critical!!!. Cannot Interpret') else: dTmp = ptTokens[0][0][1] for sKey, Value in dTmp.items(): if sKey == 'OPERATION': dQuery[sKey] = str(Value) elif sKey == 'COLUMN_DEFINES': # Validate the COLUMN Definitions here iTmp = 0 while (iTmp < len(Value)): for sSubKey, SubValue in Value[iTmp].items(): if sSubKey == 'DATA_TYPE': if str(SubValue) not in lValidDT: HDR.printException( 'DB002', 'Unexpected Data Type in the Syntax' ) dQuery = {} return else: HDR.goDebug.write( psMsg= '[%s] - [%s] DataType [%s] Valid' % (HDR.OS.path.basename(__file__), ROUTINE, str(SubValue))) elif sSubKey == 'CONSTRAINT': if str(SubValue) not in lValidConstraints: HDR.printException( 'DB003', 'Unexpected Constraint in the Syntax' ) dQuery = {} return else: HDR.goDebug.write( psMsg= '[%s] - [%s] Constraint [%s] Valid' % (HDR.OS.path.basename(__file__), ROUTINE, str(SubValue))) iTmp += 1 dQuery[sKey] = Value elif sKey == 'TABLE': dQuery[sKey] = ','.join(Value) else: raise Exception( 'Unexpected Keyword in the Syntax. Critical!!!') except Exception as e: raise Exception(e) return dQuery