def updateToIndex(self, psOldKey, psNewKey, pdTableValue, pdTableStructure, pdQuery): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write(psMsg='[%s] - Inside [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write(psMsg='[%s] - Syntax [%s]' % (HDR.OS.path.basename(__file__), str(psOldKey))) oTmp = bPlusTree.javaBPTTblSQLProcessing( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN']), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx')) oTmp.deleteTableValuesFromIndex(psOldKey, pdTableStructure[pdQuery['COLUMN']]) # Flatten the Record with new key for Insertion lFlat = [] lFlat2 = [] sValue = '' iCtr = 0 iRecordSize = 0 iCtr += 1 # Handle for DATETIME and DATE here if (pdTableStructure[pdQuery['COLUMN']] == 'DATETIME') or (pdTableStructure[pdQuery['COLUMN']] == 'DATE'): sValue = HDR.convertDT(0, sValue) lFlat.append([ str(iCtr), pdQuery['COLUMN'].decode(encoding='utf-8'), pdTableStructure[pdQuery['COLUMN']].decode(encoding='utf-8'), psNewKey ]) iCtr += 1 lFlat.append([ str(iCtr), 'ROWID', pdTableStructure['ROWID'].decode(encoding='utf-8'), pdTableValue['ROWID'] ]) HDR.goDebug.write(psMsg='[%s] - Update Index lFlat [%s]' % (HDR.OS.path.basename(__file__), lFlat)) # Insert into table here for item in lFlat: HDR.goDebug.write(psMsg='[%s] - Update Index item [%s]' % (HDR.OS.path.basename(__file__), item)) item.append(HDR.dDataTypeSerialCode[item[2]]) # For DATA_TYPE as TEXT, calculate the length of the actual text and append to list if (HDR.dDataTypeSerialCode[item[2]] == '0x0C'): item.append(str(len(item[3]))) iRecordSize = iRecordSize + len(item[3]) else: item.append(HDR.dDataTypeLength[item[2]]) iRecordSize = iRecordSize + int(HDR.dDataTypeLength[item[2]]) lFlat2.append(item) HDR.goDebug.write(psMsg='[%s] - Update Index lFlat2 appended [%s]' % (HDR.OS.path.basename(__file__), lFlat2)) # Update the File oTmpWrite = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx' + '.update'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.goDebug, HDR.goBPTPD) oTmpWrite.insertIndex(lFlat2, len(lFlat2), iRecordSize, str(psNewKey)) oTmpWrite.writeIndex() return
def processQuery(self, pdQuery={}): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write(psMsg='[%s] - Inside [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write(psMsg='[%s] - Syntax [%s]' % (HDR.OS.path.basename(__file__), str(pdQuery))) sTmp = '' sVar1 = '' sOp = '' sVar2 = '' sVal1 = '' sVal2 = '' oOpFunc = None lTmp = [] lTmp2 = [] lHeaders = [] lFlat = [] lFlat2 = [] iCtr1 = 0 iCtr2 = 0 iNumCol = 0 iNumRow = 0 iRecordSize = 0 bFlag = False if not HDR.OS.path.exists( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'])): HDR.printException('DB010', 'Table [%s] does not exist' % pdQuery['TABLE']) return # Create a Ordered Dictionary (COLUMN_NAME (Key) -> DATA_TYPE (Value)) odTableStructure = HDR.COLLECTIONS.OrderedDict() # Create a Ordered Dictionary (COLUMN_NAME (Key) -> Value) odTableValue = HDR.COLLECTIONS.OrderedDict() odTableValueConstraint = HDR.COLLECTIONS.OrderedDict() odTableValueNullable = HDR.COLLECTIONS.OrderedDict() self.joBPTTBLProcessCtg = bPlusTree.javaBPTTblSQLProcessing( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE']), HDR.OS.path.join(HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join(HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) lTmp = self.joBPTTBLProcessCtg.getTableStructureFromCatalog( pdQuery['TABLE']) for sItem in lTmp: iCtr1 += 1 iCtr2 += 1 if ( (iCtr2 % 8) == 0 ): # 8 -> Fixed Num of colums in Catalog Column Table (davisbase_columns.tbl) odTableValueConstraint[sTmp.decode( encoding='utf-8')] = sItem.split('|')[0] iCtr1 = 0 if (iCtr1 == 4): sTmp = sItem.split('|')[0] # Add key to dictionary odTableStructure[sTmp.decode(encoding='utf-8')] = '' odTableValue[sTmp.decode(encoding='utf-8')] = '' odTableValueNullable[sTmp.decode(encoding='utf-8')] = '' odTableValueConstraint[sTmp.decode(encoding='utf-8')] = '' lHeaders.append(sTmp.decode(encoding='utf-8')) elif (iCtr1 == 5): # Add value corresponding to the key odTableStructure[sTmp.decode( encoding='utf-8')] = sItem.split('|')[0] iNumCol += 1 elif (iCtr1 == 7): odTableValueNullable[sTmp.decode( encoding='utf-8')] = sItem.split('|')[0] # Reset the variables lTmp = [] # Check for constraints here if odTableValueNullable[pdQuery['COLUMN']] == 'N': if pdQuery['VALUE'].lower() == 'null': HDR.printException( 'DB008', 'Field [%s] is Not Nullable. Cannot Nullify the same' % (str(pdQuery['COLUMN']))) return self.joBPTTBLProcessNormal = bPlusTree.javaBPTTblSQLProcessing( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE']), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl')) for sItem in self.joBPTTBLProcessNormal.getTableValuesFromNormal(): iNumRow += 1 lTmp = sItem.split('~') for sItem2 in lTmp: lTmp2.append(str(sItem2)) iCtr1 = 0 # Update to the File self.joBPTTBLUpdateNormal = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], pdQuery['TABLE'] + '.tbl'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl' + '.update'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl'), HDR.goDebug, HDR.goBPTPD) # If no WHERE_CONDITION is given, update all rows if not 'WHERE_COND' in pdQuery.keys(): lTmp = [] while (iCtr1 < len(lTmp2)): for sKey, sValue in odTableValue.items(): odTableValue[sKey] = lTmp2[iCtr1] iCtr1 += 1 if pdQuery['COLUMN'] != 'ROWID' and odTableValueConstraint[ pdQuery['COLUMN']] == 'PK': oTmp1 = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join( HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx' + '.update'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.goDebug, HDR.goBPTPD) if (oTmp1.IdxFind(pdQuery['VALUE']) == 1): HDR.printException( 'DB009', 'Field [%s] is a [%s]. Cannot have duplicate values for this field' % (str(pdQuery['COLUMN']), str(odTableValueConstraint[pdQuery['COLUMN']]))) return odTableValue[pdQuery['COLUMN']] = pdQuery['VALUE'] lFlat = [] lFlat2 = [] iCtr2 = 0 iRecordSize = 0 # Flatten the query here for sKey, sValue in odTableValue.items(): iCtr2 += 1 # Handle for DATETIME and DATE here if sKey == pdQuery['COLUMN']: if (odTableStructure[sKey] == 'DATETIME') or (odTableStructure[sKey] == 'DATE'): sValue = HDR.convertDT(0, sValue) if (sValue.lower () == 'null') and (odTableStructure [sKey] != 'TEXT') and \ (odTableStructure [sKey] [:4] != 'null'): odTableStructure[sKey] = sValue.lower( ) + '_' + odTableStructure[sKey] else: if (sValue.lower () == 'null') and (odTableStructure [sKey] != 'TEXT') and \ (odTableStructure [sKey] [:4] != 'null'): odTableStructure[sKey] = sValue.lower( ) + '_' + odTableStructure[sKey] lFlat.append([ str(iCtr2), sKey, odTableStructure[sKey].decode(encoding='utf-8'), sValue ]) # Update into table here for item in lFlat: item.append(HDR.dDataTypeSerialCode[item[2]]) # For DATA_TYPE as TEXT, calculate the length of the actual text and append to list if (HDR.dDataTypeSerialCode[item[2]] == '0x0C'): item.append(str(len(item[3]))) iRecordSize = iRecordSize + len(item[3]) else: item.append(HDR.dDataTypeLength[item[2]]) iRecordSize = iRecordSize + int( HDR.dDataTypeLength[item[2]]) lFlat2.append(item) if (lFlat2): self.updateToIndex(odTableValue[pdQuery['COLUMN']], pdQuery['VALUE'], odTableValue, odTableStructure, pdQuery) self.joBPTTBLUpdateNormal.update(lFlat2, len(lFlat2), iRecordSize) bFlag = True else: lTmp = [] while (iCtr1 < len(lTmp2)): for sKey, sValue in odTableValue.items(): odTableValue[sKey] = lTmp2[iCtr1] iCtr1 += 1 if (len(pdQuery['WHERE_COND']) == 1): iCtr2 = 0 while (iCtr2 < len(pdQuery['WHERE_COND'])): sVar1 = pdQuery['WHERE_COND'][0][iCtr2] iCtr2 += 1 sOp = pdQuery['WHERE_COND'][0][iCtr2] iCtr2 += 1 sVar2 = pdQuery['WHERE_COND'][0][iCtr2] iCtr2 += 1 oOpFunc = HDR.dOpFunc[sOp] if (odTableValue[sVar1].lower() != 'null'): sVal1 = HDR.castValue(odTableValue[sVar1], odTableStructure[sVar1]) else: sVal1 = odTableValue[sVar1].lower() if (sVar2.lower() != 'null'): sVal2 = HDR.castValue( sVar2, odTableStructure[sVar1] ) # Try to cast it to same data type as Column else: sVal2 = sVar2.lower() if (odTableStructure[sVar1] == 'DATETIME') or (odTableStructure[sVar1] == 'DATE'): sVal2 = HDR.convertDT(0, str(sVal2)) if (odTableStructure[sVar1] == 'DATE'): sVal2 = sVal2[:10] if (sVal2 != 'null'): # Recast sVal2 Here sVal2 = HDR.castValue(sVal2, odTableStructure[sVar1]) bResult = oOpFunc(sVal1, sVal2) # Reset the variable iCtr2 = 0 if bResult == True: if pdQuery[ 'COLUMN'] != 'ROWID' and odTableValueConstraint[ pdQuery['COLUMN']] == 'PK': oTmp1 = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join( HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx' + '.update'), HDR.OS.path.join( HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.OS.path.join( HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['COLUMN'] + '.idx'), HDR.goDebug, HDR.goBPTPD) if (oTmp1.IdxFind(pdQuery['VALUE']) == 1): HDR.printException( 'DB009', 'Field [%s] is a [%s]. Cannot have duplicate values for this field' % (str(pdQuery['COLUMN']), str(odTableValueConstraint[ pdQuery['COLUMN']]))) return odTableValue[pdQuery['COLUMN']] = pdQuery['VALUE'] lFlat = [] lFlat2 = [] iCtr2 = 0 iRecordSize = 0 # Flatten the query here for sKey, sValue in odTableValue.items(): iCtr2 += 1 HDR.goDebug.write (psMsg='[%s] - Where Condition sKey [%s], sValue [%s]' \ %(HDR.OS.path.basename(__file__), sKey, sValue)) # Handle for DATETIME and DATE here if sKey == pdQuery['COLUMN']: HDR.goDebug.write (psMsg='[%s] - If part sKey = pdQuery [COLUMN], Constraint [%s]' \ %(HDR.OS.path.basename(__file__), odTableStructure [sKey])) if (odTableStructure[sKey] == 'DATETIME') or ( odTableStructure[sKey] == 'DATE'): sValue = HDR.convertDT(0, sValue) HDR.goDebug.write( psMsg='[%s] - If sValue [%s]' % (HDR.OS.path.basename(__file__), sValue)) if (sValue.lower () == 'null') and (odTableStructure [sKey] != 'TEXT') and \ (odTableStructure [sKey] [:4] != 'null'): odTableStructure[sKey] = sValue.lower( ) + '_' + odTableStructure[sKey] else: HDR.goDebug.write (psMsg='[%s] - Else part sKey = pdQuery [COLUMN], Constraint [%s]' \ %(HDR.OS.path.basename(__file__), odTableStructure [sKey])) HDR.goDebug.write( psMsg='[%s] - Else sValue [%s]' % (HDR.OS.path.basename(__file__), sValue)) if (sValue.lower () == 'null') and (odTableStructure [sKey] != 'TEXT') and \ (odTableStructure [sKey] [:4] != 'null'): odTableStructure[sKey] = sValue.lower( ) + '_' + odTableStructure[sKey] lFlat.append([ str(iCtr2), sKey, odTableStructure[sKey].decode( encoding='utf-8'), sValue ]) # Update into table here for item in lFlat: item.append(HDR.dDataTypeSerialCode[item[2]]) # For DATA_TYPE as TEXT, calculate the length of the actual text and append to list if (HDR.dDataTypeSerialCode[item[2]] == '0x0C'): item.append(str(len(item[3]))) iRecordSize = iRecordSize + len(item[3]) else: item.append(HDR.dDataTypeLength[item[2]]) iRecordSize = iRecordSize + int( HDR.dDataTypeLength[item[2]]) lFlat2.append(item) if (lFlat2): self.updateToIndex(odTableValue[pdQuery['COLUMN']], pdQuery['VALUE'], odTableValue, odTableStructure, pdQuery) self.joBPTTBLUpdateNormal.update( lFlat2, len(lFlat2), iRecordSize) bFlag = True if bFlag == True: self.joBPTTBLUpdateNormal.write() return
def processQuery (self, pdQuery={}): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write (psMsg='[%s] - Inside [%s]' %(HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write (psMsg='[%s] - Syntax [%s]' %(HDR.OS.path.basename(__file__), str (pdQuery))) sTmp = '' lTmp = [] lTmp2 = [] lHeaders = [] iCtr1 = 0 iCtr2 = 0 iNumCol = 0 iNumRow = 0 bResult = False # Create a Ordered Dictionary (COLUMN_NAME (Key) -> DATA_TYPE (Value)) odTableStructure = HDR.COLLECTIONS.OrderedDict () odTableValue = HDR.COLLECTIONS.OrderedDict () odTableValueConstraint = HDR.COLLECTIONS.OrderedDict () odTableValueNullable = HDR.COLLECTIONS.OrderedDict () # Special handling for viewing catalog tables - START ----> if pdQuery ['TABLES'].lower () == 'davisbase_columns': self.joBPTTBLProcessCtg = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) lTmp = self.joBPTTBLProcessCtg.getTableStructureFromCatalog (pdQuery ['TABLES']) for sItem in lTmp: iCtr1 += 1 iCtr2 += 1 if ((iCtr2 % 8) == 0): # 8 -> Fixed Num of colums in Catalog Column Table (davisbase_columns.tbl) odTableValueConstraint [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] iCtr1 = 0 if (iCtr1 == 4): sTmp = sItem.split ('|') [0] # Add key to dictionary odTableStructure [sTmp.decode (encoding='utf-8')] = '' odTableValue [sTmp.decode (encoding='utf-8')] = '' odTableValueConstraint [sTmp.decode (encoding='utf-8')] = '' odTableValueNullable [sTmp.decode (encoding='utf-8')] = '' lHeaders.append (sTmp.decode (encoding='utf-8')) elif (iCtr1 == 5): # Add value corresponding to the key odTableStructure [sTmp.decode (encoding='utf-8')] = sItem.split ('|') [0] iNumCol += 1 elif (iCtr1 == 7): odTableValueNullable [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] # Reset the variable lTmp = [] self.joBPTTBLProcessNormal = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) for sItem in self.joBPTTBLProcessNormal.getTableValuesFromNormal (): iNumRow += 1 lTmp = sItem.split ('~') for sItem2 in lTmp: lTmp2.append (str (sItem2)) # Reset the variable iCtr1 = 0 lTmp = [] if 'WHERE_COND' in pdQuery.keys (): iNumRow = 0 while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (len (pdQuery ['WHERE_COND']) == 1): iCtr2 = 0 while (iCtr2 < len (pdQuery ['WHERE_COND'])): sVar1 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sOp = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sVar2 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 oOpFunc = HDR.dOpFunc [sOp] if (odTableValue [sVar1].lower () != 'null'): sVal1 = HDR.castValue (odTableValue [sVar1], odTableStructure [sVar1]) else: sVal1 = odTableValue [sVar1].lower () if (sVar2.lower () != 'null'): sVal2 = HDR.castValue (sVar2, odTableStructure [sVar1]) # Try to cast it to same data type as Column else: sVal2 = sVar2.lower () if (odTableStructure [sVar1] == 'DATETIME') or (odTableStructure [sVar1] == 'DATE'): sVal2 = HDR.convertDT (0, str (sVal2)) if (odTableStructure [sVar1] == 'DATE'): sVal2 = sVal2 [:10] if (sVal2.lower () != 'null'): # Recast sVal2 Here sVal2 = HDR.castValue (sVal2, odTableStructure [sVar1]) bResult = oOpFunc (sVal1, sVal2) # Reset the variable iCtr2 = 0 if bResult == True: iNumRow += 1 for sKey, sValue in odTableValue.items (): if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] lTmp.append (str(odTableValue [sKey])) else: while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] for sKey, sValue in odTableValue.items (): lTmp.append (str (odTableValue [sKey])) lTmp2 = lTmp if pdQuery ['COLUMNS'] == '*': # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) else: lHeaders = pdQuery ['COLUMNS'].split (',') iNumCol = len (lHeaders) iCtr = 0 lTmp = [] while (iCtr < len (lTmp2)): for sKey, sValue in odTableStructure.items (): odTableStructure [sKey] = lTmp2 [iCtr] iCtr += 1 for sKey in lHeaders: if sKey in odTableStructure.keys(): lTmp.append (str (odTableStructure [sKey])) lTmp2 = lTmp # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) print ('\n' + HDR.TABULATE.tabulate (lTmp, headers=lHeaders, tablefmt='psql') + '\n') return elif pdQuery ['TABLES'].lower () == 'davisbase_tables': self.joBPTTBLProcessCtg = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) lTmp = self.joBPTTBLProcessCtg.getTableStructureFromCatalog (pdQuery ['TABLES']) for sItem in lTmp: iCtr1 += 1 iCtr2 += 1 if ((iCtr2 % 8) == 0): # 8 -> Fixed Num of colums in Catalog Column Table (davisbase_columns.tbl) odTableValueConstraint [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] iCtr1 = 0 if (iCtr1 == 4): sTmp = sItem.split ('|') [0] # Add key to dictionary odTableStructure [sTmp.decode (encoding='utf-8')] = '' odTableValue [sTmp.decode (encoding='utf-8')] = '' odTableValueConstraint [sTmp.decode (encoding='utf-8')] = '' odTableValueNullable [sTmp.decode (encoding='utf-8')] = '' lHeaders.append (sTmp.decode (encoding='utf-8')) elif (iCtr1 == 5): # Add value corresponding to the key odTableStructure [sTmp.decode (encoding='utf-8')] = sItem.split ('|') [0] iNumCol += 1 elif (iCtr1 == 7): odTableValueNullable [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] # Reset the variable lTmp = [] self.joBPTTBLProcessNormal = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_tables.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_tables.tbl')) for sItem in self.joBPTTBLProcessNormal.getTableValuesFromNormal (): iNumRow += 1 lTmp = sItem.split ('~') for sItem2 in lTmp: lTmp2.append (str (sItem2)) # Reset the variable iCtr1 = 0 lTmp = [] if 'WHERE_COND' in pdQuery.keys (): iNumRow = 0 while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (len (pdQuery ['WHERE_COND']) == 1): iCtr2 = 0 while (iCtr2 < len (pdQuery ['WHERE_COND'])): sVar1 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sOp = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sVar2 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 oOpFunc = HDR.dOpFunc [sOp] if (odTableValue [sVar1].lower () != 'null'): sVal1 = HDR.castValue (odTableValue [sVar1], odTableStructure [sVar1]) else: sVal1 = odTableValue [sVar1].lower () if (sVar2.lower () != 'null'): sVal2 = HDR.castValue (sVar2, odTableStructure [sVar1]) # Try to cast it to same data type as Column else: sVal2 = sVar2.lower () if (odTableStructure [sVar1] == 'DATETIME') or (odTableStructure [sVar1] == 'DATE'): sVal2 = HDR.convertDT (0, str (sVal2)) if (odTableStructure [sVar1] == 'DATE'): sVal2 = sVal2 [:10] if (sVal2.lower () != 'null'): # Recast sVal2 Here sVal2 = HDR.castValue (sVal2, odTableStructure [sVar1]) bResult = oOpFunc (sVal1, sVal2) # Reset the variable iCtr2 = 0 if bResult == True: iNumRow += 1 for sKey, sValue in odTableValue.items (): if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] lTmp.append (str(odTableValue [sKey])) else: while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] for sKey, sValue in odTableValue.items (): lTmp.append (str (odTableValue [sKey])) lTmp2 = lTmp if pdQuery ['COLUMNS'] == '*': # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) else: lHeaders = pdQuery ['COLUMNS'].split (',') iNumCol = len (lHeaders) iCtr = 0 lTmp = [] while (iCtr < len (lTmp2)): for sKey, sValue in odTableStructure.items (): odTableStructure [sKey] = lTmp2 [iCtr] iCtr += 1 for sKey in lHeaders: if sKey in odTableStructure.keys(): lTmp.append (str (odTableStructure [sKey])) lTmp2 = lTmp # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) print ('\n' + HDR.TABULATE.tabulate (lTmp, headers=lHeaders, tablefmt='psql') + '\n') return # End of Special handling for viewing Catalog Tables if not HDR.OS.path.exists (HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLES'])): HDR.printException ('DB010', 'Table [%s] does not exist' %pdQuery ['TABLES']) return self.joBPTTBLProcessCtg = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLES'], '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) lTmp = self.joBPTTBLProcessCtg.getTableStructureFromCatalog (pdQuery ['TABLES']) for sItem in lTmp: iCtr1 += 1 iCtr2 += 1 if ((iCtr2 % 8) == 0): # 8 -> Fixed Num of colums in Catalog Column Table (davisbase_columns.tbl) odTableValueConstraint [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] iCtr1 = 0 if (iCtr1 == 4): sTmp = sItem.split ('|') [0] # Add key to dictionary odTableStructure [sTmp.decode (encoding='utf-8')] = '' odTableValue [sTmp.decode (encoding='utf-8')] = '' odTableValueConstraint [sTmp.decode (encoding='utf-8')] = '' odTableValueNullable [sTmp.decode (encoding='utf-8')] = '' lHeaders.append (sTmp.decode (encoding='utf-8')) elif (iCtr1 == 5): # Add value corresponding to the key odTableStructure [sTmp.decode (encoding='utf-8')] = sItem.split ('|') [0] iNumCol += 1 elif (iCtr1 == 7): odTableValueNullable [sTmp.decode (encoding='utf-8')] = sItem.split ('|')[0] # Reset the variable lTmp = [] self.joBPTTBLProcessNormal = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLES'], '.' + pdQuery ['TABLES']), HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLES'], '.' + pdQuery ['TABLES'] + '.tbl'), HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLES'], '.' + pdQuery ['TABLES'] + '.tbl')) for sItem in self.joBPTTBLProcessNormal.getTableValuesFromNormal (): iNumRow += 1 lTmp = sItem.split ('~') for sItem2 in lTmp: lTmp2.append (str (sItem2)) # Reset the variable iCtr1 = 0 lTmp = [] if 'WHERE_COND' in pdQuery.keys (): iNumRow = 0 while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (len (pdQuery ['WHERE_COND']) == 1): iCtr2 = 0 while (iCtr2 < len (pdQuery ['WHERE_COND'])): sVar1 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sOp = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sVar2 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 oOpFunc = HDR.dOpFunc [sOp] if (odTableValue [sVar1].lower () != 'null'): sVal1 = HDR.castValue (odTableValue [sVar1], odTableStructure [sVar1]) else: sVal1 = odTableValue [sVar1].lower () if (sVar2.lower () != 'null'): sVal2 = HDR.castValue (sVar2, odTableStructure [sVar1]) # Try to cast it to same data type as Column else: sVal2 = sVar2.lower () if (odTableStructure [sVar1] == 'DATETIME') or (odTableStructure [sVar1] == 'DATE'): sVal2 = HDR.convertDT (0, str (sVal2)) if (odTableStructure [sVar1] == 'DATE'): sVal2 = sVal2 [:10] if (sVal2.lower () != 'null'): # Recast sVal2 Here sVal2 = HDR.castValue (sVal2, odTableStructure [sVar1]) bResult = oOpFunc (sVal1, sVal2) # Reset the variable iCtr2 = 0 if bResult == True: iNumRow += 1 for sKey, sValue in odTableValue.items (): if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] lTmp.append (str(odTableValue [sKey])) else: while (iCtr1 < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr1] iCtr1 += 1 if (odTableStructure [sKey] == 'DATETIME') or (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = HDR.convertDT (1, str (odTableValue [sKey])) if (odTableStructure [sKey] == 'DATE'): odTableValue [sKey] = odTableValue [sKey][:10] for sKey, sValue in odTableValue.items (): lTmp.append (str (odTableValue [sKey])) lTmp2 = lTmp if pdQuery ['COLUMNS'] == '*': # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) else: lHeaders = pdQuery ['COLUMNS'].split (',') iNumCol = len (lHeaders) iCtr = 0 lTmp = [] while (iCtr < len (lTmp2)): for sKey, sValue in odTableStructure.items (): odTableStructure [sKey] = lTmp2 [iCtr] iCtr += 1 for sKey in lHeaders: if sKey in odTableStructure.keys(): lTmp.append (str (odTableStructure [sKey])) lTmp2 = lTmp # Group List on Per-Row Basis lTmp = (HDR.grouper (lTmp2, piRow=iNumRow, piColumn=iNumCol)) HDR.goDebug.write (psMsg='[%s] - Files New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) print ('\n' + HDR.TABULATE.tabulate (lTmp, headers=lHeaders, tablefmt='psql') + '\n') return
def writeIndex(self, pdTableValue={}, pdTableStructure={}, pdQuery={}): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write(psMsg='[%s] - Inside [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write(psMsg='[%s] - Syntax [%s]' % (HDR.OS.path.basename(__file__), str(pdQuery))) for sKey, sValue in pdTableValue.items(): lFlat = [] lFlat2 = [] iCtr = 0 iRecordSize = 0 if sKey != 'ROWID': self.joBPTTBLInsertNormal = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], sKey + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + sKey + '.idx' + '.insert'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + sKey + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + sKey + '.idx'), HDR.goDebug, HDR.goBPTPD) iCtr += 1 # Handle for DATETIME and DATE here if (pdTableStructure[sKey] == 'DATETIME') or (pdTableStructure[sKey] == 'DATE'): sValue = HDR.convertDT(0, sValue) lFlat.append([ str(iCtr), sKey, pdTableStructure[sKey].decode(encoding='utf-8'), sValue ]) iCtr += 1 lFlat.append([ str(iCtr), 'ROWID', pdTableStructure['ROWID'].decode(encoding='utf-8'), pdTableValue['ROWID'] ]) # Insert into table here for item in lFlat: item.append(HDR.dDataTypeSerialCode[item[2]]) # For DATA_TYPE as TEXT, calculate the length of the actual text and append to list if (HDR.dDataTypeSerialCode[item[2]] == '0x0C'): item.append(str(len(item[3]))) iRecordSize = iRecordSize + len(item[3]) else: item.append(HDR.dDataTypeLength[item[2]]) iRecordSize = iRecordSize + int( HDR.dDataTypeLength[item[2]]) lFlat2.append(item) self.joBPTTBLInsertNormal.insertIndex(lFlat2, len(lFlat2), iRecordSize, str(sValue)) self.joBPTTBLInsertNormal.writeIndex() return
def insertIntoTable(self, pdTableStructure, pdTableValue, pdTableValueNullable, pdTableValueConstraint, pdQuery={}): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write(psMsg='[%s] - Inside [%s]' % (HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write(psMsg='[%s] - Syntax [%s]' % (HDR.OS.path.basename(__file__), str(pdQuery))) lFlat = [] lFlat2 = [] iRowID = 0 iCtr = 0 iRecordSize = 0 bFlag = False # Format the data to insert into table # Get the Last Row ID iRowID = int( HDR.pickle( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.config'), 1, 'LAST_ROW_ID_NORMAL_TABLES', 0)) if iRowID < 0: raise Exception('Database Table Corrupted.. Critical. Aborting!!!') else: iRowID += 1 pdTableValue['ROWID'] = str(iRowID) # Flatten the query here for sKey, sValue in pdTableValue.items(): iCtr += 1 # Handle for DATETIME and DATE here if (pdTableStructure[sKey] == 'DATETIME') or (pdTableStructure[sKey] == 'DATE'): sValue = HDR.convertDT(0, sValue) if (sValue.lower() == 'null') and (pdTableStructure[sKey] != 'TEXT'): pdTableStructure[sKey] = sValue.lower( ) + '_' + pdTableStructure[sKey] lFlat.append([ str(iCtr), sKey, pdTableStructure[sKey].decode(encoding='utf-8'), sValue ]) # Insert into table here for item in lFlat: item.append(HDR.dDataTypeSerialCode[item[2]]) # For DATA_TYPE as TEXT, calculate the length of the actual text and append to list if (HDR.dDataTypeSerialCode[item[2]] == '0x0C'): item.append(str(len(item[3]))) iRecordSize = iRecordSize + len(item[3]) else: item.append(HDR.dDataTypeLength[item[2]]) iRecordSize = iRecordSize + int(HDR.dDataTypeLength[item[2]]) lFlat2.append(item) if lFlat2: # Check for Constraint and Nullables here. If everything passes, then insert. # If any condition fails, then do not insert for sKey, sValue in pdTableValueNullable.items(): if pdTableValueNullable[sKey] == 'N': if pdTableValue[sKey].lower() == 'null': HDR.printException( 'DB008', 'Field [%s] is Not Nullable. Cannot Nullify the same' % (str(sKey))) bFlag = True if bFlag == False: for sKey, sValue in pdTableValue.items(): if sKey != 'ROWID' and pdTableValueConstraint[sKey] == 'PK': self.joBPTTBLInsertNormal = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], sKey + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + sKey + '.idx' + '.insert'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + sKey + '.idx'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + sKey + '.idx'), HDR.goDebug, HDR.goBPTPD) if (self.joBPTTBLInsertNormal.IdxFind(sValue) == 1): HDR.printException( 'DB009', 'Field [%s] is a [%s]. Cannot have duplicate values for this field' % (str(sKey), str(pdTableValueConstraint[sKey]))) bFlag = True break if bFlag == False: self.joBPTTBLInsertNormal = bPlusTree.javaBPTTblInsert( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], pdQuery['TABLE'] + '.tbl'), HDR.OS.path.join( HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl' + '.insert'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl'), HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.' + pdQuery['TABLE'] + '.tbl'), HDR.goDebug, HDR.goBPTPD) self.joBPTTBLInsertNormal.insert(lFlat2, len(lFlat2), iRecordSize) self.joBPTTBLInsertNormal.write() # Write the indexes for every column here self.writeIndex(pdTableValue, pdTableStructure, pdQuery) lFlat2 = [] iRecordSize = 0 # Write the last ROW ID to the config file if everything is successful HDR.pickle( HDR.OS.path.join(HDR.OS.getcwd(), pdQuery['TABLE'], '.config'), 0, 'LAST_ROW_ID_NORMAL_TABLES', iRowID) return
def processQuery (self, pdQuery={}): ROUTINE = HDR.SYS._getframe().f_code.co_name HDR.goDebug.write (psMsg='[%s] - Inside [%s]' %(HDR.OS.path.basename(__file__), ROUTINE)) HDR.goDebug.write (psMsg='[%s] - Syntax [%s]' %(HDR.OS.path.basename(__file__), str (pdQuery))) sTmp = '' sVar1 = '' sOp = '' sVar2 = '' sVal1 = '' sVal2 = '' oOpFunc = None lTmp = [] lTmp2 = [] lHeaders = [] iCtr1 = 0 iCtr2 = 0 iNumCol = 0 iNumRow = 0 # Create a Ordered Dictionary (COLUMN_NAME (Key) -> DATA_TYPE (Value)) odTableStructure = HDR.COLLECTIONS.OrderedDict () # Create a Ordered Dictionary (COLUMN_NAME (Key) -> Value) odTableValue = HDR.COLLECTIONS.OrderedDict () self.joBPTTBLProcessCtg = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLE'], '.' + pdQuery ['TABLE']), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl'), HDR.OS.path.join (HDR.gsInstallPath, 'data', 'catalog', '.davisbase_columns.tbl')) lTmp = self.joBPTTBLProcessCtg.getTableStructureFromCatalog (pdQuery ['TABLE']); for sItem in lTmp: iCtr1 += 1 iCtr2 += 1 if ((iCtr2 % 8) == 0): # 8 -> Fixed Num of colums in Catalog Column Table (davisbase_columns.tbl) iCtr1 = 0 if (iCtr1 == 4): sTmp = sItem.split ('|') [0] # Add key to dictionary odTableStructure [sTmp.decode (encoding='utf-8')] = '' odTableValue [sTmp.decode (encoding='utf-8')] = '' lHeaders.append (sTmp.decode (encoding='utf-8')) elif (iCtr1 == 5): # Add value corresponding to the key odTableStructure [sTmp.decode (encoding='utf-8')] = sItem.split ('|') [0] iNumCol += 1 # Reset the variable lTmp = [] self.joBPTTBLProcessNormal = bPlusTree.javaBPTTblSQLProcessing (HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLE'], '.' + pdQuery ['TABLE']), HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLE'], '.' + pdQuery ['TABLE'] + '.tbl'), HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLE'], '.' + pdQuery ['TABLE'] + '.tbl')) for sItem in self.joBPTTBLProcessNormal.getTableValuesFromNormal (): iNumRow += 1 lTmp = sItem.split ('~') for sItem2 in lTmp: lTmp2.append (str (sItem2)) HDR.goDebug.write (psMsg='[%s] - Delimited Records in lTmp2 = [%s]' %(HDR.OS.path.basename(__file__), lTmp2)) # If no WHERE_CONDITION is given, delete all rows if not 'WHERE_COND' in pdQuery.keys (): iCtr = 0 lTmp = [] while (iCtr < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue [sKey] = lTmp2 [iCtr] # ROWID is the Key in the .tbl File, which is deleted later if sKey != 'ROWID': self.deleteFromIndex (sKey, odTableValue [sKey], odTableStructure [sKey], pdQuery) iCtr += 1 lTmp.append (str (odTableValue ['ROWID'])) HDR.goDebug.write (psMsg='[%s] - Records in New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) self.joBPTTBLProcessNormal.deleteTableValuesFromNormal (lTmp) else: iCtr = 0 lTmp = [] while (iCtr < len (lTmp2)): for sKey, sValue in odTableValue.items (): odTableValue[sKey] = lTmp2 [iCtr] iCtr += 1 if (len (pdQuery ['WHERE_COND']) == 1): iCtr2 = 0 while (iCtr2 < len (pdQuery ['WHERE_COND'])): sVar1 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sOp = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 sVar2 = pdQuery ['WHERE_COND'][0] [iCtr2] iCtr2 += 1 oOpFunc = HDR.dOpFunc [sOp] if (odTableValue [sVar1].lower () != 'null'): sVal1 = HDR.castValue (odTableValue [sVar1], odTableStructure [sVar1]) else: sVal1 = odTableValue [sVar1].lower () if (sVar2.lower () != 'null'): sVal2 = HDR.castValue (sVar2, odTableStructure [sVar1]) # Try to cast it to same data type as Column else: sVal2 = sVar2.lower () if (odTableStructure [sVar1] == 'DATETIME') or (odTableStructure [sVar1] == 'DATE'): sVal2 = HDR.convertDT (0, str (sVal2)) if (odTableStructure [sVar1] == 'DATE'): sVal2 = sVal2 [:10] if (sVal2.lower () != 'null'): # Recast sVal2 Here sVal2 = HDR.castValue (sVal2, odTableStructure [sVar1]) bResult = oOpFunc (sVal1, sVal2) if bResult == True: for sKey, sValue in odTableValue.items (): # ROWID is the Key in the .tbl File, which is deleted later if sKey != 'ROWID': self.deleteFromIndex (sKey, sValue, odTableStructure [sKey], pdQuery) lTmp.append (str (odTableValue ['ROWID'])) if (lTmp): HDR.goDebug.write (psMsg='[%s] - Final Records in New List [%s]' %(HDR.OS.path.basename(__file__), lTmp)) self.joBPTTBLProcessNormal.deleteTableValuesFromNormal (lTmp) # Update the File self.joBPTTblWrite = bPlusTree.javaBPTTblInsert (HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLE'], pdQuery ['TABLE'] + '.tbl'), HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLE'], '.' + pdQuery ['TABLE'] + '.tbl'), HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLE'], '.' + pdQuery ['TABLE'] + '.tbl'), HDR.OS.path.join (HDR.OS.getcwd (), pdQuery ['TABLE'], '.' + pdQuery ['TABLE'] + '.tbl'), HDR.goDebug, HDR.goBPTPD) self.joBPTTblWrite.write () return