def export_sql_dump(self,db_name,query): fileOpsObj = FileOps() #export_location = re.findall(r"\'(.*?)\'", query)[0] #save_path = export_location save_path = "/Users/yash/database_5408_project_integration/output" full_name = os.path.join(save_path,db_name+"_SQLDUMP.sql") f1 = fileOpsObj.filereader(db_name+"_SQLDUMP.sql") fileOpsObj.filewriter(full_name,f1) #copyfile("/Users/yash/database_5408_project_create_insert/"+db_name+"_SQLDUMP.sql", export_location + db_name+"_SQLDUMP.sql") # sqldumpObj = Export_SQLDUMP() # query = "export sql dump;" # sqldumpObj.export_sql_dump("5408",query)
def print_relationships(self, file_name, datatype_dict_object): fileopobj = FileOps() # add methods as required seperately donot modify these #print("\n======Relationships between Tables=======") save_path = "/Users/yash/database_5408_project_integration/output" full_name = os.path.join(save_path, file_name) fileopobj.filewriter(full_name, "\n=========ER Diagram==========\n") fileopobj.filewriterAppend(full_name, "\nRelationships between Tables\n") for i in datatype_dict_object['Tables']: #print("\n") #print("Table Name: " + i['Table_name'].capitalize()) fileopobj.filewriterAppend( full_name, "\nTable Name: " + i['Table_name'].capitalize() + "\n") tables_headers = ["Relationship"] val = i['Table_columns'][0] #print(tabulate(pd.DataFrame(val, columns=tables_headers),headers='keys', tablefmt='psql')) fileopobj.filewriterAppend( full_name, tabulate(pd.DataFrame(val, columns=tables_headers), headers='keys', tablefmt='psql'))
def __init__(self): self.fileopsobj = FileOps()
class CreatQuery: def __init__(self): self.fileopsobj = FileOps() def strip_text(self, text): return re.sub(' +', ' ', text.strip()) def create_table(self, username, dbname, query, logger, fname): check_lock = LockStatus().checklock(username) #create db copy src_fname = dbname + "_Tables.txt" dest_dname = dbname + "_Tables_copy.txt" if fname == None: filename = src_fname dtname = dbname + "_Tables_Datatypes.txt" dumpname = dbname + "_SQLDUMP.sql" status = False else: filename = dest_dname dtname = dbname + "_Tables_Datatypes_copy.txt" dumpname = dbname + "_SQLDUMP_copy.sql" status = True flower_bracket_start = '{' flower_bracket_end = '}' # check user privileges later add a method table_name = self.strip_text( re.findall(r'table(.*?)\(', query.lower())[0]) query_tablelevel = re.findall(r'\((.*?)\);', query.lower())[0] table_columns = re.split(",", self.strip_text(query_tablelevel)) table_columns_list = [] table_data_type = [] num_of_foreign_keys = len(re.findall(r'foreign key', query.lower())) primary_key = 'none' foreign_key = [] foreign_key_table_name = [] foreign_key_column = [] for x in table_columns: table_columns_list.append(re.split(" ", x)[0]) table_data_type.append(re.split(" ", x)[1]) try: primary_key = re.sub("[^A-Za-z0-9_]", "", re.split("primary key", x)[1]) except: try: foreign_key.append( re.sub("[^A-Za-z0-9_]", "", re.findall(r'foreign key(.*?)references', x)[0])) except: continue try: foreign_key_table_name.append( re.split( " ", self.strip_text( re.sub("[^A-Za-z0-9_]", " ", re.findall(r'references(.*?)\)', x)[0])))[0]) foreign_key_column.append( re.split( " ", self.strip_text( re.sub("[^A-Za-z0-9_]", " ", re.findall(r'references(.*?)\)', x)[0])))[1]) except: continue table_columns_list.remove('primary') table_data_type.remove('key') for i in range(0, num_of_foreign_keys): table_columns_list.remove('foreign') table_data_type.remove('key') num_of_table_columns = len(table_columns_list) table_columns_autoincrement_string = "" table_columns_autoincrement_string += '"Auto Increment":[' for i in range(0, len(table_columns_list)): if "auto_increment" in table_columns[i]: table_columns_autoincrement_string += '"Yes"' else: table_columns_autoincrement_string += '"No"' if (i != num_of_table_columns - 1): table_columns_autoincrement_string += ',' table_columns_autoincrement_string += ']' table_columns_string = "" for i in range(0, num_of_table_columns): if "auto_increment" in table_columns[i]: table_columns_string += '"' + table_columns_list[i] + '":1' else: table_columns_string += '"' + table_columns_list[ i] + '":"defnull"' if (i != num_of_table_columns - 1): table_columns_string += ',' table_columns_nullable_string = "" table_columns_nullable_string += '"Nullable":[' for i in range(0, len(table_columns_list)): if "not null" in table_columns[i]: table_columns_nullable_string += '"No"' else: table_columns_nullable_string += '"Yes"' if (i != num_of_table_columns - 1): table_columns_nullable_string += ',' table_columns_nullable_string += ']' table_columns_name_string = "" table_columns_primary_key_string = "" table_columns_foreign_key_string = "" table_columns_name_string += '"Name":[' table_columns_primary_key_string = '"Primary Key":[' for i in range(0, num_of_table_columns): table_columns_name_string += '"' + table_columns_list[i] + '"' if (primary_key != "none"): if (primary_key == table_columns_list[i]): table_columns_primary_key_string += '"Yes"' else: table_columns_primary_key_string += '"No"' if (i != num_of_table_columns - 1): table_columns_name_string += ',' table_columns_primary_key_string += ',' table_columns_name_string += ']' table_columns_primary_key_string += ']' table_columns_foreign_key_string = '"Foreign Key":[' table_columns_relationship_string = '"Relationship":[' for i in range(0, num_of_table_columns): if (table_columns_list[i] in foreign_key): table_columns_foreign_key_string += '"Yes"' else: table_columns_foreign_key_string += '"No"' for j in range(0, num_of_foreign_keys): if (foreign_key[j] == table_columns_list[i]): table_columns_relationship_string += '"Foreign Key: ' + foreign_key[ j] + ' in Table:' + table_name + ' References Primary Key: ' + foreign_key_column[ j] + ' in Table: ' + foreign_key_table_name[j] + '"' if (j != num_of_foreign_keys - 1): table_columns_relationship_string += ',' if (i != num_of_table_columns - 1): table_columns_foreign_key_string += ',' table_columns_relationship_string += ']' table_columns_foreign_key_string += ']' table_data_type_columns_string = "" table_data_type_columns_string += '"Data Type":[' num_of_table_data_type_columns = len(table_data_type) for i in range(0, num_of_table_data_type_columns): table_data_type_columns_string += '"' + table_data_type[i] + '"' if (i != num_of_table_columns - 1): table_data_type_columns_string += ',' table_data_type_columns_string += ']' table_datatype_string = table_columns_name_string + "," + table_data_type_columns_string + "," + table_columns_nullable_string + "," + table_columns_autoincrement_string + "," + table_columns_primary_key_string + "," + table_columns_foreign_key_string + "," + table_columns_relationship_string final_table_columns_string = flower_bracket_start + table_columns_string + flower_bracket_end final_table_datatype_string = flower_bracket_start + table_datatype_string + flower_bracket_end my_table_json_string = flower_bracket_start + '"Table_name":"' + table_name + '","Table_columns":[' + final_table_columns_string + ']' + flower_bracket_end my_table_data_type_json_string = flower_bracket_start + '"Table_name":"' + table_name + '","Table_columns":[' + final_table_datatype_string + ']' + flower_bracket_end usertable_dict_obj = json.loads(my_table_json_string) usertable_datatype_dict_obj = json.loads( my_table_data_type_json_string) # my_temp_dict = usertable_dict_obj['Tables'][0]['Table_columns'][0] # table_name_dict = usertable_dict_obj['Tables'][0]['Table_name'] # latest_obj = usertable_dict_obj['Tables'][0]['Table_columns'].append(my_temp_dict) f2 = json.loads(self.fileopsobj.filereader(filename)) table_exists = False for k, v in f2.items(): if (k == "Tables"): if (len(v) == 0): v.append(json.loads(my_table_json_string)) table_exists = True print("Table added to Tables!!") else: for tables in v: for k1, v1 in tables.items(): if (k1 == "Table_name"): if (v1 == table_name): table_exists = True print("Error!!! Table already exists") if (table_exists == False): for k, v in f2.items(): if (k == "Tables"): v.append(json.loads(my_table_json_string)) print("Table added to Tables!!") self.fileopsobj.filewriter(filename, json.dumps(f2)) f4 = json.loads(self.fileopsobj.filereader(dtname)) table_dt_exists = False for k, v in f4.items(): if (k == "Tables"): if (len(v) == 0): v.append(json.loads(my_table_data_type_json_string)) table_dt_exists = True print("Data Dictionary added!!") else: for tables in v: for k1, v1 in tables.items(): if (k1 == "Table_name"): if (v1 == table_name): table_dt_exists = True print("Error!!! Table already exists") if (table_dt_exists == False): for k, v in f4.items(): if (k == "Tables"): v.append(json.loads(my_table_data_type_json_string)) print("Data Dictionary added!!") self.fileopsobj.filewriter(dtname, json.dumps(f4)) self.fileopsobj.filewriterAppend(dumpname, query + '\n') return status
class InsertQuery: def __init__(self): self.fileObj = FileOps() def strip_text(self, text): return re.sub(' +', ' ', text.strip()) def insert_row(self, username, dbname, query, logger, fname): check_lock = LockStatus().checklock(username) #create db copy src_fname = dbname + "_Tables.txt" dest_dname = dbname + "_Tables_copy.txt" if fname == None: filename = src_fname dtname = dbname + "_Tables_Datatypes.txt" status = False else: filename = dest_dname dtname = dbname + "_Tables_Datatypes_copy.txt" status = True table_name = re.split( " ", self.strip_text(re.findall(r'into(.*?)\(', query.lower())[0]))[0] query_tablelevel = re.findall(r'\((.*?)\)', query.lower()) if (len(query_tablelevel) == 1): table_columns_values_list = re.split( ",", re.sub("[^A-Za-z0-9_, ]", "", self.strip_text(query_tablelevel[0]))) else: table_columns_headers_list = re.split( ",", re.sub("[^A-Za-z0-9_, ]", "", self.strip_text(query_tablelevel[0]))) table_columns_values_list = re.split( ",", re.sub("[^A-Za-z0-9_, ]", "", self.strip_text(query_tablelevel[1]))) if len(re.findall(r'\((.*?)\)', query.lower())) == 1: # DIRECT INSERT with out columns indication print("Inserted rows in Table") f1 = json.loads(self.fileObj.filereader(filename)) f2 = json.loads(self.fileObj.filereader(dtname)) auto_increment_list = [] original_table_col_list = [] for i in f2['Tables']: if (i['Table_name'] == table_name): auto_increment_list = i['Table_columns'][0][ 'Auto Increment'] original_table_col_list = i['Table_columns'][0]['Name'] nullable_original_list = i['Table_columns'][0]['Nullable'] null_check_list = [] for z in range(0, len(original_table_col_list)): if (nullable_original_list[z] == "No" and auto_increment_list[z] != "Yes"): null_check_list.append(original_table_col_list[z]) is_null_error = False for x in range(0, len(table_columns_values_list)): if (nullable_original_list[x] == "Yes" and "null" in table_columns_values_list): is_null_error = True if (not (is_null_error)): is_this_table_flag = False for k, v in f1.items(): if (k == "Tables"): for t in v: for k1, v1 in t.items(): if (k1 == "Table_name" and v1 == table_name): is_this_table_flag = True continue if (is_this_table_flag): # reset flag once entered is_this_table_flag = False # change default values first i.e first row if (len(v1) == 1 and "defnull" in v1[0].values()): if (len(v1[0]) == len( table_columns_values_list)): i = 0 for v1 in v1: for k2, v2 in v1.items(): if (i < len( table_columns_values_list )): v1[k2] = table_columns_values_list[ i].capitalize() i += 1 else: temp_obj = v1[len(v1) - 1].copy() v1.append(temp_obj) i = 0 for k2, v2 in v1[len(v1) - 1].items(): if (i < len( table_columns_values_list) ): v1[len(v1) - 1][ k2] = table_columns_values_list[ i].capitalize() i += 1 self.fileObj.filewriter(filename, json.dumps(f1)) else: print("Null values not allowed for the columns: " + str(null_check_list) + " in Table: " + table_name.capitalize()) print("Please re-enter your query.") elif len(re.findall(r'\((.*?)\)', query.lower())) == 2: # insert values in specific columns print("Inserted rows in Table") f1 = json.loads(self.fileObj.filereader(filename)) f2 = json.loads(self.fileObj.filereader(dtname)) is_this_table_flag = False auto_increment_list = [] original_table_col_list = [] for i in f2['Tables']: if (i['Table_name'] == table_name): auto_increment_list = i['Table_columns'][0][ 'Auto Increment'] original_table_col_list = i['Table_columns'][0]['Name'] nullable_original_list = i['Table_columns'][0]['Nullable'] null_check_list = [] for z in range(0, len(original_table_col_list)): if (nullable_original_list[z] == "No" and auto_increment_list[z] != "Yes"): null_check_list.append(original_table_col_list[z]) is_null_error = False for y in null_check_list: if y not in table_columns_headers_list: is_null_error = True if (not (is_null_error)): for k, v in f1.items(): if (k == "Tables"): for t in v: for k1, v1 in t.items(): if (k1 == "Table_name" and v1 == table_name): is_this_table_flag = True continue if (is_this_table_flag): # reset flag once entered is_this_table_flag = False # change default values first i.e first row if (len(v1) == 1 and "defnull" in v1[0].values()): i = 0 for v1 in v1: for k2, v2 in v1.items(): if (i < len( table_columns_values_list )): if (k2 == table_columns_headers_list[ i]): v1[k2] = table_columns_values_list[ i].capitalize() i += 1 continue if (v2 == "1"): continue else: v1[k2] = "null" else: temp_obj = v1[len(v1) - 1].copy() v1.append(temp_obj) i = 0 j = 0 for k2, v2 in v1[len(v1) - 1].items(): if (i < len( table_columns_headers_list) ): if (k2 == table_columns_headers_list[ i]): v1[len(v1) - 1][ k2] = table_columns_values_list[ i].capitalize() i += 1 continue if (k2 == original_table_col_list[j] and auto_increment_list[j] == "Yes"): v1[len(v1) - 1][k2] = int( v1[len(v1) - 2][k2]) + 1 j += 1 else: v1[len(v1) - 1][k2] = "null" j += 1 self.fileObj.filewriter(filename, json.dumps(f1)) else: print("Null values not allowed for the columns: " + str(null_check_list) + " in Table: " + table_name.capitalize()) print("Please re-enter your query.") else: print("ERROR IN INSERT QUERY!!!") print("Please re-enter your query.") return status
def parse_query(self, username, dbname, query, logger, fname=None): logger.info("Query sent by the user {}, is {}".format(username, query)) start_time = time.time() query = query.lower() words = query.split(' ') check_permissions = self.check_permissions(username) if words[0] in check_permissions: if words[0].lower() == 'select': #select parsing try: self.parse_select(username, dbname, query, logger, fname, start_time) except: print( "Error in your Select query!!! Please check syntax!!") logger.error( "Error in your Select query!!! Please check syntax!!") self.login_status(username, dbname, logger, start_time) elif words[0].lower() == 'delete': #delete parsing try: self.parse_delete(username, dbname, query, logger, fname, start_time) except: print( "Error in your Delete query!!! Please check syntax!!") logger.error( "Error in your Delete query!!! Please check syntax!!") self.login_status(username, dbname, logger, start_time) elif words[0].lower() == 'drop': #drop table try: self.parse_drop(username, dbname, query, logger, fname, start_time) except: print("Error in your drop query!!! Please check syntax!!") logger.error( "Error in your drop query!!! Please check syntax!!") self.login_status(username, dbname, logger, start_time) elif words[0].lower() == 'create': crtObj = CreatQuery() try: status = crtObj.create_table(username, dbname, query, logger, fname) if status: return else: self.login_status(username, dbname, logger, start_time) except: print( "Error in your Create query!!! Please check syntax!!") logger.error( "Error in your drop query!!! Please check syntax!!") self.login_status(username, dbname, logger, start_time) elif words[0].lower() == 'insert': insertObj = InsertQuery() try: status = insertObj.insert_row(username, dbname, query, logger, fname) if status: return else: self.login_status(username, dbname, logger, start_time) except: print( "Error in your Insert query!!! Please check syntax!!") logger.error( "Error in your Insert query!!! Please check syntax!!") self.login_status(username, dbname, logger, start_time) elif words[0].lower() == 'update': updateObj = Update() try: status = updateObj.update_row(username, dbname, query, logger, fname) if status: return else: self.login_status(username, dbname, logger, start_time) except: print( "Error in your update query!!! Please check syntax!!") logger.error( "Error in your update query!!! Please check syntax!!") self.login_status(username, dbname, logger, start_time) elif words[0].lower() == 'truncate': truncateObj = Truncate() try: status = truncateObj.truncate_table( username, dbname, query, logger, fname) if status: return else: self.login_status(username, dbname, logger, start_time) except: print( "Error in your truncate query!!! Please check syntax!!" ) logger.error( "Error in your truncate query!!! Please check syntax!!" ) self.login_status(username, dbname, logger, start_time) elif words[0].lower() == 'show': try: displayObj = Display() fileopobj = FileOps() f1 = fileopobj.filereader(dbname + "_Tables.txt") usertable_dict_obj = json.loads(f1) status = displayObj.print_tables(usertable_dict_obj) if status: return else: self.login_status(username, dbname, logger, start_time) except: print( "Error in your query!!! Please check syntax!! Show Tables;" ) logger.error( "Error in your query!!! Please check syntax!! Show Tables;" ) self.login_status(username, dbname, logger, start_time) elif (words[0].lower() == 'export' and words[1].lower() == 'data') and (words[2].lower() == 'dictionary' or words[2].lower() == 'dictionary;'): try: displayObj = Display() fileopobj = FileOps() f1 = fileopobj.filereader(dbname + "_Tables_Datatypes.txt") usertable_datatype_dict_obj = json.loads(f1) status = displayObj.print_datadictionary( "DataDictionary.txt", usertable_datatype_dict_obj) print( "Data Dictionary exported. Check your output folder.") logger.info( "Data Dictionary exported. Check your output folder.") if status: return else: self.login_status(username, dbname, logger, start_time) except: print( "Error in your query!!! Please check syntax!! export data dictionary;" ) logger.error( "Error in your query!!! Please check syntax!! export data dictionary;" ) self.login_status(username, dbname, logger, start_time) elif (words[0].lower() == 'export') and (words[1].lower() == 'erd' or words[1].lower() == 'erd;'): try: displayObj = Display() fileopobj = FileOps() f1 = fileopobj.filereader(dbname + "_Tables_Datatypes.txt") usertable_datatype_dict_obj = json.loads(f1) status = displayObj.print_relationships( "ERD.txt", usertable_datatype_dict_obj) print("ERD exported. Check your output folder.") logger.info("ERD exported. Check your output folder.") if status: return else: self.login_status(username, dbname, logger, start_time) except: print( "Error in your query!!! Please check syntax!! export erd;" ) logger.error( "Error in your query!!! Please check syntax!! export erd;" ) self.login_status(username, dbname, logger, start_time) elif (words[0].lower() == 'export' and words[1].lower() == 'sql') and (words[2].lower() == 'dump' or words[2].lower() == 'dump;'): try: sqldumpObj = Export_SQLDUMP() status = sqldumpObj.export_sql_dump(dbname, query) print("SQL Dump exported. Check your output folder.") logger.info("SQL Dump exported. Check your output folder.") if status: return else: self.login_status(username, dbname, logger, start_time) except: print( "Error in your query!!! Please check syntax!! export sql dump;" ) logger.error( "Error in your query!!! Please check syntax!! export sql dump;" ) self.login_status(username, dbname, logger, start_time) else: print("Invalid query!!! Please check syntax!!") logger.error("Invalid query!!! Please check syntax!!") self.login_status(username, dbname, logger, start_time) else: print("no permissions granted") self.login_status(username, dbname, logger, start_time)