def __init__(self): self.HandleConfig = HandleConfig() self.MysqlRestore = MysqlRestore() self.MysqlDump = MysqlDump() self.Download = Download() self.ConnDB = ConnDB() self.conn = self.ConnDB.conndb()
def __init__(self): self.HandleConfig = HandleConfig() self.ConnDB = ConnDB() self.MysqlDump = MysqlDump() self.cleansql = self.HandleConfig.handle_config( 'g', 'referencefile', 'cleanexcel') self.nickname = self.HandleConfig.handle_config( 'g', 'excelimporter', 'nickname')
def __init__(self): self.HandleConfig = HandleConfig() self.server = self.HandleConfig.handle_config("g", "global", "server") self.host = self.HandleConfig.handle_config('g', self.server, 'host') self.user = self.HandleConfig.handle_config('g', self.server, 'user') self.passwd = self.HandleConfig.handle_config('g', self.server, 'password') self.port = int( self.HandleConfig.handle_config('g', self.server, 'port'))
class AddComment: def __init__(self): self.HandleConfig = HandleConfig() def main(self, currentwork): worktype = self.HandleConfig.handle_config('g', currentwork, 'worktype') database = self.HandleConfig.handle_config('g', currentwork, 'dbname') jiradb = self.HandleConfig.handle_config('g', currentwork, 'jiraname') with open(r'reference_files\persons.txt', "r") as f: personlist = f.readlines() layout = [[sg.Text('Role:')], [ sg.Radio('Importer', 'R0', key='I', default=True), sg.Radio('Tester on aws', 'R0', key='T0'), sg.Radio('Tester on production', 'R0', key='T1') ], [sg.Text('Person:')], [sg.Combo(personlist, key='P', default_value='Kun Li')], [sg.Submit(), sg.Cancel()]] window = sg.Window(title=currentwork, layout=layout) event, values = window.read() window.close() if event in (None, 'Cancel'): return if values['I']: role = "Importer" elif values['T0']: role = "Tester on aws" elif values['T1']: role = "Tester on production" person = values['P'].strip() if role == "Importer": if person == "Kun Li": comment = 'Hi Kun Li,\n\n' \ 'The attached file [^{0}.zip] contains all the scripts for this {1}.\n' \ 'You can apply the scripts to /{2}/.\nData Import Tag: general.\n\n' \ 'Thanks.\nXiaobo'.format(jiradb, worktype, database) else: comment = "Hi {0},\n\n" \ "Can you please test this {1}?" \ "\nhttps://neonuat.com:8443/np/clients/{2}/login.jsp\n\nThanks.\nXiaobo".format(person, worktype, database) elif role == "Tester on aws": comment = "Hi {0},\n\nAll of the issues have been fixed and test passed" \ "\nPlease upload the scripts.".format(person) else: comment = "Test passed on production!" pyperclip.copy(comment)
def __init__(self): self.realpath = os.path.split(os.path.realpath(sys.argv[0]))[0] # get configuration from 'config.ini' self.HandleConfig = HandleConfig() self.ConnDB = ConnDB() self.conn = self.ConnDB.conndb() self.img = self.realpath + "\\" + self.HandleConfig.handle_config( "g", "referencefile", "img") self.cleansql = self.realpath + "\\" + self.HandleConfig.handle_config( "g", "referencefile", "cleanexcel") self.default_dir = self.HandleConfig.handle_config( "g", "global", "default_excels_dictionary")
def __init__(self, values): self.values = values self.ConnDB = ConnDB(values) self.HandleConfig = HandleConfig() self.conn_db = self.ConnDB.conndb(host=values['host'], port=int(values['port']), user=values['user'], passwd=values['passwd'], db=values['dbname'], charset='utf8')
def __init__(self, values): self.values = values self.ConnDB = ConnDB(values) self.HandleConfig = HandleConfig() self.conn_db = self.ConnDB.conndb(host=values['host'], port=int(values['port']), user=values['user'], passwd=values['passwd'], db=values['dbname'], charset='utf8') self.sql_mode = self.ConnDB.exec( self.conn_db, 'SELECT @@SESSION.sql_mode').fetchall()[0][0]
class RefreshGit: def __init__(self): self.HandleConfig = HandleConfig() def main(self): git_repo_path = self.HandleConfig.handle_config( 'g', 'defaultpath', 'git_repo_path') repo = git.Repo(git_repo_path) git_new = repo.git ret = git_new.pull() sg.Popup('{0}'.format(ret))
class GenerateCMD: def __init__(self): self.HandleConfig = HandleConfig() def main(self, currentwork): dbname = self.HandleConfig.handle_config('g', currentwork, 'dbname') k0 = 'dbdump' k1 = 'dbrestore' k2 = 'dataImportRestore' k3 = 'dataImportRunScript' k4 = 'dbremove' db0 = '{}_bak'.format(dbname) db1 = '{}_test'.format(dbname) db2 = '{}_test1'.format(dbname) db3 = '{}_test2'.format(dbname) layout = [ [ sg.Radio(k0, 'R0', key=k0), sg.Text(' New Database:')], [ sg.Radio(k1, 'R0', key=k1), sg.Text(' '), sg.Combo([db0, db1, db2, db3], key='db_new', size=(30, 5))], [ sg.Radio(k2, 'R0', key=k2, default=True), sg.Text(' '), sg.Checkbox('ssh [email protected]', key='ssh')], [ sg.Radio(k3, 'R0', key=k3)], [ sg.Radio(k4, 'R0', key=k4)], [ sg.Submit(tooltip='Click to submit this form'), sg.Cancel()]] window = sg.Window(title=currentwork, layout=layout) event, values = window.read() window.close() if event in (None, 'Cancel'): return (None, None, None, None) command = None if values[k0]: command = k0 if values[k1]: command = k1 if values[k2]: command = k2 if values[k3]: command = k3 if values[k4]: command = k4 cmd = '/home/neon/leiwu/bin/' + command + '.sh ' + dbname + ' ' + values['db_new'] if values[k3]: cmd = '/home/neon/leiwu/bin/' + command + '.sh ' + values['db_new'] + ' ' + dbname if values['ssh']: cmd = 'ssh [email protected]\n\n' + cmd pyperclip.copy(cmd.strip())
class Setting: def __init__(self): self.realpath = os.path.split(os.path.realpath(sys.argv[0]))[0] self.configini = self.realpath + '\\config.ini' self.HandleConfig = HandleConfig() def edit_config(self): os.popen(self.configini) def load_config(self): layout = [[sg.InputText('', key='e'), sg.FileBrowse()], [ sg.Submit(tooltip='Click to submit this form'), sg.Cancel() ]] window = sg.Window(title='', layout=layout) event, values = window.read() window.close() if event in (None, 'Cancel'): return existconfig = values['e'] shutil.copyfile(existconfig, self.configini) def swich_server(self, event): current_server = event if current_server == 'localhost': new_server = 'cd188' else: new_server = 'localhost' self.HandleConfig.handle_config('s', 'global', 'server', new_server) navicat_script_path = self.HandleConfig.handle_config( 'g', new_server, 'navicat_script_path') self.HandleConfig.handle_config('s', 'defaultpath', 'navicat_script_path', navicat_script_path)
def __init__(self, values): self.values = values self.HandleConfig = HandleConfig() self.FromExcels = FromExcels(values) if values['dbtype'] == 'MySQL': self.ToDB = ToMySQL(values) self.ConnDB = self.ToDB.ConnDB self.conn_db = self.ToDB.conn_db self.sql_mode = self.ToDB.sql_mode elif values['dbtype'] == 'Oracle': self.ToDB = ToOracle(values) self.ConnDB = self.ToDB.ConnDB self.conn_db = self.ToDB.conn_db elif values['dbtype'] == 'SQL Server': self.ToDB = ToSqlserver(values) self.ConnDB = self.ToDB.ConnDB self.conn_db = self.ToDB.conn_db
class DbToDownload: def __init__(self): self.HandleConfig = HandleConfig() self.ConnDB = ConnDB() def main(self, currentwork): dbname = self.HandleConfig.handle_config('g', currentwork, 'dbname') sql = "insert into db_to_download.db_list_to_download(instanceName,department) select '{0}','data' from dual".format(dbname) conn_db = self.ConnDB.conndb(server='awshost') try: try: self.ConnDB.exec(conn_db, sql) sg.Popup('\n DB to download Complete! \n', title=currentwork) except pymysql.err.IntegrityError: sql = "select 1 from db_to_download.db_list_to_download where instanceName = '{0}' and importFlag is null".format(dbname) ret = self.ConnDB.exec(conn_db, sql) result = ret.fetchall() if result: sg.Popup('\n DB to download Complete! \nWainning: This instance may db_to_download by others.', title=currentwork) return else: sql = "select 1 from db_to_download.db_download_log where instanceName = '{0}' and DATE(completeTime) = '{1}'".format(dbname, date.today()) ret = self.ConnDB.exec(conn_db, sql) result = ret.fetchall() if len(result) == 0: sql = "update db_to_download.db_list_to_download set deleteTime = now(), downloadTime = now(), importFlag = null, department = 'data' where instanceName = '{0}'".format(dbname) self.ConnDB.exec(conn_db, sql) sg.Popup('\n DB to download Complete! \n', title=currentwork) else: sg.Popup('\n {0} downloaded, you can go to mysqldump \nWainning: This instance may db_to_download by others.'.format(dbname), title=currentwork) return except: sg.popup_error('\n DB to download Error! \n', title=currentwork) finally: conn_db.close()
class Setting: def __init__(self): self.realpath = os.path.split(os.path.realpath(sys.argv[0]))[0] self.configini = self.realpath + '\\config.ini' self.HandleConfig = HandleConfig() def db_type(self, dbtype): self.HandleConfig.handle_config('s', 'dbinfo', 'dbtype', dbtype) def switch_langage(self, language): self.HandleConfig.handle_config('s', 'general', 'language', language) def data_source(self, source): if source == '选择目录' or source == 'Directory': source = 'D' else: source = 'F' self.HandleConfig.handle_config('s', 'general', 'source', source)
class File: def __init__(self): self.HandleConfig = HandleConfig() def open_work_dir(self, currentwork): work_dir = self.HandleConfig.handle_config('g', currentwork, 'jirapath') os.startfile(work_dir) def open_git_dir(self): git_dir = self.HandleConfig.handle_config('g', 'defaultpath', 'git_repo_path') os.startfile(git_dir) def open_tmp(self, currentwork): tmptxt = self.HandleConfig.handle_config( 'g', currentwork, 'jirapath') + 'script\\tmp.txt' os.popen(tmptxt) def append_templete_scripts(self, currentwork): import shutil git_dir = self.HandleConfig.handle_config('g', 'defaultpath', 'git_repo_path') navicat_script_path = self.HandleConfig.handle_config( 'g', 'defaultpath', 'navicat_script_path') dbname = self.HandleConfig.handle_config('g', currentwork, 'dbname') navicat_script = navicat_script_path + dbname + '\\{}.sql'.format( dbname) gittemplatepath = git_dir + 'templates\\' scripts = [ gittemplatepath + 'basic_account_info_extra_data_script.sql', gittemplatepath + 'transaction_script.sql', gittemplatepath + 'gais.sql', gittemplatepath + 'custom_field.sql', ] with open(navicat_script, 'a', encoding='utf8') as fa: for script in scripts: with open(script, 'r', encoding='utf8') as fr: fa.writelines(['\n\n']) shutil.copyfileobj(fr, fa)
class MysqlRestore: def __init__(self): self.HandleConfig = HandleConfig() self.MysqlDump = MysqlDump() self.ConnDB = ConnDB() self.conn = self.ConnDB.conndb() def main(self, currentwork, advanced=1): jirapath = self.HandleConfig.handle_config('g', currentwork, 'jirapath') dbname = self.HandleConfig.handle_config('g', currentwork, 'dbname') git_repo_path = self.HandleConfig.handle_config( 'g', 'defaultpath', 'git_repo_path') merge = self.HandleConfig.handle_config('g', currentwork, 'merge') navicat_script_path = self.HandleConfig.handle_config( 'g', 'defaultpath', 'navicat_script_path') gitscriptpath = git_repo_path + 'dataImportScript\\script\\' gittemplatepath = git_repo_path + 'templates\\' scriptspath = jirapath + 'script\\' scripts_bak_path = jirapath + 'db_backup\\' tmpscript = scriptspath + 'tmp.txt' script = navicat_script_path + dbname + '\\{}.sql'.format(dbname) backupsql = scriptspath + '{0}_after.sql'.format(dbname) excel_sql_file = scriptspath + '{0}.sql'.format(dbname) # truncate tmp.txt first with open(tmpscript, 'w', encoding='utf8') as (fw): fw.truncate() files = [] drop = 1 backup_files = [backupsql] msg = '' if not advanced: # restore database in config backupgz = scripts_bak_path + '{}_backup.sql.gz'.format(dbname) backupsql = scripts_bak_path + '{0}_backup.sql'.format(dbname) with gzip.open(backupgz, 'rb') as (f_in): with open(backupsql, 'wb') as (f_out): shutil.copyfileobj(f_in, f_out) backup_files = [backupsql, scriptspath + '{0}.sql'.format(dbname)] files = [ scriptspath + 'configration.sql', gitscriptpath + 'functionAndProcedure.sql', gitscriptpath + 'smokeTestV2.sql' ] # get b4_size b4_size = os.path.getsize(backupgz) / 1024 / 1024 b4_size = '{:.0f}'.format(b4_size) self.HandleConfig.handle_config('s', currentwork, 'b4_size', b4_size) elif advanced == 1: # restore database msg = 'Restore Database Complete!' layout = [[sg.Button('RestoreToLocalHost')], [sg.Button('RestoreToAWSHotfix')]] window = sg.Window(title=currentwork, layout=layout) event, values = window.read() window.close() if event is None: return elif event == 'RestoreToLocalHost': pass elif event == 'RestoreToAWSHotfix': self.MysqlDump.main(currentwork, gz=True) winscppath = self.HandleConfig.handle_config( 'g', 'defaultpath', 'winscppath') db_backup = self.HandleConfig.handle_config( 'g', currentwork, 'jirapath') + 'db_backup\\{0}.sql.gz'.format(dbname) cmd = '{0}WinSCP.com /command "open aws188" "put {1} /home/neon/leiwu/dataimport/fulldata/" "exit"'.format( winscppath, db_backup) os.system(cmd) sg.popup('Restore To AWS Hotfix Complete') return elif advanced == 2: # restore database in run script msg = 'Runscript Database Complete!' layout = [ [ sg.Checkbox('AllScriptWithoutSmokeTest', key='AllScriptWithoutSmokeTest') ], [sg.Checkbox('ExcelSqlFile', key=excel_sql_file)], [sg.Checkbox('TemplateScript', key=script)], [sg.Checkbox('TemplateSmokeTest', key='TemplateSmokeTest')], [ sg.Checkbox('TemplateScriptAfter', key=navicat_script_path + dbname + '\\{}_after_template.sql'.format(dbname)) ], [ sg.Checkbox('AccountMerge', key=gitscriptpath + 'accountMerge.sql') ], [ sg.Checkbox('AccountMergeAfter', key=navicat_script_path + dbname + '\\{}_after_merge.sql'.format(dbname)) ], [ sg.Checkbox('dataClean', key=gitscriptpath + 'fullDataClean.sql') ], [ sg.Checkbox('SmokeTest', key=gitscriptpath + 'smokeTestV2.sql') ], [ sg.Checkbox( 'All Script and Restore To AWS Hotfix'.format(dbname), key='AllScript', default=True, text_color='blue') ], [ sg.Checkbox('Drop database if exists {}'.format(dbname), key='drop', default=True, text_color='blue') ], [sg.Submit(), sg.Cancel()] ] window = sg.Window(title=currentwork, layout=layout) event, values = window.read() window.close() if event in (None, 'Cancel'): return if values['drop']: drop = 1 else: drop = 0 backup_files = [] if values['AllScript'] or values['AllScriptWithoutSmokeTest']: files = [ script, navicat_script_path + dbname + '\\{}_after_template.sql'.format(dbname), gitscriptpath + 'accountMerge.sql', navicat_script_path + dbname + '\\{}_after_merge.sql'.format(dbname), gitscriptpath + 'fullDataClean.sql', gitscriptpath + 'smokeTestV2.sql', gitscriptpath + 'clear.sql' ] if merge == 'False': files.remove(gitscriptpath + 'accountMerge.sql') if values['AllScriptWithoutSmokeTest']: files.remove(gitscriptpath + 'smokeTestV2.sql') for k, v in values.items(): if v: if k != 'AllScript' and k != 'AllScriptWithoutSmokeTest' and k != 'drop': sg.PopupError('Confict Selection!') return else: backup_files = [] for k, v in values.items(): if v: if k == 'TemplateSmokeTest': files = files + [ gittemplatepath + 'template_smoke_test\\smoke_test_for_account.sql', gittemplatepath + 'template_smoke_test\\smoke_test_for_custom_field.sql', gittemplatepath + 'template_smoke_test\\smoke_test_for_gais.sql', gittemplatepath + 'template_smoke_test\\smoke_test_for_transaction.sql' ] elif k == 'drop': continue else: files.append(k) sqlfiles = backup_files if len(files) > 0: for f in files: with open(f, 'r', encoding='utf8') as (fa): with open(tmpscript, 'a+', encoding='utf8') as (fw): fw.writelines(['\n']) shutil.copyfileobj(fa, fw) sqlfiles = sqlfiles + [tmpscript] #sqlfiles = sqlfiles + files if drop: sql = 'drop database if exists `{}`;\ncreate database `{}`;'.format( dbname, dbname) self.ConnDB.exec(self.conn, sql) print('\n\n{}'.format(sql)) for sqlfile in sqlfiles: if not os.path.exists(sqlfile): continue ret = self.ConnDB.cmd(dbname, 'mysql', sqlfile) if ret == 0: continue else: sg.popup_error('Error!') return 0 if advanced == 2 and values['AllScript']: conn = self.ConnDB.conndb(dbname) sql = 'SELECT * FROM smoke;' ret = self.ConnDB.exec(conn, sql) result = ret.fetchall() if len(result) > 0: layout = [ [ sg.Table(result, [ 'errorMsg', 'Error Count After', 'Error Count Pre', 'Pass Flag' ], col_widths=[60, 15, 15, 15], auto_size_columns=False, justification="left") ], [ sg.Text( 'Press OK to continue to dump database and upload to winscp.' ) ], [sg.OK(), sg.Cancel()] ] window = sg.Window(title=currentwork, layout=layout) event, values = window.read() window.close() if event in ('Cancel', None): return self.MysqlDump.main(currentwork, gz=True) winscppath = self.HandleConfig.handle_config( 'g', 'defaultpath', 'winscppath') db_backup = self.HandleConfig.handle_config( 'g', currentwork, 'jirapath') + 'db_backup\\{0}.sql.gz'.format(dbname) cmd = '{0}WinSCP.com /command "open aws188" "put {1} /home/neon/leiwu/dataimport/fulldata/" "exit"'.format( winscppath, db_backup) os.system(cmd) elif advanced == 2 and values['TemplateSmokeTest']: conn = self.ConnDB.conndb(dbname) sql = 'SELECT templateTableName, errorMsg, errorCount, passFlag FROM smoke_test_report_for_template WHERE passFlag<>"Pass" ORDER BY passFlag;' ret = self.ConnDB.exec(conn, sql) result = ret.fetchall() if len(result) > 0: layout = [ [ sg.Table(result, [ 'templateTableName', 'errorMsg', 'Error Count', 'Pass Flag' ], col_widths=[30, 100, 15, 15], auto_size_columns=False, justification="left") ], [ sg.Text( 'SELECT * FROM smoke_test_report_for_template WHERE passFlag<>"Pass" ORDER BY passFlag;' ), sg.Button('Copy') ] ] window1 = sg.Window(title=currentwork, layout=layout) event1, values1 = window1.read() window1.close() if event1 is None: return if event1 == 'Copy': pyperclip.copy( 'SELECT * FROM smoke_test_report_for_template WHERE passFlag<>"Pass" ORDER BY passFlag;' ) elif advanced == 2 and values[gitscriptpath + 'smokeTestV2.sql']: conn = self.ConnDB.conndb(dbname) sql = 'SELECT * FROM smoke;' ret = self.ConnDB.exec(conn, sql) result = ret.fetchall() if len(result) > 0: layout = [ [ sg.Table(result, [ 'errorMsg', 'Error Count After', 'Error Count Pre', 'Pass Flag' ], col_widths=[100, 15, 15, 15], auto_size_columns=False, justification="left") ], ] window1 = sg.Window(title=currentwork, layout=layout) event1, values = window1.read() window1.close() if advanced: sg.Popup(msg, title=currentwork)
def __init__(self): self.HandleConfig = HandleConfig() self.ConnDB = ConnDB()
def __init__(self): self.realpath = os.path.split(os.path.realpath(sys.argv[0]))[0] self.configini = self.realpath + '\\config.ini' self.HandleConfig = HandleConfig()
class ConfigScript: def __init__(self): self.HandleConfig = HandleConfig() self.MysqlRestore = MysqlRestore() self.MysqlDump = MysqlDump() self.Download = Download() self.ConnDB = ConnDB() self.conn = self.ConnDB.conndb() def main(self, currentwork): jirapath = self.HandleConfig.handle_config('g', currentwork, 'jirapath') jiraname = self.HandleConfig.handle_config('g', currentwork, 'jiraname') git_repo_path = self.HandleConfig.handle_config( 'g', 'defaultpath', 'git_repo_path') gitscriptpath = git_repo_path + 'dataImportScript\\script\\' configration_sql = gitscriptpath + 'configration.sql' configration_sql_new = jirapath + 'script\\configration.sql' # read local configration.sql with open(configration_sql_new, 'r', encoding='utf8') as (fa): lines = fa.readlines() idx_s = lines.index('# Custom Conigurations Start\n') idx_e = lines.index('# Custom Conigurations End\n') config_lines = lines[idx_s + 1:idx_e] layout = [] option_dict = defaultdict() idx = 0 frame = [] title = '' titles = defaultdict() title_idx = 0 for line in config_lines: if re.match('^#Flag ', line): if frame: lay = [ sg.Frame(layout=frame, title=title, title_color='blue') ] layout.append(lay) title = line.split('#Flag')[-1].strip() frame = [] titles[title_idx] = '#Flag ' + title + '\n' elif re.match('^INSERT INTO z_newcreate_data_import_config', line, re.IGNORECASE): s = line.split('(') k = s[2].split(',')[0].replace("'", '').strip() d = s[2].split(',')[1].replace("'", '').replace( ')', '').replace(';', '').replace('#', '').strip() option_dict[k] = '' if len(s) > 3: # dropdown options = s[3:] option_dict[k] = '#(' + '('.join(options) for i in range(len(options)): options[i] = options[i].replace("'", '').replace( ')', '').replace('\n', '').replace(',', '') lay = [ sg.Text(k), sg.Combo(options, default_value=d, key=k) ] else: # oneline text if re.match('^Legacy Account Custom Field Name', k, re.IGNORECASE): k = 'Legacy Account Custom Field Name' + str(idx) idx += 1 option_dict[k] = '' lay = [sg.Text(k), sg.InputText(d, key=k)] frame.append(lay) title_idx += 1 if frame: lay = [sg.Frame(layout=frame, title=title, title_color='blue')] layout.append(lay) layout.append( [sg.Submit(), sg.Cancel(), sg.Button('Git Configration.sql')]) window = sg.Window(title=currentwork, layout=layout) event, values = window.read() window.close() if event in (None, 'Cancel'): return # copy configration.sql from git repo # relace new config into configration.sql shutil.copy(configration_sql, configration_sql_new) if event == 'Git Configration.sql': return li = [] sql_pre = 'INSERT INTO z_newcreate_data_import_config(taskName,taskValue) VALUES(' i = 0 print(titles) for key, value in values.items(): key_true = key if re.match('^Legacy Account Custom Field Name', key, re.IGNORECASE): key_true = 'Legacy Account Custom Field Name' if key == 'Work Start Date': if value == '': value = str(date.today()) sql = sql_pre + "'" + key_true + "'," + "'" + value + "');" + option_dict[ key] + '\n' if i in titles.keys(): li.append(titles[i]) li.append(sql) i += 1 with open(configration_sql, 'r', encoding='utf8') as (fa): lines = fa.readlines() idx_s = lines.index('# Custom Conigurations Start\n') idx_e = lines.index('# Custom Conigurations End\n') lines_new = lines[:idx_s + 1] + li + lines[idx_e:] with open(configration_sql_new, 'w', encoding='utf8') as (fw): fw.writelines(lines_new) merge = 'True' if values['Account De-dupe Rule'] == 'No De-dupe': merge = 'False' self.HandleConfig.handle_config('s', jiraname, 'merge', merge) # download database if values['Pull Database From AWS'] == 'Yes': self.Download.main(currentwork) # restore self.MysqlRestore.main(currentwork, advanced=0) # dump to dbname_after self.MysqlDump.main(currentwork, after=1, op='mysqldump') sg.Popup('Config Complete!', title=currentwork)
class ViewTable: def __init__(self): self.HandleConfig = HandleConfig() self.ConnDB = ConnDB() def main(self, currentwork): database = self.HandleConfig.handle_config('g', currentwork, 'dbname') conn = self.ConnDB.conndb(database) sql = """DROP table if exists z_newcreate_neon_data_stats_report; create table z_newcreate_neon_data_stats_report(tableName varchar(100),tableRows INT); insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'account',count(1) from account; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'user',count(1) from user; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'donation',count(1) from donation; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'membership_listing',count(1) from membership_listing; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'event_registration',count(1) from event_registration; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'event_attendee',count(1) from event_attendee; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'shopping_cart_items',count(1) from shopping_cart_items; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'payment',count(1) from payment; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'account_custom_data',count(1) from account_custom_data; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'donation_custom_data',count(1) from donation_custom_data; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'membership_listing_custom_data',count(1) from membership_listing_custom_data; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'event_registration_custom_data',count(1) from event_registration_custom_data; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'event_attendee_custom_data',count(1) from event_attendee_custom_data; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'user_custom_data',count(1) from user_custom_data; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'company_custom_data',count(1) from company_custom_data; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'contact_activity_custom_data',count(1) from contact_activity_custom_data; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'note',count(1) from note; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'address',count(1) from address; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'relation',count(1) from relation; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'company_contact',count(1) from company_contact; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'household_contact',count(1) from household_contact; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'contact_activity',count(1) from contact_activity; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'proposal',count(1) from proposal; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'solicitation',count(1) from solicitation; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'invitation',count(1) from invitation; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'material_tracking',count(1) from material_tracking; insert into z_newcreate_neon_data_stats_report(tableName,tableRows) select 'soft_credit',count(1) from soft_credit; select * from z_newcreate_neon_data_stats_report order by tableRows desc""" ret = self.ConnDB.exec(conn, sql) result = ret.fetchall() if len(result) > 0: layout = [[ sg.Table(result, ['tableName', 'tableRows'], col_widths=[25, 10], auto_size_columns=False, justification="left", size=(35, 30)) ]] window = sg.Window(title=currentwork, layout=layout) event, values = window.read() window.close()
class ConnDB: def __init__(self): self.HandleConfig = HandleConfig() self.server = self.HandleConfig.handle_config("g", "global", "server") self.host = self.HandleConfig.handle_config('g', self.server, 'host') self.user = self.HandleConfig.handle_config('g', self.server, 'user') self.passwd = self.HandleConfig.handle_config('g', self.server, 'password') self.port = int( self.HandleConfig.handle_config('g', self.server, 'port')) # return a connection def conndb(self, db=None, server=None, charset='utf8'): if server: host = self.HandleConfig.handle_config('g', server, 'host') user = self.HandleConfig.handle_config('g', server, 'user') passwd = self.HandleConfig.handle_config('g', server, 'password') port = int(self.HandleConfig.handle_config('g', server, 'port')) else: host = self.host user = self.user passwd = self.passwd port = self.port conn = pymysql.connect(host=host, user=user, passwd=passwd, port=port, charset=charset, database=db) return conn # execute sql def exec(self, conn, sql, kill=True, datalist=[]): cur = conn.cursor() host = conn.host # kill db process first if host == '192.168.1.188': pid = conn.thread_id() #currentwork = self.HandleConfig.handle_config("g", "global", "currentwork") database = conn.db if database: database = database.decode('utf8') killsql = "SELECT CONCAT('kill ',id) FROM information_schema.`PROCESSLIST` WHERE DB = '{0}' and id <> {1}".format( database, pid) cur.execute(killsql) killids = cur.fetchall() killids = list(killids) idx = 0 for killid in killids: killids[idx] = (list(killid))[0] killidsql = killids[idx] try: cur.execute(killidsql) except: continue idx = idx + 1 if datalist: cur.executemany(sql, datalist) else: for s in sql.split(";"): if s != "": cur.execute(s) conn.commit() cur.close() return cur # execute cmd # return 0 if success else 1 def cmd(self, dbname, op, sqlfile, tablename=''): cmd_statement = '' if op == "mysql": cmd_statement = "{0} -u{1} -p{2} -h{3} -P{4} {5} --default-character-set=utf8 < \"{6}\"".format( op, self.user, self.passwd, self.host, self.port, dbname, sqlfile) elif op == "mysqldump": cmd_statement = "{0} -u{1} -p{2} -h{3} -P{4} {5} -R > \"{6}\"".format( op, self.user, self.passwd, self.host, self.port, dbname, sqlfile) elif op == "mysqldump-no-r": cmd_statement = "mysqldump -u{0} -p{1} -h{2} -P{3} {4} {5} > \"{6}\"".format( self.user, self.passwd, self.host, self.port, dbname, tablename, sqlfile) print('\n\n' + cmd_statement) ret = os.system(cmd_statement) return ret
class ImportExcel: def __init__(self): self.HandleConfig = HandleConfig() self.ConnDB = ConnDB() def main(self, values): # save the recent input self.HandleConfig.handle_config("s", "file", "file_dir", values['file_dir']) self.HandleConfig.handle_config("s", "file", "csv_encoding", values['csv_encoding']) self.HandleConfig.handle_config("s", "dbinfo", "host", values['host']) self.HandleConfig.handle_config("s", "dbinfo", "port", values['port']) self.HandleConfig.handle_config("s", "dbinfo", "user", values['user']) self.HandleConfig.handle_config("s", "dbinfo", "passwd", values['passwd']) self.HandleConfig.handle_config("s", "dbinfo", "dbname", values['dbname']) self.HandleConfig.handle_config("s", "file", "na_values", values['na_values']) self.db = values['dbname'] self.conn = self.ConnDB.conndb(host=values['host'], port=int(values['port']), user=values['user'], passwd=values['passwd'], charset='utf8') self.file_dir = values['file_dir'] na_values = values['na_values'].split(',') excelcsvs = self.get_excel() if not excelcsvs: sg.Popup('No Excel/CSV files!') return # write log log_file = self.file_dir + "\\log.txt" if os.path.isfile(log_file): os.remove(log_file) # create database if values['redb']: print("Bengin to Re-Create Database") sql = "drop database if exists `{0}`;create database `{0}`".format( self.db, self.db) self.ConnDB.exec(self.conn, sql) print('\n\n{}'.format(sql)) self.conn_db = self.ConnDB.conndb(host=values['host'], port=int(values['port']), user=values['user'], passwd=values['passwd'], db=self.db, charset='utf8') self.sql_mode = self.ConnDB.exec( self.conn_db, 'SELECT @@SESSION.sql_mode').fetchall()[0][0] longexcelcsvs = defaultdict() long_num = 0 num = 0 num_s = 0 print("\n\nBegin to import...\n") for excelcsv, origin_tablename in excelcsvs.items(): self.excel_name = excelcsv try: isexcel = 0 # get csv dataset if re.fullmatch(r"^.*?\.csv$", excelcsv, flags=re.IGNORECASE): datasets = defaultdict() csv = self.file_dir + "\\" + excelcsv # Determining the encoding of a CSV file # http://pandaproject.net/docs/determining-the-encoding-of-a-csv-file.html if values['csv_encoding']: csv_encoding = values['csv_encoding'] csv_encoding = 'utf8' try: dataset = pd.read_csv(csv, encoding=csv_encoding, dtype=str, na_values=na_values, keep_default_na=False, header=0, engine='c') except UnicodeDecodeError: try: dataset = pd.read_csv(csv, encoding='ansi', dtype=str, na_values=na_values, keep_default_na=False, header=0, engine='c') except UnicodeDecodeError: try: dataset = pd.read_csv(csv, encoding='utf-16', dtype=str, na_values=na_values, keep_default_na=False, header=0, engine='c') except UnicodeDecodeError: with open(csv, 'rb') as f: bytes = f.read() if len(bytes) > 100000: with open(csv, 'rb') as f: bytes = f.readline() encode = chardet.detect(bytes)['encoding'] if encode == 'ascii': encode = 'ansi' #ansi is a super charset of ascii dataset = pd.read_csv(csv, encoding=encode, dtype=str, na_values=na_values, keep_default_na=False, header=0, engine='c') datasets['sheet1'] = dataset # get excel dataset(include sheets) if re.fullmatch(r"^.*?\.xlsx?$", excelcsv, flags=re.IGNORECASE): isexcel = 1 excel = self.file_dir + "\\" + excelcsv datasets = pd.read_excel(excel, dtype=str, na_values=na_values, keep_default_na=False, header=0, sheet_name=None) # one sheet/csv is one table for k, v in datasets.items(): created_table = None try: sheet_name = k dataset = v tablename = origin_tablename self.excel_name = excelcsv # rename table name if excel have more than one sheets if isexcel == 1 and len(datasets) > 1: tablename = origin_tablename + '_' + re.sub( r"[^\w]+", "_", sheet_name, flags=re.IGNORECASE) self.excel_name = excelcsv + '.' + sheet_name tablename = tablename.lower() # cut off table name if len(tablename.encode("utf8")) > 64: if self.is_Chinese(tablename): tablename = "{0}_".format( long_num) + tablename[:20] else: tablename = "{0}_".format( long_num) + tablename[:60] long_num += 1 longexcelcsvs[excelcsv] = tablename with open(log_file, "a", encoding="utf8") as fw: fw.write( "table name cut off: {0}, tablename: {1}\n" .format(self.excel_name, tablename)) col_maxlen, dataset = self.read_data(dataset) if dataset.empty: raise EmptyError("Empty") created_table, created_sql = self.create_table( col_maxlen, tablename) try: self.insert_data(dataset, tablename) except pymysql.err.InternalError as reason: reason_num_0 = str(reason).split(',')[0].strip('(') if reason_num_0 == '1366': try: sql_1 = 'truncate table `{0}`.`{1}`;'.format( self.db, tablename) self.ConnDB.exec(self.conn, sql_1) self.ConnDB.exec( self.conn, 'set SESSION sql_mode = ""') self.insert_data(dataset, tablename) self.ConnDB.exec( self.conn, 'set SESSION sql_mode = "{}"'.format( self.sql_mode)) except pymysql.err.InternalError as reason: reason_num_1 = str(reason).split( ',')[0].strip('(') if reason_num_1 == '1118': sql = re.sub('varchar\\(\\d+\\)', 'text', created_sql) sql_1 = 'drop table if exists `{0}`.`{1}`;'.format( self.db, tablename) self.ConnDB.exec(self.conn, sql_1) self.ConnDB.exec(self.conn, sql) self.ConnDB.exec( self.conn, 'set SESSION sql_mode = ""') self.insert_data(dataset, tablename) self.ConnDB.exec( self.conn, 'set SESSION sql_mode = "{}"'. format(self.sql_mode)) else: raise pymysql.err.InternalError( str(reason)) elif reason_num_0 == '1118': sql = re.sub('varchar\\(\\d+\\)', 'text', created_sql) sql_0 = 'drop table if exists `{0}`.`{1}`;'.format( self.db, tablename) + sql self.ConnDB.exec(self.conn, sql_0) self.insert_data(dataset, tablename) else: raise pymysql.err.InternalError(str(reason)) except Exception as reason: print('Failed: {}'.format(self.excel_name)) with open(log_file, 'a') as (fw): fw.write('sheet name: {0}, error: {1}\n'.format( self.excel_name, str(reason))) if created_table: sql = 'drop table if exists `{0}`.`{1}`'.format( self.db, created_table) self.ConnDB.exec(self.conn, sql) continue else: print('Imported: {}'.format(self.excel_name)) num_s += 1 finally: num += 1 self.ConnDB.exec( self.conn, 'set SESSION sql_mode = "{}"'.format( self.sql_mode)) except Exception as reason: print("Failed: {}".format(excelcsv)) with open(log_file, "a") as fw: fw.write("file name: {0}, error: {1}\n".format( self.excel_name, str(reason))) num += 1 continue print('\nTotal: {}, Imported: {}\n'.format(num, num_s)) self.conn.close() self.conn_db.close() if os.path.isfile(log_file): os.popen(log_file) sg.Popup("You have logs , see file '{}' \n\ncheck it first".format( log_file)) if num_s == 0: sg.Popup("No imported tables!") return sg.Popup("Done!") def is_Chinese(self, word): for ch in word: if '\u4e00' <= ch <= '\u9fff': return True return False def get_excel(self): # a function to get excel/csv files under the dictionary excels = os.listdir(self.file_dir) excelcsvs = defaultdict() for excel in excels: excel_dir = self.file_dir + "\\" + excel if os.path.isfile(excel_dir) and re.fullmatch( r"^.*?\.(xls|xlsx|csv)$", excel, flags=re.IGNORECASE): tablename = re.sub(r"\.(xls|xlsx|csv)$", '', excel.lower(), flags=re.IGNORECASE) # replace all character not \w to "_" tablename = re.sub(r"[^\w]+", "_", tablename, flags=re.IGNORECASE) excelcsvs[excel] = tablename return excelcsvs def read_data(self, dataset): dataset = dataset.fillna(value="") f = lambda x: str(x).strip() dataset = dataset.applymap(f) f = lambda x: len(x) df1 = dataset.applymap(f) f = lambda x: max(x) df3 = df1.apply(f, axis=1) df3 = pd.DataFrame(df3, columns=['c']) indexs = df3.loc[(df3['c'] == 0)].index dataset.drop(indexs, inplace=True) # deal with columns dataset.columns = [str(col) for col in dataset.columns] self.columns = dataset.columns low_col = [col.lower() for col in self.columns] s = len(low_col) recol = 1 for col in low_col: if 'unnamed: ' not in col: recol = 0 break if recol: self.columns = dataset[0:1] self.columns = np.array(self.columns) self.columns = self.columns.tolist()[0] dataset.columns = self.columns dataset.drop(dataset[:1].index, inplace=True) low_col = [col.lower() for col in self.columns] self.columns = [str(col).strip() for col in self.columns] # fix blank col name f = lambda x: "unnamed" if x == "" else x self.columns = [f(col) for col in self.columns] # cut off col def f(x): if len(x.encode("utf8")) <= 63: x = x elif self.is_Chinese(x): x = x[:20].strip() else: x = x[:62].strip() return x self.columns = [f(col) for col in self.columns] # fix duplicate column name while 1: low_col = [col.lower() for col in self.columns] idx = 0 odx = 0 c = 0 for i in self.columns: jdx = 0 n = 1 if idx == len(self.columns): continue for j in low_col[idx + 1:]: odx = idx + 1 + jdx if j == i.lower(): self.columns[odx] = j + str(n) n += 1 c += 1 jdx += 1 idx += 1 if c == 0: break dataset.columns = self.columns self.columns = np.array(self.columns) self.columns = self.columns.tolist() f = lambda x: max(x) df1.columns = self.columns df2 = df1.apply(f, axis=0) col_maxlen = df2.to_dict() f = lambda x: None if x == "" else x dataset = dataset.applymap(f) return col_maxlen, dataset def create_table(self, col_maxlen, tablename): sql = "create table {0}(".format(tablename) for col, maxLen in col_maxlen.items(): colType = "varchar(255)" if maxLen > 255: colType = "TEXT" if maxLen > 65535: colType = "MEDIUMTEXT" if maxLen > 16777215: colType = "LONGTEXT" sql = sql + "`{0}` {1} default null,".format(col, colType) sql = sql[:-1] + ")" try: self.ConnDB.exec(self.conn_db, sql) except: sql = re.sub(r"varchar\(\d+\)", "text", sql) self.ConnDB.exec(self.conn_db, sql) return tablename, sql def insert_data(self, dataset, tablename): dataset = np.array(dataset) datalist = dataset.tolist() cols = '`,`'.join(self.columns) l = len(self.columns) v = '%s,' * l v = v[:-1] sql = 'insert into `%s`.`%s`(%s) values(' % (self.db, tablename, '`' + cols + '`') sql = sql + '%s)' % v self.ConnDB.exec(self.conn, sql, datalist=datalist)
class ImportExcel: def __init__(self): self.realpath = os.path.split(os.path.realpath(sys.argv[0]))[0] # get configuration from 'config.ini' self.HandleConfig = HandleConfig() self.ConnDB = ConnDB() self.conn = self.ConnDB.conndb() self.img = self.realpath + "\\" + self.HandleConfig.handle_config( "g", "referencefile", "img") self.cleansql = self.realpath + "\\" + self.HandleConfig.handle_config( "g", "referencefile", "cleanexcel") self.default_dir = self.HandleConfig.handle_config( "g", "global", "default_excels_dictionary") def main(self): # enter a database you will import excel to self.db = easygui.enterbox(msg="Enter your database name:") if not self.db: return self.db = self.db.lower().strip() # choose excel file dictionary self.importexcelpath = easygui.diropenbox( msg="Choose your excels dictionary:", default=self.default_dir) if not self.importexcelpath: return excelcsvs = self.get_excel() if not excelcsvs: easygui.msgbox("No excels can import!") return # anything failed excel to import will be wrote in it log_file = self.importexcelpath + "\\log.txt" if os.path.isfile(log_file): os.remove(log_file) # create database try: sql = "create database `{0}`;".format(self.db) self.ConnDB.exec(self.conn, sql) except pymysql.err.ProgrammingError: conti = easygui.ynbox( msg="Database {0} exists, drop it first?".format(self.db)) if conti: print("Dropping database...") sql = "drop database if exists `{0}`;create database `{0}`".format( self.db, self.db) self.ConnDB.exec(self.conn, sql) print("Created database {}".format(self.db)) self.conn_db = self.ConnDB.conndb(self.db) longexcelcsvs = defaultdict() long_num = 0 num = 0 num_s = 0 print("Begin to import...\n") for excelcsv, origin_tablename in excelcsvs.items(): self.excel_name = excelcsv try: isexcel = 0 # get csv dataset if re.fullmatch(r"^.*?\.csv$", excelcsv, flags=re.IGNORECASE): datasets = defaultdict() csv = self.importexcelpath + "\\" + excelcsv with open(csv, 'rb') as f: bytes = f.read(1000000) encode = chardet.detect(bytes)['encoding'] if encode == 'ascii': encode = 'ansi' # ansi is a super charset of ascii dataset = pd.read_csv(csv, encoding=encode, dtype=str, na_filter=False, header=0, engine="c") datasets['sheet1'] = dataset # get excel dataset(include sheets) if re.fullmatch(r"^.*?\.xlsx?$", excelcsv, flags=re.IGNORECASE): isexcel = 1 excel = self.importexcelpath + "\\" + excelcsv datasets = pd.read_excel(excel, dtype=str, na_filter=False, header=0, sheet_name=None) # one sheet/csv is one table for k, v in datasets.items(): created_table = None try: sheet_name = k dataset = v tablename = origin_tablename self.excel_name = excelcsv # rename table name if excel have more than one sheets if isexcel == 1 and len(datasets) > 1: tablename = origin_tablename + '_' + re.sub( r"[^\w]+", "_", sheet_name, flags=re.IGNORECASE) self.excel_name = excelcsv + '.' + sheet_name tablename = tablename.lower() # cut off table name if len(tablename.encode("utf8")) > 64: if self.is_Chinese(tablename): tablename = "{0}_".format( long_num) + tablename[:20] else: tablename = "{0}_".format( long_num) + tablename[:60] long_num += 1 longexcelcsvs[excelcsv] = tablename with open(log_file, "a", encoding="utf8") as fw: fw.write( "extra long excel: {0}, tablename: {1}\n". format(self.excel_name, tablename)) col_maxlen, dataset = self.read_data(dataset) if dataset.empty: raise EmptyError("Empty") created_table, created_sql = self.create_table( col_maxlen, tablename) try: self.insert_data(dataset, tablename) except Exception as reason: reason_num_0 = str(reason).split(",")[0].strip("(") if reason_num_0 == "1366": try: sql_0 = "alter table {0} convert to character set utf8mb4 collate utf8mb4_bin".format( created_table) self.ConnDB.exec(self.conn_db, sql_0) self.insert_data(dataset, tablename, charset="utf8mb4") except pymysql.err.InternalError as reason: reason_num_1 = str(reason).split( ",")[0].strip("(") if reason_num_1 == "1118": sql = re.sub(r"varchar\(\d+\)", "text", created_sql) sql_1 = "drop table if exists {0};".format( tablename) print(sql_0) self.ConnDB.exec(self.conn_db, sql_1) self.ConnDB.exec(self.conn_db, sql) sql_0 = "alter table {0} convert to character set utf8mb4 collate utf8mb4_bin".format( created_table) self.ConnDB.exec(self.conn_db, sql_0) self.insert_data(dataset, tablename, charset="utf8mb4") else: raise pymysql.err.InternalError( str(reason)) elif reason_num_0 == "1118": sql = re.sub(r"varchar\(\d+\)", "text", created_sql) sql_0 = "drop table if exists {0};".format( tablename) + sql self.ConnDB.exec(self.conn_db, sql_0) self.insert_data(dataset, tablename) else: raise pymysql.err.InternalError(str(reason)) except Exception as reason: print("Failed: {}".format(self.excel_name)) with open(log_file, "a", encoding="utf8") as fw: fw.write( "excel sheet name: {0}, error: {1}\n".format( self.excel_name, str(reason))) if created_table: sql = "drop table if exists {0}".format( created_table) self.ConnDB.exec(self.conn_db, sql) continue else: print("Imported: {}".format(self.excel_name)) num_s += 1 finally: num += 1 except Exception as reason: print("Failed: {}".format(excelcsv)) with open(log_file, "a", encoding="utf8") as fw: fw.write("excel file name: {0}, error: {1}\n".format( self.excel_name, str(reason))) num += 1 continue print("\nTotal: {}, Imported: {}\n".format(num, num_s)) self.conn.close() self.conn_db.close() conti = 1 if os.path.isfile(log_file): os.popen(log_file) easygui.msgbox( "You have logs , see file '{}' \n\ncheck it first".format( log_file)) if num_s == 0: easygui.msgbox("No imported tables!") return conti = easygui.ccbox(msg="Clean database {} now?".format(self.db)) if conti: self.clean_data() def is_Chinese(self, word): for ch in word: if '\u4e00' <= ch <= '\u9fff': return True return False def get_excel(self): # a function to get excel/csv file under the dictionary excels = os.listdir(self.importexcelpath) excelcsvs = defaultdict() for excel in excels: excel_dir = self.importexcelpath + "\\" + excel if os.path.isfile(excel_dir) and re.fullmatch( r"^.*?\.(xls|xlsx|csv)$", excel, flags=re.IGNORECASE): tablename = re.sub(r"\.(xls|xlsx|csv)$", '', excel.lower(), flags=re.IGNORECASE) # replace all character not \w to "_" tablename = re.sub(r"[^\w]+", "_", tablename, flags=re.IGNORECASE) excelcsvs[excel] = tablename return excelcsvs def read_data(self, dataset): # str col dataset.columns = [str(col) for col in dataset.columns] self.columns = dataset.columns # replace % to _ self.columns = [ str(col).strip().replace('%', '_') for col in self.columns ] # cut off col def f(x): if len(x.encode("utf8")) <= 63: x = x elif self.is_Chinese(x): x = x[:20].strip() else: x = x[:62].strip() return x self.columns = [f(col) for col in self.columns] # fix duplicate column name while True: low_col = [col.lower() for col in self.columns] idx = 0 odx = 0 c = 0 for i in self.columns: jdx = 0 n = 1 if idx == len(self.columns): continue for j in low_col[idx + 1:]: odx = idx + 1 + jdx if j == i.lower(): self.columns[odx] = j + str(n) n += 1 c += 1 jdx += 1 idx += 1 if c == 0: break dataset.columns = self.columns self.columns = np.array(self.columns) self.columns = self.columns.tolist() # deal with data f = lambda x: str(x).strip() dataset = dataset.applymap(f) f = lambda x: len(x.encode("utf8")) df1 = dataset.applymap(f) f = lambda x: max(x) df2 = df1.apply(f, axis=0) col_maxlen = df2.to_dict() df3 = df1.apply(f, axis=1) df3 = pd.DataFrame(df3, columns=["c"]) indexs = df3.loc[df3["c"] == 0].index dataset.drop(indexs, inplace=True) f = lambda x: None if x == "" else x dataset = dataset.applymap(f) return col_maxlen, dataset def create_table(self, col_maxlen, tablename): sql = "create table {0}(".format(tablename) for col, maxLen in col_maxlen.items(): colType = "varchar(255)" if maxLen > 255: colType = "TEXT" if maxLen > 65535: colType = "MEDIUMTEXT" if maxLen > 16777215: colType = "LONGTEXT" sql = sql + "`{0}` {1} default null,".format(col, colType) sql = sql[:-1] + ")" try: self.ConnDB.exec(self.conn_db, sql) except: sql = re.sub(r"varchar\(\d+\)", "text", sql) self.ConnDB.exec(self.conn_db, sql) return tablename, sql def insert_data(self, dataset, tablename, charset='utf8'): # insert dataset = np.array(dataset) # dataframe to ndarray datalist = dataset.tolist() # ndarray to list cols = "`,`".join(self.columns) l = len(self.columns) v = "%s," * l v = v[:-1] sql = "insert into `%s`(%s) values(" % (tablename, "`" + cols + "`") sql = sql + "%s)" % v if charset == "utf8mb4": conn = self.ConnDB.conndb(db=self.db, charset=charset) cur = conn.cursor() cur.executemany(sql, datalist) conn.commit() cur.close() conn.close() else: cur = self.conn_db.cursor() cur.executemany(sql, datalist) self.conn_db.commit() cur.close() #conn.close() def clean_data(self): print('Begin to clean data...\n') file = self.cleansql ret = self.ConnDB.cmd(self.db, "mysql", file) if ret == 0: easygui.msgbox(msg="Import Over", image=self.img) else: easygui.exceptionbox("Clean Data Failed")
def generate_layout(): # get the work that the program is working on works = [ work for work in HandleConfig.handle_config()['worklist'].values() ] # the setting of tool # addvanced options menu_def = [ [ '&File', [ '&Open Work Dir', '&Open Git Dir', '&Open tmp.txt', '&Append Templete Scripts' ] ], ['&Setting', ['&Edit Config', '&Load Config']], ['&Tools', ['&Clean Sql', '&Custom Field']], [ '&Advanced', ['&Import Specific Excel', '&Add Comment', '&View Table'] ], ] # the templete layout tab_layouts = [] for work in works: try: dbname = HandleConfig.handle_config("g", work, "dbname") except: dbname = '' tab_layout = [ #[sg.Text('',size=(12,1))], [ sg.B( button_text='Import Excel', size=(15, 3), ), sg.B(button_text='Config Import', size=(15, 3)), sg.B(button_text='Restore Database', size=(15, 3)), sg.B(button_text='Run Script', size=(15, 3)), sg.B(button_text='Complete', size=(15, 3)), ], [ sg.B(button_text='DB To Download', size=(15, 3)), sg.B(button_text='AWS Command List', size=(15, 3)) ], [ sg.Frame('Work Information', [ [ sg.Text('Database:', size=(7, 1)), sg.Input('{}'.format(dbname), disabled=True, key=work + '_db', size=(25, 1)), ], [ sg.Text('Jira url: ', size=(7, 1)), sg.Input('https://neoncrm.atlassian.net/browse/{}'. format(work), disabled=True, size=(50, 1)) ], [ sg.Text('Test url: ', size=(7, 1)), sg.Input( 'https://neonuat.com:8443/np/clients/{}_test/login.jsp' .format(dbname), disabled=True, size=(70, 1)) ], ], size=(1000, 5000)) ] ] tab_layouts.append(sg.Tab(work, tab_layout)) layout = [ [sg.Menu(menu_def)], [ sg.Button('New Work'), sg.Button('Git Pull'), sg.Text(' ' * 118), sg.Button('{}'.format( HandleConfig.handle_config('g', 'global', 'server'))) ], [ sg.TabGroup([tab_layouts], selected_background_color='red', key='tabgroup') ], ] return layout
class ConnDB(): def __init__(self): self.HandleConfig = HandleConfig() self.server = self.HandleConfig.handle_config("g", "global", "use_server") self.host = self.HandleConfig.handle_config('g', self.server, 'host') self.user = self.HandleConfig.handle_config('g', self.server, 'user') self.passwd = self.HandleConfig.handle_config('g', self.server, 'password') self.port = int( self.HandleConfig.handle_config('g', self.server, 'port')) def conndb(self, db=None, charset='utf8'): self.db = db conn = pymysql.connect(host=self.host, user=self.user, passwd=self.passwd, port=self.port, charset=charset, database=db) return conn # execute sql def exec(self, conn, sql, kill=False, COMMAND=None): #conn = self.conndb() cur = conn.cursor() database = self.db if kill: killsql = "SELECT CONCAT('kill ',id) FROM information_schema.`PROCESSLIST` WHERE DB = '{}'".format( database) if COMMAND: killsql = "SELECT CONCAT('kill ',id) FROM information_schema.`PROCESSLIST` WHERE DB = '{}' OR COMMAND = 'Sleep'".format( database) cur.execute(killsql) killids = cur.fetchall() killids = list(killids) idx = 0 for killid in killids: killids[idx] = (list(killid))[0] killidsql = killids[idx] cur.execute(killidsql) idx = idx + 1 for s in sql.split(";"): if s != "": cur.execute(s) conn.commit() cur.close() return cur # exec cmd def cmd(self, db, op, file): if op == "mysql": cmd_statement = "{0} -u{1} -p{2} -h{3} -P{4} {5} --default-character-set=utf8 < \"{6}\"".format( op, self.user, self.passwd, self.host, self.port, db, file) print(cmd_statement) ret = os.system(cmd_statement) return ret
class ImportExcel: def __init__(self): self.HandleConfig = HandleConfig() self.ConnDB = ConnDB() self.MysqlDump = MysqlDump() self.cleansql = self.HandleConfig.handle_config( 'g', 'referencefile', 'cleanexcel') self.nickname = self.HandleConfig.handle_config( 'g', 'excelimporter', 'nickname') def main(self, currentwork, advanced=0): self.dbname = self.HandleConfig.handle_config('g', currentwork, 'dbname') na_values = self.HandleConfig.handle_config('g', 'excelimporter', 'na_values').split(',') na_values = [i.strip() for i in na_values] if advanced: layout = [[sg.InputText('', key='e'), sg.FilesBrowse()], [ sg.Submit(tooltip='Click to submit this form'), sg.Cancel() ]] window = sg.Window(title=currentwork, layout=layout) event, values = window.read() window.close() if event in (None, 'Cancel'): return files = values['e'] files_list = files.split(';') files = [] for f in files_list: files.append(f.split('/')[-1]) self.importexcelpath = os.path.dirname(files_list[0]).replace( '/', '\\') + '\\' excelcsvs = self.get_excel(excels=files) else: self.importexcelpath = self.HandleConfig.handle_config( 'g', currentwork, 'jirapath') + 'excel\\' excelcsvs = self.get_excel() if not excelcsvs: sg.Popup('No excels can import!') return log_file = self.importexcelpath + '\\log.txt' if os.path.isfile(log_file): os.remove(log_file) sql = 'drop database if exists `{0}`;\ncreate database `{0}`'.format( self.dbname, self.dbname) conn_db = self.ConnDB.conndb() self.ConnDB.exec(conn_db, sql) conn_db.close() print('\n\n{}'.format(sql)) self.conn = self.ConnDB.conndb(self.dbname) self.sql_mode = self.ConnDB.exec( self.conn, 'SELECT @@SESSION.sql_mode').fetchall()[0][0] longexcelcsvs = defaultdict() long_num = 0 num = 0 num_s = 0 print('\n\nBegin to import...\n') for excelcsv, origin_tablename in excelcsvs.items(): self.excel_name = excelcsv try: isexcel = 0 if re.fullmatch('^.*?\\.csv$', excelcsv, flags=(re.IGNORECASE)): datasets = defaultdict() csv = self.importexcelpath + '\\' + excelcsv # Determining the encoding of a CSV file # http://pandaproject.net/docs/determining-the-encoding-of-a-csv-file.html try: dataset = pd.read_csv(csv, encoding='utf-8', dtype=str, na_values=na_values, keep_default_na=False, header=0, engine='c') except UnicodeDecodeError: try: dataset = pd.read_csv(csv, encoding='ansi', dtype=str, na_values=na_values, keep_default_na=False, header=0, engine='c') except UnicodeDecodeError: try: dataset = pd.read_csv(csv, encoding='utf-16', dtype=str, na_values=na_values, keep_default_na=False, header=0, engine='c') except UnicodeDecodeError: with open(csv, 'rb') as f: bytes = f.read() if len(bytes) > 100000: with open(csv, 'rb') as f: bytes = f.readline() encode = chardet.detect(bytes)['encoding'] if encode == 'ascii': encode = 'ansi' #ansi is a super charset of ascii dataset = pd.read_csv(csv, encoding=encode, dtype=str, na_filter=False, header=0, engine="c") datasets['sheet1'] = dataset if re.fullmatch('^.*?\\.xlsx?$', excelcsv, flags=(re.IGNORECASE)): isexcel = 1 excel = self.importexcelpath + '\\' + excelcsv datasets = pd.read_excel(excel, dtype=str, na_values=na_values, keep_default_na=False, header=0, sheet_name=None) for k, v in datasets.items(): created_table = None try: sheet_name = k dataset = v tablename = origin_tablename self.excel_name = excelcsv if isexcel == 1: if len(datasets) > 1: tablename = origin_tablename + '_' + re.sub( '[^0-9a-z]+', '_', sheet_name, flags=(re.IGNORECASE)) self.excel_name = excelcsv + '.' + sheet_name tablename = tablename.lower() if len(tablename) > 55: tablename = tablename[:51] + '_{0}'.format( long_num) long_num += 1 longexcelcsvs[excelcsv] = tablename with open(log_file, 'a') as (fw): fw.write( 'extra long excel: {0}, tablename: {1}\n'. format(self.excel_name, tablename)) col_maxlen, dataset = self.read_data(dataset) if dataset.empty: raise EmptyError('Empty') created_table, created_sql = self.create_table( col_maxlen, tablename) try: self.insert_data(dataset, tablename) except pymysql.err.InternalError as reason: reason_num_0 = str(reason).split(',')[0].strip('(') if reason_num_0 == '1366': try: sql_1 = 'truncate table `{0}`.`{1}`;'.format( self.dbname, tablename) self.ConnDB.exec(self.conn, sql_1) self.ConnDB.exec( self.conn, 'set SESSION sql_mode = ""') self.insert_data(dataset, tablename) self.ConnDB.exec( self.conn, 'set SESSION sql_mode = "{}"'.format( self.sql_mode)) except pymysql.err.InternalError as reason: reason_num_1 = str(reason).split( ',')[0].strip('(') if reason_num_1 == '1118': sql = re.sub('varchar\\(\\d+\\)', 'text', created_sql) sql_1 = 'drop table if exists `{0}`.`{1}`;'.format( self.dbname, tablename) self.ConnDB.exec(self.conn, sql_1) self.ConnDB.exec(self.conn, sql) self.ConnDB.exec( self.conn, 'set SESSION sql_mode = ""') self.insert_data(dataset, tablename) self.ConnDB.exec( self.conn, 'set SESSION sql_mode = "{}"'. format(self.sql_mode)) else: raise pymysql.err.InternalError( str(reason)) elif reason_num_0 == '1118': sql = re.sub('varchar\\(\\d+\\)', 'text', created_sql) sql_0 = 'drop table if exists `{0}`.`{1}`;'.format( self.dbname, tablename) + sql self.ConnDB.exec(self.conn, sql_0) self.insert_data(dataset, tablename) else: raise pymysql.err.InternalError(str(reason)) except Exception as reason: print('Failed: {}'.format(self.excel_name)) with open(log_file, 'a') as (fw): fw.write( 'excel sheet name: {0}, error: {1}\n'.format( self.excel_name, str(reason))) if created_table: sql = 'drop table if exists `{0}`.`{1}`'.format( self.dbname, created_table) self.ConnDB.exec(self.conn, sql) continue else: print('Imported: {}'.format(self.excel_name)) num_s += 1 finally: num += 1 self.ConnDB.exec( self.conn, 'set SESSION sql_mode = "{}"'.format( self.sql_mode)) except Exception as reason: print('Failed: {}'.format(excelcsv)) with open(log_file, 'a') as (fw): fw.write('excel file name: {0}, error: {1}\n'.format( self.excel_name, str(reason))) num += 1 continue print('\nTotal: {}, Imported: {}\n'.format(num, num_s)) if os.path.isfile(log_file): os.popen(log_file) ret = sg.Popup( 'You have logs. Check them first!\n\nPress OK to continue to dump database if you have checked.', title=currentwork) if ret is None: return if num_s == 0: sg.Popup('No imported tables!', title=currentwork) return self.clean_data() if advanced: sqlfile = self.HandleConfig.handle_config( 'g', currentwork, 'jirapath') + 'script\\{0}.sql'.format( self.dbname) if os.path.exists(sqlfile): self.ConnDB.cmd(self.dbname, 'mysql', sqlfile) self.MysqlDump.main(currentwork) sg.Popup('Import Excel Complete!', title=currentwork) def get_excel(self, excels=None): if not excels: excels = os.listdir(self.importexcelpath) excelcsvs = defaultdict() for excel in excels: excel_dir = self.importexcelpath + '\\' + excel if os.path.isfile(excel_dir) and re.fullmatch( '^.*?\\.(xls|xlsx|csv)$', excel, flags=re.IGNORECASE): tablename = re.sub('\\.(xls|xlsx|csv)$', '', (excel.lower()), flags=re.IGNORECASE) tablename = 'z_excel_' + self.nickname + '_' + re.sub( '[^0-9a-z]+', '_', tablename, flags=re.IGNORECASE) excelcsvs[excel] = tablename return excelcsvs def read_data(self, dataset): dataset = dataset.fillna(value="") f = lambda x: str(x).strip() dataset = dataset.applymap(f) f = lambda x: len(x) df1 = dataset.applymap(f) f = lambda x: max(x) df3 = df1.apply(f, axis=1) df3 = pd.DataFrame(df3, columns=['c']) indexs = df3.loc[(df3['c'] == 0)].index dataset.drop(indexs, inplace=True) # deal with columns dataset.columns = [str(col) for col in dataset.columns] self.columns = dataset.columns low_col = [col.lower() for col in self.columns] s = len(low_col) if 'unnamed: {}'.format(s - 1) in low_col: self.columns = dataset[0:1] self.columns = np.array(self.columns) self.columns = self.columns.tolist()[0] dataset.columns = self.columns dataset.drop(dataset[:1].index, inplace=True) low_col = [col.lower() for col in self.columns] if 'ignore' in low_col: self.columns = dataset[0:1] self.columns = np.array(self.columns) self.columns = self.columns.tolist()[0] dataset.columns = self.columns dataset.drop(dataset[:1].index, inplace=True) self.columns = [str(col).strip() for col in self.columns] # fix blank col name f = lambda x: "unnamed" if x == "" else x self.columns = [f(col) for col in self.columns] f = lambda x: x if len(x) <= 63 else x[:62].strip() self.columns = [f(col) for col in self.columns] # fix duplicate column name while 1: low_col = [col.lower() for col in self.columns] idx = 0 odx = 0 c = 0 for i in self.columns: jdx = 0 n = 1 if idx == len(self.columns): continue for j in low_col[idx + 1:]: odx = idx + 1 + jdx if j == i.lower(): self.columns[odx] = j + str(n) n += 1 c += 1 jdx += 1 idx += 1 if c == 0: break dataset.columns = self.columns self.columns = np.array(self.columns) self.columns = self.columns.tolist() f = lambda x: max(x) df1.columns = self.columns df2 = df1.apply(f, axis=0) col_maxlen = df2.to_dict() f = lambda x: None if x == "" else x dataset = dataset.applymap(f) return col_maxlen, dataset def create_table(self, col_maxlen, tablename): sql = 'create table `{0}`.`{1}`('.format(self.dbname, tablename) for col, maxLen in col_maxlen.items(): colType = 'varchar(255)' if maxLen > 255: colType = 'TEXT' if maxLen > 65535: colType = 'MEDIUMTEXT' if maxLen > 16777215: colType = 'LONGTEXT' sql = sql + '`{0}` {1} default null,'.format(col, colType) sql = sql[:-1] + ')' try: self.ConnDB.exec(self.conn, sql) except pymysql.InternalError: sql = re.sub('varchar\\(\\d+\\)', 'text', sql) self.ConnDB.exec(self.conn, sql) return (tablename, sql) def insert_data(self, dataset, tablename): dataset = np.array(dataset) datalist = dataset.tolist() cols = '`,`'.join(self.columns) l = len(self.columns) v = '%s,' * l v = v[:-1] sql = 'insert into `%s`.`%s`(%s) values(' % (self.dbname, tablename, '`' + cols + '`') sql = sql + '%s)' % v self.ConnDB.exec(self.conn, sql, datalist=datalist) def clean_data(self): print('Begin to clean data...\n') file = self.cleansql ret = self.ConnDB.cmd(self.dbname, 'mysql', file) if ret == 0: print('Succeed: Clean data\n') else: sg.Popup('Clean Data Failed')
""" A python tool for batch importing excel/csv files into mysql database. Author: ryjfgjl Date: 2020-01-05 """ Version = "2.0" # import GUI model import PySimpleGUI as sg import traceback import sys from common.handleconfig import HandleConfig sg.ChangeLookAndFeel('dark') HandleConfig = HandleConfig() # database connection host = HandleConfig.handle_config("g", "dbinfo", "host") port = HandleConfig.handle_config("g", "dbinfo", "port") user = HandleConfig.handle_config("g", "dbinfo", "user") passwd = HandleConfig.handle_config("g", "dbinfo", "passwd") dbname = HandleConfig.handle_config("g", "dbinfo", "dbname") # file information file_dir = HandleConfig.handle_config("g", "file", "file_dir") csv_encoding = HandleConfig.handle_config("g", "file", "csv_encoding") na_values = HandleConfig.handle_config("g", "file", "na_values") def exception_format(): """
class Gui: def __init__(self): self.HandleConfig = HandleConfig() def ret_bool(self, source): if source == 'D': return True else: return False def generate_layout(self): default_values = self.HandleConfig.get_defaults() if default_values['language'] == 'English': # English gui # menu menu_def = [ ['&Language', ['&中文', '&English']], ['&Database', ['&MySQL', '&Oracle', '&SQL Server']], ['&Data Source', ['&Directory', '&Files']], ['&Help', ['&About']], ] # general layout_general = [ [sg.Menu(menu_def)], [sg.Text('Excel', size=(12, 1), text_color='red')], [ sg.Input('{}'.format(default_values['file_dir']), key='file_dir', size=(42, 1), visible=self.ret_bool(default_values['source'])), sg.FolderBrowse( initial_folder='{}'.format(default_values['file_dir']), button_text='Choose Directory', visible=self.ret_bool(default_values['source'])), sg.Input( '{}'.format(default_values['files']), key='files', size=(45, 1), visible=not self.ret_bool(default_values['source'])), sg.FilesBrowse( button_text='Choose Files', visible=not self.ret_bool(default_values['source'])), ], [ sg.Text('{}'.format(default_values['dbtype']), text_color='red', key='dbtype'), sg.Text('Connection', text_color='red'), ], [ sg.Text('Host:', size=(5, 1)), sg.Input('{}'.format(default_values['host']), key='host', size=(15, 1)), sg.Text(' ' * 11), sg.Text('Port:', size=(7, 1)), sg.Input('{}'.format(default_values['port']), key='port', size=(15, 1)), ], [ sg.Text('User:'******'{}'.format(default_values['user']), key='user', size=(15, 1)), sg.Text(' ' * 11), sg.Text('Password:'******'{}'.format(default_values['passwd']), key='passwd', size=(15, 1)), ], [ sg.Text('Database:', size=(7, 1)), sg.Input('{}'.format(default_values['dbname']), key='dbname', size=(48, 1)), sg.Text(' ' * 1), ], [ sg.Text('Mode:', text_color='red'), sg.Text(' ' * 10), sg.Radio('Overwrite', group_id='mode', key='mode1', default=default_values['mode1']), sg.Text(' ' * 10), sg.Radio('Append', group_id='mode', key='mode2', default=default_values['mode2']), ], [sg.Button('Start', size=(52, 1), key='start')], [sg.MLine(key='output', size=(58, 10), auto_refresh=True)], ] # advanced layout_advanced = [ [ sg.Text('CSV Encoding:', size=(12, 1)), sg.Combo(['AUTO', 'UTF-8', 'ANSI', 'GBK'], default_value=default_values['csv_encoding'], key='csv_encoding', size=(10, 1)) ], [ sg.Text('Replace To NULL:', size=(15, 1)), sg.Input('{}'.format(default_values['na_values']), key='na_values', size=(40, 1)), ], [ sg.Text('Append all data to one exists table:', size=(25, 1)), sg.Input( default_values['tname'], key='tname', size=(25, 1), ), ], [ sg.Text('Add Table Prefix:', size=(13, 1)), sg.Input( default_values['prefix'], key='prefix', size=(10, 1), ), sg.Checkbox('Add a column is table name', key='add_tname', size=(22, 1), default=default_values['add_tname']), ], [ sg.Text('The column on row:', size=(15, 1)), sg.Input(default_values['header'], key='header', size=(10, 1)), sg.Text('', size=(3, 1)), sg.Checkbox('Include Sub Directories', key='loop_subdir', size=(18, 1), default=default_values['loop_subdir']), ], [ sg.Checkbox('Skip Blank Rows', key='del_blank_lines', size=(15, 1), default=default_values['del_blank_lines']), sg.Checkbox('Trim Spaces', key='trim', size=(12, 1), default=default_values['trim']), sg.Checkbox('Skip Blank Sheets', key='skip_blank_sheet', size=(12, 1), default=default_values['skip_blank_sheet']), ], [ sg.Text('Run sql before starting:', size=(17, 1)), sg.Input('{}'.format(default_values['sql_b4']), key='sql_b4', size=(32, 1)), sg.FileBrowse(initial_folder='{}'.format( default_values['sql_b4']), button_text=' 选择 ') ], [ sg.Text('Run sql after comleting:', size=(17, 1)), sg.Input('{}'.format(default_values['sql_after']), key='sql_after', size=(32, 1)), sg.FileBrowse(initial_folder='{}'.format( default_values['sql_after']), button_text=' 选择 ') ], ] tab_layouts = [ sg.Tab('General', layout_general), sg.Tab('Advanced', layout_advanced) ] layout = [ [ sg.TabGroup([tab_layouts], selected_background_color='red', key='tabgroup') ], ] else: # menu menu_def = [ ['&语言', ['&中文', '&English']], ['&数据库', ['&MySQL', '&Oracle', '&SQL Server']], ['&数据源', ['&选择目录', '&选择文件']], ['&帮助', ['&关于']], ] # general layout_general = [ [sg.Menu(menu_def)], [sg.Text('Excel 文件', size=(12, 1), text_color='red')], [ sg.Input('{}'.format(default_values['file_dir']), key='file_dir', size=(50, 1), visible=self.ret_bool(default_values['source'])), sg.FolderBrowse( initial_folder='{}'.format(default_values['file_dir']), button_text='选择目录', visible=self.ret_bool(default_values['source'])), sg.Input( '{}'.format(default_values['files']), key='files', size=(50, 1), visible=not self.ret_bool(default_values['source'])), sg.FilesBrowse( button_text='选择文件', visible=not self.ret_bool(default_values['source'])), ], [ sg.Text('{}'.format(default_values['dbtype']), text_color='red', key='dbtype'), sg.Text('连接', text_color='red'), ], [ sg.Text('主机:', size=(5, 1)), sg.Input('{}'.format(default_values['host']), key='host', size=(15, 1)), sg.Text(' ' * 11), sg.Text('端口:', size=(7, 1)), sg.Input('{}'.format(default_values['port']), key='port', size=(15, 1)), ], [ sg.Text('用户:', size=(5, 1)), sg.Input('{}'.format(default_values['user']), key='user', size=(15, 1)), sg.Text(' ' * 11), sg.Text('密码:', size=(7, 1)), sg.Input('{}'.format(default_values['passwd']), key='passwd', size=(15, 1)), ], [ sg.Text('数据库:', size=(5, 1)), sg.Input('{}'.format(default_values['dbname']), key='dbname', size=(50, 1)), sg.Text(' ' * 1), ], [ sg.Text('模 式:', text_color='red'), sg.Text(' ' * 6), sg.Radio('覆 盖', group_id='mode', key='mode1', default=default_values['mode1']), sg.Text(' ' * 15), sg.Radio('追 加', group_id='mode', key='mode2', default=default_values['mode2']), ], [sg.Button('开 始', size=(52, 1), key='start')], [sg.MLine(key='output', size=(58, 10), auto_refresh=True)], ] # advanced layout_advanced = [ [ sg.Text('CSV文件编码:', size=(12, 1)), sg.Combo(['AUTO', 'UTF-8', 'ANSI', 'GBK'], default_value=default_values['csv_encoding'], key='csv_encoding', size=(10, 1)) ], [ sg.Text('将这些值替换为null:', size=(15, 1)), sg.Input('{}'.format(default_values['na_values']), key='na_values', size=(40, 1)), ], [ sg.Text('为创建的表名添加前缀:', size=(18, 1)), sg.Input( default_values['prefix'], key='prefix', size=(20, 1), ), ], [ sg.Text('将数据追加到已存在的表(追加模式有效):', size=(34, 1)), sg.Input( default_values['tname'], key='tname', size=(20, 1), ), ], [ sg.Text('指定列名所在行数:', size=(18, 1)), sg.Input(default_values['header'], key='header', size=(10, 1)), sg.Checkbox('添加一列值为表名', key='add_tname', size=(15, 1), default=default_values['add_tname']), ], [ sg.Checkbox('删除空行', key='del_blank_lines', size=(7, 1), default=default_values['del_blank_lines']), sg.Checkbox('去除字符前后空格', key='trim', size=(14, 1), default=default_values['trim']), sg.Checkbox('跳过空表', key='skip_blank_sheet', size=(6, 1), default=default_values['skip_blank_sheet']), sg.Checkbox('遍历子目录', key='loop_subdir', size=(9, 1), default=default_values['loop_subdir']), ], [ sg.Text('导入开始前运行sql:', size=(15, 1)), sg.Input('{}'.format(default_values['sql_b4']), key='sql_b4', size=(32, 1)), sg.FileBrowse(initial_folder='{}'.format( default_values['sql_b4']), button_text=' 选择 ') ], [ sg.Text('导入结束后运行sql:', size=(15, 1)), sg.Input('{}'.format(default_values['sql_after']), key='sql_after', size=(32, 1)), sg.FileBrowse(initial_folder='{}'.format( default_values['sql_after']), button_text=' 选择 ') ], ] tab_layouts = [ sg.Tab('常规', layout_general), sg.Tab('高级', layout_advanced) ] layout = [ [ sg.TabGroup([tab_layouts], selected_background_color='red', key='tabgroup') ], ] return layout
def __init__(self): self.HandleConfig = HandleConfig() self.MysqlDump = MysqlDump() self.ConnDB = ConnDB() self.conn = self.ConnDB.conndb()
def __init__(self): self.HandleConfig = HandleConfig()
class DeleteWork: def __init__(self): self.HandleConfig = HandleConfig() self.ConnDB = ConnDB() def main(self, currentwork): layout = [[ sg.Text( 'Warnning!\n\nThis option will remove the work from the tool.\nDo you want to continue?' ) ], [ sg.OK(), sg.Button('Just Zip'), sg.Button('OK but not upload zip') ]] window = sg.Window(title=currentwork, layout=layout) event, values = window.read() window.close() if event is None: return 0 navicat_script_path = self.HandleConfig.handle_config( 'g', 'defaultpath', 'navicat_script_path') jirapath = self.HandleConfig.handle_config('g', currentwork, 'jirapath') scriptspath = jirapath + 'script\\' git_repo_path = self.HandleConfig.handle_config( 'g', 'defaultpath', 'git_repo_path') gitscriptpath = git_repo_path + 'dataImportScript\\script\\' dbname = self.HandleConfig.handle_config('g', currentwork, 'dbname') sqlfile = scriptspath + '{0}.sql'.format(dbname) txtfile = scriptspath + '{0}.txt'.format(dbname) merge = self.HandleConfig.handle_config('g', currentwork, 'merge') script = navicat_script_path + dbname + '\\{}.sql'.format(dbname) script_after_merge = navicat_script_path + dbname + '\\{}_after_merge.sql'.format( dbname) script_after_template = navicat_script_path + dbname + '\\{}_after_template.sql'.format( dbname) files = [ scriptspath + 'configration.sql', gitscriptpath + 'functionAndProcedure.sql', script, navicat_script_path + dbname + '\\{}_after_template.sql'.format(dbname), gitscriptpath + 'accountMerge.sql', script_after_merge, gitscriptpath + 'fullDataClean.sql', gitscriptpath + 'clear.sql' ] if merge == 'False': files.remove(gitscriptpath + 'accountMerge.sql') with open(txtfile, 'w', encoding='utf8') as (fw): fw.truncate() for f in files: with open(f, 'r', encoding='utf8') as (fa): with open(txtfile, 'a+', encoding='utf8') as (fw): shutil.copyfileobj(fa, fw) # generate zipfile sqlfile_zip = scriptspath + '{0}.zip'.format(currentwork) if os.path.isfile(sqlfile_zip): os.remove(sqlfile_zip) zfile = zipfile.ZipFile(sqlfile_zip, 'w', zipfile.ZIP_DEFLATED) if os.path.isfile(sqlfile): zfile.write(sqlfile, '{0}.sql'.format(dbname)) zfile.write(txtfile, '{0}.txt'.format(dbname)) zfile.close() # upload zipfile if event != 'OK but not upload zip': winscppath = self.HandleConfig.handle_config( 'g', 'defaultpath', 'winscppath') cmd = '{0}WinSCP.com /command "open aws188" "put {1} /home/neon/leiwu/dataimport/script/" "exit"'.format( winscppath, sqlfile_zip) os.system(cmd) if event == 'Just Zip': sg.Popup('Complete!', title=currentwork) return 0 # remove work script_temp = jirapath + 'temp\\{}.sql'.format(dbname) script_after_merge_temp = jirapath + 'temp\\{}_after_merge.sql'.format( dbname) script_after_template_temp = jirapath + 'temp\\{}_after_template.sql'.format( dbname) if (not os.path.isfile(script_temp)) and os.path.isfile(script): shutil.move(script, script_temp) if (not os.path.isfile(script_after_merge_temp) ) and os.path.isfile(script_after_merge): shutil.move(script_after_merge, script_after_merge_temp) if (not os.path.isfile(script_after_template_temp) ) and os.path.isfile(script_after_template): shutil.move(script_after_template, script_after_template_temp) conn_db = self.ConnDB.conndb(server='awshost') sql = "update db_to_download.data_import_tracking set releaseDate = '{0}' where jiraId = '{1}' and releaseDate is null".format( date.today(), currentwork) self.ConnDB.exec(conn_db, sql) conn_db.close() self.HandleConfig.handle_config( 'rs', self.HandleConfig.handle_config('g', 'worklist', currentwork)) self.HandleConfig.handle_config('ro', 'worklist', key=currentwork) works = [ work for work in self.HandleConfig.handle_config()['worklist'].values() ] currentwork = '' if works: currentwork = works[-1] self.HandleConfig.handle_config('s', 'global', 'currentwork', currentwork) if event != 'OK but not upload zip': cmd = '/home/neon/leiwu/bin/dataImportRunScript.sh {0}_test {0}'.format( dbname) sg.Popup('Complete!\n\n{0} has been copied.'.format(cmd), title=currentwork) pyperclip.copy(cmd) else: sg.Popup('Complete!', title=currentwork)