Exemplo n.º 1
0
class DbToDownload:

    def __init__(self):
        self.HandleConfig = HandleConfig()
        self.ConnDB = ConnDB()

    def main(self, currentwork):
        dbname = self.HandleConfig.handle_config('g', currentwork, 'dbname')
        sql = "insert into db_to_download.db_list_to_download(instanceName,department) select '{0}','data' from dual".format(dbname)
        conn_db = self.ConnDB.conndb(server='awshost')
        try:
            try:
                self.ConnDB.exec(conn_db, sql)
                sg.Popup('\n DB to download   Complete!         \n', title=currentwork)
            except pymysql.err.IntegrityError:
                sql = "select 1 from db_to_download.db_list_to_download where instanceName = '{0}' and importFlag is null".format(dbname)
                ret = self.ConnDB.exec(conn_db, sql)
                result = ret.fetchall()
                if result:
                    sg.Popup('\n  DB to download  Complete!         \nWainning: This instance may db_to_download by others.', title=currentwork)
                    return
                else:
                    sql = "select 1 from db_to_download.db_download_log where instanceName = '{0}' and DATE(completeTime) = '{1}'".format(dbname, date.today())
                    ret = self.ConnDB.exec(conn_db, sql)
                    result = ret.fetchall()
                    if len(result) == 0:
                        sql = "update db_to_download.db_list_to_download set deleteTime = now(), downloadTime = now(), importFlag = null, department = 'data' where instanceName = '{0}'".format(dbname)
                        self.ConnDB.exec(conn_db, sql)
                        sg.Popup('\n  DB to download  Complete!         \n', title=currentwork)
                    else:
                        sg.Popup('\n  {0} downloaded, you can go to mysqldump   \nWainning: This instance may db_to_download by others.'.format(dbname), title=currentwork)
                        return
            except:
                sg.popup_error('\n  DB to download  Error!         \n', title=currentwork)

        finally:
            conn_db.close()
Exemplo n.º 2
0
class ImportExcel:
    def __init__(self):
        self.realpath = os.path.split(os.path.realpath(sys.argv[0]))[0]
        # get configuration from 'config.ini'
        self.HandleConfig = HandleConfig()
        self.ConnDB = ConnDB()
        self.conn = self.ConnDB.conndb()
        self.img = self.realpath + "\\" + self.HandleConfig.handle_config(
            "g", "referencefile", "img")
        self.cleansql = self.realpath + "\\" + self.HandleConfig.handle_config(
            "g", "referencefile", "cleanexcel")
        self.default_dir = self.HandleConfig.handle_config(
            "g", "global", "default_excels_dictionary")

    def main(self):
        # enter a database you will import excel to
        self.db = easygui.enterbox(msg="Enter your database name:")
        if not self.db:
            return
        self.db = self.db.lower().strip()

        # choose excel file dictionary
        self.importexcelpath = easygui.diropenbox(
            msg="Choose your excels dictionary:", default=self.default_dir)
        if not self.importexcelpath:
            return

        excelcsvs = self.get_excel()
        if not excelcsvs:
            easygui.msgbox("No excels can import!")
            return

        # anything failed excel to import will be wrote in it
        log_file = self.importexcelpath + "\\log.txt"
        if os.path.isfile(log_file):
            os.remove(log_file)

        # create database
        try:
            sql = "create database `{0}`;".format(self.db)
            self.ConnDB.exec(self.conn, sql)

        except pymysql.err.ProgrammingError:
            conti = easygui.ynbox(
                msg="Database {0} exists, drop it first?".format(self.db))
            if conti:
                print("Dropping database...")
                sql = "drop database if exists `{0}`;create database `{0}`".format(
                    self.db, self.db)
                self.ConnDB.exec(self.conn, sql)

        print("Created database {}".format(self.db))
        self.conn_db = self.ConnDB.conndb(self.db)

        longexcelcsvs = defaultdict()
        long_num = 0
        num = 0
        num_s = 0

        print("Begin to import...\n")
        for excelcsv, origin_tablename in excelcsvs.items():
            self.excel_name = excelcsv
            try:
                isexcel = 0
                # get csv dataset
                if re.fullmatch(r"^.*?\.csv$", excelcsv, flags=re.IGNORECASE):
                    datasets = defaultdict()
                    csv = self.importexcelpath + "\\" + excelcsv
                    with open(csv, 'rb') as f:
                        bytes = f.read(1000000)
                    encode = chardet.detect(bytes)['encoding']
                    if encode == 'ascii':
                        encode = 'ansi'  # ansi is a super charset of ascii
                    dataset = pd.read_csv(csv,
                                          encoding=encode,
                                          dtype=str,
                                          na_filter=False,
                                          header=0,
                                          engine="c")
                    datasets['sheet1'] = dataset

                # get excel dataset(include sheets)
                if re.fullmatch(r"^.*?\.xlsx?$", excelcsv,
                                flags=re.IGNORECASE):
                    isexcel = 1
                    excel = self.importexcelpath + "\\" + excelcsv
                    datasets = pd.read_excel(excel,
                                             dtype=str,
                                             na_filter=False,
                                             header=0,
                                             sheet_name=None)

                # one sheet/csv is one table
                for k, v in datasets.items():
                    created_table = None
                    try:
                        sheet_name = k
                        dataset = v
                        tablename = origin_tablename
                        self.excel_name = excelcsv
                        # rename table name if excel have more than one sheets
                        if isexcel == 1 and len(datasets) > 1:
                            tablename = origin_tablename + '_' + re.sub(
                                r"[^\w]+",
                                "_",
                                sheet_name,
                                flags=re.IGNORECASE)
                            self.excel_name = excelcsv + '.' + sheet_name
                        tablename = tablename.lower()
                        # cut off table name
                        if len(tablename.encode("utf8")) > 64:
                            if self.is_Chinese(tablename):
                                tablename = "{0}_".format(
                                    long_num) + tablename[:20]
                            else:
                                tablename = "{0}_".format(
                                    long_num) + tablename[:60]
                            long_num += 1
                            longexcelcsvs[excelcsv] = tablename
                            with open(log_file, "a", encoding="utf8") as fw:
                                fw.write(
                                    "extra long excel: {0}, tablename: {1}\n".
                                    format(self.excel_name, tablename))

                        col_maxlen, dataset = self.read_data(dataset)

                        if dataset.empty:
                            raise EmptyError("Empty")
                        created_table, created_sql = self.create_table(
                            col_maxlen, tablename)

                        try:
                            self.insert_data(dataset, tablename)
                        except Exception as reason:
                            reason_num_0 = str(reason).split(",")[0].strip("(")
                            if reason_num_0 == "1366":
                                try:
                                    sql_0 = "alter table {0} convert to character set utf8mb4 collate utf8mb4_bin".format(
                                        created_table)
                                    self.ConnDB.exec(self.conn_db, sql_0)
                                    self.insert_data(dataset,
                                                     tablename,
                                                     charset="utf8mb4")

                                except pymysql.err.InternalError as reason:
                                    reason_num_1 = str(reason).split(
                                        ",")[0].strip("(")
                                    if reason_num_1 == "1118":
                                        sql = re.sub(r"varchar\(\d+\)", "text",
                                                     created_sql)
                                        sql_1 = "drop table if exists {0};".format(
                                            tablename)
                                        print(sql_0)
                                        self.ConnDB.exec(self.conn_db, sql_1)
                                        self.ConnDB.exec(self.conn_db, sql)

                                        sql_0 = "alter table {0} convert to character set utf8mb4 collate utf8mb4_bin".format(
                                            created_table)
                                        self.ConnDB.exec(self.conn_db, sql_0)
                                        self.insert_data(dataset,
                                                         tablename,
                                                         charset="utf8mb4")
                                    else:
                                        raise pymysql.err.InternalError(
                                            str(reason))

                            elif reason_num_0 == "1118":
                                sql = re.sub(r"varchar\(\d+\)", "text",
                                             created_sql)
                                sql_0 = "drop table if exists {0};".format(
                                    tablename) + sql
                                self.ConnDB.exec(self.conn_db, sql_0)
                                self.insert_data(dataset, tablename)

                            else:
                                raise pymysql.err.InternalError(str(reason))

                    except Exception as reason:
                        print("Failed: {}".format(self.excel_name))

                        with open(log_file, "a", encoding="utf8") as fw:
                            fw.write(
                                "excel sheet name: {0}, error: {1}\n".format(
                                    self.excel_name, str(reason)))

                        if created_table:
                            sql = "drop table if exists {0}".format(
                                created_table)
                            self.ConnDB.exec(self.conn_db, sql)

                        continue

                    else:
                        print("Imported: {}".format(self.excel_name))
                        num_s += 1

                    finally:
                        num += 1

            except Exception as reason:
                print("Failed: {}".format(excelcsv))
                with open(log_file, "a", encoding="utf8") as fw:
                    fw.write("excel file name: {0}, error: {1}\n".format(
                        self.excel_name, str(reason)))
                num += 1
                continue

        print("\nTotal: {}, Imported: {}\n".format(num, num_s))
        self.conn.close()
        self.conn_db.close()

        conti = 1
        if os.path.isfile(log_file):
            os.popen(log_file)
            easygui.msgbox(
                "You have logs , see file '{}' \n\ncheck it first".format(
                    log_file))
            if num_s == 0:
                easygui.msgbox("No imported tables!")
                return
            conti = easygui.ccbox(msg="Clean database {} now?".format(self.db))

        if conti:
            self.clean_data()

    def is_Chinese(self, word):
        for ch in word:
            if '\u4e00' <= ch <= '\u9fff':
                return True
        return False

    def get_excel(self):
        # a function to get excel/csv file under the dictionary
        excels = os.listdir(self.importexcelpath)
        excelcsvs = defaultdict()

        for excel in excels:
            excel_dir = self.importexcelpath + "\\" + excel
            if os.path.isfile(excel_dir) and re.fullmatch(
                    r"^.*?\.(xls|xlsx|csv)$", excel, flags=re.IGNORECASE):
                tablename = re.sub(r"\.(xls|xlsx|csv)$",
                                   '',
                                   excel.lower(),
                                   flags=re.IGNORECASE)
                # replace all character not \w to "_"
                tablename = re.sub(r"[^\w]+",
                                   "_",
                                   tablename,
                                   flags=re.IGNORECASE)
                excelcsvs[excel] = tablename
        return excelcsvs

    def read_data(self, dataset):
        # str col
        dataset.columns = [str(col) for col in dataset.columns]
        self.columns = dataset.columns
        # replace % to _
        self.columns = [
            str(col).strip().replace('%', '_') for col in self.columns
        ]

        # cut off col
        def f(x):
            if len(x.encode("utf8")) <= 63:
                x = x
            elif self.is_Chinese(x):
                x = x[:20].strip()
            else:
                x = x[:62].strip()
            return x

        self.columns = [f(col) for col in self.columns]

        # fix duplicate column name
        while True:
            low_col = [col.lower() for col in self.columns]
            idx = 0
            odx = 0
            c = 0
            for i in self.columns:
                jdx = 0
                n = 1
                if idx == len(self.columns):
                    continue
                for j in low_col[idx + 1:]:
                    odx = idx + 1 + jdx
                    if j == i.lower():
                        self.columns[odx] = j + str(n)
                        n += 1
                        c += 1
                    jdx += 1
                idx += 1
            if c == 0:
                break

        dataset.columns = self.columns
        self.columns = np.array(self.columns)
        self.columns = self.columns.tolist()

        # deal with data
        f = lambda x: str(x).strip()
        dataset = dataset.applymap(f)
        f = lambda x: len(x.encode("utf8"))
        df1 = dataset.applymap(f)
        f = lambda x: max(x)
        df2 = df1.apply(f, axis=0)
        col_maxlen = df2.to_dict()

        df3 = df1.apply(f, axis=1)
        df3 = pd.DataFrame(df3, columns=["c"])
        indexs = df3.loc[df3["c"] == 0].index
        dataset.drop(indexs, inplace=True)

        f = lambda x: None if x == "" else x
        dataset = dataset.applymap(f)

        return col_maxlen, dataset

    def create_table(self, col_maxlen, tablename):
        sql = "create table {0}(".format(tablename)
        for col, maxLen in col_maxlen.items():
            colType = "varchar(255)"
            if maxLen > 255:
                colType = "TEXT"
            if maxLen > 65535:
                colType = "MEDIUMTEXT"
            if maxLen > 16777215:
                colType = "LONGTEXT"

            sql = sql + "`{0}` {1} default null,".format(col, colType)

        sql = sql[:-1] + ")"

        try:
            self.ConnDB.exec(self.conn_db, sql)
        except:
            sql = re.sub(r"varchar\(\d+\)", "text", sql)
            self.ConnDB.exec(self.conn_db, sql)

        return tablename, sql

    def insert_data(self, dataset, tablename, charset='utf8'):
        # insert
        dataset = np.array(dataset)  # dataframe to ndarray
        datalist = dataset.tolist()  # ndarray to list
        cols = "`,`".join(self.columns)
        l = len(self.columns)
        v = "%s," * l
        v = v[:-1]
        sql = "insert into `%s`(%s) values(" % (tablename, "`" + cols + "`")
        sql = sql + "%s)" % v

        if charset == "utf8mb4":
            conn = self.ConnDB.conndb(db=self.db, charset=charset)
            cur = conn.cursor()
            cur.executemany(sql, datalist)
            conn.commit()
            cur.close()
            conn.close()
        else:
            cur = self.conn_db.cursor()
            cur.executemany(sql, datalist)
            self.conn_db.commit()
            cur.close()
            #conn.close()

    def clean_data(self):
        print('Begin to clean data...\n')
        file = self.cleansql
        ret = self.ConnDB.cmd(self.db, "mysql", file)
        if ret == 0:
            easygui.msgbox(msg="Import Over", image=self.img)
        else:
            easygui.exceptionbox("Clean Data Failed")
Exemplo n.º 3
0
class ViewTable:
    def __init__(self):
        self.HandleConfig = HandleConfig()
        self.ConnDB = ConnDB()

    def main(self, currentwork):
        database = self.HandleConfig.handle_config('g', currentwork, 'dbname')
        conn = self.ConnDB.conndb(database)
        sql = """DROP table if exists z_newcreate_neon_data_stats_report;
create table z_newcreate_neon_data_stats_report(tableName varchar(100),tableRows INT);

insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'account',count(1) from account;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'user',count(1) from user;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'donation',count(1) from donation;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'membership_listing',count(1) from membership_listing;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'event_registration',count(1) from event_registration;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'event_attendee',count(1) from event_attendee;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'shopping_cart_items',count(1) from shopping_cart_items;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'payment',count(1) from payment;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'account_custom_data',count(1) from account_custom_data;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'donation_custom_data',count(1) from donation_custom_data;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'membership_listing_custom_data',count(1) from membership_listing_custom_data;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'event_registration_custom_data',count(1) from event_registration_custom_data;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'event_attendee_custom_data',count(1) from event_attendee_custom_data;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'user_custom_data',count(1) from user_custom_data;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'company_custom_data',count(1) from company_custom_data;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'contact_activity_custom_data',count(1) from contact_activity_custom_data;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'note',count(1) from note;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'address',count(1) from address;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'relation',count(1) from relation;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'company_contact',count(1) from company_contact;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'household_contact',count(1) from household_contact;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'contact_activity',count(1) from contact_activity;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'proposal',count(1) from proposal;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'solicitation',count(1) from solicitation;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'invitation',count(1) from invitation;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'material_tracking',count(1) from material_tracking;
insert into z_newcreate_neon_data_stats_report(tableName,tableRows)
  select 'soft_credit',count(1) from soft_credit;
  select * from z_newcreate_neon_data_stats_report order by tableRows desc"""
        ret = self.ConnDB.exec(conn, sql)
        result = ret.fetchall()
        if len(result) > 0:
            layout = [[
                sg.Table(result, ['tableName', 'tableRows'],
                         col_widths=[25, 10],
                         auto_size_columns=False,
                         justification="left",
                         size=(35, 30))
            ]]
            window = sg.Window(title=currentwork, layout=layout)
            event, values = window.read()
            window.close()
Exemplo n.º 4
0
class ToMySQL:
    def __init__(self, values):
        self.values = values
        self.ConnDB = ConnDB(values)
        self.HandleConfig = HandleConfig()
        self.conn_db = self.ConnDB.conndb(host=values['host'],
                                          port=int(values['port']),
                                          user=values['user'],
                                          passwd=values['passwd'],
                                          db=values['dbname'],
                                          charset='utf8')
        self.sql_mode = self.ConnDB.exec(
            self.conn_db, 'SELECT @@SESSION.sql_mode').fetchall()[0][0]

    def is_Chinese(self, word):
        for ch in word:
            if '\u4e00' <= ch <= '\u9fff':
                return True
        return False

    # create table
    def create_table(self, col_maxlen, tablename):
        sql = "drop table if exists {0};create table {0}(".format(tablename)
        for col, maxLen in col_maxlen.items():
            colType = "varchar(255)"
            if type(maxLen) == int:
                if maxLen > 255:
                    colType = "TEXT"
                if maxLen > 65535:
                    colType = "MEDIUMTEXT"
                if maxLen > 16777215:
                    colType = "LONGTEXT"

            sql = sql + "`{0}` {1} default null,".format(col, colType)

        if self.values['add_tname']:
            sql = sql + "`table_name` varchar(255) default '{0}',".format(
                tablename)
        sql = sql[:-1] + ")"

        try:
            self.ConnDB.exec(self.conn_db, sql)
        except:
            try:
                sql = re.sub(r"varchar\(\d+\)", "text", sql)
                self.ConnDB.exec(self.conn_db, sql)
            except:
                sql = sql + ' engine=myisam'
                self.ConnDB.exec(self.conn_db, sql)
        return tablename, sql

    # insert into
    def insert_data(self, dataset, tablename, created_sql):

        if dataset.empty:
            return
        sql = "select column_name from information_schema.`COLUMNS` " \
              "where table_schema = '{0}' and table_name = '{1}'".format(self.values['dbname'], tablename)
        columns = self.ConnDB.exec(self.conn_db, sql).fetchall()
        exists_columns = []
        for column in columns:
            if column[0] in dataset.columns:
                exists_columns.append(column[0])
        dataset = dataset[exists_columns]
        columns = dataset.columns
        dataset = np.array(dataset)
        datalist = dataset.tolist()
        cols = '`,`'.join(columns)
        l = len(columns)
        v = '%s,' * l
        v = v[:-1]

        sql = 'insert into `%s`(%s) values(' % (tablename, '`' + cols + '`')
        sql = sql + '%s)' % v
        try:
            self.ConnDB.exec(self.conn_db, sql, datalist=datalist)
        except pymysql.err.InternalError as reason:
            if self.values['mode1']:
                reason_num_0 = str(reason).split(',')[0].strip('(')
                # utf8mb4
                if reason_num_0 == '1366':
                    try:
                        sql_1 = 'truncate table `{1}`;'.format(tablename)
                        self.ConnDB.exec(self.conn_db, sql_1)
                        self.ConnDB.exec(self.conn_db,
                                         'set SESSION sql_mode = ""')
                        self.ConnDB.exec(self.conn_db, sql, datalist=datalist)
                        self.ConnDB.exec(
                            self.conn_db, 'set SESSION sql_mode = "{}"'.format(
                                self.sql_mode))

                    except pymysql.err.InternalError as reason:
                        reason_num_1 = str(reason).split(',')[0].strip('(')
                        # 1118 error
                        if reason_num_1 == '1118':
                            sql_0 = re.sub('varchar\\(\\d+\\)', 'text',
                                           created_sql)
                            sql_1 = 'drop table if exists `{0}`.`{1}`;'.format(
                                self.values['dbname'], tablename)
                            self.ConnDB.exec(self.conn_db, sql_1)
                            self.ConnDB.exec(self.conn_db, sql_0)

                            self.ConnDB.exec(self.conn_db,
                                             'set SESSION sql_mode = ""')
                            self.ConnDB.exec(self.conn_db,
                                             sql,
                                             datalist=datalist)
                            self.ConnDB.exec(
                                self.conn_db,
                                'set SESSION sql_mode = "{}"'.format(
                                    self.sql_mode))

                        else:
                            raise pymysql.err.InternalError(str(reason))

                elif reason_num_0 == '1118':
                    sql_1 = re.sub('varchar\\(\\d+\\)', 'text', created_sql)
                    sql_0 = 'drop table if exists `{0}`.`{1}`;'.format(
                        self.values['dbname'], tablename) + sql_1
                    self.ConnDB.exec(self.conn_db, sql_0)
                    self.ConnDB.exec(self.conn_db, sql, datalist=datalist)
                else:
                    raise pymysql.err.InternalError(str(reason))
            else:
                raise pymysql.err.InternalError(str(reason))
Exemplo n.º 5
0
class MysqlRestore:
    def __init__(self):
        self.HandleConfig = HandleConfig()
        self.MysqlDump = MysqlDump()
        self.ConnDB = ConnDB()
        self.conn = self.ConnDB.conndb()

    def main(self, currentwork, advanced=1):
        jirapath = self.HandleConfig.handle_config('g', currentwork,
                                                   'jirapath')
        dbname = self.HandleConfig.handle_config('g', currentwork, 'dbname')
        git_repo_path = self.HandleConfig.handle_config(
            'g', 'defaultpath', 'git_repo_path')
        merge = self.HandleConfig.handle_config('g', currentwork, 'merge')
        navicat_script_path = self.HandleConfig.handle_config(
            'g', 'defaultpath', 'navicat_script_path')

        gitscriptpath = git_repo_path + 'dataImportScript\\script\\'
        gittemplatepath = git_repo_path + 'templates\\'
        scriptspath = jirapath + 'script\\'
        scripts_bak_path = jirapath + 'db_backup\\'
        tmpscript = scriptspath + 'tmp.txt'
        script = navicat_script_path + dbname + '\\{}.sql'.format(dbname)
        backupsql = scriptspath + '{0}_after.sql'.format(dbname)
        excel_sql_file = scriptspath + '{0}.sql'.format(dbname)

        # truncate tmp.txt first
        with open(tmpscript, 'w', encoding='utf8') as (fw):
            fw.truncate()

        files = []
        drop = 1
        backup_files = [backupsql]
        msg = ''

        if not advanced:
            # restore database in config
            backupgz = scripts_bak_path + '{}_backup.sql.gz'.format(dbname)
            backupsql = scripts_bak_path + '{0}_backup.sql'.format(dbname)
            with gzip.open(backupgz, 'rb') as (f_in):
                with open(backupsql, 'wb') as (f_out):
                    shutil.copyfileobj(f_in, f_out)
            backup_files = [backupsql, scriptspath + '{0}.sql'.format(dbname)]
            files = [
                scriptspath + 'configration.sql',
                gitscriptpath + 'functionAndProcedure.sql',
                gitscriptpath + 'smokeTestV2.sql'
            ]

            # get b4_size
            b4_size = os.path.getsize(backupgz) / 1024 / 1024
            b4_size = '{:.0f}'.format(b4_size)
            self.HandleConfig.handle_config('s', currentwork, 'b4_size',
                                            b4_size)

        elif advanced == 1:
            # restore database
            msg = 'Restore Database Complete!'
            layout = [[sg.Button('RestoreToLocalHost')],
                      [sg.Button('RestoreToAWSHotfix')]]

            window = sg.Window(title=currentwork, layout=layout)
            event, values = window.read()
            window.close()

            if event is None:
                return

            elif event == 'RestoreToLocalHost':
                pass

            elif event == 'RestoreToAWSHotfix':
                self.MysqlDump.main(currentwork, gz=True)
                winscppath = self.HandleConfig.handle_config(
                    'g', 'defaultpath', 'winscppath')
                db_backup = self.HandleConfig.handle_config(
                    'g', currentwork,
                    'jirapath') + 'db_backup\\{0}.sql.gz'.format(dbname)
                cmd = '{0}WinSCP.com /command "open aws188" "put {1} /home/neon/leiwu/dataimport/fulldata/" "exit"'.format(
                    winscppath, db_backup)
                os.system(cmd)
                sg.popup('Restore To AWS Hotfix Complete')
                return

        elif advanced == 2:
            # restore database in run script
            msg = 'Runscript Database Complete!'

            layout = [
                [
                    sg.Checkbox('AllScriptWithoutSmokeTest',
                                key='AllScriptWithoutSmokeTest')
                ], [sg.Checkbox('ExcelSqlFile', key=excel_sql_file)],
                [sg.Checkbox('TemplateScript', key=script)],
                [sg.Checkbox('TemplateSmokeTest', key='TemplateSmokeTest')],
                [
                    sg.Checkbox('TemplateScriptAfter',
                                key=navicat_script_path + dbname +
                                '\\{}_after_template.sql'.format(dbname))
                ],
                [
                    sg.Checkbox('AccountMerge',
                                key=gitscriptpath + 'accountMerge.sql')
                ],
                [
                    sg.Checkbox('AccountMergeAfter',
                                key=navicat_script_path + dbname +
                                '\\{}_after_merge.sql'.format(dbname))
                ],
                [
                    sg.Checkbox('dataClean',
                                key=gitscriptpath + 'fullDataClean.sql')
                ],
                [
                    sg.Checkbox('SmokeTest',
                                key=gitscriptpath + 'smokeTestV2.sql')
                ],
                [
                    sg.Checkbox(
                        'All Script and Restore To AWS Hotfix'.format(dbname),
                        key='AllScript',
                        default=True,
                        text_color='blue')
                ],
                [
                    sg.Checkbox('Drop database if exists {}'.format(dbname),
                                key='drop',
                                default=True,
                                text_color='blue')
                ], [sg.Submit(), sg.Cancel()]
            ]
            window = sg.Window(title=currentwork, layout=layout)
            event, values = window.read()
            window.close()

            if event in (None, 'Cancel'):
                return

            if values['drop']:
                drop = 1
            else:
                drop = 0
                backup_files = []

            if values['AllScript'] or values['AllScriptWithoutSmokeTest']:
                files = [
                    script, navicat_script_path + dbname +
                    '\\{}_after_template.sql'.format(dbname),
                    gitscriptpath + 'accountMerge.sql', navicat_script_path +
                    dbname + '\\{}_after_merge.sql'.format(dbname),
                    gitscriptpath + 'fullDataClean.sql',
                    gitscriptpath + 'smokeTestV2.sql',
                    gitscriptpath + 'clear.sql'
                ]
                if merge == 'False':
                    files.remove(gitscriptpath + 'accountMerge.sql')
                if values['AllScriptWithoutSmokeTest']:
                    files.remove(gitscriptpath + 'smokeTestV2.sql')

                for k, v in values.items():
                    if v:
                        if k != 'AllScript' and k != 'AllScriptWithoutSmokeTest' and k != 'drop':
                            sg.PopupError('Confict Selection!')
                            return

            else:
                backup_files = []
                for k, v in values.items():
                    if v:
                        if k == 'TemplateSmokeTest':
                            files = files + [
                                gittemplatepath +
                                'template_smoke_test\\smoke_test_for_account.sql',
                                gittemplatepath +
                                'template_smoke_test\\smoke_test_for_custom_field.sql',
                                gittemplatepath +
                                'template_smoke_test\\smoke_test_for_gais.sql',
                                gittemplatepath +
                                'template_smoke_test\\smoke_test_for_transaction.sql'
                            ]
                        elif k == 'drop':
                            continue
                        else:
                            files.append(k)

        sqlfiles = backup_files
        if len(files) > 0:
            for f in files:
                with open(f, 'r', encoding='utf8') as (fa):
                    with open(tmpscript, 'a+', encoding='utf8') as (fw):
                        fw.writelines(['\n'])
                        shutil.copyfileobj(fa, fw)
            sqlfiles = sqlfiles + [tmpscript]
            #sqlfiles = sqlfiles + files

        if drop:
            sql = 'drop database if exists `{}`;\ncreate database `{}`;'.format(
                dbname, dbname)
            self.ConnDB.exec(self.conn, sql)
            print('\n\n{}'.format(sql))

        for sqlfile in sqlfiles:
            if not os.path.exists(sqlfile):
                continue
            ret = self.ConnDB.cmd(dbname, 'mysql', sqlfile)
            if ret == 0:
                continue
            else:
                sg.popup_error('Error!')
                return 0

        if advanced == 2 and values['AllScript']:
            conn = self.ConnDB.conndb(dbname)
            sql = 'SELECT * FROM smoke;'
            ret = self.ConnDB.exec(conn, sql)
            result = ret.fetchall()
            if len(result) > 0:
                layout = [
                    [
                        sg.Table(result, [
                            'errorMsg', 'Error Count After', 'Error Count Pre',
                            'Pass Flag'
                        ],
                                 col_widths=[60, 15, 15, 15],
                                 auto_size_columns=False,
                                 justification="left")
                    ],
                    [
                        sg.Text(
                            'Press OK to continue to dump database and upload to winscp.'
                        )
                    ], [sg.OK(), sg.Cancel()]
                ]
                window = sg.Window(title=currentwork, layout=layout)
                event, values = window.read()
                window.close()
                if event in ('Cancel', None):
                    return
            self.MysqlDump.main(currentwork, gz=True)
            winscppath = self.HandleConfig.handle_config(
                'g', 'defaultpath', 'winscppath')
            db_backup = self.HandleConfig.handle_config(
                'g', currentwork,
                'jirapath') + 'db_backup\\{0}.sql.gz'.format(dbname)
            cmd = '{0}WinSCP.com /command "open aws188" "put {1} /home/neon/leiwu/dataimport/fulldata/" "exit"'.format(
                winscppath, db_backup)
            os.system(cmd)
        elif advanced == 2 and values['TemplateSmokeTest']:
            conn = self.ConnDB.conndb(dbname)
            sql = 'SELECT templateTableName, errorMsg, errorCount, passFlag FROM smoke_test_report_for_template WHERE passFlag<>"Pass" ORDER BY passFlag;'
            ret = self.ConnDB.exec(conn, sql)
            result = ret.fetchall()
            if len(result) > 0:
                layout = [
                    [
                        sg.Table(result, [
                            'templateTableName', 'errorMsg', 'Error Count',
                            'Pass Flag'
                        ],
                                 col_widths=[30, 100, 15, 15],
                                 auto_size_columns=False,
                                 justification="left")
                    ],
                    [
                        sg.Text(
                            'SELECT * FROM smoke_test_report_for_template WHERE passFlag<>"Pass" ORDER BY passFlag;'
                        ),
                        sg.Button('Copy')
                    ]
                ]
                window1 = sg.Window(title=currentwork, layout=layout)
                event1, values1 = window1.read()
                window1.close()
                if event1 is None:
                    return
                if event1 == 'Copy':
                    pyperclip.copy(
                        'SELECT * FROM smoke_test_report_for_template WHERE passFlag<>"Pass" ORDER BY passFlag;'
                    )
        elif advanced == 2 and values[gitscriptpath + 'smokeTestV2.sql']:
            conn = self.ConnDB.conndb(dbname)
            sql = 'SELECT * FROM smoke;'
            ret = self.ConnDB.exec(conn, sql)
            result = ret.fetchall()
            if len(result) > 0:
                layout = [
                    [
                        sg.Table(result, [
                            'errorMsg', 'Error Count After', 'Error Count Pre',
                            'Pass Flag'
                        ],
                                 col_widths=[100, 15, 15, 15],
                                 auto_size_columns=False,
                                 justification="left")
                    ],
                ]
                window1 = sg.Window(title=currentwork, layout=layout)
                event1, values = window1.read()
                window1.close()

        if advanced:
            sg.Popup(msg, title=currentwork)
class ImportExcel:
    def __init__(self):
        self.HandleConfig = HandleConfig()
        self.ConnDB = ConnDB()

    def main(self, values):
        # save the recent input
        self.HandleConfig.handle_config("s", "file", "file_dir",
                                        values['file_dir'])
        self.HandleConfig.handle_config("s", "file", "csv_encoding",
                                        values['csv_encoding'])
        self.HandleConfig.handle_config("s", "dbinfo", "host", values['host'])
        self.HandleConfig.handle_config("s", "dbinfo", "port", values['port'])
        self.HandleConfig.handle_config("s", "dbinfo", "user", values['user'])
        self.HandleConfig.handle_config("s", "dbinfo", "passwd",
                                        values['passwd'])
        self.HandleConfig.handle_config("s", "dbinfo", "dbname",
                                        values['dbname'])
        self.HandleConfig.handle_config("s", "file", "na_values",
                                        values['na_values'])

        self.db = values['dbname']
        self.conn = self.ConnDB.conndb(host=values['host'],
                                       port=int(values['port']),
                                       user=values['user'],
                                       passwd=values['passwd'],
                                       charset='utf8')
        self.file_dir = values['file_dir']
        na_values = values['na_values'].split(',')

        excelcsvs = self.get_excel()
        if not excelcsvs:
            sg.Popup('No Excel/CSV files!')
            return

        # write log
        log_file = self.file_dir + "\\log.txt"
        if os.path.isfile(log_file):
            os.remove(log_file)

        # create database
        if values['redb']:
            print("Bengin to Re-Create Database")
            sql = "drop database if exists `{0}`;create database `{0}`".format(
                self.db, self.db)
            self.ConnDB.exec(self.conn, sql)
            print('\n\n{}'.format(sql))
        self.conn_db = self.ConnDB.conndb(host=values['host'],
                                          port=int(values['port']),
                                          user=values['user'],
                                          passwd=values['passwd'],
                                          db=self.db,
                                          charset='utf8')
        self.sql_mode = self.ConnDB.exec(
            self.conn_db, 'SELECT @@SESSION.sql_mode').fetchall()[0][0]

        longexcelcsvs = defaultdict()
        long_num = 0
        num = 0
        num_s = 0

        print("\n\nBegin to import...\n")
        for excelcsv, origin_tablename in excelcsvs.items():
            self.excel_name = excelcsv
            try:
                isexcel = 0
                # get csv dataset
                if re.fullmatch(r"^.*?\.csv$", excelcsv, flags=re.IGNORECASE):
                    datasets = defaultdict()
                    csv = self.file_dir + "\\" + excelcsv
                    # Determining the encoding of a CSV file
                    # http://pandaproject.net/docs/determining-the-encoding-of-a-csv-file.html
                    if values['csv_encoding']:
                        csv_encoding = values['csv_encoding']
                    csv_encoding = 'utf8'
                    try:
                        dataset = pd.read_csv(csv,
                                              encoding=csv_encoding,
                                              dtype=str,
                                              na_values=na_values,
                                              keep_default_na=False,
                                              header=0,
                                              engine='c')
                    except UnicodeDecodeError:
                        try:
                            dataset = pd.read_csv(csv,
                                                  encoding='ansi',
                                                  dtype=str,
                                                  na_values=na_values,
                                                  keep_default_na=False,
                                                  header=0,
                                                  engine='c')
                        except UnicodeDecodeError:
                            try:
                                dataset = pd.read_csv(csv,
                                                      encoding='utf-16',
                                                      dtype=str,
                                                      na_values=na_values,
                                                      keep_default_na=False,
                                                      header=0,
                                                      engine='c')
                            except UnicodeDecodeError:
                                with open(csv, 'rb') as f:
                                    bytes = f.read()
                                    if len(bytes) > 100000:
                                        with open(csv, 'rb') as f:
                                            bytes = f.readline()
                                encode = chardet.detect(bytes)['encoding']
                                if encode == 'ascii':
                                    encode = 'ansi'  #ansi is a super charset of ascii
                                dataset = pd.read_csv(csv,
                                                      encoding=encode,
                                                      dtype=str,
                                                      na_values=na_values,
                                                      keep_default_na=False,
                                                      header=0,
                                                      engine='c')
                    datasets['sheet1'] = dataset

                # get excel dataset(include sheets)
                if re.fullmatch(r"^.*?\.xlsx?$", excelcsv,
                                flags=re.IGNORECASE):
                    isexcel = 1
                    excel = self.file_dir + "\\" + excelcsv
                    datasets = pd.read_excel(excel,
                                             dtype=str,
                                             na_values=na_values,
                                             keep_default_na=False,
                                             header=0,
                                             sheet_name=None)

                # one sheet/csv is one table
                for k, v in datasets.items():
                    created_table = None
                    try:
                        sheet_name = k
                        dataset = v
                        tablename = origin_tablename
                        self.excel_name = excelcsv
                        # rename table name if excel have more than one sheets
                        if isexcel == 1 and len(datasets) > 1:
                            tablename = origin_tablename + '_' + re.sub(
                                r"[^\w]+",
                                "_",
                                sheet_name,
                                flags=re.IGNORECASE)
                            self.excel_name = excelcsv + '.' + sheet_name
                        tablename = tablename.lower()
                        # cut off table name
                        if len(tablename.encode("utf8")) > 64:
                            if self.is_Chinese(tablename):
                                tablename = "{0}_".format(
                                    long_num) + tablename[:20]
                            else:
                                tablename = "{0}_".format(
                                    long_num) + tablename[:60]
                            long_num += 1
                            longexcelcsvs[excelcsv] = tablename
                            with open(log_file, "a", encoding="utf8") as fw:
                                fw.write(
                                    "table name cut off: {0}, tablename: {1}\n"
                                    .format(self.excel_name, tablename))

                        col_maxlen, dataset = self.read_data(dataset)

                        if dataset.empty:
                            raise EmptyError("Empty")

                        created_table, created_sql = self.create_table(
                            col_maxlen, tablename)

                        try:
                            self.insert_data(dataset, tablename)

                        except pymysql.err.InternalError as reason:

                            reason_num_0 = str(reason).split(',')[0].strip('(')

                            if reason_num_0 == '1366':
                                try:
                                    sql_1 = 'truncate table `{0}`.`{1}`;'.format(
                                        self.db, tablename)
                                    self.ConnDB.exec(self.conn, sql_1)
                                    self.ConnDB.exec(
                                        self.conn, 'set SESSION sql_mode = ""')
                                    self.insert_data(dataset, tablename)
                                    self.ConnDB.exec(
                                        self.conn,
                                        'set SESSION sql_mode = "{}"'.format(
                                            self.sql_mode))

                                except pymysql.err.InternalError as reason:
                                    reason_num_1 = str(reason).split(
                                        ',')[0].strip('(')
                                    if reason_num_1 == '1118':
                                        sql = re.sub('varchar\\(\\d+\\)',
                                                     'text', created_sql)
                                        sql_1 = 'drop table if exists `{0}`.`{1}`;'.format(
                                            self.db, tablename)
                                        self.ConnDB.exec(self.conn, sql_1)
                                        self.ConnDB.exec(self.conn, sql)

                                        self.ConnDB.exec(
                                            self.conn,
                                            'set SESSION sql_mode = ""')
                                        self.insert_data(dataset, tablename)
                                        self.ConnDB.exec(
                                            self.conn,
                                            'set SESSION sql_mode = "{}"'.
                                            format(self.sql_mode))

                                    else:
                                        raise pymysql.err.InternalError(
                                            str(reason))

                            elif reason_num_0 == '1118':
                                sql = re.sub('varchar\\(\\d+\\)', 'text',
                                             created_sql)
                                sql_0 = 'drop table if exists `{0}`.`{1}`;'.format(
                                    self.db, tablename) + sql
                                self.ConnDB.exec(self.conn, sql_0)
                                self.insert_data(dataset, tablename)
                            else:
                                raise pymysql.err.InternalError(str(reason))

                    except Exception as reason:
                        print('Failed: {}'.format(self.excel_name))
                        with open(log_file, 'a') as (fw):
                            fw.write('sheet name: {0}, error: {1}\n'.format(
                                self.excel_name, str(reason)))
                        if created_table:
                            sql = 'drop table if exists `{0}`.`{1}`'.format(
                                self.db, created_table)
                            self.ConnDB.exec(self.conn, sql)
                        continue

                    else:
                        print('Imported: {}'.format(self.excel_name))
                        num_s += 1
                    finally:
                        num += 1
                        self.ConnDB.exec(
                            self.conn, 'set SESSION sql_mode = "{}"'.format(
                                self.sql_mode))

            except Exception as reason:
                print("Failed: {}".format(excelcsv))
                with open(log_file, "a") as fw:
                    fw.write("file name: {0}, error: {1}\n".format(
                        self.excel_name, str(reason)))
                num += 1
                continue

        print('\nTotal: {}, Imported: {}\n'.format(num, num_s))
        self.conn.close()
        self.conn_db.close()

        if os.path.isfile(log_file):
            os.popen(log_file)
            sg.Popup("You have logs , see file '{}' \n\ncheck it first".format(
                log_file))
            if num_s == 0:
                sg.Popup("No imported tables!")
                return
        sg.Popup("Done!")

    def is_Chinese(self, word):
        for ch in word:
            if '\u4e00' <= ch <= '\u9fff':
                return True
        return False

    def get_excel(self):
        # a function to get excel/csv files under the dictionary
        excels = os.listdir(self.file_dir)
        excelcsvs = defaultdict()
        for excel in excels:
            excel_dir = self.file_dir + "\\" + excel
            if os.path.isfile(excel_dir) and re.fullmatch(
                    r"^.*?\.(xls|xlsx|csv)$", excel, flags=re.IGNORECASE):
                tablename = re.sub(r"\.(xls|xlsx|csv)$",
                                   '',
                                   excel.lower(),
                                   flags=re.IGNORECASE)
                # replace all character not \w to "_"
                tablename = re.sub(r"[^\w]+",
                                   "_",
                                   tablename,
                                   flags=re.IGNORECASE)
                excelcsvs[excel] = tablename
        return excelcsvs

    def read_data(self, dataset):
        dataset = dataset.fillna(value="")
        f = lambda x: str(x).strip()
        dataset = dataset.applymap(f)
        f = lambda x: len(x)
        df1 = dataset.applymap(f)
        f = lambda x: max(x)
        df3 = df1.apply(f, axis=1)
        df3 = pd.DataFrame(df3, columns=['c'])
        indexs = df3.loc[(df3['c'] == 0)].index
        dataset.drop(indexs, inplace=True)

        # deal with columns
        dataset.columns = [str(col) for col in dataset.columns]
        self.columns = dataset.columns
        low_col = [col.lower() for col in self.columns]
        s = len(low_col)
        recol = 1
        for col in low_col:
            if 'unnamed: ' not in col:
                recol = 0
                break

        if recol:
            self.columns = dataset[0:1]
            self.columns = np.array(self.columns)
            self.columns = self.columns.tolist()[0]
            dataset.columns = self.columns
            dataset.drop(dataset[:1].index, inplace=True)
            low_col = [col.lower() for col in self.columns]

        self.columns = [str(col).strip() for col in self.columns]
        # fix blank col name
        f = lambda x: "unnamed" if x == "" else x
        self.columns = [f(col) for col in self.columns]

        # cut off col
        def f(x):
            if len(x.encode("utf8")) <= 63:
                x = x
            elif self.is_Chinese(x):
                x = x[:20].strip()
            else:
                x = x[:62].strip()
            return x

        self.columns = [f(col) for col in self.columns]

        # fix duplicate column name
        while 1:
            low_col = [col.lower() for col in self.columns]
            idx = 0
            odx = 0
            c = 0
            for i in self.columns:
                jdx = 0
                n = 1
                if idx == len(self.columns):
                    continue
                for j in low_col[idx + 1:]:
                    odx = idx + 1 + jdx
                    if j == i.lower():
                        self.columns[odx] = j + str(n)
                        n += 1
                        c += 1
                    jdx += 1

                idx += 1

            if c == 0:
                break

        dataset.columns = self.columns
        self.columns = np.array(self.columns)
        self.columns = self.columns.tolist()
        f = lambda x: max(x)
        df1.columns = self.columns
        df2 = df1.apply(f, axis=0)
        col_maxlen = df2.to_dict()
        f = lambda x: None if x == "" else x
        dataset = dataset.applymap(f)

        return col_maxlen, dataset

    def create_table(self, col_maxlen, tablename):
        sql = "create table {0}(".format(tablename)
        for col, maxLen in col_maxlen.items():
            colType = "varchar(255)"
            if maxLen > 255:
                colType = "TEXT"
            if maxLen > 65535:
                colType = "MEDIUMTEXT"
            if maxLen > 16777215:
                colType = "LONGTEXT"

            sql = sql + "`{0}` {1} default null,".format(col, colType)

        sql = sql[:-1] + ")"

        try:
            self.ConnDB.exec(self.conn_db, sql)
        except:
            sql = re.sub(r"varchar\(\d+\)", "text", sql)
            self.ConnDB.exec(self.conn_db, sql)

        return tablename, sql

    def insert_data(self, dataset, tablename):
        dataset = np.array(dataset)
        datalist = dataset.tolist()
        cols = '`,`'.join(self.columns)
        l = len(self.columns)
        v = '%s,' * l
        v = v[:-1]

        sql = 'insert into `%s`.`%s`(%s) values(' % (self.db, tablename,
                                                     '`' + cols + '`')
        sql = sql + '%s)' % v
        self.ConnDB.exec(self.conn, sql, datalist=datalist)
Exemplo n.º 7
0
class ConfigScript:
    def __init__(self):
        self.HandleConfig = HandleConfig()
        self.MysqlRestore = MysqlRestore()
        self.MysqlDump = MysqlDump()
        self.Download = Download()
        self.ConnDB = ConnDB()
        self.conn = self.ConnDB.conndb()

    def main(self, currentwork):
        jirapath = self.HandleConfig.handle_config('g', currentwork,
                                                   'jirapath')
        jiraname = self.HandleConfig.handle_config('g', currentwork,
                                                   'jiraname')
        git_repo_path = self.HandleConfig.handle_config(
            'g', 'defaultpath', 'git_repo_path')
        gitscriptpath = git_repo_path + 'dataImportScript\\script\\'
        configration_sql = gitscriptpath + 'configration.sql'
        configration_sql_new = jirapath + 'script\\configration.sql'
        # read local configration.sql
        with open(configration_sql_new, 'r', encoding='utf8') as (fa):
            lines = fa.readlines()
        idx_s = lines.index('# Custom Conigurations Start\n')
        idx_e = lines.index('# Custom Conigurations End\n')
        config_lines = lines[idx_s + 1:idx_e]
        layout = []
        option_dict = defaultdict()
        idx = 0
        frame = []
        title = ''
        titles = defaultdict()
        title_idx = 0
        for line in config_lines:
            if re.match('^#Flag ', line):
                if frame:
                    lay = [
                        sg.Frame(layout=frame, title=title, title_color='blue')
                    ]
                    layout.append(lay)
                title = line.split('#Flag')[-1].strip()
                frame = []
                titles[title_idx] = '#Flag ' + title + '\n'

            elif re.match('^INSERT INTO z_newcreate_data_import_config', line,
                          re.IGNORECASE):
                s = line.split('(')
                k = s[2].split(',')[0].replace("'", '').strip()
                d = s[2].split(',')[1].replace("'", '').replace(
                    ')', '').replace(';', '').replace('#', '').strip()
                option_dict[k] = ''

                if len(s) > 3:
                    # dropdown
                    options = s[3:]
                    option_dict[k] = '#(' + '('.join(options)
                    for i in range(len(options)):
                        options[i] = options[i].replace("'", '').replace(
                            ')', '').replace('\n', '').replace(',', '')
                    lay = [
                        sg.Text(k),
                        sg.Combo(options, default_value=d, key=k)
                    ]
                else:
                    # oneline text
                    if re.match('^Legacy Account Custom Field Name', k,
                                re.IGNORECASE):
                        k = 'Legacy Account Custom Field Name' + str(idx)
                        idx += 1
                    option_dict[k] = ''
                    lay = [sg.Text(k), sg.InputText(d, key=k)]
                frame.append(lay)
                title_idx += 1
        if frame:
            lay = [sg.Frame(layout=frame, title=title, title_color='blue')]
            layout.append(lay)

        layout.append(
            [sg.Submit(),
             sg.Cancel(),
             sg.Button('Git Configration.sql')])
        window = sg.Window(title=currentwork, layout=layout)
        event, values = window.read()
        window.close()
        if event in (None, 'Cancel'):
            return

        # copy configration.sql from git repo
        # relace new config into configration.sql
        shutil.copy(configration_sql, configration_sql_new)
        if event == 'Git Configration.sql':
            return
        li = []
        sql_pre = 'INSERT INTO z_newcreate_data_import_config(taskName,taskValue) VALUES('
        i = 0
        print(titles)
        for key, value in values.items():
            key_true = key
            if re.match('^Legacy Account Custom Field Name', key,
                        re.IGNORECASE):
                key_true = 'Legacy Account Custom Field Name'
            if key == 'Work Start Date':
                if value == '':
                    value = str(date.today())
            sql = sql_pre + "'" + key_true + "'," + "'" + value + "');" + option_dict[
                key] + '\n'
            if i in titles.keys():
                li.append(titles[i])
            li.append(sql)

            i += 1
        with open(configration_sql, 'r', encoding='utf8') as (fa):
            lines = fa.readlines()
        idx_s = lines.index('# Custom Conigurations Start\n')
        idx_e = lines.index('# Custom Conigurations End\n')
        lines_new = lines[:idx_s + 1] + li + lines[idx_e:]
        with open(configration_sql_new, 'w', encoding='utf8') as (fw):
            fw.writelines(lines_new)
        merge = 'True'
        if values['Account De-dupe Rule'] == 'No De-dupe':
            merge = 'False'
        self.HandleConfig.handle_config('s', jiraname, 'merge', merge)

        # download database
        if values['Pull Database From AWS'] == 'Yes':
            self.Download.main(currentwork)
        # restore
        self.MysqlRestore.main(currentwork, advanced=0)
        # dump to dbname_after
        self.MysqlDump.main(currentwork, after=1, op='mysqldump')

        sg.Popup('Config Complete!', title=currentwork)
Exemplo n.º 8
0
class DeleteWork:
    def __init__(self):
        self.HandleConfig = HandleConfig()
        self.ConnDB = ConnDB()

    def main(self, currentwork):
        layout = [[
            sg.Text(
                'Warnning!\n\nThis option will remove the work from the tool.\nDo you want to continue?'
            )
        ], [
            sg.OK(),
            sg.Button('Just Zip'),
            sg.Button('OK but not upload zip')
        ]]
        window = sg.Window(title=currentwork, layout=layout)
        event, values = window.read()
        window.close()
        if event is None:
            return 0

        navicat_script_path = self.HandleConfig.handle_config(
            'g', 'defaultpath', 'navicat_script_path')
        jirapath = self.HandleConfig.handle_config('g', currentwork,
                                                   'jirapath')
        scriptspath = jirapath + 'script\\'
        git_repo_path = self.HandleConfig.handle_config(
            'g', 'defaultpath', 'git_repo_path')
        gitscriptpath = git_repo_path + 'dataImportScript\\script\\'
        dbname = self.HandleConfig.handle_config('g', currentwork, 'dbname')
        sqlfile = scriptspath + '{0}.sql'.format(dbname)
        txtfile = scriptspath + '{0}.txt'.format(dbname)
        merge = self.HandleConfig.handle_config('g', currentwork, 'merge')

        script = navicat_script_path + dbname + '\\{}.sql'.format(dbname)
        script_after_merge = navicat_script_path + dbname + '\\{}_after_merge.sql'.format(
            dbname)
        script_after_template = navicat_script_path + dbname + '\\{}_after_template.sql'.format(
            dbname)
        files = [
            scriptspath + 'configration.sql',
            gitscriptpath + 'functionAndProcedure.sql', script,
            navicat_script_path + dbname +
            '\\{}_after_template.sql'.format(dbname),
            gitscriptpath + 'accountMerge.sql', script_after_merge,
            gitscriptpath + 'fullDataClean.sql', gitscriptpath + 'clear.sql'
        ]
        if merge == 'False':
            files.remove(gitscriptpath + 'accountMerge.sql')
        with open(txtfile, 'w', encoding='utf8') as (fw):
            fw.truncate()
        for f in files:
            with open(f, 'r', encoding='utf8') as (fa):
                with open(txtfile, 'a+', encoding='utf8') as (fw):
                    shutil.copyfileobj(fa, fw)

        # generate zipfile
        sqlfile_zip = scriptspath + '{0}.zip'.format(currentwork)
        if os.path.isfile(sqlfile_zip):
            os.remove(sqlfile_zip)
        zfile = zipfile.ZipFile(sqlfile_zip, 'w', zipfile.ZIP_DEFLATED)
        if os.path.isfile(sqlfile):
            zfile.write(sqlfile, '{0}.sql'.format(dbname))
        zfile.write(txtfile, '{0}.txt'.format(dbname))
        zfile.close()

        # upload zipfile
        if event != 'OK but not upload zip':
            winscppath = self.HandleConfig.handle_config(
                'g', 'defaultpath', 'winscppath')
            cmd = '{0}WinSCP.com /command "open aws188" "put {1} /home/neon/leiwu/dataimport/script/" "exit"'.format(
                winscppath, sqlfile_zip)
            os.system(cmd)

        if event == 'Just Zip':
            sg.Popup('Complete!', title=currentwork)
            return 0
        # remove work
        script_temp = jirapath + 'temp\\{}.sql'.format(dbname)
        script_after_merge_temp = jirapath + 'temp\\{}_after_merge.sql'.format(
            dbname)
        script_after_template_temp = jirapath + 'temp\\{}_after_template.sql'.format(
            dbname)
        if (not os.path.isfile(script_temp)) and os.path.isfile(script):
            shutil.move(script, script_temp)
        if (not os.path.isfile(script_after_merge_temp)
            ) and os.path.isfile(script_after_merge):
            shutil.move(script_after_merge, script_after_merge_temp)
        if (not os.path.isfile(script_after_template_temp)
            ) and os.path.isfile(script_after_template):
            shutil.move(script_after_template, script_after_template_temp)

        conn_db = self.ConnDB.conndb(server='awshost')
        sql = "update db_to_download.data_import_tracking set releaseDate = '{0}' where jiraId = '{1}' and releaseDate is null".format(
            date.today(), currentwork)
        self.ConnDB.exec(conn_db, sql)
        conn_db.close()

        self.HandleConfig.handle_config(
            'rs', self.HandleConfig.handle_config('g', 'worklist',
                                                  currentwork))
        self.HandleConfig.handle_config('ro', 'worklist', key=currentwork)
        works = [
            work
            for work in self.HandleConfig.handle_config()['worklist'].values()
        ]
        currentwork = ''
        if works:
            currentwork = works[-1]
        self.HandleConfig.handle_config('s', 'global', 'currentwork',
                                        currentwork)

        if event != 'OK but not upload zip':
            cmd = '/home/neon/leiwu/bin/dataImportRunScript.sh {0}_test {0}'.format(
                dbname)
            sg.Popup('Complete!\n\n{0} has been copied.'.format(cmd),
                     title=currentwork)
            pyperclip.copy(cmd)
        else:
            sg.Popup('Complete!', title=currentwork)
Exemplo n.º 9
0
class ImportExcel:
    def __init__(self):
        self.HandleConfig = HandleConfig()
        self.ConnDB = ConnDB()
        self.MysqlDump = MysqlDump()
        self.cleansql = self.HandleConfig.handle_config(
            'g', 'referencefile', 'cleanexcel')
        self.nickname = self.HandleConfig.handle_config(
            'g', 'excelimporter', 'nickname')

    def main(self, currentwork, advanced=0):
        self.dbname = self.HandleConfig.handle_config('g', currentwork,
                                                      'dbname')
        na_values = self.HandleConfig.handle_config('g', 'excelimporter',
                                                    'na_values').split(',')
        na_values = [i.strip() for i in na_values]
        if advanced:
            layout = [[sg.InputText('', key='e'),
                       sg.FilesBrowse()],
                      [
                          sg.Submit(tooltip='Click to submit this form'),
                          sg.Cancel()
                      ]]
            window = sg.Window(title=currentwork, layout=layout)
            event, values = window.read()
            window.close()
            if event in (None, 'Cancel'):
                return
            files = values['e']
            files_list = files.split(';')
            files = []
            for f in files_list:
                files.append(f.split('/')[-1])
            self.importexcelpath = os.path.dirname(files_list[0]).replace(
                '/', '\\') + '\\'
            excelcsvs = self.get_excel(excels=files)
        else:
            self.importexcelpath = self.HandleConfig.handle_config(
                'g', currentwork, 'jirapath') + 'excel\\'
            excelcsvs = self.get_excel()

        if not excelcsvs:
            sg.Popup('No excels can import!')
            return
        log_file = self.importexcelpath + '\\log.txt'
        if os.path.isfile(log_file):
            os.remove(log_file)

        sql = 'drop database if exists `{0}`;\ncreate database `{0}`'.format(
            self.dbname, self.dbname)
        conn_db = self.ConnDB.conndb()
        self.ConnDB.exec(conn_db, sql)
        conn_db.close()
        print('\n\n{}'.format(sql))
        self.conn = self.ConnDB.conndb(self.dbname)
        self.sql_mode = self.ConnDB.exec(
            self.conn, 'SELECT @@SESSION.sql_mode').fetchall()[0][0]

        longexcelcsvs = defaultdict()
        long_num = 0
        num = 0
        num_s = 0

        print('\n\nBegin to import...\n')
        for excelcsv, origin_tablename in excelcsvs.items():
            self.excel_name = excelcsv
            try:
                isexcel = 0
                if re.fullmatch('^.*?\\.csv$', excelcsv,
                                flags=(re.IGNORECASE)):
                    datasets = defaultdict()
                    csv = self.importexcelpath + '\\' + excelcsv

                    # Determining the encoding of a CSV file
                    # http://pandaproject.net/docs/determining-the-encoding-of-a-csv-file.html
                    try:
                        dataset = pd.read_csv(csv,
                                              encoding='utf-8',
                                              dtype=str,
                                              na_values=na_values,
                                              keep_default_na=False,
                                              header=0,
                                              engine='c')
                    except UnicodeDecodeError:
                        try:
                            dataset = pd.read_csv(csv,
                                                  encoding='ansi',
                                                  dtype=str,
                                                  na_values=na_values,
                                                  keep_default_na=False,
                                                  header=0,
                                                  engine='c')
                        except UnicodeDecodeError:
                            try:
                                dataset = pd.read_csv(csv,
                                                      encoding='utf-16',
                                                      dtype=str,
                                                      na_values=na_values,
                                                      keep_default_na=False,
                                                      header=0,
                                                      engine='c')
                            except UnicodeDecodeError:
                                with open(csv, 'rb') as f:
                                    bytes = f.read()
                                    if len(bytes) > 100000:
                                        with open(csv, 'rb') as f:
                                            bytes = f.readline()
                                encode = chardet.detect(bytes)['encoding']
                                if encode == 'ascii':
                                    encode = 'ansi'  #ansi is a super charset of ascii
                                dataset = pd.read_csv(csv,
                                                      encoding=encode,
                                                      dtype=str,
                                                      na_filter=False,
                                                      header=0,
                                                      engine="c")
                    datasets['sheet1'] = dataset
                if re.fullmatch('^.*?\\.xlsx?$',
                                excelcsv,
                                flags=(re.IGNORECASE)):
                    isexcel = 1
                    excel = self.importexcelpath + '\\' + excelcsv
                    datasets = pd.read_excel(excel,
                                             dtype=str,
                                             na_values=na_values,
                                             keep_default_na=False,
                                             header=0,
                                             sheet_name=None)

                for k, v in datasets.items():
                    created_table = None
                    try:

                        sheet_name = k
                        dataset = v
                        tablename = origin_tablename
                        self.excel_name = excelcsv
                        if isexcel == 1:
                            if len(datasets) > 1:
                                tablename = origin_tablename + '_' + re.sub(
                                    '[^0-9a-z]+',
                                    '_',
                                    sheet_name,
                                    flags=(re.IGNORECASE))
                                self.excel_name = excelcsv + '.' + sheet_name
                        tablename = tablename.lower()
                        if len(tablename) > 55:
                            tablename = tablename[:51] + '_{0}'.format(
                                long_num)
                            long_num += 1
                            longexcelcsvs[excelcsv] = tablename
                            with open(log_file, 'a') as (fw):
                                fw.write(
                                    'extra long excel: {0}, tablename: {1}\n'.
                                    format(self.excel_name, tablename))
                        col_maxlen, dataset = self.read_data(dataset)
                        if dataset.empty:
                            raise EmptyError('Empty')
                        created_table, created_sql = self.create_table(
                            col_maxlen, tablename)
                        try:
                            self.insert_data(dataset, tablename)

                        except pymysql.err.InternalError as reason:

                            reason_num_0 = str(reason).split(',')[0].strip('(')

                            if reason_num_0 == '1366':
                                try:
                                    sql_1 = 'truncate table `{0}`.`{1}`;'.format(
                                        self.dbname, tablename)
                                    self.ConnDB.exec(self.conn, sql_1)
                                    self.ConnDB.exec(
                                        self.conn, 'set SESSION sql_mode = ""')
                                    self.insert_data(dataset, tablename)
                                    self.ConnDB.exec(
                                        self.conn,
                                        'set SESSION sql_mode = "{}"'.format(
                                            self.sql_mode))

                                except pymysql.err.InternalError as reason:
                                    reason_num_1 = str(reason).split(
                                        ',')[0].strip('(')
                                    if reason_num_1 == '1118':
                                        sql = re.sub('varchar\\(\\d+\\)',
                                                     'text', created_sql)
                                        sql_1 = 'drop table if exists `{0}`.`{1}`;'.format(
                                            self.dbname, tablename)
                                        self.ConnDB.exec(self.conn, sql_1)
                                        self.ConnDB.exec(self.conn, sql)

                                        self.ConnDB.exec(
                                            self.conn,
                                            'set SESSION sql_mode = ""')
                                        self.insert_data(dataset, tablename)
                                        self.ConnDB.exec(
                                            self.conn,
                                            'set SESSION sql_mode = "{}"'.
                                            format(self.sql_mode))

                                    else:
                                        raise pymysql.err.InternalError(
                                            str(reason))

                            elif reason_num_0 == '1118':
                                sql = re.sub('varchar\\(\\d+\\)', 'text',
                                             created_sql)
                                sql_0 = 'drop table if exists `{0}`.`{1}`;'.format(
                                    self.dbname, tablename) + sql
                                self.ConnDB.exec(self.conn, sql_0)
                                self.insert_data(dataset, tablename)
                            else:
                                raise pymysql.err.InternalError(str(reason))

                    except Exception as reason:
                        print('Failed: {}'.format(self.excel_name))
                        with open(log_file, 'a') as (fw):
                            fw.write(
                                'excel sheet name: {0}, error: {1}\n'.format(
                                    self.excel_name, str(reason)))
                        if created_table:
                            sql = 'drop table if exists `{0}`.`{1}`'.format(
                                self.dbname, created_table)
                            self.ConnDB.exec(self.conn, sql)
                        continue

                    else:
                        print('Imported: {}'.format(self.excel_name))
                        num_s += 1
                    finally:
                        num += 1
                        self.ConnDB.exec(
                            self.conn, 'set SESSION sql_mode = "{}"'.format(
                                self.sql_mode))

            except Exception as reason:
                print('Failed: {}'.format(excelcsv))
                with open(log_file, 'a') as (fw):
                    fw.write('excel file name: {0}, error: {1}\n'.format(
                        self.excel_name, str(reason)))
                num += 1
                continue

        print('\nTotal: {}, Imported: {}\n'.format(num, num_s))

        if os.path.isfile(log_file):
            os.popen(log_file)
            ret = sg.Popup(
                'You have logs. Check them first!\n\nPress OK to continue to dump database if you have checked.',
                title=currentwork)
            if ret is None:
                return
        if num_s == 0:
            sg.Popup('No imported tables!', title=currentwork)
            return

        self.clean_data()
        if advanced:
            sqlfile = self.HandleConfig.handle_config(
                'g', currentwork, 'jirapath') + 'script\\{0}.sql'.format(
                    self.dbname)
            if os.path.exists(sqlfile):
                self.ConnDB.cmd(self.dbname, 'mysql', sqlfile)

        self.MysqlDump.main(currentwork)
        sg.Popup('Import Excel Complete!', title=currentwork)

    def get_excel(self, excels=None):

        if not excels:
            excels = os.listdir(self.importexcelpath)
        excelcsvs = defaultdict()
        for excel in excels:
            excel_dir = self.importexcelpath + '\\' + excel
            if os.path.isfile(excel_dir) and re.fullmatch(
                    '^.*?\\.(xls|xlsx|csv)$', excel, flags=re.IGNORECASE):
                tablename = re.sub('\\.(xls|xlsx|csv)$',
                                   '', (excel.lower()),
                                   flags=re.IGNORECASE)
                tablename = 'z_excel_' + self.nickname + '_' + re.sub(
                    '[^0-9a-z]+', '_', tablename, flags=re.IGNORECASE)
                excelcsvs[excel] = tablename

        return excelcsvs

    def read_data(self, dataset):
        dataset = dataset.fillna(value="")
        f = lambda x: str(x).strip()
        dataset = dataset.applymap(f)
        f = lambda x: len(x)
        df1 = dataset.applymap(f)
        f = lambda x: max(x)

        df3 = df1.apply(f, axis=1)
        df3 = pd.DataFrame(df3, columns=['c'])
        indexs = df3.loc[(df3['c'] == 0)].index
        dataset.drop(indexs, inplace=True)

        # deal with columns
        dataset.columns = [str(col) for col in dataset.columns]
        self.columns = dataset.columns
        low_col = [col.lower() for col in self.columns]
        s = len(low_col)
        if 'unnamed: {}'.format(s - 1) in low_col:
            self.columns = dataset[0:1]
            self.columns = np.array(self.columns)
            self.columns = self.columns.tolist()[0]
            dataset.columns = self.columns
            dataset.drop(dataset[:1].index, inplace=True)
            low_col = [col.lower() for col in self.columns]

        if 'ignore' in low_col:
            self.columns = dataset[0:1]
            self.columns = np.array(self.columns)
            self.columns = self.columns.tolist()[0]
            dataset.columns = self.columns
            dataset.drop(dataset[:1].index, inplace=True)

        self.columns = [str(col).strip() for col in self.columns]
        # fix blank col name
        f = lambda x: "unnamed" if x == "" else x
        self.columns = [f(col) for col in self.columns]

        f = lambda x: x if len(x) <= 63 else x[:62].strip()
        self.columns = [f(col) for col in self.columns]
        # fix duplicate column name
        while 1:
            low_col = [col.lower() for col in self.columns]
            idx = 0
            odx = 0
            c = 0
            for i in self.columns:
                jdx = 0
                n = 1
                if idx == len(self.columns):
                    continue
                for j in low_col[idx + 1:]:
                    odx = idx + 1 + jdx
                    if j == i.lower():
                        self.columns[odx] = j + str(n)
                        n += 1
                        c += 1
                    jdx += 1

                idx += 1

            if c == 0:
                break

        dataset.columns = self.columns
        self.columns = np.array(self.columns)
        self.columns = self.columns.tolist()

        f = lambda x: max(x)
        df1.columns = self.columns
        df2 = df1.apply(f, axis=0)
        col_maxlen = df2.to_dict()

        f = lambda x: None if x == "" else x
        dataset = dataset.applymap(f)

        return col_maxlen, dataset

    def create_table(self, col_maxlen, tablename):
        sql = 'create table `{0}`.`{1}`('.format(self.dbname, tablename)
        for col, maxLen in col_maxlen.items():
            colType = 'varchar(255)'
            if maxLen > 255:
                colType = 'TEXT'
            if maxLen > 65535:
                colType = 'MEDIUMTEXT'
            if maxLen > 16777215:
                colType = 'LONGTEXT'
            sql = sql + '`{0}` {1} default null,'.format(col, colType)

        sql = sql[:-1] + ')'
        try:
            self.ConnDB.exec(self.conn, sql)
        except pymysql.InternalError:
            sql = re.sub('varchar\\(\\d+\\)', 'text', sql)
            self.ConnDB.exec(self.conn, sql)

        return (tablename, sql)

    def insert_data(self, dataset, tablename):
        dataset = np.array(dataset)
        datalist = dataset.tolist()
        cols = '`,`'.join(self.columns)
        l = len(self.columns)
        v = '%s,' * l
        v = v[:-1]

        sql = 'insert into `%s`.`%s`(%s) values(' % (self.dbname, tablename,
                                                     '`' + cols + '`')
        sql = sql + '%s)' % v
        self.ConnDB.exec(self.conn, sql, datalist=datalist)

    def clean_data(self):
        print('Begin to clean data...\n')
        file = self.cleansql
        ret = self.ConnDB.cmd(self.dbname, 'mysql', file)
        if ret == 0:
            print('Succeed: Clean data\n')
        else:
            sg.Popup('Clean Data Failed')
Exemplo n.º 10
0
class ToSqlserver:
    def __init__(self, values):
        self.values = values
        self.ConnDB = ConnDB(values)
        self.HandleConfig = HandleConfig()
        self.conn_db = self.ConnDB.conndb(host=values['host'],
                                          port=int(values['port']),
                                          user=values['user'],
                                          passwd=values['passwd'],
                                          db=values['dbname'],
                                          charset='utf8')

    def is_Chinese(self, word):
        for ch in word:
            if '\u4e00' <= ch <= '\u9fff':
                return True
        return False

    # create table
    def create_table(self, col_maxlen, tablename):
        sql = "select 1 from SYSOBJECTS where XTYPE = 'U' and NAME = '{}'".format(
            tablename)
        cnt = self.ConnDB.exec(self.conn_db, sql).fetchall()
        if len(cnt) > 0:
            sql = 'drop table "{}"'.format(tablename)
            self.ConnDB.exec(self.conn_db, sql)
        sql = "create table \"{0}\"(".format(tablename)
        for col, maxLen in col_maxlen.items():
            colType = "varchar(255)"
            if type(maxLen) == int:
                if maxLen * 6 > 4000:
                    colType = "CLOB"
                elif maxLen > 0:
                    colType = "varchar({})".format(maxLen * 6)

            sql = sql + "\"{0}\" {1} default null,".format(col, colType)
        if self.values['add_tname']:
            sql = sql + "`table_name` varchar(255) default '{0}',".format(
                tablename)
        sql = sql[:-1] + ")"

        self.ConnDB.exec(self.conn_db, sql)
        return tablename, sql

    # insert into
    def insert_data(self, dataset, tablename, created_sql=None):
        if dataset.empty:
            return
        sql = "SELECT NAME FROM SYSCOLUMNS WHERE ID=OBJECT_ID('{0}}')".format(
            tablename)
        columns = self.ConnDB.exec(self.conn_db, sql).fetchall()
        exists_columns = []
        for column in columns:
            if column[0] in dataset.columns:
                exists_columns.append(column[0])
        dataset = dataset[exists_columns]
        columns = dataset.columns
        dataset = np.array(dataset)
        datalist = dataset.tolist()

        cols = '","'.join(columns)
        l = len(columns)
        v = ''
        for i in range(l):
            v = v + ':{},'.format(i + 1)
        v = v[:-1]

        sql = 'insert into "%s"(%s) values(' % (tablename, '"' + cols + '"')
        sql = sql + '%s)' % v

        self.ConnDB.exec(self.conn_db, sql, datalist=datalist)
Exemplo n.º 11
0
class NewWork:
    def __init__(self):
        self.HandleConfig = HandleConfig()
        self.DbToDownload = DbToDownload()
        self.ConnDB = ConnDB()

    def main(self):
        workpath = self.HandleConfig.handle_config('g', 'defaultpath',
                                                   'workpath')
        nickname = self.HandleConfig.handle_config('g', 'excelimporter',
                                                   'nickname')
        emptytxt = self.HandleConfig.handle_config('g', 'referencefile',
                                                   'emptytxt')
        git_repo_path = self.HandleConfig.handle_config(
            'g', 'defaultpath', 'git_repo_path')
        navicat_script_path = self.HandleConfig.handle_config(
            'g', 'defaultpath', 'navicat_script_path')
        desktoppath = self.HandleConfig.handle_config('g', 'defaultpath',
                                                      'desktoppath')

        # user input
        jiraname, dbname, worktype, cost, duedate = self.input_workinfo(
            workpath)
        if not worktype:
            return 0

        # create dictionary
        year = str(date.today().year)
        month = str(date.today().month)
        day = str(date.today().day)
        yearpath = workpath + year + '\\'
        monthpath = yearpath + month + '\\'
        daypath = monthpath + day + '\\'
        worktypepath = daypath + worktype.replace(' ', '_') + '\\'
        jirapath = worktypepath + jiraname + '\\'
        if not os.path.exists(yearpath):
            os.makedirs(yearpath)
        if not os.path.exists(monthpath):
            os.makedirs(monthpath)
        if not os.path.exists(daypath):
            os.makedirs(daypath)
        if not os.path.exists(worktypepath):
            os.makedirs(worktypepath)
        if not os.path.exists(jirapath):
            os.makedirs(jirapath)
        scriptspath = jirapath + 'script\\'
        scriptsbakpath = jirapath + 'db_backup\\'
        xlspath = jirapath + 'excel\\'
        queriespath = jirapath + 'temp\\'
        if not os.path.exists(scriptspath):
            os.makedirs(scriptspath)
        if not os.path.exists(scriptsbakpath):
            os.makedirs(scriptsbakpath)
        if not os.path.exists(xlspath):
            os.makedirs(xlspath)
        if not os.path.exists(queriespath):
            os.makedirs(queriespath)
        script = scriptspath + '{}.txt'.format(dbname)
        if not os.path.isfile(script):
            shutil.copyfile(emptytxt, script)
        tmpscript = scriptspath + 'tmp.txt'
        if not os.path.isfile(tmpscript):
            shutil.copyfile(emptytxt, tmpscript)

        # copy configration.sql from git repo
        gitscriptpath = git_repo_path + 'dataImportScript\\script\\'
        configration_sql = gitscriptpath + 'configration.sql'
        configration_sql_new = jirapath + 'script\\configration.sql'
        if not os.path.isfile(configration_sql_new):
            shutil.copy(configration_sql, configration_sql_new)

        # write into config.ini
        # set as current work
        self.HandleConfig.handle_config('a', jiraname)
        self.HandleConfig.handle_config('s', jiraname, 'dbname', dbname)
        self.HandleConfig.handle_config('s', jiraname, 'jiraname', jiraname)
        self.HandleConfig.handle_config('s', jiraname, 'jirapath', jirapath)
        self.HandleConfig.handle_config('s', jiraname, 'worktype', worktype)
        self.HandleConfig.handle_config('s', jiraname, 'merge', 'True')
        self.HandleConfig.handle_config('s', 'worklist', jiraname, jiraname)
        self.HandleConfig.handle_config('s', 'global', 'currentwork', jiraname)

        # create database
        sql = 'drop database if exists `{0}`;create database `{0}`'.format(
            dbname, dbname)
        conn_db = self.ConnDB.conndb()
        self.ConnDB.exec(conn_db, sql)
        conn_db.close()

        navicat_db_path = navicat_script_path + dbname + '\\'
        if not os.path.exists(navicat_db_path):
            os.makedirs(navicat_db_path)
        navicatscript = navicat_db_path + '{}.sql'.format(dbname)
        if not os.path.isfile(navicatscript):
            shutil.copyfile(emptytxt, navicatscript)
        after_template_script = navicat_db_path + '{}_after_template.sql'.format(
            dbname)
        if not os.path.isfile(after_template_script):
            shutil.copyfile(emptytxt, after_template_script)
        after_merge_script = navicat_db_path + '{}_after_merge.sql'.format(
            dbname)
        if not os.path.isfile(after_merge_script):
            shutil.copyfile(emptytxt, after_merge_script)

        # create shortcut
        self.set_shortcut(jirapath, desktoppath, jiraname)

        # insert into data_import_tracking
        sql = "insert into db_to_download.data_import_tracking(jiraId,dataImportType,dataImportCost,productionInstance,developer,startDate,dueDate) " \
              "select '{0}','{1}','{2}','{3}','{4}','{5}','{6}' from dual"\
            .format(jiraname, worktype, cost, dbname, nickname, date.today(), duedate)
        conn_db = self.ConnDB.conndb(server='awshost')
        self.ConnDB.exec(conn_db, sql)
        conn_db.close()

        # Add instance to db_list_to_download
        currentwork = self.HandleConfig.handle_config("g", "global",
                                                      "currentwork")
        self.DbToDownload.main(currentwork)

        sg.Popup('\n  New Work  Complete!         \n', title=currentwork)

    def input_workinfo(self, workpath):
        year = str(date.today().year)
        workpath = workpath + year + '\\'
        layout = [[
            sg.Frame(layout=[[
                sg.Radio('First Import', 'R0', default=True, key='first'),
                sg.Radio('Second Import', 'R0', key='second'),
                sg.Radio('Fix Import', 'R0', key='fix')
            ]],
                     title='Work Type',
                     title_color='red')
        ], [sg.Text('Jira Name:')], [sg.InputText(key='jiraname')],
                  [sg.Text('Database Name:')], [sg.InputText(key='dbname')],
                  [sg.Text('Cost:'),
                   sg.Text(' ' * 20),
                   sg.Text('Due Date:')],
                  [
                      sg.InputText(key='cost', size=(10, 1)),
                      sg.Text(' ' * 10),
                      sg.InputText(key='duedate', size=(10, 1))
                  ],
                  [
                      sg.Submit(tooltip='Click to submit this form'),
                      sg.Cancel()
                  ]]
        window = sg.Window('', layout=layout)
        event, values = window.read()
        window.close()
        if event in (None, 'Cancel'):
            return None, None, None, None, None

        jiraname = values['jiraname'].strip().upper()
        if '/' in jiraname:
            jiraname = jiraname.split('/')[-1]
        dbname = values['dbname'].strip().lower()
        cost = values['cost'].strip()
        duedate = values['duedate'].strip()
        worktype = 'First Import'
        if values['second']:
            worktype = 'Second Import'
        elif values['fix']:
            worktype = 'Fix Import'

        # jiraname and dbname can not be blank
        if dbname == '' or jiraname == '':
            sg.Popup('Jira Name or Database Name can not be blank!')
            self.input_workinfo(workpath)
            return None, None, None, None, None

        if re.match('[^0-9a-z]', dbname):
            sg.Popup('Database Name only can be 0-9a-z')
            self.input_workinfo(workpath)
            return None, None, None, None, None

        return jiraname, dbname, worktype, cost, duedate

    def set_shortcut(self, jirapath, desktop_path, jiraname):
        filename = jirapath.strip('\\')
        lnkname = desktop_path + jiraname + r".lnk"
        shortcut = pythoncom.CoCreateInstance(shell.CLSID_ShellLink, None,
                                              pythoncom.CLSCTX_INPROC_SERVER,
                                              shell.IID_IShellLink)
        shortcut.SetPath(filename)
        if os.path.splitext(lnkname)[-1] != '.lnk':
            lnkname += ".lnk"
        shortcut.QueryInterface(pythoncom.IID_IPersistFile).Save(lnkname, 0)
Exemplo n.º 12
0
class MysqlDump:
    def __init__(self):
        self.HandleConfig = HandleConfig()
        self.ConnDB = ConnDB()

    def main(self,
             currentwork,
             advanced=0,
             gz=False,
             after=0,
             op='mysqldump-no-r'):
        jirapath = self.HandleConfig.handle_config('g', currentwork,
                                                   'jirapath')
        dbname = self.HandleConfig.handle_config('g', currentwork, 'dbname')
        sqlname = '{0}.sql'.format(dbname)
        sqlfile = jirapath + 'script\\' + sqlname
        if after:
            sqlfile = jirapath + 'script\\' + '{0}_after.sql'.format(dbname)
        if not op:
            op = 'mysqldump-no-r'
        tablesname = ''

        if advanced:
            layout = [
                [sg.Text('Database Name: {0}'.format(dbname))],
                [sg.Text('Tables Name:'),
                 sg.InputText(key='tablesname')],
                [sg.Text('File Name:'),
                 sg.InputText(key='filename')],
                [sg.Submit(tooltip='Click to submit this form'),
                 sg.Cancel()]
            ]
            window = sg.Window('', layout=layout)
            event, values = window.read()
            window.close()
            if event in (None, 'Cancel'):
                return
            tablesname = values['tablesname']
            filename = values['filename']
            if filename:
                sqlfile = jirapath + 'script\\' + filename

        if gz:

            sql = """select TABLE_NAME from information_schema.TABLES t where TABLE_SCHEMA =database() and TABLE_NAME like 'z\_%' and TABLE_NAME NOT like 'z\_backup%' and TABLE_NAME NOT REGEXP '^z_excel_.*_20[0-9]{6}$' and TABLE_NAME NOT REGEXP '^z_newcreate_.*_20[0-9]{6}$' AND table_name NOT IN('z_newcreate_sequence_number_table','z_newcreate_revert_group_concat_bak','z_newcreate_extra_campaign','z_newcreate_extra_fund','z_newcreate_extra_purpose','z_newcreate_extra_tender','z_newcreate_extra_event','z_newcreate_extra_event_category_cd','z_newcreate_extra_membership_term','z_newcreate_extra_fundraiser','z_newcreate_all_custom_data_smoke_test', 'z_newcreate_all_custom_data_options_smoke_test', 'z_newcreate_wxm_membership_email_condition_smoke_test', 'z_newcreate_wxm_membershipEmailCondition_sep_smoke_test', 'z_newcreate_wxm_membershipLetterCondi_smoke_test', 'z_newcreate_wxm_membershipLetterCondi_sep_smoke_test', 'z_newcreate_wxm_shopping_cart_items_smoke_test', 'z_newcreate_prepare_statement_to_proceed_smoke_test', 'z_newcreate_wxm_may_be_drop_down_smoke_test', 'z_newcreate_cutoff_columns_smoke_test', 'z_newcreate_wxm_donation_smoke_test', 'z_newcreate_wxm_credit_card_id_smoke_test', 'z_newcreate_max_id_b4import_for_system_tables', 'z_newcreate_prepare_statement_to_proceed', 'z_newcreate_match_multiple_company', 'z_smoke_test_newcreate_past_duplicate_custom_field', 'z_newcreate_current_duplicate_custom_field_smoke_test', 'z_newcreate_check_table_name_column','z_newcreate_all_onelinetext_custom_field');"""
            conn = self.ConnDB.conndb(dbname)
            result = self.ConnDB.exec(conn, sql)
            result = ['drop table if exists ' + t[0] for t in result]
            sql = ';'.join(result)
            self.ConnDB.exec(conn, sql)

            sqlfile = jirapath + 'db_backup\\' + sqlname
            sqlfile_gzip = jirapath + '\\db_backup\\{0}.sql.gz'.format(dbname)
            op = 'mysqldump'
            ret = self.ConnDB.cmd(dbname, op, sqlfile)
            if ret == 0:
                with open(sqlfile, 'rb') as (f_in):
                    with gzip.open(sqlfile_gzip, 'wb') as (f_out):
                        shutil.copyfileobj(f_in, f_out)
                os.remove(sqlfile)

            # get after_size
            after_size = os.path.getsize(sqlfile_gzip) / 1024 / 1024
            after_size = int('{:.0f}'.format(after_size))
            try:
                b4_size = int(
                    self.HandleConfig.handle_config('g', currentwork,
                                                    'b4_size'))
                if b4_size < 100 and after_size >= 100:
                    sql = "DROP TABLE IF EXISTS z_newcreate_table_row;CREATE TABLE z_newcreate_table_row SELECT 'account' tableName, (SELECT count(*) FROM account) tableRow " \
                          "UNION ALL SELECT 'donation' tableName, (SELECT count(*) FROM donation) tableRow UNION ALL SELECT 'event_registration' tableName, (SELECT count(*) FROM event_registration) tableRow " \
                          "UNION ALL SELECT 'event_attendee' tableName, (SELECT count(*) FROM event_attendee) tableRow UNION ALL SELECT 'membership_listing' tableName, (SELECT count(*) FROM membership_listing) tableRow " \
                          "UNION ALL SELECT 'payment' tableName, (SELECT count(*) FROM payment) tableRow;"
                    self.ConnDB.exec(conn, sql)
                    sg.Popup(
                        'The size of database is more than 100M! See z_newcreate_table_row',
                        title=currentwork)
            except:
                pass

        else:
            ret = self.ConnDB.cmd(dbname, op, sqlfile, tablesname)
            if ret == 0:
                with open(sqlfile, 'a') as (fa):
                    fa.write('COMMIT;')
            if advanced:
                sg.Popup('Complete!')