def execute_sql(self, command='', database=''):
     is_ok = True
     table_head_list = list()
     data_list = list()
     try:
         cursor = self.mysql_conn.cursor()
         if database != '':
             cursor.execute('use ' + database)
         rowcount = cursor.execute(command)
         if rowcount > 0:
             if cursor.description is not None:
                 table_head_list = [
                     tuple[0] for tuple in cursor.description
                 ]
                 for data in cursor.fetchall():
                     if len(table_head_list) == len(data):
                         data_list.append(list(data))
                     else:
                         is_ok = False
                         table_head_list.clear()
                         data_list.clear()
                         break
         self.mysql_conn.commit()
         cursor.close()
     except Exception as err:
         is_ok = False
         table_head_list.clear()
         data_list.clear()
         write_log(command)
         write_log(err)
     return is_ok, table_head_list, data_list
 def sftp_rmdir(self, dir=''):
     stdin, stdout, stderr = self.ssh_conn.exec_command('rm  -rf  ' + dir)
     if len(stderr.read()) == 0:
         write_log("ftp remove  dir " + dir + " succ")
         return True
     else:
         write_log("ftp remove  dir " + dir + " fail")
         return False
 def __init__(self, ip, port, user, password):
     self.sftp_is_alive = False
     try:
         super().__init__(ip, port, user, password)
         self.sftp_conn = self.ssh_conn.open_sftp()
         self.sftp_is_alive = True
     except Exception as err:
         write_log(err)
def exe_sys_cmd_get_echo_v3(os_cmd=''):
    # need python3
    cmd_echo = ''
    echo_error_code = -1
    if os_cmd != '':
        write_log(os_cmd)
        echo_error_code, cmd_echo = subprocess.getstatusoutput(os_cmd)
        write_log(cmd_echo)
    return cmd_echo, echo_error_code
def exe_sys_cmd_v3(os_cmd=''):
    # need python3
    is_ok = False
    if os_cmd != '':
        write_log(os_cmd)
        echo_error_code = subprocess.call(os_cmd, shell=True)
        if echo_error_code == 0:
            is_ok = True
    return is_ok
 def __init__(self, ip, port, user, password):
     self.ssh_conn = paramiko.SSHClient()
     self.ssh_conn.set_missing_host_key_policy(paramiko.AutoAddPolicy())
     self.ssh_is_alive = False
     try:
         self.ssh_conn.connect(ip, port, user, password)
         self.ssh_is_alive = True
     except Exception as err:
         write_log(err)
Exemple #7
0
def parse_text(text_file = './file.text', encoding = 'utf8'):
    text_list = list()
    if os.path.exists(text_file):
        f_hd = open(text_file, 'r', encoding = encoding, errors='ignore')
        text_list = f_hd.readlines()
        f_hd.close()
    else:
        write_log("failed. non-existent, please check " + text_file)
    return text_list
Exemple #8
0
def parse_xml_rude(xml_file='./file.xml'):
    xml_list = list()
    if os.path.exists(xml_file):
        f_hd = open(xml_file, 'r')
        xml_list = f_hd.readlines()
        f_hd.close()
    else:
        write_log("failed. non-existent, please check " + xml_file)
    return xml_list
def exe_sys_cmd_get_echo(os_cmd=''):
    cmd_echo = ''
    if os_cmd != '':
        write_log(os_cmd)
        cmd_pipe = os.popen(os_cmd)
        cmd_echo = cmd_pipe.read()
        cmd_pipe.close()
        write_log(cmd_echo)
    return cmd_echo
 def __init__(self, user, password, ip, port, database):
     self.mysql_is_alive = False
     try:
         self.mysql_conn = pymysql.connect(user=user,
                                           password=password,
                                           host=ip,
                                           port=int(port),
                                           database=database)
         self.mysql_is_alive = True
     except Exception as err:
         write_log(err)
Exemple #11
0
def modify_job_ini(config_modify_list=list(), local_path=''):
    is_ok = False
    if not os.path.exists(local_path + '/job.ini'):
        return is_ok
    for config_modify_item in config_modify_list:
        if config_modify_item['config_type'] == 'ini':
            is_ok = modify_ini(config_modify_item['modContent'],
                               local_path + '/job.ini')
            if not is_ok:
                write_log("modify ini file failed.  " + local_path +
                          '/job.ini')
    return is_ok
def processor(build_tag='', step=''):
    # gen job_config in control server
    mysql_info = {'mysql_user': '******',
                  'mysql_password': '******',
                  'mysql_ip': '10.134.104.40',
                  'mysql_port': 3306,
                  'mysql_database': 'main_project'}
    if step == 'gen_job_config':
        if build_tag != '':
            if not (gen_job_config_main(mysql_info, build_tag) and
                    gen_job_config_module_conf_mod(mysql_info, build_tag) and
                    gen_job_config_module_data_mod(mysql_info, build_tag) and
                    gen_job_config_module_start_script(mysql_info, build_tag)):
                sys.exit(1)
        sys.exit(0)
    # build step by step in test server
    conf_main, conf_module_mod, conf_data_mod = get_job_config_file_name(os.path.join(sys.path[0], 'conf/'), build_tag)
    deploy_info = parse_json(conf_main)
    if step not in deploy_info['deploy_info']['deploy_step'].keys() or \
            deploy_info['deploy_info']['deploy_step'][step] == '0':
        write_log('step ' + step + ' is illegal, skip')
        sys.exit(0)
    local_path = ''
    for server_info in deploy_info['offline_server_info']:
        if check_is_local_server(server_info['addr']):
            # local_path = server_info['deploy_path'] + '/' + deploy_info['service_info']['service_name'] + '/'
            local_path = server_info['deploy_path'] + '/'
            break
    if local_path == '':
        sys.exit(1)
    if not exe_sys_cmd_v3('mkdir -p ' + local_path):
        sys.exit(1)
    env_prepare()
    if step == 'is_establish':
        if not module_install(deploy_info, conf_module_mod, local_path):
            sys.exit(2)
    elif step == 'is_config':
        if not module_configure(deploy_info, conf_module_mod, local_path)[0]:
            sys.exit(3)
    elif step == 'is_data':
        if not module_data_modify(deploy_info, conf_data_mod):
            sys.exit(4)
    elif step == 'is_start':
        if not module_start(deploy_info, local_path, build_tag):
            sys.exit(5)
    elif step == 'is_dailybuild':
        from proc.package_deploy_dailybuild import gen_dailybuild
        is_ok, conf_name, dailybuild_file = gen_dailybuild(deploy_info, local_path)
        if not is_ok:
            sys.exit(5)
        else:
            print(dailybuild_file)
Exemple #13
0
def parse_QO_config(ABMF_file, split_point):
    ABMF_list = list()
    ABMF_partial_fragment = _tree()
    if not os.path.exists(ABMF_file):
        write_log("failed. non-existent, please check " + ABMF_file)
        return ABMF_list, ABMF_partial_fragment
    f_hd = open(ABMF_file, 'r')
    if len(split_point) == 0:
        ABMF_list = f_hd.readlines()
        f_hd.close()
        return ABMF_list, ABMF_partial_fragment
    is_find = False
    sub_content = ''
    sub_key = ''
    split_point_index = 0
    for line in f_hd:
        # hard code for vrqo_QOConfig modification
        if line.find('# rule : map.index blk') != -1:
            ABMF_list.append('#---divide_line---\n')
        if not is_find:
            for check_point in split_point:
                if line.find(check_point["start"]) != -1:
                    sub_content = line
                    is_find = True
                    break
                split_point_index += 1
            if is_find:
                continue
            else:
                split_point_index = 0
        if is_find:
            if line.find(split_point[split_point_index]["end"]) != -1:
                sub_content = sub_content + line
                ABMF_partial_fragment[split_point[split_point_index]
                                      ["start"]][sub_key] = sub_content
                ABMF_list.append('PlaceHolder__' + '--'.join(
                    [split_point[split_point_index]["start"], sub_key]))
                split_point_index = 0
                is_find = False
            if not is_find:
                continue
        if is_find:
            sub_content = sub_content + line
            if line.find(split_point[split_point_index]["key"]) != -1:
                sub_key = trim_head_tail(line.split('=')[1])
            continue
        ABMF_list.append(line)
    f_hd.close()
    return ABMF_list, ABMF_partial_fragment
Exemple #14
0
def parse_json(json_file='./file.json'):
    json_dict = dict()
    if os.path.exists(json_file) and os.path.splitext(json_file)[1] == '.json':
        f_hd = open(json_file, 'r')
        try:
            json_dict = json.load(f_hd)
        except:
            json_dict.clear()
            write_log("failed. parse json file error, please check " +
                      json_file)
        f_hd.close()
    else:
        write_log("failed. non-existent or non-json-format , please check " +
                  json_file)
        #sys.exit(1)
    return json_dict
Exemple #15
0
def parse_ini(ini_file = './file.ini'):
    ini_dict = _tree()
    if os.path.exists(ini_file):
        parser = ConfigParser()
        try:
            parser.read(ini_file)
        except:
            write_log("failed. parse ini file error, please check " + ini_file)
            return ini_dict
        section_list = parser.sections()
        for section in section_list:
            param_list = parser.options(section)
            for param in param_list:
                ini_dict[section][param] = parser[section][param]
    else:
        write_log("failed. non-existent or non-ini-format , please check " + ini_file)
    return ini_dict
Exemple #16
0
def write_ini(data = dict(), ini_file = './file.ini'):
    parser = ConfigParser()
    try:
        parser.read_dict(data)
    except:
        write_log("failed. parse ini data error, please check " + str(data))
        return False
    try:
        f_hd = open(ini_file, 'w')
    except:
        return False
    try:
        parser.write(f_hd)
    except:
        f_hd.close()
        return False
    f_hd.close()
    return True
 def exe_ssh_cmd_get_echo(self, os_cmd=''):
     cmd_echo = ''
     cmd_echo_err = ''
     if os_cmd != '':
         write_log(os_cmd)
         stdin, stdout, stderr = self.ssh_conn.exec_command(os_cmd)
         if len(stderr.read()) == 0:
             cmd_echo = bytes.decode(stdout.read())
             write_log(cmd_echo)
         else:
             cmd_echo_err = bytes.decode(stderr.read())
             write_log(bytes.decode(stderr.read()))
     return cmd_echo, cmd_echo_err
Exemple #18
0
def gen_cnv_report(cnv_data_file, cu, cl, genelist, outfile, source):
    """
    If required, run an Oncomine CNV report to get that data from the VCF file
    as well.
    """
    cnv_data = []
    parsed_cnv_results = []
    if source == 'oca':
        cmd = (os.path.join(scripts_dir, 'get_cnvs.pl'), '--cu', cu, '--cl',
               cl, cnv_data_file)
        parsed_cnv_results = run(cmd,
                                 "Generating Oncomine CNV results data",
                                 True,
                                 silent=not verbose)
    elif source == 'tso500':
        cmd = (os.path.join(scripts_dir, 'tso500_cnvs.pl'), cnv_data_file)
        parsed_cnv_results = run(cmd,
                                 "Generating TSO500 CNV results data",
                                 True,
                                 silent=not verbose)
    elif source == 'wes':
        logger.write_log(
            'error', 'CNV data acquisition is not yet implemented '
            'for WES panels yet. Skipping this step.')
        return None

    header = parsed_cnv_results.pop(0).rstrip('\n').split(',')

    # If we have results, then generate a report.  Else, bail out.
    if parsed_cnv_results:
        for r in parsed_cnv_results:
            data = dict(zip(header, r.rstrip('\n').split(',')))

            # Fix measurements to accomodate multiple assays.
            if source == 'oca':
                cn = float(data['Raw_CN'])
                lower_reading = float(data['CI_05'])
                upper_reading = float(data['CI_95'])
            elif source == 'tso500':
                cn = lower_reading = upper_reading = float(data['FoldChange'])

            #  cnv_str = '{},{}'.format(data['Gene'], data['Raw_CN'])
            cnv_str = '{},{}'.format(data['Gene'], cn)

            if lower_reading >= float(cu):
                data.update({'var_type': 'Amplification'})
            elif upper_reading <= float(cl):
                data.update({'var_type': 'Deletion'})

            # Remove any genes not in our filter list if there is one.
            if genelist and data['Gene'] not in genelist:
                log.write_log(
                    'debug', 'Filtering out {} because it is not '
                    'in the requested list.'.format(cnv_str))
                continue

            # Filter events outside of our thresholds.
            if not __cnv_filter(lower_reading, upper_reading, cu, cl):
                log.write_log(
                    'debug', 'Filtering out {} because it is not '
                    'an copy number event within the threshold (copy loss={}; '
                    'copy_gain={}).'.format(cnv_str, cl, cu))
                continue

            cnv_data.append(data)

    if not cnv_data:
        log.write_log(
            'info', 'No copy number amplifications or deletions '
            'found in this specimen.')
    else:
        log.write_log(
            'info', 'Found {} copy number events in the '
            'specimen.'.format(len(cnv_data)))
        log.write_log('info', 'Annotating with OncoKB lookup.')
        __oncokb_cnv_and_fusion_annotate(cnv_data, oncokb_cnv_file, 'cnv')

    # Set up column headers for different assays.
    assay_elems = {
        'oca': ('Chr', 'Start', 'End', 'Gene', 'CN', 'CI_05', 'CI_95', 'MAPD',
                'Oncogenicity', 'Effect'),
        'tso500':
        ('Chr', 'Start', 'End', 'Gene', 'FoldChange', 'Oncogenicity', 'Effect')
    }

    with open(outfile, 'w') as outfh:
        csv_writer = csv.writer(outfh, lineterminator="\n", delimiter=",")
        wanted = assay_elems[source]

        csv_writer.writerow(wanted)
        if cnv_data:
            for elem in cnv_data:
                data = [elem[x] for x in wanted]
                csv_writer.writerow(data)
        else:
            outfh.write("No CNVs found.\n")

    return cnv_data
        self.ssh_conn.close()

    @ssh_alive
    def exe_ssh_cmd_get_echo(self, os_cmd=''):
        cmd_echo = ''
        cmd_echo_err = ''
        if os_cmd != '':
            write_log(os_cmd)
            stdin, stdout, stderr = self.ssh_conn.exec_command(os_cmd)
            if len(stderr.read()) == 0:
                cmd_echo = bytes.decode(stdout.read())
                write_log(cmd_echo)
            else:
                cmd_echo_err = bytes.decode(stderr.read())
                write_log(bytes.decode(stderr.read()))
        return cmd_echo, cmd_echo_err


# for test
if __name__ == "__main__":
    test_ssh_client = SSHClient('10.134.91.94', "22", "root",
                                "noSafeNoWork@2014")
    echo, err = test_ssh_client.exe_ssh_cmd_get_echo('ls  -rtl ')
    #test_ssh_client.exe_ssh_cmd_get_echo('cd /search/odin/xujiang/vrqo_test/QueryOptimizer; nohup ./vrqo vrqo.cfg.ex  >std.ex.diff 2>err.ex.diff &')
    test_ssh_client.close()

    from lib.sys_inter_act import split_sys_cmd_echo
    echo_split_to_list = split_sys_cmd_echo(echo)
    write_log(echo_split_to_list)
    echo_split_to_list_partial = split_sys_cmd_echo(echo, 5)
    write_log(echo_split_to_list_partial)
def exe_sys_cmd(os_cmd=''):
    if os_cmd != '':
        write_log(os_cmd)
        os.system(os_cmd)
    return