コード例 #1
0
ファイル: todo_list.py プロジェクト: fzdp/orchid-notebook
 def _get_finished_at_group_key(todo):
     finished_at = todo.finished_at
     if not finished_at:
         return _("todo_list.not_finished")
     if TimeUtil.is_today(finished_at):
         return _("todo_list.today_finished")
     elif TimeUtil.is_yesterday(finished_at):
         return _("todo_list.yesterday_finished")
     else:
         return _("todo_list.previous_finished")
コード例 #2
0
def main():
    if len(sys.argv) < 2: 
        usage(sys.argv)
    
    t_util = TimeUtil()
    cfg = SwitchConfig().get_config()
        
    customer = cfg.get("failover", "customer")
    max_count = cfg.getint("failover", "max_count")
    time_sleep = cfg.getfloat("failover", "time_sleep")
    
    fo_active = cfg.get("failover", "active")
    fo_standby = cfg.get("failover", "standby")
    
    target = sys.argv[1]    
    
    if target == "active":
        curr = fo_standby
        targ = fo_active
        
        fo = utm_failover.UTMFailOver(cfg, curr, targ)
        
        fo.show_config(curr, "현재 설정 내용")
        fo.show_config(targ, "Failback 설정 내용")
        if InteractionUtil().ask_user("Failback 설정 내용을 적용하시겠습니까?"):
            t_util.start() 
            fo.failover(curr, targ)
            
            # 현재 구현된 l3_check는 
            # red/orange/green과 동일한 망에 연결된 노드에서만 수행 가능
            # 일단은 False로 설정 
            l3_check = fo.loop_l2_connectivity(False, max_count, time_sleep)
            fo.loop_l3_connectivity(l3_check, max_count, time_sleep)         
        else: 
            return False
    elif target == "standby":
        curr = fo_active
        targ = fo_standby
        
        fo = utm_failover.UTMFailOver(cfg, curr, targ)
        
        fo.show_config(curr, "현재 설정 내용")
        fo.show_config(targ, "Failover 설정 내용")
        
        if InteractionUtil().ask_user("Failover 설정 내용을 적용하시겠습니까?"): 
            t_util.start()
            fo.failover(curr, targ)
            
            l3_check = fo.loop_l2_connectivity(False, max_count, time_sleep)
            fo.loop_l3_connectivity(l3_check, max_count, time_sleep)            
        else: 
            return False        
    else:
        usage(argv)
    
    #Console().info(" *** [INFO] max_count= %s time_sleep= %s count= %s" % (max_count, time_sleep, count))
    t_util.stop()
    Console().info(" *** [INFO] Time= %s seconds!!!" % t_util.get_duration())
コード例 #3
0
def construct_aggregated_file_name(prefix, game, batch_id, job_id, start_ts):
    hour_part = TimeUtil.get_hr_str(start_ts)
    return '{hour_part}-{batch_id}-{job_id}.{prefix}.{game}.daily_snapshot.{start_ts}'.format(
        hour_part=hour_part,
        batch_id=batch_id,
        job_id=job_id,
        prefix=prefix,
        game=game,
        start_ts=int(start_ts))
コード例 #4
0
 def _save_data(self, data):
     f = open('%s/lb_%s.txt' % (self.lb_pt, TimeUtil.t_now_YmdH()), 'w')
     f.write('%s\n' % data)
     f.close()
コード例 #5
0
def get_url_dp(product):
    """
    通过ChromeDP获取下载产品信息
    :param product: 产品
    :return:
    """
    if product == "" or product is None:
        raise MsgException("产品不能为空")

    if netsarang_info is not None and netsarang_info and product in netsarang_info.keys(
    ):
        info = netsarang_info[product]
        # 如果数据不为空,并且日期为今天,这么做是为了避免消耗过多的性能,每天只查询一次
        if len(info) > 1 and not info and TimeUtil.date_compare_to(
                info[0], datetime.now()) == 0:
            return info[1]

    prefix = StringUtil.random_lowercase_alphanumeric(9)
    suffix = Mail.lin_shi_you_xiang_suffix()

    Mail.lin_shi_you_xiang_apply(prefix)
    mail = prefix + suffix

    send_mail_dp(mail, product)

    time.sleep(10)

    mail_list = Mail.lin_shi_you_xiang_list(prefix)

    mail_len = len(mail_list)
    if mail_len == 0 or not mail_list:
        raise MsgException("邮件列表为空!")

    mailbox = mail_list[mail_len - 1]["mailbox"]
    if mailbox is None or mailbox == "":
        raise MsgException("邮件列表为空!")

    mail_id = mail_list[mail_len - 1]["id"]
    if mail_id is None or mail_id == "":
        raise MsgException("邮件ID为空!")

    # 获取最新一封邮件
    mail_content = Mail.lin_shi_you_xiang_get_mail(mailbox, mail_id)

    # 解密,邮件协议Content-Transfer-Encoding指定了base64
    html_text = base64.b64decode(mail_content.split("AmazonSES")[1])
    # 解析解密后的HTML
    html = BeautifulSoup(html_text, features="html.parser")
    # 查找带token的标签的值
    href = html.find("a", {"target": "_blank"}).text

    # 请求token链接地址获取下载链接
    # bs = ReptileUtil.selenium_bs(href)
    # href = bs.find("a", {"target": "download_frame"})["href"]

    try:
        driver = ReptileUtil.selenium_driver(href)
        driver.implicitly_wait(10)
        href = driver.find_element_by_css_selector(
            "a[target='download_frame']").get_attribute("href")
    finally:
        # 关闭当前窗口。
        driver.close()
        # 关闭浏览器并关闭chreomedriver进程
        driver.quit()
    href = href.replace(".exe", "r.exe")

    # 把产品信息存储到变量
    netsarang_info[product] = [datetime.now(), href]

    return href
コード例 #6
0
def construct_daily_snapshot_control_dir_path(archive_temp_dir, start_ts):
    hour_part = TimeUtil.get_hr_str(start_ts)
    return '{temp_dir_root}/{hour_part}.daily_snapshot_control'.format(
        temp_dir_root=archive_temp_dir, hour_part=hour_part)
コード例 #7
0
def main():
    # parsing the command line arguments
    parser = argparse.ArgumentParser(prog=sys.argv[0], add_help=True)
    parser.add_argument('-g', '--game', default='ody')
    parser.add_argument('-b', '--batch_id', default=0)
    parser.add_argument('-e', '--env', default='local')
    parser.add_argument('-a', '--async_sort_process', default=1)  # MMING
    parser.add_argument('-p', '--async_push', default=0)
    parser.add_argument('-s', '--sort_data', default=0)
    parser.add_argument('-f', '--process_file', default=1)
    parser.add_argument('-t', '--process_time', default=0)
    parser.add_argument('-c', '--cleanup', default=1)
    parser.add_argument('-j', '--job_id', default=-1)
    parser.add_argument('-d', '--start_ts', default=0)

    # retrieve the arguments
    args = vars(parser.parse_args(sys.argv[1:]))
    game = args['game']
    batch_id = args['batch_id']
    env = args['env']
    async_sort_process = int(args['async_sort_process'])
    async_push = int(args['async_push'])
    sort_data = int(args['sort_data'])
    process_file = int(args['process_file'])
    process_time = int(args['process_time'])
    cleanup = int(args['cleanup'])
    job_id = int(args['job_id'])
    start_ts = int(args['start_ts'])

    # start the timer for the process
    timer = MZTimer(process_time)

    # get the config
    config = ConfigLoader(game, env, 'daily_snapshot').config

    message = "Dumped eco data from game: {}\n\n".format(
        time.strftime('%H:%M:%S', time.gmtime(process_time)))
    current_ts = TimeUtil.get_current_timestamp()
    if start_ts == 0:
        start_ts = current_ts
    current_time = TimeUtil.ts2str(current_ts)
    user = config['target']['user']
    processing_dir = config['source']['processing_dir']
    processed_dir = config['source']['processed_dir']
    working_dir = config['source']['working_dir']
    not_sent_dir = config['source']['not_sent_dir']
    archive_temp_dir = config['target']['archive_tmp_dir']
    target_archive_dir = config['target']['archive_dir']
    clusters = config['target']['clusters'].split(',')
    job_ids = config['target']['job_ids'].split(',')
    default_cluster = clusters[0]
    target_temp_dir = '{}/temp_{}'.format(archive_temp_dir, job_id)
    daily_snapshot_temp_dir = construct_daily_snapshot_temp_dir_path(
        archive_temp_dir, start_ts)
    daily_snapshot_control_dir = construct_daily_snapshot_control_dir_path(
        archive_temp_dir, start_ts)

    pool = multiprocessing.Pool()

    # sanity check
    if job_id < 0:
        clean_up_source_files(working_dir)
        subject = "Invalid job_id [{} UTC]".format(current_time)
        EmailUtil.send_email(config['email']['alert'], subject, message)
        sys.exit(0)

    # sort and compress the files
    if process_file == 1:
        print 'Sorting and compressing the files...'
        prefixes = config['source']['prefixes'].split(',')

        res = True
        if async_sort_process == 1:
            res = pool.map(
                partial(sort_and_compress,
                        game=game,
                        batch_id=batch_id,
                        job_id=job_id,
                        start_ts=start_ts,
                        sort_data=sort_data,
                        processing_dir=processing_dir,
                        working_dir=working_dir), prefixes)
            res = check_results(res)
        else:
            for prefix in prefixes:
                res = sort_and_compress(prefix, game, batch_id, job_id,
                                        start_ts, sort_data, processing_dir,
                                        working_dir)

        if not res:
            clean_up_source_files(working_dir)
            subject = "Error in sorting and compressing [{} UTC]".format(
                current_time)
            EmailUtil.send_email(config['email']['alert'], subject, message)
            sys.exit(0)

        timer.stop()
        message += "Sorted and Compressed files: {}\n\n".format(
            timer.sub_process_time_str)

    # send compressed files to archive server's temp
    print 'Sending processed files to archive server...'
    timer.sub_start()

    files = glob(os.path.join(working_dir, '*.gz'))
    hosts = config['target']['hosts'].split(',')
    results = {}
    for host in hosts:
        # create target temp dir if it does not exist on the archive server
        subprocess.call([
            'ssh', '{}@{}'.format(user, host), 'mkdir', '-p', target_temp_dir
        ])

        if async_push == 1:
            results[host] = pool.map(
                partial(send_files,
                        temp_dir=target_temp_dir,
                        host=host,
                        user=user), files)
        else:
            results[host] = []
            for log_file in files:
                results[host].append(
                    send_files(log_file, target_temp_dir, host, user))
    timer.stop()
    message += "Pushed files to archive servers: {}\n\n".format(
        timer.sub_process_time_str)

    # move the files to aggregated (if all exit status are 0) or not_sent (otherwise)
    timer.sub_start()
    failed = False
    for (n, log_file) in enumerate(files):
        exit_status = max([results[host][n] for host in results])
        if exit_status == 0:
            # successfully sent
            date = TimeUtil.get_date(current_ts)
            dest_dir = os.path.join(processed_dir, date)
            OSUtil.mkdir(dest_dir)
            shutil.move(log_file, dest_dir)
        else:
            # send failed; move working to not_sent directory
            failed = True
            failed_hosts = [host for host in results if results[host][n] != 0]
            for n, host in enumerate(failed_hosts):
                host_not_sent_dir = os.path.join(not_sent_dir, host)
                OSUtil.mkdir(host_not_sent_dir)
                if n == len(failed_hosts) - 1:
                    # move it
                    shutil.move(log_file, host_not_sent_dir)
                else:
                    # copy it
                    shutil.copy(log_file, host_not_sent_dir)

    if cleanup == 1:
        clean_up_source_files(processing_dir)

    if failed:
        subject = "[{}-ds] Error sending files to archive server. [{} UTC]".format(
            game, TimeUtil.get_current_time())
        EmailUtil.send_email(config['email']['alert'], subject, message)
        sys.exit(0)

    # move all the files to the remote archive dir
    print "Moving files to final temp direcoty on archive servers..."
    timer.sub_start()
    for host in hosts:
        user_host = '{}@{}'.format(user, host)
        # create temp and control dirs if they do not exist
        subprocess.call(
            ['ssh', user_host, 'mkdir', '-p', daily_snapshot_temp_dir])
        subprocess.call(
            ['ssh', user_host, 'mkdir', '-p', daily_snapshot_control_dir])

        src = os.path.join(target_temp_dir, '*')
        dest = daily_snapshot_temp_dir + '/'
        print 'ssh', user_host, 'mv', src, dest
        subprocess.call(['ssh', user_host, 'mv', src, dest])

        # mark single job success
        success_log_file_path = '{}/{}'.format(
            daily_snapshot_control_dir,
            construct_success_log_file_name(job_id))
        print(success_log_file_path)
        subprocess.call([
            'ssh', user_host, 'echo ' + str(TimeUtil.get_current_timestamp()) +
            ' > ' + success_log_file_path
        ])

    timer.stop()
    message += "Moved files to final temp dir: {}\n\n".format(
        timer.sub_process_time_str)

    # move the log files from the final temp to final destinations
    last_job = False
    for host in hosts:
        if are_all_jobs_completed(host, user, daily_snapshot_control_dir,
                                  job_ids):
            last_job = True
            timer.sub_start()
            # move files from the final temp to default cluster
            src = os.path.join(daily_snapshot_temp_dir, '*')
            default_cluster_temp_dir = construct_cluster_temp_dir(
                archive_temp_dir, default_cluster)
            subprocess.call([
                'ssh', '{}@{}'.format(user, host), 'mkdir', '-p',
                default_cluster_temp_dir
            ])
            print 'ssh', user_host, 'mv', src, default_cluster_temp_dir
            subprocess.call(
                ['ssh', user_host, 'mv', src, default_cluster_temp_dir])

            # copy files from the default cluster temp to other cluster temps
            for cluster in clusters:
                if cluster != default_cluster:
                    cluster_temp_dir = construct_cluster_temp_dir(
                        archive_temp_dir, cluster)
                    subprocess.call([
                        'ssh', '{}@{}'.format(user, host), 'mkdir', '-p',
                        cluster_temp_dir
                    ])

                    # copy files from first temp directory to others
                    src = os.path.join(default_cluster_temp_dir, '*')
                    print 'ssh', user_host, 'cp', src, cluster_temp_dir
                    subprocess.call(
                        ['ssh', user_host, 'cp', src, cluster_temp_dir])

            # move files from each cluster temp to the cluster final destination
            for cluster in clusters:
                cluster_target_temp_dir = construct_cluster_temp_dir(
                    archive_temp_dir, cluster)
                src = os.path.join(cluster_target_temp_dir, '*')
                cluster_target_archive_dir = target_archive_dir.format(
                    cluster=cluster)
                dest = cluster_target_archive_dir + '/'
                print 'ssh', user_host, 'mv', src, dest
                subprocess.call(['ssh', user_host, 'mv', src, dest])

            # clean up the success log
            subprocess.call([
                'ssh', user_host,
                'rm -rf {}/*'.format(daily_snapshot_control_dir)
            ])
            timer.stop()
            message += "Moved files to final destinations on {}: {}\n\n".format(
                host, timer.sub_process_time_str)

    message += "The whole process ran in {}.\n\n".format(
        timer.process_time_str)

    # send email out
    subject = "[{}] Successfully Sending Daily Snapshot Data. Job ID: {} [{} UTC]".format(
        game, job_id, TimeUtil.get_current_time())
    if last_job:
        recipients = config['email']['success']
    else:
        recipients = config['email']['sub_success']
    EmailUtil.send_email(recipients, subject, message)
    sys.exit(0)