Exemple #1
0
    def from_record(cls, record_table):
        time_utils = TimeUtils(record_table=record_table)
        time_line = time_utils.time_line

        # reset index of table
        record_table.reset_index(inplace=True)

        # add total fluid
        record_table = avg_etdrs_columns(record_table,
                                         rings=[1, 2],
                                         regions=MeasureSeqAtrophy.REGIONS,
                                         features=["atropy_percentage"],
                                         foveal_region=["C0"],
                                         new_column_name="avg_atrophy")

        # round va
        record_table.insert(loc=10,
                            column="cur_va_rounded",
                            value=record_table.cur_va.round(2),
                            allow_duplicates=True)

        # add total injections
        record_table = get_total_number_of_injections(table=record_table)
        MeasureSeqAtrophy.DATA_POINTS = MeasureSeqAtrophy.DATA_POINTS + MeasureSeqAtrophy.ATROPHY_COLUMNS

        # assign items to time line
        for data_point in MeasureSeqAtrophy.DATA_POINTS:
            time_line = time_utils.assign_to_timeline(time_line=time_line,
                                                      item=data_point)

        # interpolate time vector
        for item in ["avg_atrophy"] + MeasureSeqAtrophy.ATROPHY_COLUMNS:
            time_line = interpolate_numeric_field(time_line, item=item)

        treatment_dict = {}
        # set treatment effect
        for dist in ["three", "six"]:
            time_line = SeqUtils.set_treatment_effect(time_line,
                                                      time_dist=dist,
                                                      item="avg_atrophy")

            item = f"{dist}_month_effect"
            treatment_dict[f"{dist}_month"] = SeqUtils.get_treatment_effect(
                item, time_line)

        # check if naive record
        naive = SeqUtils.is_naive(record_table)

        # get number of injections total
        number_of_injections = SeqUtils.get_seq_number_of_injections(time_line)
        return cls(meta_data=record_table.iloc[0],
                   time_line=time_line,
                   naive=naive,
                   number_of_injections=number_of_injections,
                   treatment_dict=treatment_dict)
    def from_record(cls, record_table):
        time_utils = TimeUtils(record_table=record_table)
        time_line = time_utils.time_line

        # initalize sequence class
        super().__init__(SeqUtils, time_line)

        # reset index of table
        record_table.reset_index(inplace=True)

        # add total fluid
        record_table = sum_etdrs_columns(record_table,
                                         rings=[1, 2],
                                         regions=MeasureSeqGeneral.REGIONS,
                                         features=[3, 4],
                                         foveal_region=["C0"],
                                         new_column_name="total_fluid")

        record_table = avg_etdrs_columns(record_table,
                                         rings=[1, 2],
                                         regions=MeasureSeqGeneral.REGIONS,
                                         features=["atropy_percentage"],
                                         foveal_region=["C0"],
                                         new_column_name="avg_atrophy")

        # round va
        record_table.insert(loc=10,
                            column="cur_va_rounded",
                            value=record_table.cur_va.round(2))

        # add total injections
        # record_table = get_total_number_of_injections(table = record_table)

        # assign items to time line
        for data_point in MeasureSeqGeneral.DATA_POINTS:
            time_line = time_utils.assign_to_timeline(time_line=time_line,
                                                      item=data_point)

        # interpolate time vector
        for item in ["total_fluid", "avg_atrophy"]:
            time_line = interpolate_numeric_field(time_line, item=item)

        # check if naive record
        naive = SeqUtils.is_naive(record_table)

        # get number of injections total
        number_of_injections = SeqUtils.get_seq_number_of_injections(time_line)
        return cls(meta_data=record_table.iloc[0],
                   time_line=time_line,
                   naive=naive,
                   number_of_injections=number_of_injections)
    def reformat_dataset(self, df_raw, verbose=False):
        df_gd = df_raw
        # add the timezone
        # get dimcity table
        df_dimcity = GeoUtils.get_dimcity(verbose=verbose)
        tz_lookup = {
            int(x['city_id']): x['timezone']
            for x in df_dimcity.to_dict('records')
        }
        df_gd['timezone'] = df_gd.apply(lambda x: tz_lookup[x['city_id']],
                                        axis=1)
        # do time local conversion from timezone column
        df_gd['local_dt_str'] = df_gd.apply(
            lambda x: TimeUtils.utc_ms_to_local_datestr(
                x['utc_timestamp'], x['timezone']),
            axis=1)
        df_gd['gd_value'] = df_gd.apply(
            lambda x: x['imputed_value']
            if x['is_blacklisted'] else x['real_value'],
            axis=1)
        df_gd = df_gd.groupby(
            ['city_id', 'hexcluster_id', 'local_dt_str',
             'is_holiday'])['gd_value'].sum().reset_index()

        if verbose:
            print "Result of reformating>>>"
            #print "not implemented yet! : ("
            print df_gd.info()
            print df_gd.sample(2)

        return df_gd
Exemple #4
0
    def write_task_info_to_database(self, test_run_id=None, is_manual=False):
        test_run_id = self.test_run_id if test_run_id is None else test_run_id
        sql_for_insert_task_info = "insert into task_info(build_id,test_run_id,source,type,created_time,update_time)" \
                                   "values('%s', '%s', '%s','%s','%s','%s')" % \
                                   (self.build_id,
                                    test_run_id,
                                    self.source,
                                    self.type,
                                    TimeUtils.get_time(),
                                    TimeUtils.get_time())
        sql_for_insert_build_and_test_info = "insert into build_and_test_info ( build_id, test_run_id, created_time)" \
                                             "values ('%s', '%s', '%s')" % (
                                             self.build_id, test_run_id, TimeUtils.get_time())

        self.mysql_client.mysql_insert_update_delete(sql_for_insert_task_info)
        if is_manual:
            self.mysql_client.mysql_insert_update_delete(
                sql_for_insert_build_and_test_info)
Exemple #5
0
 def __get_target_dir(self, dirs, start_time, end_time='0'):
     # 解析测试用例的父目录,将需要保存的目录下载到本地
     target_dirs = []
     for dirname in dirs:
         # print dirname
         if dirname.startswith("out"):
             arr = dirname.split("-")
             if len(arr) == 3:
                 time1 = "20" + arr[2]
                 if TimeUtils.comparetime2(time1, start_time, end_time):
                     target_dirs.append(dirname)
     # print target_dirs
     return target_dirs
Exemple #6
0
#-*- coding: UTF-8 -*-

import requests
import threading
import core.config.global_var as fd
from utils.stock_breed import StockBreed
from utils.time_utils import TimeUtils
from utils.logging import Log

lock = threading.Lock()
# 取系统品种
sb = StockBreed()
# 时间
tu = TimeUtils()
#日志
log = Log().getInstance("timer_task")


#运算线程 10秒执行一次
def update_position_status():
    lock.acquire()
    try:
        log.info("更新仓位状态:" + "START")
        sb.update_position()
        log.info("更新仓位状态:" + "END")
    except Exception as e:
        log.error(e)
        lock.release()
    lock.release()

from copy import deepcopy
from os import walk

import xlsxwriter

# from ftpdownload3 import FtpDownload
import ftpdownload
from utils.config_utils import ConfigUtils
from utils.time_utils import TimeUtils

# 读取配置文件
caseMaps, appMaps, deviceMaps = ConfigUtils.readConfig()

# print caseMaps, appMaps, deviceMaps
# print "caseMaps", caseMaps
today = TimeUtils.getTodayTimeStamp()
today = "170610"

# 170610115939


def parse_logs():
    # ftpdown = ftpdownload3.FtpDownload()
    # ftpdown.download("172.25.105.226", "UniversalTest/performance_ScenarioTest/", "20170610115900")
    # 6.7.1//7.0.0
    dir1 = "logs/PD1619/" + today + "/"
    dir2 = "logs/PD1610/" + today + "/"
    # dir2 = "logs/PD1635/" + today + "/"
    # dir2 = "logs/PD1610/" + today + "/"
    # dir3 = "logs/PD1616/" + today + "/"
    parse_log(dir1, "PD1619")
Exemple #8
0
from xml.etree import ElementTree as eTree
from functools import reduce
from conf.constants import *
from conf.search_params_define import *
from conf.cache_params_define import LOOKBACK_MUC_CACHE, LOOKBACK_SINGLE_CACHE
from utils.redis_utils import RedisUtil
from utils.dsl import DSL
from utils.authorization import md5
from utils.time_utils import TimeUtils
from utils.common_utils import TextHandler
import utils.common_sql

user_data = utils.common_sql.user_data
domain = utils.common_sql.domain
dsl = DSL()
time_util = TimeUtils()
text_handler = TextHandler()
# 生成logger
log_path = get_logger_file('search.log')
lookback_logger = configure_logger('search', log_path)
redis_util = RedisUtil()

COMMON_DSL = {
    "conversation_aggs": {
        "terms": {
            "field": "conversation",
            "size": 80,
            "collect_mode": "breadth_first"

        },
        "aggs": {
Exemple #9
0
#!/usr/bin/env python
# -*- coding:utf-8 -*-
__author__ = 'jingyu.he'

import json
from utils.common_sql import UserLib
from xml.etree import ElementTree as eTree
from conf.search_params_define import if_lookback, if_es
from conf.cache_params_define import SINGLE_KEY, MUC_KEY, SINGLE_TRACE_KEY, MUC_TRACE_KEY, SINGLE_CACHE, MUC_CACHE, \
    ALL_USER_DATA_CACHE, USER_MUCS
from utils.redis_utils import RedisUtil
from utils.logger_conf import get_logger_file, configure_logger
from utils.redis_utils import redis_cli
from utils.common_sql import UserLib, AsyncLib, if_async
from utils.time_utils import TimeUtils
time_utils = TimeUtils()
log_path = get_logger_file('search.log')
lookback_logger = configure_logger('search', log_path)
cache_redis_cli = RedisUtil()


class LookbackLib:
    def __init__(self, args, user_id, timeout=5):
       
        # 参数获取
        self.timeout=timeout
        userlib = None
        self.args = args
        self.user = user_id
        self.group_id = args.get("groupid", "")
        self.offset = int(args.get("start", 0))
Exemple #10
0
def get_index_name():
    time_utils = TimeUtils()
    index = time_utils.get_next_month_index()
    indexes = ['qc' + '_' + index, index]
    return indexes
Exemple #11
0
    def write_jenkins_info(self, source='jenkins', build_method='auto'):
        self.get_jenkins_info()
        build_id = self._jenkins_info.get(BUILD_ID_KEY)
        commit_id = self._jenkins_info.get(COMMIT_ID_KEY)
        test_run_id = TestRun.get_test_run_id()
        build_author = self._jenkins_info.get(BUILD_AUTHOR_KEY)
        commit_author = Commit.get_commit_author(commit_id)
        link_work_item = Commit.get_link_work_item()
        apk_path = self._jenkins_info.get(APK_PATH_KEY)
        branch_name = self._jenkins_info.get(BRANCH_NAME_KEY)
        commit_msg = Commit.get_commit_msg(commit_id)
        apk_type = self._jenkins_info.get(APK_TYPE_KEY)
        pull_request_id = self._jenkins_info.get(PULL_REQUEST_ID_KEY)
        print("pull_request_id", pull_request_id)
        sql_for_insert_apk_info = "insert into apk_info (" \
                                  "build_id, " \
                                  "apk_path, " \
                                  "type," \
                                  "pull_request_id," \
                                  "created_time," \
                                  "update_time) values ('%s', '%s', '%s', %d,'%s', '%s');" % \
                                  (build_id,
                                   apk_path,
                                   apk_type,
                                   int(pull_request_id),
                                   TimeUtils.get_time(),
                                   TimeUtils.get_time())
        sql_for_insert_build_and_test_info = "insert into build_and_test_info" \
                                             "(build_id, test_run_id, created_time) values ('%s', '%s', '%s');" \
                                             % (build_id, test_run_id, TimeUtils.get_time())

        sql_for_insert_commit_and_build_info = "insert into commit_and_build_info" \
                                               "(commit_id, build_id, created_time) values ('%s', '%s', '%s');" \
                                               % (commit_id, build_id, TimeUtils.get_time())
        sql_for_insert_build_description = "insert into build_description(" \
                                           "build_id, " \
                                           "commit_msg, " \
                                           "commit_author, " \
                                           "build_author," \
                                           "linked_work_item," \
                                           "branch," \
                                           "build_method," \
                                           "created_time) values ('%s', '%s', '%s','%s','%s','%s','%s','%s');" % \
                                           (build_id,
                                            commit_msg,
                                            commit_author,
                                            build_author,
                                            link_work_item,
                                            branch_name,
                                            build_method,
                                            TimeUtils.get_time())
        select_for_builds_is_exist = " select * from apk_info where build_id = '%s'" % build_id
        select_build_info = self.mysql_client.mysql_select(
            select_for_builds_is_exist)
        print(1, sql_for_insert_apk_info)
        self.mysql_client.mysql_insert_update_delete(sql_for_insert_apk_info)
        print(2, sql_for_insert_build_and_test_info)
        self.mysql_client.mysql_insert_update_delete(
            sql_for_insert_build_and_test_info)

        if len(select_build_info) == 0:
            self.mysql_client.mysql_insert_update_delete(
                sql_for_insert_commit_and_build_info)
            self.mysql_client.mysql_insert_update_delete(
                sql_for_insert_build_description)
        if Task.TASK_APK_TYPE == apk_type:
            Task(build_id, source,
                 self.mysql_client).write_task_info_to_database(test_run_id)
        return self.get_jenkins_info()