Beispiel #1
0
def writeTask(runs):
    fname = os.path.dirname(os.path.abspath(__file__)) + "\\tasks\\" + str(
        runs['id'])

    Logger.get_logger().info('Writing JSON file %s', str(runs['id']))
    with open(fname + '.json', 'w') as outfile:
        json.dump(runs, outfile)
Beispiel #2
0
def loadGatheringFile(gathering_file):
    data = np.genfromtxt(gathering_file, skip_header=2)

    if len(data) < 1:
        Logger.get_logger().warning("spot file " + gathering_file + " is empty")

    return data
Beispiel #3
0
def loadSpotFile(spot_file):
    header = dict()

    with io.open(spot_file, "r", encoding="ISO-8859-1") as fp:
        for line in fp:
            m = re.search(r"\[([aA-zZ -]+)\]([0-9.:]+)", line)

            if m is None:
                break

            header[m.group(1)] = m.group(2)

    header['FirstPixel'] = int(header['FirstPixel'])
    header['LastPixel']  = int(header['LastPixel'])
    header['NrReadOuts'] = int(header['NrReadOuts'])
    header['TDI-Stages'] = int(header['TDI-Stages'])
    header['LineFreq']   = float(header['LineFreq'])
    header['Date']       = datetime.strptime(header['Date'], '%d.%m.%Y')
    header['Time']       = datetime.strptime(header['Time'], '%H:%M:%S')
    header['PixelCount'] = header['LastPixel'] - header['FirstPixel'] + 1
    header['LineCount']  = header['NrReadOuts']
    header['PixelSize']  = 0.00875 # milimeter

    z = np.reshape(np.fromfile(spot_file, dtype=np.ushort)[:-8 * header['PixelCount']],
                   (header['LineCount'], header['PixelCount']))

    if (len(z) < 1):
        Logger.get_logger().warning("spot file " + spot_file + " is empty")

    return header, z
Beispiel #4
0
def json_repair(folder):
    spot_size = 6000 * 1024  # KB
    gather_size = 1000 * 1024  # KB

    if not os.path.isabs(folder):
        folder = Config.get("PLOT_DATA_FOLDER") + folder

    if not os.path.exists(folder) or not os.path.isdir(folder):
        print("Error: folder " + folder + " does not exist!")
        sys.exit(0)

    errors = {}
    for root, dirs, files in os.walk(folder):
        for file in files:
            if not file.endswith('.spot') and not file.endswith('.gather'):
                continue

            if (file.endswith('.spot') and os.path.getsize(folder + '/' + file) < spot_size) or \
                    (file.endswith('.gather') and os.path.getsize(folder + '/' + file) < gather_size):
                Logger.get_logger().debug('File to small: ' + file)

                name, ext = os.path.splitext(file)
                pattern = r"(\d*)_([\d\w-]*)_(\d*)"
                m = re.search(pattern, name)

                errors[name] = ((m.group(1), m.group(2), m.group(3)))

    runs_by_task = loadRuns()

    runs = {}
    runs['id'] = 'repair'
    runs['desc'] = 'Repeat previously broken runs'

    runs['runs'] = []

    for name in errors:
        (id, task, run_id) = errors[name]

        old_run = runs_by_task[task][int(run_id)]

        run = {}
        run['velocity'] = old_run.vel
        run['position'] = old_run.pos
        run['frequency'] = old_run.freq
        run['config'] = old_run.cfg

        runs['runs'].append(run)

        print(run)

    return runs
Beispiel #5
0
    def writeCSV(data, csv_file):
        types = list(set(data.keys()).difference(('pixel',)))
        data_len = len(data[types[0]])
        Logger.get_logger().info("Writing csv file " + csv_file)
        
        with open(csv_file, 'w') as fh:
            f = csv.writer(fh)
            f.writerow(types)

            for data_pos in range(data_len):
                data_row = []
                for type_ in types:
                    data_row.append(round(data[type_][data_pos], 6))
                f.writerow(data_row)
Beispiel #6
0
    def __init__(self, record_file):
        self.firstvalid = False
        self.logger = Logger.get_logger("RtkRecord")
        self.record_file = record_file
        self.logger.info("Record file to: " + record_file)

        try:
            self.file_handler = open(record_file, 'w')
        except IOError:
            self.logger.error("Open file %s failed" % (record_file))
            self.file_handler.close()
            sys.exit(1)

        self.write(
            "x,y,z,speed,acceleration,curvature,"
            "curvature_change_rate,time,theta,gear,s,throttle,brake,steering\n"
        )

        self.localization = localization_pb2.LocalizationEstimate()
        self.chassis = chassis_pb2.Chassis()
        self.chassis_received = False

        self.cars = 0.0
        self.startmoving = False

        self.terminating = False
        self.carcurvature = 0.0

        self.prev_carspeed = 0.0
Beispiel #7
0
def __get_app_logger():
    """
    初始化Logger
    """
    global app_name
    if Config.log_dir.startswith("/"):
        log_dir = os.path.join(Config.log_dir, app_name)
    else:
        fdir = os.path.dirname(__file__)
        log_dir = os.path.abspath(os.path.join(fdir, Config.log_dir, app_name))
    if not os.path.isdir(log_dir):
        os.makedirs(log_dir)

    # 默认输出到stderr,resetup让日志输出到文件并按照相应规则进行切割
    Config.log_level = getattr(Logger, Config.log_level, Logger.INFO)
    Logger.setup(access_log_file=os.path.join(log_dir, "access.log"),
                 access_log_level=Config.log_level,
                 error_log_file=os.path.join(log_dir, "errors.log"),
                 ledger_log_file=os.path.join(log_dir, "ledger.log"),
                 transaction_log_file=os.path.join(log_dir, "transaction.log"),
                 when=Config.log_when,
                 backup_count=Config.log_backup_count,
                 enable_console=Config.enable_console)

    # 调整application应用日志
    Logger.application_handler()

    # 初始化日志打印输出信息
    logger = Logger.get_logger(__name__)
    logger.info(message="INFO FROM GET APP LOGGER")
    logger.error(message="ERRS FROM GET APP LOGGER")
Beispiel #8
0
def loadRuns():
    runs = {}

    for root, dirs, files in os.walk(os.path.dirname(os.path.abspath(__file__)) + "\\..\\tasks\\" ):
        for file in files:
            name, ext = os.path.splitext(file)
            if not (name + ".json") in files:
                continue

            Logger.get_logger().info("Loading task %s", name)
            taskfile = os.path.join(root, name) + '.json'
            cfg = RunConfig(taskfile)
            runs[name] = {}
            runs[name] = cfg.getRuns()

    return runs
Beispiel #9
0
    def saveNPY(self):

        data = {}
        runs = loadRuns()

        for root, dirs, files in os.walk(self.subdirectory):
            for file in files:
                if (not file.endswith('.spot')):
                    continue

                name, ext = os.path.splitext(file)
                if not (name + ".gather") in files:
                    continue

                pattern = r"(\d*)_([\d\w-]*)_(\d*)"
                m = re.search(pattern, name)

                if m is None:
                    continue

                id      = m.group(1)
                task    = m.group(2)
                run_id  = m.group(3)

                Logger.get_logger().info("Loading %s", name)

                if not task in runs.keys():
                    Logger.get_logger().warn("Task %s not found", task)

                header, spot = loadSpotFile(os.path.join(root, name) + '.spot')
                gather       = loadGatheringFile(os.path.join(root, name) + '.gather')
                run          = runs[task][int(run_id)]

                try:
                    data = self.buildAndAppendData(id, header, spot, gather, run, data)
                except Exception as e:
                    print(e)
                    continue

        for id in data.keys():
            f = self.subdirectory + "\\" + id
            np.save(f, data[id])

        return data
Beispiel #10
0
    def __init__(self, record_file, node, speedmultiplier, completepath,
                 replan):
        """Init player."""
        self.firstvalid = False
        self.logger = Logger.get_logger(tag="RtkPlayer")
        self.logger.info("Load record file from: %s" % record_file)
        try:
            file_handler = open(record_file, 'r')
        except IOError:
            self.logger.error("Cannot find file: " + record_file)
            file_handler.close()
            sys.exit(0)

        self.data = genfromtxt(file_handler, delimiter=',', names=True)
        file_handler.close()

        self.localization = localization_pb2.LocalizationEstimate()
        self.chassis = chassis_pb2.Chassis()
        self.padmsg = pad_msg_pb2.PadMessage()
        self.localization_received = False
        self.chassis_received = False

        self.planning_pub = node.create_writer('/apollo/planning',
                                               planning_pb2.ADCTrajectory)

        self.speedmultiplier = speedmultiplier / 100
        self.terminating = False
        self.sequence_num = 0

        b, a = signal.butter(6, 0.05, 'low')
        self.data['acceleration'] = signal.filtfilt(b, a,
                                                    self.data['acceleration'])

        self.start = 0
        self.end = 0
        self.closestpoint = 0
        self.automode = False

        self.replan = (replan == 't')
        self.completepath = (completepath == 't')

        self.estop = False
        self.logger.info("Planning Ready")

        vehicle_config = vehicle_config_pb2.VehicleConfig()
        proto_utils.get_pb_from_text_file(
            "/apollo/modules/common/data/vehicle_param.pb.txt", vehicle_config)
        self.vehicle_param = vehicle_config.vehicle_param
Beispiel #11
0
    def __init__(self, record_file):
        self.firstvalid = False
        self.logger = Logger.get_logger("LaneRecord")
        self.record_file = record_file
        self.logger.info("Record file to: " + record_file)

        try:
            self.file_handler = open(record_file, 'w')
        except IOError:
            self.logger.error("Failed to open file %s " % (record_file))
            self.file_handler.close()
            sys.exit(-1)

        self.write("x,y,z,theta,dist_l,conf_l,dist_r,conf_r\n")

        self.localization = localization_pb2.LocalizationEstimate()
        self.mobileye = mobileye_pb2.Mobileye()
        self.mobileye_received = False
        self.terminating = False
Beispiel #12
0
import json
import time
from decimal import Decimal
from datetime import datetime
from enum import (Enum, unique)
from functools import reduce
from operator import add
from multiprocessing import Pool

from conn.httprunner import HTTPRunner
from common.errors import (InvalidParameter, NotFound, MisMatched)
from common.logger import (Logger, LoggerContext)
from common.util import (get_stringify_uuid, get_traceback)

logger = Logger.get_logger(__name__)


@unique
class ResultState(Enum):
    """
    测试结果状态对象
    """
    PASS = "******"  # 测试通过
    FAIL = "FAIL"  # 测试失败
    ERROR = "ERROR"  # 测试异常
    PENDING = "PENDING"  # 测试未执行
    WARNING = "WARNING"  # 测试警告,废弃


@unique
Beispiel #13
0
import os
import time
from datetime import datetime
from threading import Thread

import numpy as np
from clients import *
from common.config import Config
from common.logger import Logger
from run import RunConfig

logger = Logger.get_logger()


class Control:
    @staticmethod
    def start(run_file, subdir=None):
        xps = XPSClient()

        cfg = RunConfig(run_file)

        if subdir is None:
            subdir = "default"

        logger.info("cli: run %s\\%s with config %s", subdir, cfg.id, run_file)

        runs = cfg.getRuns()
        cam = CameraClient()

        if not cam.test():
            logger.error("cli: camera offline")
Beispiel #14
0
import time
from selenium import webdriver
from common.logger import Logger

log = Logger.get_logger()

class Verifier(object):

    navigator = None
    def __init__(self,navigator):
        self.navigator = navigator

    def verify_sect_buckets(self,expected_buckets):
        #we need to verify to make sure this table
        #shows all the bucket
        self.navigator.managed_buckets()
        names = PageParser(self.navigator).bucket_names()
        time.sleep(5)
        log.info('buckets shown in the ui {0}'.format(names))
        log.info('buckets retrieved from rest {0}'.format(expected_buckets))
        for expected_bucket in expected_buckets:
            if not expected_bucket in names:
                return False
        return True

class PageParser(object):
    navigator = None
    def __init__(self,navigator):
        self.navigator = navigator

    def bucket_names(self):
Beispiel #15
0
import time
from selenium import webdriver
from common.logger import Logger

log = Logger.get_logger()


class Verifier(object):

    navigator = None

    def __init__(self, navigator):
        self.navigator = navigator

    def verify_sect_buckets(self, expected_buckets):
        #we need to verify to make sure this table
        #shows all the bucket
        self.navigator.managed_buckets()
        names = PageParser(self.navigator).bucket_names()
        time.sleep(5)
        log.info('buckets shown in the ui {0}'.format(names))
        log.info('buckets retrieved from rest {0}'.format(expected_buckets))
        for expected_bucket in expected_buckets:
            if not expected_bucket in names:
                return False
        return True


class PageParser(object):
    navigator = None