Exemple #1
0
    def __init__(self):
        self.node = cyber.Node("sendor_calibration_preprocessor")
        self.writer = self.node.create_writer("/apollo/dreamview/progress",
                                              preprocess_table_pb2.Progress, 6)
        self.config = extractor_config_pb2.DataExtractionConfig()
        self.progress = preprocess_table_pb2.Progress()
        self.progress.percentage = 0.0
        self.progress.log_string = "Preprocessing in progress..."
        self.progress.status = preprocess_table_pb2.Status.UNKNOWN
        try:
            get_pb_from_text_file(FLAGS.config, self.config)
        except text_format.ParseError:
            print(f'Error: Cannot parse {FLAGS.config} as text proto')
        self.records = []
        for r in self.config.records.record_path:
            self.records.append(str(r))
        self.start_timestamp = -1
        self.end_timestamp = -1
        if self.config.io_config.start_timestamp == "FLOAT_MIN":
            self.start_timestamp = np.finfo(np.float32).min
        else:
            self.start_timestamp = np.float32(
                self.config.io_config.start_timestamp)

        if self.config.io_config.end_timestamp == "FLOAT_MAX":
            self.end_timestamp = np.finfo(np.float32).max
        else:
            self.end_timestamp = np.float32(
                self.config.io_config.end_timestamp)
def main():
    """
    Main function
    """
    if CYBER_PATH is None:
        print('Error: environment variable CYBER_PATH was not found, '
              'set environment first.')
        sys.exit(1)

    os.chdir(CYBER_PATH)

    parser = argparse.ArgumentParser(
        description='A tool to extract data information for sensor calibration.')
    # parser.add_argument("-i", "--record_path", action="append", default=[], required=True,
    #                     dest='record_list',
    #                     help="Specify the record file to extract data information.")
    # parser.add_argument("-o", "--output_path", action="store", type=str,
    #                     default="./extracted_data",
    #                     help="The output directory to restore message.")
    # # parser.add_argument("-z", "--compressed_file", action="store", type=str,
    # #                     default="extraction_data", help="The output compressed filename.")
    # parser.add_argument("-t", "--task_name", action="store", type=str, default="tmp",
    #                     help="name of the data extraction task, e.g., Camera_Lidar_Calibration.")
    # parser.add_argument("-c", "--channel_name", dest='channel_list', action="append",
    #                     default=[], help="list of channel_name that needs parsing.")
    # parser.add_argument("-s", "--start_timestamp", action="store", type=float,
    #                     default=np.finfo(np.float32).min,
    #                     help="Specify the begining time to extract data information.")
    # parser.add_argument("-e", "--end_timestamp", action="store", type=float,
    #                     default=np.finfo(np.float32).max,
    #                     help="Specify the ending timestamp to extract data information.")
    # parser.add_argument("-r", "--extraction_rate", action="store", type=int,
    #                     default=10, help="extraction rate for channel with large storage cost.")
    parser.add_argument("--config", action="store", type=str, required=True, dest="config",
                        help="protobuf text format configuration file abosolute path")
    args = parser.parse_args()

    config = extractor_config_pb2.DataExtractionConfig()
    with open(args.config, "r") as f:
        proto_block = f.read()
        text_format.Merge(proto_block, config)

    records = []
    for r in  config.records.record_path:
        records.append(str(r))

    valid_record_list = validate_record_files(records, kword='.record.')

    channels, extraction_rates = parse_channel_config(config.channels.channel)
    print('parsing the following channels: %s' % channels)

    start_timestamp = -1
    end_timestamp = -1
    if config.io_config.start_timestamp == "FLOAT_MIN":
        start_timestamp = np.finfo(np.float32).min
    else:
        start_timestamp = np.float32(config.io_config.start_timestamp)

    if config.io_config.end_timestamp == "FLOAT_MAX":
        end_timestamp = np.finfo(np.float32).max
    else:
        end_timestamp = np.float32(config.io_config.end_timestamp)

    # Create directory to save the extracted data
    # use time now() as folder name
    output_relative_path = config.io_config.task_name + datetime.now().strftime("-%Y-%m-%d-%H-%M")
    output_abs_path = os.path.join(config.io_config.output_path, output_relative_path)

    ret = process_dir(output_abs_path, 'create')
    if not ret:
        print('Failed to create extrated data directory: %s' % output_abs_path)
        sys.exit(1)

    ret = extract_data(valid_record_list, output_abs_path, channels,
                       start_timestamp, end_timestamp, extraction_rates)
    if not ret:
        print('Failed to extract data!')

    generate_compressed_file(input_path=config.io_config.output_path,
                             input_name=output_relative_path,
                             output_path=config.io_config.output_path,
                             compressed_file=config.io_config.task_name)

    print('Data extraction is completed successfully!')
    sys.exit(0)