output_dir = Path(args.output_directory) output_dir.mkdir(exist_ok=True) output_path = output_dir / args.output_file_name for file_path in json_files: log.info(f"Loading {file_path}") with file_path.open("r") as fp: lines = fp.readlines() log.info(f"Found {len(lines)} lines") for line in lines: parsed_line = json.loads(line) source_value = select_and_merge_jsonpaths( parsed_line, args.source_jsonpaths) target_value = select_and_merge_jsonpaths( parsed_line, [args.target_jsonpath]) output_object = { args.source_key: source_value, args.target_key: target_value, } output_line = json.dumps(output_object) with output_path.open("a") as out_fp: out_fp.write(output_line) out_fp.write(os.linesep) if __name__ == "__main__": parser = ArgumentParser(Args) args = parser.parse_args() logging.basicConfig(level="INFO") main(args)
def start(config_params: Config): start_time = time.time() log = ImportData(config_params.event_log_file) logging.info(log.trace_list) logging.info(log.unique_events) logging.info(log.event_map) population = InitialPopulation( log.unique_events, config_params.initial_population_size ) best_tree = utility.run( population.trees, log.unique_events, log.trace_list, config_params ) logging.info(f"Execution time: {time.time() - start_time}") logging.info( f"Tree: {best_tree} Replay fitness: {best_tree.replay_fitness} Precision: {best_tree.precision} Simplicity: {best_tree.simplicity} Generalization: {best_tree.generalization} Fitness: {best_tree.fitness}" ) for k, v in config_params.__dict__.items(): logging.info(f"{k}: {v}") logging.info("Tree class values") for k, v in best_tree.__dict__.items(): logging.info(f"{k}: {v}") if __name__ == "__main__": parser = ArgumentParser(Config) config = parser.parse_args() start(config)
class ParseOptions: M1_dim: Tuple[int, int] = field(metadata=dict(args=["-M1_dim"]), default=(2, 2)) M2_dim: Tuple[int, int] = field(metadata=dict(args=["-M2_dim"]), default=(2, 2)) hidden_layers: str = field(metadata=dict(args=["-hiddens"]), default=None) log_dir: str = field(metadata=dict(args=["-log-dir"]), default=None) learning_rate: float = field(metadata=dict(args=["-learning_rate"]), default=1e-3) buffer_size: int = field(metadata=dict(args=["-buf_size"]), default=1000) batch_size: int = field(metadata=dict(args=["-batch_size"]), default=32) loss: str = field(metadata=dict(args=["-loss"]), default="mse") optimizer: str = field(metadata=dict(args=["-optimizer"]), default="adam") activation: str = field(metadata=dict(args=["-activation"]), default="ReLU") layer: str = field(metadata=dict(args=["-layer"]), default="affine") if __name__ == "__main__": parser = ArgumentParser(ParseOptions) print(parser.parse_args()) args = parser.parse_args() hiddens = args.hidden_layers[1:len(args.hidden_layers) - 1] hiddens = hiddens.split(',') hiddens = [int(i) for i in hiddens] args.hidden_layers = hiddens subprocess.Popen(["tensorboard", "--logdir", args.log_dir]) webbrowser.open("127.0.0.1:6006") trainer = Trainer(**vars(args)) trainer.train()