def main(): args = argopts.parse_arguments() socket = (args.interface, args.port) # Initialize Logging settings for Provenance, basically telling # the "Provenance" logger to redirect to the specified logfile AND # to the UI logger_controller = LoggingController(logfile="logs/provenance.log", level=args.verbose) logger = logging.getLogger("Provenance") # Initialize Backend try: if args.threading: server = ThreadedProvenanceServer(server_address=socket, discovery=args.discovery, whitelist=args.whitelist, backup_dir="backups", restore=args.restore) # Start a thread with the server -- that thread will then start one # more thread for each request server_thread = threading.Thread(target=server.serve_forever) # Exit the server thread when the main thread terminates server_thread.daemon = True server_thread.start() else: # We don't want to use this sys.exit(0) # server = ProvenanceServer(socket, ProvenanceClientHandler, args) # server.serve_forever() except KeyboardInterrupt: sys.exit(0) # Initialize Controllers for model-ui interactions ui_controller = UIController() model_controller = ModelController(server) logger.critical(f"Server starting on {socket[0]}:{socket[1]}.") # Initialize Frontend if args.ui == "cli": ProvenanceCLI.model = model_controller ProvenanceCLI.logger = logger_controller ProvenanceCLI.ui = ui_controller ProvenanceCLI.run()
output_vis_path / f"{base_name}_gt.obj", level=1) return {'loss': 0} def train_implicit_refinement(args): seed_everything(args.seed) checkpoint_callback = ModelCheckpoint(filepath=os.path.join( "runs", args.experiment, 'checkpoints'), save_top_k=-1, verbose=False, period=args.save_epoch) model = ImplicitRefinementTrainer(args) trainer = Trainer(gpus=[args.gpu], num_sanity_val_steps=args.sanity_steps, checkpoint_callback=checkpoint_callback, max_epochs=args.max_epoch, limit_val_batches=args.val_check_percent, val_check_interval=min(args.val_check_interval, 1.0), check_val_every_n_epoch=max(1, args.val_check_interval), resume_from_checkpoint=args.resume, logger=None, benchmark=True) trainer.fit(model) if __name__ == '__main__': _args = arguments.parse_arguments() train_implicit_refinement(_args)
#!/usr/bin/env python ## This script marshals the other parts of the parser from util.reader import read from util.arguments import parse_arguments import util.statistics as statistics if __name__ == "__main__": args = parse_arguments() csv_data = read(args.filename) environ = {} # process options which set flags for other modules if args.numeric_ids: environ["numeric_ids"] = True else: environ["numeric_ids"] = False # process mutators, according to which ones were selected if args.condense_java: from mutators import condense_java condense_java.mutate(csv_data, environ) if args.condense_ms: from mutators import condense_ms condense_ms.mutate(csv_data, environ) if args.select_adobe: from mutators import select_adobe select_adobe.mutate(csv_data, args.select_adobe, environ)
#!/usr/bin/env python ## This script marshals the other parts of the parser from util.reader import read from util.arguments import parse_arguments import util.statistics as statistics if __name__ == '__main__': args = parse_arguments() csv_data = read(args.filename) environ = {} # process options which set flags for other modules if args.numeric_ids: environ['numeric_ids'] = True else: environ['numeric_ids'] = False if args.level: environ['level'] = args.level # process mutators, according to which ones were selected if args.condense_java: from mutators import condense_java condense_java.mutate(csv_data, environ) if args.condense_ms: from mutators import condense_ms condense_ms.mutate(csv_data, environ) if args.select_adobe: from mutators import select_adobe select_adobe.mutate(csv_data, args.select_adobe, environ) # level and plugin_list don't really make sense together,