def main(): if not io.run: return start_time = time.time() logline("Gathering features for", str(io.get('dataset_percentage')) + "% of rows", "using a batch size of", BATCH_SIZE) get_features() # get_features_iter() logline('Total runtime is', Timer.stringify_time(Timer.format_time(time.time() - start_time))) sys.exit()
exit_group() logline("Done joining all files") except KeyboardInterrupt as _: logline('Cancelled joining of files') if __name__ == '__main__': _gpus, _command, _name, _logfile = get_io() logline, debug, error, log_done = logline_to_folder(path=_logfile) start_time = time.time() main_done = None exit_code = 0 try: main(_gpus, _command, _name) except Exception as e: error("An exception has occurred", "\n", traceback.format_exc()) exit_code = 1 else: logline('Ran successfully') finally: logline( 'Runtime for training/testing is', Timer.stringify_time(Timer.format_time(main_done - start_time))) logline( 'Total runtime is', Timer.stringify_time(Timer.format_time(time.time() - start_time))) log_done() sys.exit(exit_code)
if timer.current >= max_users: break debug('Runtime is', timer.report_total_time()) logline('Generating concatenated results') if output_file == 'stdout': logline("Outputting results to stdout\n\n\n") logline('Final value is', anomaly_rows_list) logline(json.dumps(anomaly_rows_list)) else: logline('Outputting results to', output_file) with open(output_file, 'w') as out_file: out_file.write(json.dumps(anomaly_rows_list)) logline('Output results to', output_file) if REMOVE_INPUT_FILE: os.remove(input_file) logline('Removed encoded file') else: logline('Not Removing encoded file') logline('Done, closing files and stuff') if __name__ == '__main__': start_time = time.time() main() logline('Total runtime is', Timer.stringify_time(Timer.format_time(time.time() - start_time)))