def main(args): logger = logging.getLogger(__name__) configure_logger(logger, args.log_fn) config = load_json(args.config) beneficiary_summary = run_beneficiary_summary_parser( args.sample_num, config, logger) claim_summary = {} for claim_type in ['inpatient', 'outpatient', 'carrier', 'pde']: claims = run_claim_parser(claim_type, args.sample_num, config[claim_type], logger) claim_summary[claim_type] = claims combine_files(beneficiary_summary, claim_summary, config['output'].replace('*', args.sample_num), logger)
def main(args): logger = logging.getLogger(__name__) configure_logger(logger, args.log_fn) members = load_json(args.json_fn_in, multi_lines=True) logging.info('{} members loaded'.format(len(members))) tkr_members = filter_out_tkr_members(members) logging.info('{} members were found to have TKR in history'.format(len(tkr_members))) label_readmitted_tkr_members(tkr_members, logger) logging.info('Finished labeling readmitted TKR members') matrix_builder = Builder() matrix_builder.find_common_codes(tkr_members, args.num_dx, args.num_proc) header, matrix = matrix_builder.build_matrix(tkr_members) logging.info('Finished building matrix, with size ({}, {})'.format(len(matrix), len(matrix[0]))) with open(args.csv_fn_out, 'wb') as fp: writer = csv.writer(fp) writer.writerow(header) writer.writerows(matrix)
parser.add_argument('-p', '--path', nargs='+', type=str, default=None, metavar="CONVMAPS", help="convmaps") parser.add_argument('-o', '--out', nargs='?', type=str, default=None, metavar="FOLDER", help="output folder") # Debugging/Logging parser.add_argument('-d', '--debug', action='store_true', help="enable DEBUG output") parser.add_argument('-l', '--logfile', nargs='?', default=None, metavar="LOGFILE", help="logfile path") parser.add_argument('-v', '--verbose', action='store_true', help="enable INFO output") # Power parser.add_argument('--lmax', nargs='?', type=int, default=3071) args = parser.parse_args() configure_logger(debug=args.debug, logfile=args.logfile, verbose=args.verbose) logger = logging.getLogger(__name__) # create worker pool logger.info("starting {}".format("power_euclid.py")) pool = multiprocessing.Pool(processes=2) cl_list = [] for path in args.path: logger.info("using map {}".format(path)) timer = time.time() k_map = np.array(np.load(path), dtype=np.float32)
def pytest_configure(config): util.setup_thread_debugger() util.configure_logger(config)
parser.add_argument('--network', type=str, choices=['mainnet', 'rinkeby'], default='mainnet', help="Choose one network (mainnet or rinkeby)") parser.add_argument( '--logging', type=str.upper, choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], default='INFO', help="Logging level.") args = parser.parse_args() util.configure_logger(getattr(logging, args.logging)) if args.jsonFile is not None: # Read input JSON. assert os.path.isfile(args.jsonFile) output_dir = os.path.dirname(args.jsonFile) inst = util.read_instance_from_file(args.jsonFile) else: # Get instance from blockchain. output_dir = './' contract_reader = ContractReader(args.network) inst = util.read_instance_from_blockchain(contract_reader) TokenInfo.update(inst['tokens']) # Get number of orders per token pair.
def pytest_configure(config): util.setup_thread_debugger() util.configure_logger(config.getoption('tests_log_level'))
t2_name, xrates, cumulated_sell_amounts, cumulated_buy_amounts, plot_title="orderbook-%s-%s" % (t1_name, t2_name), output_dir=output_dir, ipython=ipython, **kwargs) return if __name__ == "__main__": """Main function.""" util.configure_logger(logging.INFO) # Process command line arguments. parser = argparse.ArgumentParser( description='Input file and output directory parser.') parser.add_argument( 't1', type=str, help="The name of the first token.") parser.add_argument( 't2', type=str, help="The name of the second token.")