def main(): """Entry.""" # init distributed global is_root_rank if FLAGS.use_distributed: udist.init_dist() FLAGS.batch_size = udist.get_world_size() * FLAGS.per_gpu_batch_size FLAGS._loader_batch_size = FLAGS.per_gpu_batch_size if FLAGS.bn_calibration: FLAGS._loader_batch_size_calib = FLAGS.bn_calibration_per_gpu_batch_size FLAGS.data_loader_workers = round(FLAGS.data_loader_workers / udist.get_local_size()) is_root_rank = udist.is_master() else: count = torch.cuda.device_count() FLAGS.batch_size = count * FLAGS.per_gpu_batch_size FLAGS._loader_batch_size = FLAGS.batch_size if FLAGS.bn_calibration: FLAGS._loader_batch_size_calib = FLAGS.bn_calibration_per_gpu_batch_size * count is_root_rank = True FLAGS.lr = FLAGS.base_lr * (FLAGS.batch_size / FLAGS.base_total_batch) # NOTE: don't drop last batch, thus must use ceil, otherwise learning rate # will be negative FLAGS._steps_per_epoch = int(np.ceil(NUM_IMAGENET_TRAIN / FLAGS.batch_size)) if is_root_rank: FLAGS.log_dir = '{}/{}'.format(FLAGS.log_dir, time.strftime("%Y%m%d-%H%M%S")) create_exp_dir( FLAGS.log_dir, FLAGS.config_path, blacklist_dirs=[ 'exp', '.git', 'pretrained', 'tmp', 'deprecated', 'bak', ], ) setup_logging(FLAGS.log_dir) for k, v in _ENV_EXPAND.items(): logging.info('Env var expand: {} to {}'.format(k, v)) logging.info(FLAGS) set_random_seed(FLAGS.get('random_seed', 0)) with SummaryWriterManager(): train_val_test()
def main(): """Entry.""" FLAGS.test_only = True mc.setup_distributed() if udist.is_master(): FLAGS.log_dir = '{}/{}'.format(FLAGS.log_dir, time.strftime("%Y%m%d-%H%M%S-eval")) setup_logging(FLAGS.log_dir) for k, v in _ENV_EXPAND.items(): logging.info('Env var expand: {} to {}'.format(k, v)) logging.info(FLAGS) set_random_seed(FLAGS.get('random_seed', 0)) with mc.SummaryWriterManager(): val()
def main(): """Entry.""" NUM_IMAGENET_TRAIN = 1281167 if FLAGS.dataset == 'cityscapes': NUM_IMAGENET_TRAIN = 2975 elif FLAGS.dataset == 'ade20k': NUM_IMAGENET_TRAIN = 20210 elif FLAGS.dataset == 'coco': NUM_IMAGENET_TRAIN = 149813 mc.setup_distributed(NUM_IMAGENET_TRAIN) if FLAGS.net_params and FLAGS.model_kwparams.task == 'segmentation': tag, input_channels, block1, block2, block3, block4, last_channel = FLAGS.net_params.split( '-') input_channels = [int(item) for item in input_channels.split('_')] block1 = [int(item) for item in block1.split('_')] block2 = [int(item) for item in block2.split('_')] block3 = [int(item) for item in block3.split('_')] block4 = [int(item) for item in block4.split('_')] last_channel = int(last_channel) inverted_residual_setting = [] for item in [block1, block2, block3, block4]: for _ in range(item[0]): inverted_residual_setting.append([ item[1], item[2:-int(len(item) / 2 - 1)], item[-int(len(item) / 2 - 1):] ]) FLAGS.model_kwparams.input_channel = input_channels FLAGS.model_kwparams.inverted_residual_setting = inverted_residual_setting FLAGS.model_kwparams.last_channel = last_channel if udist.is_master(): FLAGS.log_dir = '{}/{}'.format(FLAGS.log_dir, time.strftime("%Y%m%d-%H%M%S")) # yapf: disable create_exp_dir(FLAGS.log_dir, FLAGS.config_path, blacklist_dirs=[ 'exp', '.git', 'pretrained', 'tmp', 'deprecated', 'bak', 'output']) # yapf: enable setup_logging(FLAGS.log_dir) for k, v in _ENV_EXPAND.items(): logging.info('Env var expand: {} to {}'.format(k, v)) logging.info(FLAGS) set_random_seed(FLAGS.get('random_seed', 0)) with mc.SummaryWriterManager(): train_val_test()
def main(): """Entry.""" NUM_IMAGENET_TRAIN = 1281167 mc.setup_distributed(NUM_IMAGENET_TRAIN) if udist.is_master(): FLAGS.log_dir = '{}/{}'.format(FLAGS.log_dir, time.strftime("%Y%m%d-%H%M%S")) # yapf: disable create_exp_dir(FLAGS.log_dir, FLAGS.config_path, blacklist_dirs=[ 'exp', '.git', 'pretrained', 'tmp', 'deprecated', 'bak']) # yapf: enable setup_logging(FLAGS.log_dir) for k, v in _ENV_EXPAND.items(): logging.info('Env var expand: {} to {}'.format(k, v)) logging.info(FLAGS) set_random_seed(FLAGS.get('random_seed', 0)) with mc.SummaryWriterManager(): train_val_test()
def main(): """main method""" parser = argparse.ArgumentParser( description="Get your total worth of coins across exchanges") parser.add_argument("-f", default="settings/api_keys.yml", help="File path for api keys") args = parser.parse_args() # load the logging configuration setup_logging() logger = logging.getLogger(__name__) # mute request log logging.getLogger("requests").setLevel(logging.WARNING) # load the api keys with open(args.f, 'r') as ymlfile: api_keys = yaml.load(ymlfile) # init total wallet total_value = 0. # init cur_address to store already processed eth addresses cur_address = [] for exchange, auth in api_keys.items(): if exchange == "bittrex": # instantiate Bittrex class client = bittrex.Bittrex(auth['key'], auth['secret']) # get wallet value wallet = client.get_wallet() value = client.get_wallet_value(wallet) logger.info("%s worth(USD):$%s" % (exchange, value)) total_value = total_value + value elif exchange == "bitfinex": # instantiate Bitfinex class client = bitfinex.Bitfinex(auth['key'], auth['secret']) # get wallet value wallet = client.get_wallet() value = client.get_wallet_value(wallet) logger.info("%s worth(USD):$%s" % (exchange, value)) total_value = total_value + value elif exchange == "coinbase": # instantiate Coinbase class client = coinbase.Coinbase(auth['key'], auth['secret']) # get wallet value wallet = client.get_wallet() value = client.get_wallet_value(wallet) logger.info("%s worth(USD):$%s" % (exchange, value)) total_value = total_value + value elif exchange == "etherscan": # process each eth address for acc in auth['account']: if acc not in cur_address: # save acc to cur_address cur_address.append(acc) # instantiate Etherscan class client = etherscan.Etherscan(auth['key'], acc) # get wallet value wallet = client.get_wallet() value = client.get_wallet_value(wallet) logger.info("Ethereum address %s worth(USD):$%s" % (acc['address'], value)) total_value = total_value + value logger.info("Total crpyto worth(usd):$%s" % total_value)