def __init__(self, config_file_path, debug_mode=False, import_dir='', export_dir=''): # # parse config file and save off the information we need # config_dict = parse_config(config_file_path) self.server_url = config_dict.get('server_url', 'https://127.0.0.1') self.api_token = config_dict.get('api_token', '') self.sites = config_dict.get('sites', []) self.debug = config_dict.get('debug', False) self.export_dir = export_dir self.import_dir = import_dir self.http_proxy_url = config_dict.get('http_proxy_url', None) self.https_proxy_url = config_dict.get('https_proxy_url', None) if self.export_dir and not os.path.exists(self.export_dir): os.mkdir(self.export_dir) # # Test Cb Response connectivity # try: self.cb = CbApi(server=self.server_url, token=self.api_token, ssl_verify=False) self.cb.feed_enum() except: logger.error(traceback.format_exc()) sys.exit(-1)
def __init__(self, config_file_path, debug_mode=False, import_dir='', export_dir=''): # # parse config file and save off the information we need # config_dict = parse_config(config_file_path) self.server_url = config_dict.get('server_url', 'https://127.0.0.1') self.api_token = config_dict.get('api_token', '') self.sites = config_dict.get('sites', []) self.debug = config_dict.get('debug', False) self.export_dir = export_dir self.import_dir = import_dir if self.export_dir and not os.path.exists(self.export_dir): os.mkdir(self.export_dir) # # Test Cb Response connectivity # try: self.cb = CbApi(server=self.server_url, token=self.api_token, ssl_verify=False) self.cb.feed_enum() except: logger.error(traceback.format_exc()) sys.exit(-1)
pred = pred.view(batch_size, group_size, -1) pred = nn.functional.softmax(pred, dim=-1).mean(1) pred_classes = pred.argmax(dim=1) num_each_class[0] += pred_classes.eq(0).sum().item() num_each_class[1] += pred_classes.eq(1).sum().item() total = num_each_class.sum() print_and_log('Prediction set classification results:') print_and_log('A: {0} / {2}\tB: {1} / {2}'.format(num_each_class[0], num_each_class[1], total)) return num_each_class if __name__ == '__main__': parser = config_util.make_parser() args = parser.parse_args() config = config_util.parse_config(args.config) config_util.update_config(config, args) # Insert the git hash into the config so that it gets logged config['Logging']['githash'] = get_git_hash() try: os.mkdir(config['Logging']['log_root']) except FileExistsError: pass model_config = config['Model'] if model_config['model'] not in nn_models.models.keys(): print('--model {} not in list of available models'.format(args.model)) sys.exit(1)
type=float, default=None, help='If set, does elastic net with the given l1_ratio.' ' (0 corresponds to purely L2, 1 corresponds to purely L1)') parser.add_argument('--plot', action='store_true', help='If set, plots the paths at the end') parser.add_argument( '--val-precision', action='store_true', help='Use average precision as the "val accuracy" metric') parser.add_argument('--order-rescale', nargs='+', type=float, default=None) parser.add_argument('--doping-level', type=float, default=None) args = parser.parse_args() config = parse_config(args.config) config['Model']['saved_model'] = args.model_file config['Preprocessing']['oversample'] = False if args.doping_level is not None: config['Loader Kwargs']['doping_level'] = args.doping_level if torch.cuda.is_available(): print('Loaded CUDA successfully!') device = 'cuda' else: print('Could not load CUDA!') device = 'cpu' model = nn_models.from_config(config).to(device=device)