def run(args): """ Run the model. Args: """ config.update(args) index_col_name = config["index_col"] if "index_col" in config.keys( ) else None df = pd.read_csv(config["data_path"], index_col=index_col_name) if not os.path.exists(config["output_dir"]): os.makedirs(config["output_dir"]) ts = TimeSeriesDataset(data=df, categorical_cols=config["categorical_cols"], target_col=config["label_col"], seq_length=config["seq_len"], prediction_window=config["prediction_window"]) train_iter, test_iter, nb_features = ts.get_loaders( batch_size=config["batch_size"]) model = AutoEncForecast(config, input_size=nb_features).to(config["device"]) criterion = nn.MSELoss() optimizer = torch.optim.Adam(model.parameters(), lr=config["lr"]) if config["do_eval"] and config["ckpt"]: model, _, loss, epoch = load_checkpoint(config["ckpt"], model, optimizer, config["device"]) evaluate(test_iter, loss, model, config) elif config["do_train"]: train(train_iter, test_iter, model, criterion, optimizer, config)
def handler(msg): """Handle requests from other servers.""" if str(msg['uuid']) == myuuid: ## Continue listening return True if msg['action'] == "sync": ## Reply to sync request pubnub.publish({ 'channel' : "candybox_sync", 'message' : { 'uuid' : myuuid, 'data' : stats } }) elif msg['action'] == "config": ## Update config config.update(msg['data']) elif msg['action'] == "update": ## Update internal game stats add(stats, msg['data']['delta'], internal=True) codes |= set(msg['data']['codes']) ## Continue listening return True
def __init__(self, weight_path, model_params={}, model_fname='model.pkl', mode=system_modes.EXECUTION, domain_knowledge=None): super(RetrievalModelAgent, self).__init__(domain_knowledge, mode) W, self.index_dictionary = cPickle.load(open(weight_path, 'rb')) config = self.DEFAULT_MODEL_CONFIGS.copy() config['W'] = W.astype(theano.config.floatX) config.update(model_params) self.model = main.Model(**config) if mode == system_modes.TRAINING: self.model_fname = model_fname self.best_val_perf = 0 self.test_perf = 0 self.test_probas = None # self.n_train_batches = len(self.train_data['y']) // self.batch_size # self.n_val_batches = len(self.validation_data['y']) // self.batch_size # self.n_test_batches = len(self.test_data['y']) // self.batch_size self.n_train_batches = 2 # min(2, n_train_batches) self.n_val_batches = 2 # min(2, n_val_batches) self.n_test_batches = 2 # min(2, n_test_batches) self.test_perf = 0 self.test_probas = None
def handler(msg): """Handle requests from other servers.""" if str(msg['uuid']) == myuuid: ## Continue listening return True if msg['action'] == "sync": ## Reply to sync request pubnub.publish({ 'channel': "candybox_sync", 'message': { 'uuid': myuuid, 'data': stats } }) elif msg['action'] == "config": ## Update config config.update(msg['data']) elif msg['action'] == "update": ## Update internal game stats add(stats, msg['data']['delta'], internal=True) codes |= set(msg['data']['codes']) ## Continue listening return True
def set_args(): """Parse command line arguments.""" ap = ArgumentParser( description='Back up project databases and filesystems.') for name, params in ARGS.items(): ap.add_argument(name, **params) args = ap.parse_args() config.update(vars(args))
async def api_config(request: Request) -> StreamResponse: try: data = request.json() except: raise web.HTTPBadRequest() config.update(data, full=True) return web.json_response(config.serialize_json(full=True))
def setUp(self): config.update({'MYSQL_DATABASE': 'roulette_test'}) self.store = MySQLStore(config) self.bot = MockBot() self.updater = Updater(bot=self.bot) load_modules(self.updater.dispatcher, [RouletteModule(self.store, Mock())])
def run(self): while True: for _ in tqdm(range(config.save_freq)): client_id, observations = load( self.c2s_socket.recv(copy=False).bytes) self._on_state(client_id, observations) self.network.save() config.update()
def setUp(self): config.update({ 'MYSQL_DATABASE': 'roulette_test' }) self.store = MySQLStore(config) self.bot = MockBot() self.updater = Updater(bot=self.bot) load_modules(self.updater.dispatcher, [RouletteModule(self.store, Mock())])
def Delete(self): try: shutil.rmtree(dir_mix(Player_path,self.Info["TagName"])) except: pass self.Find() if config.config["PlayerVersion"] == self.Info["TagName"]: config.config["PlayerVersion"] = "Wrong version" config.update() self.update.emit()
def authenticate_user(): token_file = os.path.expanduser('~/.letgithub/token.txt') while True: try: with open(token_file, 'r') as f: token = f.read() g = Github(token) config.update(GITHUB=g) config.update(PROMT_MSG='[{}]> '.format(g.get_user().login)) return except FileNotFoundError: username = input('Github username: '******'Github password: '******'{}/{}'.format(GITHUB_API, 'authorizations') payload = { 'note': 'token for letgithub', 'scopes': ['repo'] } res = requests.post(url, auth=(username, password), data=json.dumps(payload)) data = json.loads(res.text) if res.status_code != 201: msg = data.get('message', res.status_code) print('ERROR: {}'.format(msg)) continue config.update(GITHUB=Github(data['token'])) config.update(PROMT_MSG='[{}]> '.format(username)) os.makedirs(os.path.dirname(token_file), exist_ok=True) with open(token_file, 'w') as f: f.write(data['token']) return
def configure(**kwargs): """ Convenience function to merge multiple settings into the default global config. Example: >>> import conjur >>> conjur.configure(appliance_url='https://conjur.example.com/api', ... account='example', ... cert_path='/path/to/cert.pem') """ config.update(**kwargs) return config
def loadConfig(): print('orig: ', config) try: with open(CONFIG_FILE_NAME) as infile: data = json.load(infile) config.update(data) except: print('There might be no saved config yet.') # saveConfig() else: print('diff: ', data) print('new: ', config)
def get_config(self): """ Gets the configuration of this layer. Returns Dictionary containing the parameters of this layer. """ config = super(FilterDetections, self).get_config() config.update({ 'nms': self.nms, 'class_specific_filter': self.class_specific_filter, 'nms_threshold': self.nms_threshold, 'score_threshold': self.score_threshold, 'max_detections': self.max_detections, 'parallel_iterations': self.parallel_iterations, }) return config
def main(): current_dir = os.path.dirname(os.path.abspath(__file__)) default_conf_file = os.path.join(current_dir, 'config.json') theme_file = os.path.join(current_dir, 'themes', 'dracula.json') obj = load_config(default_conf_file) config.update(obj) theme = load_config(theme_file) config.update({'THEME': theme}) users.authenticate_user() while True: try: promt = color(config['THEME']['PROMT'])(config['PROMT_MSG']) data = input(promt).strip() except EOFError: print() continue except KeyboardInterrupt: sys.exit(yellow('\nGoodbye, see you. :)')) except Exception: continue if data == 'q': utils.quit(yellow('Bye, see you. :)'), EXIT_SUCCESS) try: args = data.split() cmd = args[0] COMMANDS[cmd](*args[1:]) except (KeyError, ValueError): print(red('Command `{}` not found!'.format(data))) except (IndexError, KeyboardInterrupt): pass except TypeError: print(red('Sorry, I can\'t understand.')) except NotImplementedError: utils.perr(red('command hasn\'t implemented yet!')) except Exception as err: utils.perr(red(err))
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import json import socketio import eventlet from eventlet import wsgi from config import config from app.apiserver import app from lib.logger import logger from lib.message import lib_send_redis_message from lib.smsmessage import lib_send_sms_message try: from config_override import config_override config.update(config_override) except ImportError: pass def init_redis_io(): # Setting eventlet, important eventlet.monkey_patch() # Set redis manager redis_mgr = socketio.RedisManager(url=config['REDIS_LOCAL_URL'], channel=config['SOCKET_IO_CHANNEL']) # Setting socket-io socket_io = socketio.Server(client_manager=redis_mgr) return socket_io def init_sio():
#! /usr/bin/python import os.path from config import config import sys sys.path.append("/home/alex/scm/guppy") import guppy workflow = os.path.basename(sys.argv[0]) component_selection = { "database" : guppy.Component("database.ram.RAM"), "ui" : guppy.Component("ui.console.UI"), "workflow" : guppy.Component("workflow.%s.Workflow" % workflow), "dictionary" : guppy.Component("dictionary.trans_de_en.Dictionary"), } config.update(component_selection) components = guppy.factory.create(config) components["workflow"].run()
import redis import json from lib.logger import logger from config import config try: from config_override import config_override config.update(config_override) except ImportError: pass # 添加联系人 def lib_add_contact(username, contact_username, contact_nickname): hash_key = 'contact:' + username response = {} params = {'username': contact_username, 'nickname': contact_nickname} try: redis_client = redis.StrictRedis.from_url(config['REDIS_REMOTE_URL']) redis_client.hset(hash_key, contact_username, json.dumps(params)) except: logger.error("ERROR! Cannot connect to {}".format( config['REDIS_REMOTE_URL'])) response['status'] = 'err' response['data'] = "连接数据库错误!" return response logger.debug("Success add contact {} to user {}".format( contact_username, username))
def generate_conf( rootdir: str, test_path: str, test_name: str, base_params: Optional[List[str]] = None, params: Optional[List[str]] = None, overrides: Optional[List[str]] = None, machine: str = "", interpolate_type=InterpolateEnumType.STANDARD, ) -> ConfigurationSet: """Create a :obj:`ConfigurationSet` from a heirarchy of configuration files. Args: rootdir: The base directory. test_path: Full path of test file. test_name: Name for the test base_params: Optional parameters. Defaults to None. params: Optional parameters. Defaults to None. overrides: Optional key/value pairs to override generated configuration. Defaults to None. machine: Optional machine type to override configuration. Returns: Layered configuration with lowest level taking precedence. """ if base_params is None: base_params = [] if params is None: params = [] if overrides is None: overrides = [] relpath = os.path.relpath(test_path, rootdir) relpath_list = relpath.split(os.sep) del relpath_list[-1] # remove file name from list relpath_list.append(test_name) relpath_param_list = relpath_list + params # Build list of configuration files by iterating each dir level supported_extensions = [ ".py", ".ini", ".toml", ".json", ".yaml", ".yml", ] relative_path = "" config_list = [] for relpath_param in ["."] + relpath_param_list: # start with rootdir relative_path = os.path.abspath( os.path.join(relative_path, relpath_param)) search_dir = os.path.join(rootdir, relative_path) with contextlib.suppress(FileNotFoundError): for filename in os.listdir(search_dir): root, ext = os.path.splitext(filename) if root == "conf" and ext in supported_extensions: found_conf = os.path.join(search_dir, filename) log.debug(f"Configuration file found at {found_conf}") config_list.append(found_conf) if machine: config_list.append(get_machine_file(machine)) # Override files for index, override in enumerate(overrides): _, ext = os.path.splitext(override) if "=" not in override and ext in supported_extensions: override_abspath = os.path.abspath(override) if is_file(override_abspath): log.debug( f"Override configuration file found at {override_abspath}") config_list.append(override_abspath) del overrides[index] # Load list of configuration files into config config_list.reverse() # Lowest level first and takes precedence config = load_configuration(config_list, interpolate_type=interpolate_type) # Override variables for override in overrides: key, value = override.split("=") if has_key(config.as_dict(), key): if type(config[key]) is list: value = value.split(",") log.debug(f"Override variable applied: {key}: {value}") config.update({key: value}) config["ROOT"] = rootdir config["testSuite"] = relpath_list[0] config["test"] = test_name config["test_path_list"] = relpath_list config["test_param_path_list"] = relpath_param_list config["test_path"] = os.path.join(rootdir, *relpath_list) config["base_params"] = base_params config["params"] = params if machine: config["machine"] = machine base_params.insert(0, machine) config["base_params"] = base_params return config
def login(username: str, *arg, **kwagrs): password = getpass('Enter password for user `{}`: '.format(username)) config.update(GITHUB=Github(username, password)) config.update(PROMT_MSG='[{}]> '.format(username))
def process_ucerf3_forecast(config): """ Post-processing script for ucerf3-forecasts Program will perform N, M, and S tests and write out evaluation results. Args: config (dict): contents of configuration needed to run the job """ # Get directory of forecast file from simulation manifest forecast_dir = get_forecast_filepath(config['simulation_list'], config['job_idx']) config.update({'forecast_dir': forecast_dir}) print(f"Working on forecast in {config['forecast_dir']}.") # Search for forecast files forecast_path = os.path.join(forecast_dir, 'results_complete.bin.gz') if not os.path.exists(forecast_path): print( f"Did not find a forecast at {forecast_path}. Looking for uncompressed version.", flush=True) forecast_path = os.path.join(forecast_dir, 'results_complete.bin') if not os.path.exists(forecast_path): print(f"Unable to find uncompressed forecast. Aborting.", flush=True) sys.exit(-1) config['forecast_path'] = forecast_path print(f"Found forecast file at {config['forecast_path']}.") # Create output directory mkdirs(config['output_dir']) # Initialize processing tasks print(f"Processing forecast at {forecast_path}.", flush=True) config_path = os.path.join(config['forecast_dir'], 'config.json') with open(config_path) as json_file: u3etas_config = json.load(json_file) # Time horizon of the forecast start_epoch = u3etas_config['startTimeMillis'] end_epoch = start_epoch + config['forecast_duration_millis'] config['start_epoch'] = start_epoch config['end_epoch'] = end_epoch # Create region information from configuration file region_config = config['region_information'] region = create_space_magnitude_region(region_config['name'], region_config['min_mw'], region_config['max_mw'], region_config['dmw']) min_magnitude = region.magnitudes[0] # Set up filters for forecast and catalogs filters = [ f'origin_time >= {start_epoch}', f'origin_time < {end_epoch}', f'magnitude >= {min_magnitude}' ] # Forecast, note: filters are applied when iterating through the forecast forecast_basename = os.path.basename(config['forecast_dir']) forecast = load_catalog_forecast(forecast_path, type='ucerf3', name=f'ucerf3-{forecast_basename}', region=region, filters=filters, filter_spatial=True, apply_filters=True) # Sanity check to ensure that forecasts are filtered properly min_mws = [] for catalog in forecast: if catalog.event_count > 0: min_mws.append(catalog.get_magnitudes().min()) print( f"Overall minimum magnitude of catalogs in forecast: {np.min(min_mws)}" ) # Compute expected rates for spatial test and magnitude test _ = forecast.get_expected_rates() sc = forecast.expected_rates.spatial_counts() sc_path = os.path.join( config['output_dir'], create_output_filepath(config['forecast_dir'], 'spatial_counts_arr-f8.bin')) with open(sc_path, 'wb') as sc_file: print(f"Writing spatial counts to {sc_path}") sc.tofile(sc_file) # Prepare evaluation catalog eval_catalog = load_catalog(config['catalog_path'], region=region, filters=filters, name='comcat', apply_filters=True) # Compute and store number test print("Computing number-test on forecast.") ntest_result = catalog_evaluations.number_test(forecast, eval_catalog) ntest_path = os.path.join( config['output_dir'], create_output_filepath(config['forecast_dir'], 'ntest_result.json')) try: write_json(ntest_result, ntest_path) config['ntest_path'] = ntest_path print(f"Writing outputs to {config['ntest_path']}.") except IOError: print("Unable to write n-test result.") # Compute number test over multiple magnitudes # print("Computing number test over multiple magnitudes") # ntest_results = number_test_multiple_mag(forecast, eval_catalog) # config['ntest_paths'] = [] # for r in ntest_results: # min_mw = r.min_mw # ntest_path = os.path.join( # config['output_dir'], # create_output_filepath(config['forecast_dir'], 'ntest_result_' + str(min_mw).replace('.','p') + '.json') # ) # try: # write_json(ntest_result, ntest_path) # config['ntest_paths'].append(ntest_path) # print(f"Writing outputs to {ntest_path}.") # except IOError: # print("Unable to write n-test result.") # Compute and store magnitude test print("Computing magnitude-test on forecast.") mtest_result = catalog_evaluations.magnitude_test(forecast, eval_catalog) mtest_path = os.path.join( config['output_dir'], create_output_filepath(config['forecast_dir'], 'mtest_result.json')) try: write_json(mtest_result, mtest_path) config['mtest_path'] = mtest_path print(f"Writing outputs to {config['mtest_path']}.") except IOError: print("Unable to write m-test result.") # Compute and store spatial test print("Computing spatial test on forecast.") stest_path = os.path.join( config['output_dir'], create_output_filepath(config['forecast_dir'], 'stest_result.json')) stest_result = catalog_evaluations.spatial_test(forecast, eval_catalog) try: write_json(stest_result, stest_path) config['stest_path'] = stest_path except (IOError, TypeError, ValueError): print("Unable to write s-test result.") # Write calculation configuration config_path = os.path.join( config['output_dir'], create_output_filepath(config['forecast_dir'], 'meta.json')) print(f"Saving run-time configuration to {config_path}.") with open(config_path, 'w') as f: json.dump(config, f, indent=4, separators=(',', ': '))
import h5py import matplotlib.pyplot as plt import numpy as np from common import load_filters, mag_in_z from config import config, home plt.ion() config.update({ 'filters_dir': '%s/doutorado/photo_filters/sdss' % home, 'filters': { 'g': 'g.dat', 'i': 'i.dat' }, }) filters, _ = load_filters(config) templates = np.loadtxt(config['bpz_library'], np.str, usecols=(0, )) templates_data = [ np.loadtxt('%s/%s' % (config['bpz_library_dir'], t), dtype=np.dtype([('lambda', np.float), ('flux', np.float)])) for t in templates ] interpolations = np.linspace( 0., len(templates) - 1, len(templates) + (len(templates) - 1) * config['n_interpolations'])
def generate_conf( rootdir: Optional[str] = None, test_path: str = "", test_name: str = "", base_params: Optional[List[str]] = None, params: Optional[List[str]] = None, overrides: Optional[List[str]] = None, machine: str = "", interpolate_type=InterpolateEnumType.STANDARD, ) -> ConfigurationSet: """Create a :obj:`ConfigurationSet` from a heirarchy of configuration files. Args: rootdir: The base directory. Defaults to None. test_path: Full path of test file. Defaults to empty string. test_name: Name for the test. Defaults to empty string. base_params: Optional parameters. Defaults to None. params: Optional parameters. Defaults to None. overrides: Optional key/value pairs to override generated configuration. Defaults to None. machine: Optional machine type to override configuration. Returns: Layered configuration with lowest level taking precedence. """ if rootdir is None: rootdir = os.getcwd() test_path = inspect.stack()[1][1] test_name = inspect.stack()[1][3] if base_params is None: base_params = [] if params is None: params = [] if overrides is None: overrides = [] relpath = os.path.relpath(test_path, rootdir) relpath_list = relpath.split(os.sep) del relpath_list[-1] # remove file name from list relpath_list.append(test_name) relpath_param_list = relpath_list + params # Build list of configuration files by iterating each dir level supported_extensions = [ ".py", ".ini", ".toml", ".json", ".yaml", ".yml", ] relative_path = "" config_list = [] for relpath_param in [rootdir] + relpath_param_list: # start with rootdir relative_path = os.path.abspath( os.path.join(relative_path, relpath_param)) search_dir = os.path.join(rootdir, relative_path) with contextlib.suppress(FileNotFoundError): for filename in os.listdir(search_dir): root, ext = os.path.splitext(filename) if root == "conf" and ext in supported_extensions: found_conf = os.path.join(search_dir, filename) log.debug(f"Configuration file found at {found_conf}") config_list.append(found_conf) file_overrides = [] var_overrides = [] subtree_overrides = [] for override in overrides: _, ext = os.path.splitext(override) if "=" not in override: if ext in supported_extensions: file_overrides.append(override) else: subtree_overrides.append(override) elif "=" in override and ext not in supported_extensions: var_overrides.append(override) # Override files for override in file_overrides: override_abspath = os.path.abspath(override) if is_file(override_abspath): log.debug( f"Override configuration file found at {override_abspath}") config_list.append(override_abspath) # Base configuration parameters build_dir = os.path.join(rootdir, "build") base_ws_dir = os.path.join(build_dir, *base_params) test_base_ws_dir = os.path.join(build_dir, *base_params, *relpath_list) ws_dir = os.path.join(build_dir, *base_params, *relpath_list, *params) log_dir = os.path.join(ws_dir, "log") work_dir = os.path.join(ws_dir, "work") images_dir = os.path.join(ws_dir, "images") base_config = { "ROOT": rootdir, "buildDir": build_dir, "testSuite": relpath_list[0], "test": test_name, "test_path_list": relpath_list, "test_param_path_list": relpath_param_list, "test_path": os.path.join(rootdir, *relpath_list), "base_params": base_params, "params": params, "wsDir": ws_dir, "logDir": log_dir, "workDir": work_dir, "imagesDir": images_dir, "base_ws_dir": base_ws_dir, "test_base_ws_dir": test_base_ws_dir, } config_list.append(base_config) # type: ignore if machine: # base_params and downstream vars need to be updated base_params.insert(0, machine) base_ws_dir = os.path.join(build_dir, *base_params) test_base_ws_dir = os.path.join(build_dir, *base_params, *relpath_list) ws_dir = os.path.join(build_dir, *base_params, *relpath_list, *params) log_dir = os.path.join(ws_dir, "log") work_dir = os.path.join(ws_dir, "work") images_dir = os.path.join(ws_dir, "images") subtree_overrides.insert(0, machine) machine_config = { "machine": machine, "base_params": base_params, "wsDir": ws_dir, "logDir": log_dir, "workDir": work_dir, "imagesDir": images_dir, "base_ws_dir": base_ws_dir, "test_base_ws_dir": test_base_ws_dir, } config_list.append(machine_config) # type: ignore config_list.append(get_machine_file(machine)) config_list.reverse() # Lowest level first and takes precedence config = load_configuration(config_list, interpolate_type=interpolate_type) # append override variable from config to overrides at highest level and takes precedence if config.get("overrides"): if type(config["overrides"]) is list: subtree_overrides = config[ "overrides"] + subtree_overrides # type: ignore else: log.error( "Override variable detected but should be of type list not : {}" .format(type(config["overrides"]))) # auto override variables in configuration if subtree_overrides: for override in subtree_overrides: overrides_func(config, override) log.debug(f"Subtree overrides variable applied: {override}") # Override variables for override in var_overrides: key, value = override.split("=") if key in config: if type(config.as_dict()[key]) is list: value = value.split(",") log.debug(f"Override variable applied: {key}: {value}") config.update({key: value}) return config
def setDefault(self): config.config["PlayerVersion"] = self.Info["TagName"] config.update() self.update.emit()
parser = argparse.ArgumentParser() parser.add_argument('--c', default=None, type=str) parser.add_argument('--device', default="cpu", type=str) args = parser.parse_args() from config import config if args.c is not None: with open(args.c, 'r') as fh: specified = json.load(fh) for key in config: if key not in specified: print("{} not specified, defaulting to: {}".format( key, config[key])) config.update(specified) data = signalset.SignalSet(data=config["data_folder"], all_in_memory=False, norm_factor=config["signal_normalization"]) data.seg_length = config["segment_length"] if args.device == "gpu": device = torch.device("cuda:0") else: device = torch.device(args.device) if config["model"] == "csn": net = csn.ConvSparseNet(n_kernel=config["n_kernel"], lam=config["sparseness_parameter"], initialization="minirandom",