def __init__(self, config_path='config.ini'): # Load the config file if not os.path.exists(config_path): logger.warning('Config file %s not found so default will be created' % config_path) self.config = setup_config(config_path, create=True) if self.config['debug']: logger.setLevel(logging.DEBUG) logger.debug('Debug mode is on!') else: logger.setLevel(logging.WARNING) # Load the replica DB file (path from config) replicadb_path = os.path.join(os.path.dirname(self.config.filename), self.config['replicadb']) self.replicadb = setup_replicadb(replicadb_path, create=True) self.load() # Instantiate the Reaction class and validate it against the config # TODO: improve this! self.reaction = globals()[self.config['reaction']['type']](self.config)
def make_request( endpoint="", params={}, sub_res="", user="******", leader=False): """Generic request wrapper for routing all requests to api""" print "Requesting data..." API_KEY = config.get_config('api_key') or config.setup_config() # make the request parameters and endpoint ready params["api_key"] = API_KEY if not leader: user_type = 'users/' + user if endpoint in resource_endpoints: endpoint = resource_endpoints[endpoint] try: r = requests.get( BASE_URL + user_type + endpoint + sub_res, params=params) if r.status_code == 200: print pretty_output(json.dumps(r.json())) elif r.status_code == 403: print "Whoops! Looks like we can't show that." else: print "We are having some problem. Make sure you have everything correct." except TypeError: print "whoops" else: r = requests.get(BASE_URL + 'leaders') print pretty_output(json.dumps(r.json()))
import logging from aiohttp import web from base import setup_database, setup_email, setup_report, setup_task from config import setup_config from model import setup_model from routes import setup_routes, setup_middleware logging.basicConfig( format='%(levelname)s: %(asctime)s [%(pathname)s:%(lineno)d] %(message)s', level=logging.NOTSET) app = web.Application() setup_config(app) setup_routes(app) setup_middleware(app) setup_task(app) setup_database(app) # 依赖 config setup_model(app) # 依赖 database, config setup_email(app) # 依赖 config setup_report(app) # 依赖 config, database, email host, port = app['config']['server']['host'], int( app['config']['server']['port']) web.run_app(app, host=host, port=port)
from os import path, getenv from sys import exit from tempfile import gettempdir from subprocess import run, STDOUT, PIPE from time import sleep from config import setup_config from utils import checkout_repo, log CONFIG = setup_config() def main(): if not getenv("TEST_PKS_ETHEREUM"): log( "Ethereum private key not set. Set with TEST_PKS_ETHEREUM environment variables", True, True) exit(1) repoPath = path.join(gettempdir(), "meson-client") confDir = path.join(gettempdir(), "meson-testnet") checkout_repo(repoPath, "https://github.com/hashcloak/Meson-client", CONFIG["TEST"]["CLIENTCOMMIT"]) warpedBuildFlags = '-ldflags "-X github.com/katzenpost/core/epochtime.WarpedEpoch=true -X github.com/katzenpost/server/internal/pki.WarpedEpoch=true"' cmd = "go run {warped} {testGo} -c {client} -k {currency} -pk {pk}".format( warped=warpedBuildFlags if CONFIG["WARPED"] else "", testGo=path.join(repoPath, "integration", "tests.go"), client=path.join(confDir, "client.toml"), currency=path.join(confDir, "provider-0", "currency.toml"), pk=CONFIG["TEST"]["PKS"]["ETHEREUM"])
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo.config import cfg from neutron.plugins.ml2 import driver_api as api from dcclient.dcclient import Manager import config config.setup_config() class DatacomDriver(api.MechanismDriver): """ """ def __init__(self): pass def initialize(self): self.dcclient = Manager() def create_network_precommit(self, context): """Within transaction.""" pass def create_network_postcommit(self, context):
parser.add_argument('--lr', default=1e-3, type=float, help='learning rate') args = parser.parse_args() assert args.data_dir is not None torch.backends.cudnn.benchmark = True torch.backends.cudnn.deterministic = False conf = setup_config(input_rows=args.input_rows, input_cols=args.input_cols, input_deps=args.input_deps, batch_size=args.batch_size, verbose=args.verbose, cls_classes=args.cls_classes, nb_instances=args.nb_instances, nb_val_instances=args.nb_val_instances, nb_multires_patch=args.nb_multires_patch, lambda_rec=args.lambda_rec, lambda_cls=args.lambda_cls, DATA_DIR=args.data_dir, exp_choice=args.exp) conf.display() print("torch = {}".format(torch.__version__), file=conf.log_writter) x_train = [] y_train = [] for i in range(int(math.ceil(conf.cls_classes / 50))): print("data part:", i) for fold in range(conf.nb_multires_patch): print("fold:", fold)
# limitations under the License. from oslo_config import cfg from neutron.plugins.ml2 import driver_api as api import neutron.db.api as db from dcclient.dcclient import Manager from dcclient.xml_manager.data_structures import Pbits from db.models import DatacomNetwork, DatacomPort from sqlalchemy import func from sqlalchemy import exists import config config.setup_config() class DatacomDriver(api.MechanismDriver): """ """ def __init__(self): self.dcclient = Manager() def initialize(self): self.dcclient.setup() session = db.get_session() self.query_bd(session) self.dcclient.create_network_bulk(self.networks, interfaces=self.interfaces)
(options, args) = parser.parse_args() assert options.decoder_block_type in ['transpose', 'upsampling'] assert options.data_dir is not None seed = 1 random.seed(seed) config = setup_config(decoder_block_type=options.decoder_block_type, input_rows=options.input_rows, input_cols=options.input_cols, input_deps=options.input_deps, batch_size=options.batch_size, verbose=options.verbose, cls_classes=options.cls_classes, nb_instances=options.nb_instances, nb_multires_patch=options.nb_multires_patch, weights=options.weights, unet_weights=options.unet_weights, encoder_weights=options.encoder_weights, lambda_rec=options.lambda_rec, lambda_cls=options.lambda_cls, DATA_DIR=options.data_dir) config.display() model = ynet_model_3d( (1, config.input_rows, config.input_cols, config.input_deps), batch_normalization=True, unet_weights=config.unet_weights, encoder_weights=config.encoder_weights,