def get_app(): #app = Flask(__name__) app = Flask('pyfem') app.config.from_object(Config) # set environment PYFEM_SETTINGS=testing_settings to automatically use a -test version of db if os.environ.get('PYFEM_SETTINGS'): if os.environ['PYFEM_SETTINGS'] == 'testing_settings': app.config['MONGODB_SETTINGS'] = dict(db=app.config['MONGO_DBNAME'] + '-test') app.secret_key = app.config['SECRET_KEY'] app.me = MongoEngine(app) app.pymongo = _get_db() # app.jinja_env.add_extension('util.Markdown2Extension') # app.jinja_env.filters['slugify'] = slugify # app.jinja_env.filters['timesince'] = timesince # app.jinja_env.filters['timeuntil'] = timeuntil # app.jinja_env.filters['jsonencode'] = jsonencode # app.jinja_env.globals['newrelic_head'] = newrelic_head # app.jinja_env.globals['newrelic_foot'] = newrelic_foot # if not app.config.get('TEMPLATE_DEBUG', True): # compiled_templates = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'compiled_templates') # compiled_files = path.path(compiled_templates).files() # if len(compiled_files) <= 1: # app.jinja_env.compile_templates(compiled_templates, zip=None, py_compile=True) # app.jinja_env.loader = ModuleLoader(compiled_templates) configure_logging(app) return app
def main(): """ Main function - configures and runs the plugin. """ # Read the network config file from stdin. config_raw = ''.join(sys.stdin.readlines()).replace('\n', '') network_config = json.loads(config_raw).copy() # Get the log level from the config file, default to INFO. log_level = network_config.get(LOG_LEVEL_KEY, "INFO").upper() # Configure logging. configure_logging(_log, LOG_FILENAME, log_level=log_level) _log.debug("Loaded network config:\n%s", json.dumps(network_config, indent=2)) # Get the etcd authority from the config file. Set the # environment variable. etcd_authority = network_config.get(ETCD_AUTHORITY_KEY, DEFAULT_ETCD_AUTHORITY) os.environ[ETCD_AUTHORITY_ENV] = etcd_authority _log.debug("Using ETCD_AUTHORITY=%s", etcd_authority) # Get the CNI environment. env = os.environ.copy() _log.debug("Loaded environment:\n%s", json.dumps(env, indent=2)) # Create the plugin, passing in the network config, environment, # and the Calico configuration options. plugin = CniPlugin(network_config, env) # Call the CNI plugin. sys.exit(plugin.execute())
def main(): # Read config file from stdin. _log.debug("Reading config from stdin") conf_raw = ''.join(sys.stdin.readlines()).replace('\n', '') config = json.loads(conf_raw) # Get the log level from the config file, default to INFO. log_level = config.get(LOG_LEVEL_KEY, "INFO").upper() # Setup logger. We log to file and to stderr based on the # log level provided in the network configuration file. configure_logging(_log, LOG_FILENAME, log_level=log_level, stderr_level=logging.INFO) # Get copy of environment. env = os.environ.copy() # Create plugin instance. plugin = IpamPlugin(config, env) try: # Execute IPAM. plugin.execute() except Exception, e: _log.exception("Unhandled exception") _exit_on_error(ERR_CODE_UNHANDLED, message="Unhandled Exception", details=e.message)
def main(argv): # set up logging util.create_dir("logs") util.configure_logging("gzip", "logs/gzip.log") # check args if len(argv) != 4: print "Usage: {} <c/d> <source> <target>".format(argv[0]) return -1 # check if first argument is valid if argv[1] != "c" and argv[1] != "d": logging.error("First argument %s should be 'c' or 'd'", repr(argv[1])) return -1 # check if files do (not) exist source = argv[2] target = argv[3] if not os.path.isfile(source): logging.error("Source %s does not exist", repr(source)) return -1 if os.path.isfile(target) and os.path.getsize(target) > 0: logging.error("Target %s already exists and is non-empty", repr(target)) return -1 # compress/decompress if argv[1] == "c": compress(source, target) else: decompress(source, target) return 0
def main() -> int: util.configure_logging() cfg = parse_args_to_config() if cfg.is_source_package: version = None else: versions = repos.get_all_versions( os.path.join(cfg.source_dir, 'versions.yml')) if cfg.revision not in versions: logging.error('Invalid revision %s', cfg.revision) return 1 version = versions[cfg.revision] try: if (cfg.host_toolchain.kind == config.ToolchainKind.MINGW and cfg.ask_copy_runtime_dlls): res = ask_about_runtime_dlls(cfg) if res is None: return 1 cfg.copy_runtime_dlls = res if not cfg.skip_checks: check.check_prerequisites(cfg) def do_prepare_repositories(): assert version is not None prepare_repositories(cfg, version) run_or_skip(cfg, Action.PREPARE, do_prepare_repositories, 'source code checkout') build_all(cfg) run_tests(cfg) def do_source_package(): version.poplulate_commits(cfg.repos_dir) package.create_source_package(cfg, version) run_or_skip(cfg, Action.PACKAGE_SRC, do_source_package, 'source package') def do_binary_package(): if version is not None: version.poplulate_commits(cfg.repos_dir) package.create_binary_package(cfg, version) run_or_skip(cfg, Action.PACKAGE, do_binary_package, 'binary package') except util.ToolchainBuildError: # By this time the error must have already been logged return 1 except KeyboardInterrupt: sys.stdout.write('\n') logging.info('Build interrupted by user') return 1 return 0
def __init__(self, methodName='runTest'): super().__init__(methodName) util.configure_logging() self.A = np.array([[1, 1, 0, 0, 1, 0], [1, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 1, 1], [1, 1, 0, 1, 0, 0], [0, 0, 0, 1, 0, 0]]) self.D = np.array([[3, 0, 0, 0, 0, 0], [0, 3, 0, 0, 0, 0], [0, 0, 2, 0, 0, 0], [0, 0, 0, 3, 0, 0], [0, 0, 0, 0, 3, 0], [0, 0, 0, 0, 0, 1]]) self.L = np.array([[2, -1, 0, 0, -1, 0], [-1, 3, -1, 0, -1, 0], [0, -1, 2, -1, 0, 0], [0, 0, -1, 3, -1, -1], [-1, -1, 0, -1, 3, 0], [0, 0, 0, -1, 0, 1]])
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) util.configure_logging() self.etalon = { 'news_2cl_1': (97.5, 0.95, 97.8, 0.96, 91.8, 0.85, 94.5, 0.90), 'news_2cl_2': (90.6, 0.83, 91.5, 0.84, 81.5, 0.70, 93.0, 0.87), 'news_2cl_3': (95.5, 0.91, 96.0, 0.92, 94.8, 0.90, 95.7, 0.92), 'news_3cl_1': (93.9, 0.91, 94.5, 0.92, 89.2, 0.85, 92.7, 0.90), 'news_3cl_2': (93.6, 0.91, 93.5, 0.91, 86.7, 0.82, 92.0, 0.89), 'news_3cl_3': (93.9, 0.91, 92.8, 0.90, 87.4, 0.83, 81.7, 0.78), 'news_5cl_1': (83.0, 0.80, 85.4, 0.83, 80.4, 0.79, 76.7, 0.78), 'news_5cl_2': (74.8, 0.77, 78.4, 0.79, 64.4, 0.69, 67.7, 0.72), 'news_5cl_3': (76.4, 0.75, 80.1, 0.79, 64.9, 0.69, 64.0, 0.72), }
def main(): BUCKET_NAME = 'sandbox-226501-mlengine' logger = configure_logging() logger.info("caching data from GCP bucket...") cache_training_data(BUCKET_NAME, train_filename="titanic_training_split.csv", val_filename="titanic_val_split.csv", train_cache_name="titanic_train_temp.csv", val_cache_name="titanic_val_temp.csv") data = load_data() logger.info("loaded data into dataframes") model = build_model() logger.info("loaded model") logger.info("beginning model fit") model.fit(data['X_train'], data['y_train']) logger.info("model has been fit") y_hat = model.predict_proba(data['X_val'])[:, 1] score = roc_auc_score(data['y_val'], y_hat) logger.info(" Model AUC:{}".format(score)) save_model_to_cloud(model, BUCKET_NAME) logger.info("model saved to GCP")
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) util.configure_logging() self.etalon = { # CCT, FE, logFor, RSP, SCT, SP 'football': (0.7928, 0.9061, 0.9028, 0.9092, 0.8115, 0.8575), 'news_2cl_1': (0.7944, 0.8050, 0.8381, 0.7966, 0.8174, 0.6540), 'news_2cl_2': (0.5819, 0.5909, 0.5844, 0.5797, 0.5523, 0.5159), 'news_2cl_3': (0.7577, 0.8107, 0.7482, 0.7962, 0.7857, 0.8592), 'news_3cl_1': (0.7785, 0.7810, 0.7530, 0.7810, 0.7730, 0.7426), 'news_3cl_2': (0.7616, 0.7968, 0.7585, 0.7761, 0.7282, 0.6246), 'news_3cl_3': (0.7455, 0.7707, 0.7487, 0.7300, 0.7627, 0.7203), 'news_5cl_1': (0.6701, 0.6922, 0.6143, 0.7078, 0.6658, 0.6815), 'news_5cl_2': (0.6177, 0.6401, 0.5977, 0.6243, 0.6154, 0.5970), 'news_5cl_3': (0.6269, 0.6065, 0.5729, 0.5750, 0.5712, 0.4801), 'polblogs': (0.5525, 0.5813, 0.5811, 0.5815, 0.5757, 0.5605), 'zachary': (1.0000, 1.0000, 1.0000, 1.0000, 1.0000, 1.0000) }
def main(): # cli args checking parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(dest="action") parser_c = subparsers.add_parser("c") parser_c.add_argument("source", type=str) parser_c.add_argument("target", type=str) parser_c.add_argument("reference", type=str) parser_c.add_argument("--inner", type=str) parser_c.add_argument("--delta", type=str) parser_c.add_argument("--nointra", action="store_true") parser_d = subparsers.add_parser("d") parser_d.add_argument("source", type=str) parser_d.add_argument("target", type=str) args = parser.parse_args() # create method name if args.action == "c": method = create_method_name(args.nointra, args.delta, args.inner) elif args.action == "d": with open(args.source, "rb") as f: method = util.parse_header(f)[1] # set up logging util.create_dir("logs") util.configure_logging(method, "logs/{}.log".format(method)) # check if files do (not) exist if not os.path.isfile(args.source): logging.error("Source %s does not exist", repr(args.source)) return -1 if os.path.isfile(args.target) and os.path.getsize(args.target) > 0: logging.error("Target %s already exists and is non-empty", repr(args.target)) return -1 if args.action == "c" and not os.path.isfile(args.reference): logging.error("Reference %s does not exist", repr(args.reference)) return -1 # compress/decompress if args.action == "c": return compress(args.source, args.target, args.reference, args.nointra, args.delta, args.inner) elif args.action == "d": return decompress(args.source, args.target)
def main(): conf = Config("rs-issue") conf.add_option("-l", "--limit", dest="limit", action="store", type="int", help="number of results to return.") conf.add_option("-o", "--offset", dest="offset", action="store", type="int", help="skip this number of records.") conf.add_option("-s", "--sort", dest="sort", action="store", help="sort the results on column.") conf.add_option("--status-id", dest="qs_status_id", help="the name of the status state, e.g. closed.") conf.add_option("--project-id", dest="qs_project_id", help="the name of the project e.g. rc-support.") conf.add_option("--tracker-id", dest="qs_tracker_id", help="the name of the tracker e.g. support.") opts, args = conf.parse_args() # Here would be a good place to check what came in on the command # line and call optp.error("Useful message") to exit if all it not # well. util.configure_logging(opts.verbose) query = {} for qs in dir(opts): if qs.startswith("qs_"): name = qs[3:] query[name] = getattr(opts, qs) client = IssueClient(opts.url, opts.api_key) result = client.query(query) from prettytable import PrettyTable x = PrettyTable(field_names=["ID", "Tracker", "Status", "Priority", "Title", "Assigned To"]) for row in result["issues"]: if "assigned_to" in row: assigned_to = row["assigned_to"]["name"] else: assigned_to = "" x.add_row( [ row["id"], row["tracker"]["name"], row["status"]["name"], row["priority"]["name"], row["subject"], assigned_to, ] ) print str(x)
def main() -> int: util.configure_logging() cfg = parse_args_to_config() versions = repos.get_all_versions( os.path.join(cfg.source_dir, 'versions.yml')) if cfg.revision not in versions: logging.error('Invalid revision %s', cfg.revision) return 1 try: if (cfg.host_toolchain.kind == config.ToolchainKind.MINGW and cfg.ask_copy_runtime_dlls): res = ask_about_runtime_dlls(cfg) if res is None: return 1 cfg.copy_runtime_dlls = res if not cfg.skip_checks: check.check_prerequisites(cfg) run_or_skip(cfg, Action.PREPARE, lambda: prepare_repositories(cfg, versions[cfg.revision]), 'source code checkout') build_all(cfg) run_tests(cfg) def do_package(): tarball.write_version_file(cfg) tarball.package_toolchain(cfg) run_or_skip(cfg, Action.PACKAGE, do_package, 'packaging') except util.ToolchainBuildError: # By this time the error must have already been logged return 1 except KeyboardInterrupt: sys.stdout.write('\n') logging.info('Build interrupted by user') return 1 return 0
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) util.configure_logging() self.graph = sample.triangle_graph
def main(): util.configure_logging() parser = argparse.ArgumentParser( description='Manage LLVM Embedded Toolchain for Arm checkout', formatter_class=argparse.RawDescriptionHelpFormatter, epilog='\n' 'Actions:\n' ' list list available versions.\n' ' status report the status of each checkout.\n' ' check check the state of each checkout matches the ' 'requested toolchain revision\n' ' clone checkout each repository as needed for the ' 'requested toolchain revision\n' ' freeze print a YAML description of the current ' 'repositories state\n') parser.add_argument('-v', '--verbose', action='store_true', help='Increase verbosity level.') parser.add_argument('-r', '--revision', metavar='version', default='HEAD', help='Select the toolchain version to use.') parser.add_argument('--patches', metavar='DIR', default='patches', help='Select where the patch files are checked out.') parser.add_argument('--repositories', metavar='DIR', default='repos', help='Select where the modules are checked out.') parser.add_argument( '--versions', metavar='FILE', default='versions.yml', help='Select the database of available toolchain reversions.') parser.add_argument('action', nargs=1, choices=['list', 'status', 'check', 'clone', 'freeze'], help='Action to perform') args = parser.parse_args() args.action = args.action[0] ret_val = 0 if args.action == 'status': ret_val = print_repositories_status(args.repositories) sys.exit(ret_val) elif args.action == 'freeze': ret_val = freeze_repositories(args.repositories, args.revision) sys.exit(ret_val) versions = get_all_versions(args.versions) # Make sure the requested version actually exists if args.revision not in versions: die('revision "{}" is unknown'.format(args.revision)) if args.action == 'list': ret_val = print_versions(versions, args.verbose) elif args.action == 'check': ret_val = check_repositories_status(args.repositories, versions[args.revision]) elif args.action == 'clone': ret_val = clone_repositories(args.repositories, versions[args.revision], args.patches) else: die('unsupported command: "{}"'.format(args.action)) sys.exit(ret_val)
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.graph = sample.chain_graph util.configure_logging()
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) util.configure_logging() self.graph = sample.diploma_matrix
make_response, redirect, render_template, request, url_for, ) from flask_oidc import OpenIDConnect from okta import UsersClient from dotenv import load_dotenv from util import (generate_secrets_json, authorize, configure_logging, format_json, parse_jwt_payload, get_data, is_premium_user) load_dotenv() generate_secrets_json() configure_logging() DEFAULT_SCOPES = ['openid', 'email', 'profile', 'promos:read'] app = Flask(__name__) app.config['DEBUG'] = True app.config['OIDC_CLIENT_SECRETS'] = 'client_secrets.json' app.config['OIDC_COOKIE_SECURE'] = False app.config['OIDC_CALLBACK_ROUTE'] = '/oidc/callback' app.config['OIDC_SCOPES'] = DEFAULT_SCOPES app.config['SECRET_KEY'] = environ.get('SECRET_KEY') app.config['OIDC_ID_TOKEN_COOKIE_NAME'] = 'id_token' oidc = OpenIDConnect(app) okta_client = UsersClient(environ.get('OKTA_ORG_URL'), environ.get('OKTA_API_TOKEN'))
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) util.configure_logging() graph, info = dataset.news_2cl_1 self.graph, self.y_true = graph[0]
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) util.configure_logging()
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) util.configure_logging() self.get_CT = lambda A: SPCT(A).get_D(0) self.get_SP = lambda A: SPCT(A).get_D(1)
import logging import os from util import configure_logging from docker import Client from docker.errors import APIError # Docker config. DOCKER_HOST_ENV = "DOCKER_HOST" DOCKER_SOCKET = "unix://var/run/docker.sock" DOCKER_VERSION = "1.16" LOG_FILENAME = "cni.log" _log = logging.getLogger(__name__) configure_logging(_log, LOG_FILENAME) class BaseContainerEngine(object): """ Abstract base class for interacting with containerizers. """ def uses_host_networking(self, container_id): """ Whether or not this container is networking using host networking. """ raise NotImplementedError() class DefaultEngine(BaseContainerEngine):
def __init__(self, *args, **kwargs): super().__init__(0.002, *args, **kwargs) # error bars in paper: 0.002 util.configure_logging()
DOTFILES_ROOT = os.path.dirname(os.path.realpath(__file__)) SUBMODULES = os.path.join(DOTFILES_ROOT, 'submodules') COMPILED = os.path.join(DOTFILES_ROOT, 'compiled') try: from pathlib import Path except ImportError: from pathlib2 import Path try: from bunch import Bunch except ImportError: from ecn.util.bunch import Bunch log = configure_logging() def get_host(): hostfile = Path.home() / '.config' / 'build_target' with open(str(hostfile)) as fl: return fl.readlines()[0].rstrip() def get_task(): host = Host(get_host()) return Bunch( host=host, targets=CONFIGS[host], )
def __init__(self, atol, *args, **kwargs): super().__init__(*args, **kwargs) util.configure_logging() self.atol = atol self.n_params = 200
import logging import os from util import configure_logging from docker import Client from docker.errors import APIError # Docker config. DOCKER_HOST_ENV = "DOCKER_HOST" DOCKER_SOCKET = "unix://var/run/docker.sock" DOCKER_VERSION = "1.16" LOG_FILENAME = "cni.log" _log = logging.getLogger(__name__) configure_logging(_log, LOG_FILENAME) class BaseContainerEngine(object): """ Abstract base class for interacting with containerizers. """ def uses_host_networking(self, container_id): """ Whether or not this container is networking using host networking. """ raise NotImplementedError() class DefaultEngine(BaseContainerEngine): """