def setup_console(): # Try to add the last git revision to the version-string config.add_git_rev_to_version() _add_commandline_arguments() _parse_commandline_arguments(_get_ec2_userdata()) # Check if we need to set all loggers to debug mode config.update_debug_mode() # After setting up and parsing commandline arguments, make sure this is a valid config config.validate_config()
def main(): parser = argparse.ArgumentParser(description='Fake Maxwell data into Kafka.') parser.add_argument('--config', metavar='CONFIG', type=str, required=True, help='path to yaml config file') parser.add_argument('--schema', metavar='SCHEMA', type=str, required=True, help='schema to produce') parser.add_argument('--database', metavar='DATABASE', type=str, required=True, help='database to produce') parser.add_argument('--table', metavar='TABLE', type=str, required=True, help='table to generate') parser.add_argument('--id', metavar='ID', type=int, required=True, help='id to generate') parser.add_argument('--partition-count', metavar='PARTITION_COUNT', type=int, default=12, required=False, help='number of partitions (default 12)') args = parser.parse_args() config = yaml.load(open(args.config).read()) validate_config(config) f_consume = generate_console_consumer(args) resolve_ref(f_consume, args, config)
def load(basepath=None): "Instantiates plugins and drivers based on user configuration. If basepath is specified, that directory will be searched for components, otherwise the current directory will be searched. Returns the result of _configure for (True if new plugins, drivers, or both have been discovered)." global plugins, drivers if basepath is None: basepath = '' # Store the length of the root type sections to allow for auto-detection. driverconf = config.conf["drivers"] beforedrivers = len(driverconf) pluginconf = config.conf["plugins"] beforeplugins = len(pluginconf) _configure('driver', _discover('driver', basepath=basepath)) _configure('plugin', _discover('plugin', basepath=basepath)) config.validate_config() drivers = _loadtype('driver', basepath) plugins = _loadtype('plugin', basepath) return len(driverconf) != beforedrivers or len(pluginconf) != beforeplugins
def main(): parser = argparse.ArgumentParser( description='Fake Maxwell data into Kafka.') parser.add_argument('--config', metavar='CONFIG', type=str, required=True, help='path to yaml config file') parser.add_argument('--schema', metavar='SCHEMA', type=str, required=False, help='schema to produce') parser.add_argument('--database', metavar='DATABASE', type=str, required=False, help='database to produce') parser.add_argument('--table', metavar='TABLE', type=str, required=False, help='table to produce') parser.add_argument('-c', action='store_true', required=False, help='produce message to console') parser.add_argument( '--partition-count', metavar='PARTITION_COUNT', type=int, default=12, required=False, help= 'number of partitions (default 12, only take effect when -c specified)' ) args = parser.parse_args() config = yaml.load(open(args.config).read()) validate_config(config) try: f_consume = generate_console_consumer( args) if args.c else generate_kafka_producer_consumer(config) produce_messages(f_consume, args, config) except KeyboardInterrupt: sys.exit(1)
def main(): # pragma: no cover config.validate_config() config.log.init_log() # parse command line parser = config.args.get_parser() args = parser.parse_args() # register webhook loop = asyncio.get_event_loop() webhook_registered = loop.run_until_complete(register_webhook()) if not webhook_registered: logger.error('could not register webhook') return # start app logger.info('app started on port %s', args.port) web.run_app(create_app(), host='0.0.0.0', port=args.port)
def main(): application_environment = os.getenv('APPLICATION_ENV') if application_environment == 'development': application_config = DevelopmentConfig elif application_environment == 'production': application_config = ProductionConfig else: sys.stdout.write( 'Application environment setup required: env APPLICATION_ENV should be' 'development or production') sys.exit(1) try: validate_config(application_config) except ConfigError as e: sys.stdout.write(str(e)) sys.exit(1) setup_logging() RedisStorage.initialize(**application_config.REDIS_SETTINGS) arg_parser = create_parser() args = arg_parser.parse_args() if args.command == 'populate_db': populate_db.run_command( application_config.QUIZ_QUESTIONS_DIRECTORY, application_config.DEFAULT_ENCODING, application_config.QUIZ_QUESTIONS_FILEPARSING_LIMIT, ) elif args.command == 'run': if args.platform == 'telegram': run_telegram_bot.run_command(application_config.TELEGRAM_BOT_TOKEN) elif args.platform == 'vk': run_vk_bot.run_command(application_config.VK_GROUP_TOKEN) else: sys.stdout.write('Unknown command. Please refer for help.') sys.exit(1) else: sys.stdout.write('Unknown command. Please refer for help.') sys.exit(1)
def main(): parser = argparse.ArgumentParser(description='Fake Maxwell data into Kafka.') parser.add_argument('--config', metavar='CONFIG', type=str, required=True, help='path to yaml config file') parser.add_argument('--schema', metavar='SCHEMA', type=str, required=False, help='schema to produce') parser.add_argument('--database', metavar='DATABASE', type=str, required=False, help='database to produce') parser.add_argument('--table', metavar='TABLE', type=str, required=False, help='table to produce') parser.add_argument('-c', action='store_true', required=False, help='produce message to console') parser.add_argument('--partition-count', metavar='PARTITION_COUNT', type=int, default=12, required=False, help='number of partitions (default 12, only take effect when -c specified)') args = parser.parse_args() config = yaml.load(open(args.config).read()) validate_config(config) try: f_consume = generate_console_consumer(args) if args.c else generate_kafka_producer_consumer(config) produce_messages(f_consume, args, config) except KeyboardInterrupt: sys.exit(1)
def main(): parser = argparse.ArgumentParser( description='Maxwell faker for systems and load testing.') parser.add_argument('--config', metavar='CONFIG', type=str, required=True, help='path to yaml config file') parser.add_argument('--database', metavar='DATABASE', type=str, required=True, help='database to bootstrap') parser.add_argument('--table', metavar='TABLE', type=str, required=True, help='table to bootstrap') parser.add_argument('--target', metavar='TARGET', type=str, required=True, help='target system that messages will be output to') parser.add_argument('--partition-count', metavar='PARTITION_COUNT', type=int, required=False, default=12, help='number of partitions (default 12)') args = parser.parse_args() config = yaml.load(open(args.config).read()) validate_config(config) schema = find_schema(config, args.database, args.table) if args.target == 'console': produce_to_console(schema, args, config) elif args.target == 'kafka': produce_to_kafka(schema, args, config) elif args.target == 'bruce': produce_to_bruce(schema, args, config) else: raise Exception('invalid target')
def main(): parser = argparse.ArgumentParser( description='Fake Maxwell data into Kafka.') parser.add_argument('--config', metavar='CONFIG', type=str, required=True, help='path to yaml config file') parser.add_argument('--schema', metavar='SCHEMA', type=str, required=True, help='schema to produce') parser.add_argument('--database', metavar='DATABASE', type=str, required=True, help='database to produce') parser.add_argument('--table', metavar='TABLE', type=str, required=True, help='table to generate') parser.add_argument('--id', metavar='ID', type=int, required=True, help='id to generate') parser.add_argument('--partition-count', metavar='PARTITION_COUNT', type=int, default=12, required=False, help='number of partitions (default 12)') args = parser.parse_args() config = yaml.load(open(args.config).read()) validate_config(config) f_consume = generate_console_consumer(args) resolve_ref(f_consume, args, config)
def test_default_pages(self): tmp_dir = tempfile.mkdtemp() try: open(os.path.join(tmp_dir, 'index.md'), 'w').close() open(os.path.join(tmp_dir, 'about.md'), 'w').close() conf = config.validate_config({ 'site_name': 'Example', 'docs_dir': tmp_dir }) self.assertEqual(conf['pages'], ['index.md', 'about.md']) finally: shutil.rmtree(tmp_dir)
def main(): parser = argparse.ArgumentParser(description='Maxwell faker for systems and load testing.') parser.add_argument('--config', metavar='CONFIG', type=str, required=True, help='path to yaml config file') parser.add_argument('--database', metavar='DATABASE', type=str, required=True, help='database to bootstrap') parser.add_argument('--table', metavar='TABLE', type=str, required=True, help='table to bootstrap') parser.add_argument('--target', metavar='TARGET', type=str, required=True, help='target system that messages will be output to') parser.add_argument('--partition-count', metavar='PARTITION_COUNT', type=int, required=False, help='number of partitions (will read from kafka topic if not specified)') args = parser.parse_args() config = yaml.load(open(args.config).read()) validate_config(config) schema = find_schema(config, args.database, args.table) if args.target == 'console': produce_to_console(schema, args, config) elif args.target == 'kafka': produce_to_kafka(schema, args, config) elif args.target == 'bruce': produce_to_bruce(schema, args, config) else: raise Exception('invalid target')
def config_get(): if request.method == 'GET': return send_from_directory( '.', os.environ.get('CONFIG_FILE', 'config.json')) if not request.json: abort(400, 'Missing data') try: validate_config(request.json) except InvalidConfigException as ex: response = jsonify({ "field": ex.field, "param": ex.param, "message": ex.message, }) response.status = '400' abort(response) with open(os.environ.get('CONFIG_FILE', 'config.json'), 'w') as file: file.write(dumps(request.json, indent=4)) return Response(status=200)
def main(): arg_parser = argparse.ArgumentParser() arg_parser.add_argument( '--config-file', help='config file location (if not %s)' % DEFAULT_PARAMETERS_FILENAME ) args = arg_parser.parse_args() config_file_location = args.config_file if (args.config_file is not None) else DEFAULT_PARAMETERS_FILENAME if len(config_file_location) == 0: raise RuntimeError('--config-file option must not be empty') # Only prepend script path when config file is not an absolute path if config_file_location[0] != '/': script_path = os.path.dirname(os.path.realpath(__file__)) config_file_location = '%s/%s' % (script_path, config_file_location) config_document = yaml.load(open(config_file_location, 'r')) parameters = validate_config(config_document, os.path.basename(config_file_location)) pushover_client = None if parameters['pushover'] is not None: pushover_client = PushoverClient(parameters['pushover']['token'], parameters['pushover']['user_key']) organiser = MediaOrganiser( parameters['complete_downloads_path'], parameters['library_path'], TVDBClient(parameters['tvdb_api_key']), parameters['series_title_overrides'], parameters['series_season_offsets'], parameters['md5_check'], pushover_client, parameters['pushover']['device'] if parameters['pushover'] is not None and 'device' in parameters['pushover'] else None, parameters['pushover']['ignore_hd'] if parameters['pushover'] is not None and 'ignore_hd' in parameters['pushover'] else False ) organiser.process()
dest="qfaufbv", help="Produce a formula using an uninterpreted function") args.add_argument("--rounds", type=int, help="Number of rounds", default=13) args.add_argument("--teams", type=int, help="Number of teams", default=32) args.add_argument("--closeness", type=int, help="Closeness constraint", default=5) args.add_argument("--slots", type=int, help="Slots per match", default=4) the_args = args.parse_args() set_config(the_args.rounds, the_args.teams, the_args.closeness, the_args.slots) compute_bitwidths() validate_config() print "(set-info :status unknown)" print "(set-option :produce-models true)" output_object = None if the_args.z3: output_object = Z3() elif the_args.qfaufbv: output_object = QFAUFBV() elif the_args.qfbv: output_object = QFBV() else: output_object = QFBV() output_object.preamble()
from lda_model import train_lda, predict config_logger(ROOT_PATH) time_start = time.time() current_time = time.strftime("%Y-%m-%d %H:%M:%S %Z", time.gmtime(time_start)) logger.info("Execution started at " + current_time) # Get paths dir_data, dir_output, dir_tmp, config_path = get_folder_structure(root_path=ROOT_PATH, \ config_fname=CONFIG_NAME) logger.info("Validation config file..") # Load config schema = get_schema() config = read_config(config_path=config_path) config = validate_config(config=config, schema=schema) # Clean data df_cleaned = clean_data(dir_tmp=dir_tmp, path_data=dir_data) df_base = preprocess_data(df_cleaned, dir_tmp=dir_tmp, path_data=dir_data) # Create trained model folder dir_model = os.path.join(dir_output, "model_trained") print(dir_model) print(dir_output) print(config['n_top_words']) # If its train if config['train']: if not os.path.isdir(dir_model):
form = args.add_mutually_exclusive_group() form.add_argument("--z3", action="store_true", default=False, dest="z3", help="Produce a formula using Z3 enumerations") form.add_argument("--qfbv", action="store_true", default=False, dest="qfbv", help="Produce a formula with enumerated bitvector variables") form.add_argument("--qfaufbv", action="store_true", default=False, dest="qfaufbv", help="Produce a formula using an uninterpreted function") args.add_argument("--rounds", type=int, help="Number of rounds", default=13) args.add_argument("--teams", type=int, help="Number of teams", default=32) args.add_argument("--closeness", type=int, help="Closeness constraint", default=5) args.add_argument("--slots", type=int, help="Slots per match", default=4) the_args = args.parse_args() set_config(the_args.rounds, the_args.teams, the_args.closeness, the_args.slots) compute_bitwidths() validate_config() print "(set-info :status unknown)" print "(set-option :produce-models true)" output_object = None if the_args.z3: output_object = Z3() elif the_args.qfaufbv: output_object = QFAUFBV() elif the_args.qfbv: output_object = QFBV() else: output_object = QFBV()
help='the file name to save as') return parser.parse_args() def read_sav(path): """Read .sav files, return pandas DataFrame""" w = r('foreign::read.spss("%s", to.data.frame=TRUE)' % path) df = pandas2ri.ri2py(w) return df if __name__ == "__main__": args = get_args() config = load_config(CONFIG_DIR / args.config_file, args.verbose) validate_config(config, args.verbose) meta_df = create_base_df(args.verbose) paper_paths = load_paper_paths(config, args.verbose) meta_df = load_and_merge(meta_df, paper_paths, args.manipulation, args.verbose) if args.verbose: print("Saving meta_df in {}".format(PREPROCESSED_DIR / (args.name + '.csv'))) print("Found {} individual participants".format(len(meta_df))) meta_df.to_csv(PREPROCESSED_DIR / (args.name + '.csv'), index=False)
"curl": lambda action: execute_curl(action["url"]), "command": lambda action: execute_command(action["command"]) } def execute_action(event: NfcEvent, tag_id: str): resolved_actions = resolve(config, event, tag_id) for action in resolved_actions: ACTION_MAP[action["type"]](action) # welcome message logging.info("Welcome to MFRC522-trigger!") validate_config(config) logging.info("Press Ctrl-C to stop.") # create a reader reader = pirc522.RFID() current_tag = '' last_tag = '' count = 0 polling = False # This loop keeps checking for chips. If one is near it will get the UID and authenticate while True: try: # wait for reader to send an interrupt if not polling:
def parse_and_execute_actions(args): if args.action == 'none': logging.info('Nothing to do, leaving now...') return elif args.action == 'create-config': if args.default: config.create_default_config(args.config) elif args.interactive: config.create_config_interactively(args.config) else: logging.warning('No way to create the config file was specified. ' 'Choose --default or --interactive.') return config.validate_config(args.config) cfg = config.load_config(args.config) if args.action == 'setup-database': database.setup_database(cfg) return db, cursor = database.connect_to_database(cfg) if not project.is_machine_registered(cfg, db, cursor): project.register_render_machine(cfg, db, cursor) if args.action == 'start': if not project.check_if_project_name_valid(args): logging.error('Project name "{}" is not a valid choice!'.format( args.project_name)) elif project.check_if_project_name_taken(args, db, cursor): logging.error('Project name "{}" is already taken!'.format( args.project_name)) else: project.start_project(args, cfg, db, cursor) elif args.action == 'cancel': # TODO add security check if args.project: project.cancel_project(args, cfg, db, cursor) elif args.frames: project.cancel_frames(args, cfg, db, cursor) else: logging.warning( 'Missing additional parameter, specify whether the project (--project) or frames ' '(--frames <...>) should be deleted.') elif args.action == 'finish': project.finish_project(args, cfg, db, cursor) elif args.action == 'check': # status report if not project.check_if_project_name_valid(args): logging.error('Project name "{}" is not a valid choice!'.format( args.project_name)) elif not project.check_if_project_name_taken(args, db, cursor): logging.error('Project name "{}" does not exist!'.format( args.project_name)) else: project.get_project_info(args, cfg, db, cursor) if args.history: pass elif args.frame_list: print(project.get_project_frame_list(args, cfg, db, cursor)) elif args.action == 'render': if args.one_batch: project.render_frames(args, cfg, db, cursor) elif args.some_batches > 0: for _ in range(args.some_batches): project.render_frames(args, cfg, db, cursor) else: while project.has_project_open_frames(args, cfg, db, cursor): project.render_frames(args, cfg, db, cursor) elif args.action == 'free': if args.free_failed: project.free_failed_frames(args, cfg, db, cursor) elif args.free_waiting: project.free_waiting_frames(args, cfg, db, cursor) database.close_connection(db, cursor)