def setup_app_from_commandline(self, argv): if len(argv) < 2: print >> sys.stderr, 'No configuration file specified.' sys.exit(1) bootstrap(argv[1]) self.app = default_app return argv[:1] + argv[2:]
def main(args=None): """The main routine.""" if args is None: args = sys.argv[1:] parser = argparse.ArgumentParser(description="Kinto commands") subparsers = parser.add_subparsers(title='subcommands', description='valid subcommands', help='init/start/migrate') parser_init = subparsers.add_parser('init') parser_init.set_defaults(which='init') parser_init.add_argument('--config_file', required=False, help='Config file may be passed as argument') parser_migrate = subparsers.add_parser('migrate') parser_migrate.set_defaults(which='migrate') parser_start = subparsers.add_parser('start') parser_start.set_defaults(which='start') args = vars(parser.parse_args()) if args['which'] == 'init': if(args['config_file'] is None): env = bootstrap('config/kinto.ini') else: config_file = format(args['config_file']) env = bootstrap(config_file) elif args['which'] == 'migrate': env = bootstrap('config/kinto.ini') cliquet.init_schema(env) elif args['which'] == 'start': pserve_argv = ['pserve', 'config/kinto.ini', '--reload'] pserve.main(pserve_argv)
def main(): if len(sys.argv) != 2: usage() bootstrap(sys.argv[1]) Base.metadata.create_all()
def init_db(args): """Create database tables and elasticsearch indices.""" # Force model creation using the MODEL_CREATE_ALL env var os.environ['MODEL_CREATE_ALL'] = 'True' # Start the application, triggering model creation paster.setup_logging(args.config_uri) paster.bootstrap(args.config_uri)
def take_action(self, parsed_args): config = parsed_args.config bootstrap(config) create_blog(raw_input) import transaction transaction.commit()
def main(argv=sys.argv): if not len(argv) >= 2: sys.stderr.write("Usage: %s config\n" % os.path.basename(argv[0])) sys.stderr.write("Configuration file not present.\n") sys.exit(1) bootstrap(argv[1]) celery.start(argv[:1] + argv[2:]) # Remove argv[1].
def quick_test(options): bootstrap(production=False) from runway.core.system.jobs.prune_logs import PruneLogsJob j = PruneLogsJob() j.perform_job(None) return ""
def main(): if len(sys.argv) != 2: usage() config_uri = sys.argv[1] bootstrap(config_uri) Base.metadata.create_all()
def main(argv=sys.argv): # pragma: no cover if len(argv) < 2: usage(argv) config_uri = argv[1] bootstrap(config_uri) setup_logging(config_uri) processor()
def bootstrap(args): """ Bootstrap the application from the given arguments. Returns a bootstrapped request object. """ paster.setup_logging(args.config_uri) request = Request.blank('/', base_url=args.base) paster.bootstrap(args.config_uri, request=request) return request
def take_action(self, parsed_args): from pyramid.paster import bootstrap from urakata.models import Scaffold, Session, Base bootstrap(parsed_args.config) # suppress logging Base.metadata.bind.echo = False header = [("name", "version", "repository", "utime")] row = [(s.name, s.version, s.repository.name, s.utime) for s in Session.query(Scaffold)] return header + [row]
def __init__(self, options): self.options = options self.imported = set() self.layers = [] settings = {} with open(".build/config.yaml") as f: settings = yaml.load(f) self.languages = settings["available_locale_names"] # must be done only once we have loaded the project config from c2cgeoportal.models import DBSession, Interface, Theme, Role self.session = DBSession() self._ = {} self.metadata_service_url = \ 'http://shop.geoportail.lu/Portail/inspire/webservices/getMD.jsp' registry = bootstrap(self.options.app_config)['registry'] request = bootstrap(self.options.app_config)['request'] self.es_layer_index = get_index(request) + '_layers' self.tdirs = registry.queryUtility(ITranslationDirectories, default=[]) self.tsf = TranslationStringFactory('geoportailv3-client') self.interfaces = self.session.query(Interface).filter( Interface.name.in_(options.interfaces) ).all() self.public_theme = {} self.public_group = {} for interface in self.interfaces: self.public_theme[interface.id] = [] self.public_group[interface.id] = [] for theme in self.session.query(Theme).filter_by(public=True).all(): self._add_theme(theme) for role in self.session.query(Role).all(): for theme in self.session.query(Theme).all(): self._add_theme(theme, role) ensure_index( get_elasticsearch(request), self.es_layer_index, options.recreate_index ) try: helpers.bulk(actions=self.layers, client=get_elasticsearch(request), raise_on_error=True) except (BulkIndexError, ConnectionTimeout) as e: statuslog("\n %s" % e)
def main(): if len(sys.argv) != 2: usage() bootstrap(sys.argv[1]) if shutil.which('convert') is None: print('Error: ImageMagick not found ("convert" is not in $PATH)') print('Try: sudo apt-get install imagemagick') sys.exit(2) create_logos()
def sql(options): bootstrap(production=False) results = cli_f.sql(*options.vals) for i, query in enumerate(results): print("") print(options.vals[i]) if query.returns_rows: for row in query: print(row) print("") return ""
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) os.environ['PYJASPER_SERVLET_URL'] = settings['jasper_url'] bootstrap(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') other_engine = engine_from_config(settings, 'othersql.') Base.metadata.bind = engine OtherBase.metadata.bind = other_engine from ..models.esppt_models import ( esNopModel, esRegModel, spptModel, ) from ..views.es_reports import GenerateSppt DBSession.configure(bind=engine) q = DBSession.query(esNopModel, esRegModel).filter( esNopModel.es_reg_id == esRegModel.id) q = q.filter(esNopModel.email_sent == 0) for r_nop, r_reg in q: nop = get_nop(r_nop) q = spptModel.get_by_nop_thn(nop, r_nop.tahun) sppt = q.first() if not sppt: continue nilai = thousand(sppt.pbb_yg_harus_dibayar_sppt) g = GenerateSppt(nop, r_nop.tahun, r_reg.kode) #USER_ID) updated menggunakan password dari user yang ada di reg.kode aagusti sppt_file = g.sppt_file e_filename = os.path.split(sppt_file)[-1] f = open(sppt_file) content = f.read() f.close() e_content = base64.encodestring(content) e_subject = EMAIL_SUBJECT.format(nop=nop, tahun=r_nop.tahun) e_body = EMAIL_BODY.format(nama_wp=sppt.nm_wp_sppt, nop=nop, tahun=r_nop.tahun, nilai=nilai) files = [(e_filename, e_content)] print('To: {name} <{email}>'.format(name=sppt.nm_wp_sppt, email=r_reg.email)) print('Subject: {s}'.format(s=e_subject)) print('Body: {s}'.format(s=e_body)) print('File: {s}'.format(s=e_filename)) r_nop.email_sent = 1 flush(r_nop) send(r_reg.email, sppt.nm_wp_sppt, e_subject, e_body, files, settings['email_pengirim']) transaction.commit()
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] bootstrap(config_uri) email = input('Input Email:') password = getpass.getpass('Input Password:') user = User(email=email) set_password(user, password) with transaction.manager: DBSession.add(user)
def setup_app_from_commandline(self, argv): if argv is None: argv = sys.argv print argv if len(argv) < 2: print >> sys.stderr, 'No configuration file specified.' return argv if not self.conf: self.conf = argv.pop(1) argv.insert(1, 'worker') bootstrap(self.conf) self.app = default_app return argv
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) ziggurat_init(settings) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) init_model() Base.metadata.create_all(engine) # Create non Ziggurat tables bootstrap(config_uri) # This make get_current_registry() works. insert_data(fixtures) create_default_permissions() transaction.commit()
def main(): parser = argparse.ArgumentParser() parser.add_argument("config_uri", help="Paster ini file to load settings from") parser.add_argument("path", help="from which path to clear likes (meeting or agenda item)") args = parser.parse_args() env = bootstrap(args.config_uri) root = env['root'] request = env['request'] context = traverse(root, args.path).get('context') if IMeeting.providedBy(context) or IAgendaItem.providedBy(context): print('Clearing likes on {}'.format(context.title)) path_query = query.Eq('path', args.path) cleared = False for type_name in ('Proposal', 'DiscussionPost'): count, docids = root.catalog.query(path_query & query.Eq('type_name', type_name)) response = input('Found {} {} on {}. Do you want to clear likes on these? (y/N) '.format( count, type_name, context.title).encode('utf8')) if response.lower() in ('y', 'yes', 'j', 'ja'): cleared = True for obj in request.resolve_docids(docids, perm=None): like = request.registry.getAdapter(obj, IUserTags, name='like') like.storage.clear() like._notify() if cleared: transaction.commit() env['closer']() else: print('Path does not match a meeting or agenda item')
def command(self): from next.models import Base from shapely.wkb import loads from yaml import dump config_uri = self.args[0] env = bootstrap(config_uri) engine = engine_from_config(env['registry'].settings, 'sqlalchemy.') initialize_base(engine) tables = Base.metadata.sorted_tables assert self.args[1] is not None, 'You should provide a output file' yaml_file = open(self.args[1], 'w') fixtures = [] for table in tables: for row in table.select().execute(): c = {'table': table.name, 'fields': {}} columns = table.c.keys() # sanity check before we export the data assert len(columns) == len(row) for i in range(len(columns)): column = table.c[columns[i]] cell = row[i] if str(column.type) in geom_types: # we have to call str on the binary column first c['fields'][column.name] = loads(str(cell)).wkt else: c['fields'][column.name] = cell fixtures.append(c) dump(fixtures, yaml_file)
def main(): # pragma: nocover env = bootstrap("development.ini") from c2cgeoportal.models import DBSession, TreeItem package = env["registry"].settings["package"] directory = "%s/locale/" % package destination = path.join(directory, "%s-db.pot" % package) w = codecs.open(destination, "wt", encoding="utf-8") w.write( u"""#, fuzzy msgid "" msgstr "" "MIME-Version: 1.0\\n" "Content-Type: text/plain; charset=utf-8\\n" "Content-Transfer-Encoding: 8bit\\n" """ ) treeitems = DBSession.query(TreeItem.item_type, TreeItem.id, TreeItem.name) for type, id, name in treeitems: w.write( u"""#: %(type)s.%(id)s msgid "%(name)s" msgstr "" """ % {"type": type, "id": id, "name": name} ) print("DB Pot file updated: %s" % destination)
def import_access(argv=None): """ Load the access control data from a dump file or stdin This operation is idempotent and graceful. It will not clobber your existing ACL. """ if argv is None: argv = sys.argv[1:] parser = argparse.ArgumentParser( description=import_access.__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('config', help="Name of config file") parser.add_argument('-i', help="Name of input file") args = parser.parse_args(argv) logging.basicConfig() if args.i: with gzip.open(args.i, 'r') as ifile: data = json.load(ifile) else: print "Reading data from stdin..." data = json.load(sys.stdin) env = bootstrap(args.config) access = env['request'].access result = access.load(data) transaction.commit() if result is not None: print result
def main(): args = cmdline() env = bootstrap(args.inifile) settings, closer = env["registry"].settings, env["closer"] ca_cert = settings.get("ca.cert") ca_key = settings.get("ca.key") if not ca_cert: print("Missing 'ca.cert' in ini file") closer() exit() if not ca_key: print("Missing 'ca.cert' in ini file") closer() exit() for f in ca_cert, ca_key: if os.path.exists(f): print("File already exists: {}. Refusing to corrupt.".format(f)) closer() exit() else: dname = os.path.dirname(f) os.makedirs(dname, exist_ok=True) print("Will write key to {}".format(ca_key)) print("Will write cert to {}".format(ca_cert)) build_ca(keyname=ca_key, certname=ca_cert)
def pyramid_app(self): self.log.debug("Bootstrapping pyramid application") # setup logging logger = logging.getLogger("mokacms") current_level = self.log.level() logger.setLevel(current_level) ch = logging.StreamHandler() formatter = logging.Formatter("%(levelname)s: [mokacms] %(message)s") ch.setLevel(current_level) ch.setFormatter(formatter) logger.addHandler(ch) try: env = bootstrap(self.pargs.ini) except: self.log.error("Cannot bootstrap application") raise def cleanup(_): self.log.debug("Calling pyramid app closer") env['closer']() hook.register("pre_close", cleanup) return env
def main(argv=sys.argv): if len(argv) < 3: usage(argv) config_uri = argv[1] json_path = argv[2] options = parse_vars(argv[3:]) setup_logging(config_uri) # Configure the application, so we can access the registry. env = bootstrap(config_uri, options=options) # Generate a DBSession using the sessionmaker: DBSession = env['registry']['db_sessionmaker']() # The SQLAlchemy engine is accessible as the session's bind. engine = DBSession.bind Base.metadata.create_all(engine) json_data = json.load(open(json_path)) with transaction.manager: for kitten_data in json_data: kitten = Kitten(source_url=kitten_data['source_url'], credit=kitten_data['credit']) r = requests.get(kitten_data['download_url']) if r.headers['content-type'] == 'image/jpeg': kitten.file_extension = '.jpeg' elif r.headers['content-type'] == 'image/png': kitten.file_extension = '.png' kitten.file_data = r.content DBSession.add(kitten) # Not strictly necessary, as everything gets unwound when main returns anyway. # But it's a good habit to keep. env['closer']()
def main(): config = sys.argv[1] env = bootstrap(config) registry = env["registry"] introspector = registry.introspector print("{path:^40} {method:^6} {view_name:^20}".format( path="Path", method="Method", view_name="View")) print(u"--------------------------------------------------") for d in (introspector.get_category("views")): view = d["introspectable"] route = None for s, category_name, discriminator in view._relations: if s and category_name == "routes": route = introspector.get(category_name, discriminator) break if route: print("{path:<40} {method:<6} {view_name:<20}".format( path=route["pattern"], method=view["request_methods"] or "-", view_name=view_name(view["callable"], view["attr"]) ))
def init_env(p): global redis_key env = bootstrap(p) redis_key = "cfront-{0}:job:hits".format("dev" if cfront_settings.get("debug_mode",False) else "prod") redis_key = "cfront-{0}:job:hits".format("dev" if cfront_settings.get("debug_mode",False) else "prod")
def get_env(config_uri, base_url): """ Return a preconfigured paste environment object. Sets up the WSGI application and ensures that webassets knows to load files from ``h:static`` regardless of the ``webassets.base_dir`` setting. """ request = Request.blank('', base_url=base_url) env = paster.bootstrap(config_uri, request) request.root = env['root'] # Ensure that the webassets URL is absolute request.webassets_env.url = urlparse.urljoin(base_url, request.webassets_env.url) # Disable webassets caching and manifest generation request.webassets_env.cache = False request.webassets_env.manifest = False # By default, webassets will use its base_dir setting as its search path. # When building extensions, we change base_dir so as to build assets # directly into the extension directories. As a result, we have to add # back the correct search path. request.webassets_env.append_path(resolve('h:static').abspath(), request.webassets_env.url) request.registry.notify(ContextFound(request)) # pyramid_layout attrs return env
def engine_from_settings(config, full_config=False): settings = get_appsettings(config, 'assembl') if settings['sqlalchemy.url'].startswith('virtuoso:'): db_schema = '.'.join((settings['db_schema'], settings['db_user'])) else: db_schema = settings['db_schema'] set_config(settings, True) session = None if full_config: env = bootstrap(config) configure_zmq(settings['changes_socket'], False) configure_indexing() configure_model_watcher(env['registry'], 'assembl') logging.config.fileConfig(config) session = get_session_maker() metadata = get_metadata() else: session = make_session_maker(zope_tr=True) import assembl.models from assembl.lib.sqla import class_registry engine = configure_engine(settings, session_maker=session) metadata = get_metadata() metadata.bind = engine session = sessionmaker(engine)() return (metadata, session)
def main(): args = parse_args() env = bootstrap(args.config_uri) try: imap = IMAP4Mailbox(settings=env['registry'].settings) if imap.connection: print('Successfully connected to host %r, mailbox %r.' % (imap.host, imap.mailbox)) if args.list_: print('Listing mailbox messages...') for msg in imap.messages: print(parsedate(msg['date']), msg['subject']) print('Finished.') if args.import_: count = 0 print('Importing mailbox messages...'), with transaction.manager: for msg in imap.messages: post = api.create(email_text=str(msg)) count += 1 print('imported %d messages.' % count) print('Threading messages...'), with transaction.manager: posts = api.list() api.thread(posts) print('threaded %d messages.' % len(posts)) print('Finished.') finally: imap.close()
def on_preload_parsed(options, **kwargs): """ This actually configures celery from pyramid config file """ celery.conf["INI_PYRAMID"] = options["ini"] import appenlight_client.client as e_client ini_location = options["ini"] if not ini_location: raise Exception( "You need to pass pyramid ini location using " "--ini=filename.ini argument to the worker" ) env = bootstrap(ini_location[0]) api_key = env["request"].registry.settings["appenlight.api_key"] tr_config = env["request"].registry.settings.get("appenlight.transport_config") CONFIG = e_client.get_config({"appenlight.api_key": api_key}) if tr_config: CONFIG["appenlight.transport_config"] = tr_config APPENLIGHT_CLIENT = e_client.Client(CONFIG) # log.addHandler(APPENLIGHT_CLIENT.log_handler) register_signals(APPENLIGHT_CLIENT) celery.pyramid = env
def main(argv=sys.argv): args = parse_args(argv) setup_logging(args.config_uri) env = bootstrap(args.config_uri) try: with env['request'].tm: dbsession = env['request'].dbsession setup_models(dbsession) except OperationalError as err: print(''' Pyramid is having a problem using your SQL database. The problem might be caused by one of the following things: 1. You may need to initialize your database tables with `alembic`. Check your README.txt for description and try to run it. 2. Your database server may not be running. Check that the database server referred to by the "sqlalchemy.url" setting in your "production.ini" file is running. {err} '''.format(err=err))
def export_users(): """Export all users and their proposal rates to csv file. usage:: bin/export_mercator_users etc/development.ini 10 """ docstring = inspect.getdoc(export_users) parser = argparse.ArgumentParser(description=docstring) parser.add_argument('ini_file', help='path to the adhocracy backend ini file') parser.add_argument('min_rate', type=int, help='minimal rate to restrict listed proposals') args = parser.parse_args() env = bootstrap(args.ini_file) filename = create_filename(directory='./var/export/', prefix='adhocracy-users', suffix='.csv') _export_users_and_proposals_rates(env['root'], filename, min_rate=args.min_rate) env['closer']()
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) env = bootstrap(config_uri) settings = env['registry'].settings request = env['request'] engine = engine_from_config(settings, 'sqlalchemy.') db_session = scoped_session(sessionmaker()) db_session.configure(bind=engine) Base.metadata.drop_all(engine) Base.metadata.create_all(engine) data = generate_default_data(db_session) db_session.commit() if request.search_settings['enabled']: setup_user_index(request) index_users(request, data['users'])
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) env = bootstrap(config_uri) try: with env['request'].tm: dbsession = env['request'].dbsession setup_models(dbsession) except OperationalError: print(''' Pyramid is having a problem using your SQL database. The problem might be caused by one of the following things: 1. You may need to initialize your database tables with `alembic`. Check your README.txt for description and try to run it. 2. Your database server may not be running. Check that the database server referred to by the "sqlalchemy.url" setting in your "development.ini" file is running. ''')
def engine_from_settings(config, full_config=False): settings = get_appsettings(config, 'assembl') if settings['sqlalchemy.url'].startswith('virtuoso:'): db_schema = '.'.join((settings['db_schema'], settings['db_user'])) else: db_schema = settings['db_schema'] set_config(settings, True) session = None if full_config: env = bootstrap(config) configure_zmq(settings['changes_socket'], False) configure_indexing() configure_model_watcher(env['registry'], 'assembl') logging.config.fileConfig(config) else: session = make_session_maker(zope_tr=True) import assembl.models from assembl.lib.sqla import class_registry engine = configure_engine(settings, session_maker=session) metadata = get_metadata() metadata.bind = engine session = sessionmaker(engine)() return (metadata, session)
def main(argv: t.List[str] = sys.argv) -> None: """Run the script.""" parser = argparse.ArgumentParser( usage="python -m getoffmylawn.scripts.populate") parser.add_argument( "-c", "--config", type=str, default="etc/development.ini", metavar="<config>", help="Pyramid application configuration file.", ) env = bootstrap(parser.parse_args().config) setup_logging(parser.parse_args().config) with transaction.manager: add_users(env["request"].db) add_urls(env["request"].db) logger.info("populate script finished") env["closer"]()
def main(): parser = argparse.ArgumentParser() parser.add_argument("config_uri", help="Paster ini file to load settings from") parser.add_argument("meeting_path", help="Meeting to add users to") args = parser.parse_args() env = bootstrap(args.config_uri) root = env['root'] request = env['request'] print "Adding to meeting %r" % args.meeting_path #Just to make sure path exists meeting = find_resource(root, args.meeting_path) userids = add_users(root, request) for userid in userids: meeting.local_roles.add( userid, [ROLE_VIEWER, ROLE_DISCUSS, ROLE_PROPOSE, ROLE_VOTER]) print "Results" print "=" * 80 for userid in userids: print ", ".join([userid, "*****@*****.**" % userid]) print "-" * 80 print "Commit" transaction.commit()
def main(args=None): if args is None: args = sys.argv[1:] parser = argparse.ArgumentParser(description="Kinto Deployment Worker") parser.add_argument('--ini', help='Application configuration file', dest='ini_file') parsed_args = vars(parser.parse_args(args)) logging.basicConfig(level=DEFAULT_LOG_LEVEL, format=DEFAULT_LOG_FORMAT) config_file = parsed_args['ini_file'] env = bootstrap(config_file) registry = env['registry'] r = StrictRedis(**registry.redis) while True: queue, b64_credentials = r.blpop(DEPLOY_QUEUE, 0) user_id = hmac_digest(registry.hmac_secret, b64_credentials) credentials = base64.b64decode(b64_credentials).split(':', 1) id_alwaysdata = r.get(ID_ALWAYSDATA_KEY.format(user_id)) settings = { 'id_alwaysdata': id_alwaysdata, 'credentials': tuple(credentials), 'postgresql_host': "postgresql-%s.alwaysdata.net" % id_alwaysdata, 'ssh_host': "ssh-%s.alwaysdata.net" % id_alwaysdata, 'ftp_host': "ftp-%s.alwaysdata.net" % id_alwaysdata, 'prefixed_username': "******" % id_alwaysdata } status_handler = RedisStatusHandler(r, user_id) deploy_kinto_to_alwaysdata(status_handler, file_root=FILE_ROOT, **settings)
def parsed_args(*arg_specs, **kw): # pragma: no cover """pass a truthy value as keyword parameter bootstrap to bootstrap the app. """ parser = argparse.ArgumentParser() parser.add_argument("config_uri", action=ExistingConfig, help="ini file providing app config") parser.add_argument("--glottolog-dburi", default=None) parser.add_argument("--module", default=None) parser.add_argument("--sqlite", nargs=1, action=SqliteDb, help="sqlite db file") for args, _kw in arg_specs: parser.add_argument(*args, **_kw) args = parser.parse_args(args=kw.pop('args', None)) engine = getattr(args, 'engine', kw.get('engine', None)) args.env = bootstrap(args.config_uri) if kw.get('bootstrap', False) else {} module = setup_session(args.config_uri, engine=engine) # make sure we create URLs in the correct domain if args.env: dataset = DBSession.query(common.Dataset).first() if dataset: args.env['request'].environ['HTTP_HOST'] = dataset.domain if module == 'tests': module = 'clld' args.module = __import__(args.module or module) args.log = logging.getLogger(args.module.__name__) if engine: args.log.info('using bind %s' % engine) args.data_file = partial(data_file, args.module) args.module_dir = path(args.module.__file__).dirname() args.migrations_dir = path(args.module.__file__).dirname().joinpath( '..', 'migrations') return args
def main(): from pyramid.paster import bootstrap from ..resources import BlogEntry parser = OptionParser(description=__doc__, usage='usage: %prog [options]') parser.add_option('-c', '--config', dest='config', help='Specify a paster config file.') parser.add_option('-i', '--num', dest='num', default='10000', help='Specify the number of blog entries to add.') options, args = parser.parse_args() config = options.config num = int(options.num) if config is None: raise ValueError('must supply config file name') config = os.path.abspath(os.path.normpath(config)) env = bootstrap(config) root = env['root'] registry = env['registry'] closer = env['closer'] for n in range(0, num): print ("adding", n) entry = registry.content.create( 'Blog Entry', 'Title of blog entry %s' % n, LOREM_IPSUM, 'html', datetime.date.today(), ) id = 'blogentry_%s' % n root[id] = entry if n % 10000 == 0: print ('committing') transaction.commit() print ('committing') transaction.commit() root._p_jar._db.close() closer()
def devdata(): with bootstrap(sys.argv[1]) as env: with tempfile.TemporaryDirectory() as tmpdirname: # The directory that we'll clone the devdata git repo into. git_dir = os.path.join(tmpdirname, "devdata") subprocess.check_call([ "git", "clone", "[email protected]:hypothesis/devdata.git", git_dir ]) # Copy devdata env file into place. shutil.copyfile( os.path.join(git_dir, "lms", "devdata.env"), os.path.join( pathlib.Path(lms.__file__).parent.parent, ".devdata.env"), ) DevDataFactory( env["request"], json.loads( open(os.path.join(git_dir, "lms", "devdata.json"), "r").read()), ).create_all()
def main(): parser = argparse.ArgumentParser() parser.add_argument("config_uri", help="Paster ini file to load settings from") parser.add_argument("meeting", help="Meeting to add users to") parser.add_argument("csv_file", help="CSV file to read") args = parser.parse_args() env = bootstrap(args.config_uri) root = env['root'] print "Adding to meeting %r" % args.meeting #Just to make sure path exists meeting = root[args.meeting] pns = IParticipantNumbers(meeting) with open(args.csv_file, "r") as csv_file: csv_reader = csv.reader(csv_file, delimiter=str(";")) for row in csv_reader: i = int(row[0]) token = row[1] ticket = ParticipantNumberTicket(i, token, 'admin') pns.tickets[i] = ticket pns.token_to_number[token] = i print "-" * 80 print "Commit" transaction.commit()
def main(argv: t.List[str] = sys.argv) -> None: """Run the script.""" parser = argparse.ArgumentParser( usage= "pipenv run python -m {{cookiecutter.project_slug}}.scripts.drop_tables" ) parser.add_argument( "-c", "--config", type=str, default="etc/development.ini", metavar="<config>", help="Pyramid application configuration file.", ) env = bootstrap(parser.parse_args().config, options={"SKIP_CHECK_DB_MIGRATED": "true"}) setup_logging(parser.parse_args().config) engine = env["registry"].settings["sqlalchemy.engine"] engine.execute("DROP OWNED BY current_user") logger.warn("db reset done for", url=str(engine.url)) env["closer"]()
def main(): parser = OptionParser( description=__doc__, usage="%prog <config_uri> <username> <password>", ) options, args = parser.parse_args() try: config_uri, username, password = args except: parser.print_usage() sys.exit(1) setup_logging(config_uri) env = bootstrap(config_uri) site = env['root'] principals = env['root']['principals'] users = principals['users'] admins = principals['groups']['admins'] user = principals.add_user(username, password=password) admins.memberids.connect([user]) transaction.commit()
def main(argv=None): args = parse_args(argv or sys.argv) setup_logging(args.config_uri) env = bootstrap(args.config_uri) try: with env["request"].tm: dbsession = env["request"].dbsession Base.metadata.create_all(dbsession.get_bind()) setup_models(dbsession) except OperationalError: print( """ Pyramid is having a problem using your SQL database. The problem might be caused by one of the following things: 1. You may need to initialize your database tables with `alembic`. Check your README.txt for description and try to run it. 2. Your database server may not be running. Check that the database server referred to by the "sqlalchemy.url" setting in your "development.ini" file is running. """ )
def bootstrap_pyramid(signal, sender, **kwargs): global session_factory import os from pyramid.paster import bootstrap settings = bootstrap('/run/secrets/production.ini')['registry'].settings engine = models.get_engine(settings, prefix='sqlalchemy.') while True: # Here we try to connect to database server until connection succeeds. # This is needed because the database server may take longer # to start than the application import sqlalchemy.exc try: print("Checking database connection") conn = engine.connect() conn.execute("select 'OK'") except sqlalchemy.exc.OperationalError: import time print("Connection failed. Sleeping.") time.sleep(2) continue # If we get to this line, connection has succeeded so we break # out of the loop conn.close() break session_factory = models.get_session_factory(engine)
def main(): description = """\ Harvest event from sources. Example: 'harvest deployment.ini' """ usage = "usage: %prog config_uri" parser = optparse.OptionParser( usage=usage, description=textwrap.dedent(description) ) options, args = parser.parse_args(sys.argv[1:]) if not len(args) >= 1: print('You must provide one argument') return 2 config_uri = args[0] pyramid.paster.setup_logging(config_uri) env = bootstrap(config_uri) closer = env['closer'] try: with transaction.manager: harvest() finally: closer()
def announce(): usage = "announce: %prog config_uri email_template" description = "Send an announce to every user with a verified email that has at least one password." parser = optparse.OptionParser(usage=usage, description=textwrap.dedent(description)) options, args = parser.parse_args(sys.argv[1:]) if len(args) != 2: safe_print('You must provide two arguments. ' 'The first one is the config file and the ' 'second one is the email template.') return 2 config_uri = args[0] email_template = args[1] env = bootstrap(config_uri) settings, closer = env['registry'].settings, env['closer'] try: request = env['request'] public_url_root = settings['public_url_root'] preferences_link = urlparse.urljoin( public_url_root, request.route_path('user_preferences')) tx = transaction.begin() mailer = get_mailer(request) for user in get_all_users_with_passwords_and_email(): message = send_email(request, email_template, user, preferences_link) mailer.send(message) tx.commit() finally: closer()
def main(argv: List[str] = sys.argv) -> None: args = parse_args(argv[1:]) setup_logging(args.config_uri) log_level = logging.WARNING if args.verbose: log_level = logging.INFO if args.debug: log_level = logging.DEBUG logging.getLogger().setLevel(log_level) logging.getLogger("urllib3.connectionpool").setLevel(log_level) with bootstrap(args.config_uri, options={"app": "zam_fetch_amendements"}) as env: settings = env["registry"].settings repository.load_data() try: fetch_amendements(args.chambre, args.num, args.progress, settings) finally: transaction.abort()
def main(argv=sys.argv): args = parse_args(argv) setup_logging(args.config_uri) env = bootstrap(args.config_uri) settings=env['request'].registry.settings try: engine_admin=models.get_engine(settings,prefix='sqlalchemy_admin.') except: pass else: setup_server(engine_admin,settings,args.delete_existing) with env['request'].tm: dbsession = env['request'].dbsession setup_models(dbsession,args.config_uri) admin_exists=dbsession.query(models.User).filter( models.User.name=='admin').count() if not admin_exists: create_admin_user(dbsession,settings) print('Database setup complete.')
def main(): args = parser.parse_args() request = Request.blank('', base_url=args.base) env = paster.bootstrap(args.config_uri, request=request) request.root = env['root'] paster.setup_logging(args.config_uri) if not args.dry_run: if args.key is None: print 'No Mandrill API key specified.' parser.print_help() sys.exit(1) # Provide an opportunity to bail out. log.warning('Changes will be made and mail will be sent.') log.info('Waiting five seconds.') time.sleep(5) log.info('Collecting reserved users.') session = models.get_session(request) users = get_users(session, limit=args.limit) if args.dry_run: log.info('Skipping actions ignored by dry run.') success, error = users, [] else: log.info('Sending invitations to %d users.', len(users)) success, error = send_invitations(request, args.key, users) log.info('Marking users as invited.') mark_invited(session, success) log.info('%d succeeded / %d failed', len(success), len(error)) sys.exit(0)
def show_topics(): description = """ Lists the current topic URLs registered with the hub. Arguments: config_uri: the pyramid configuration to use for the hub Example usage: bin/show_topics etc/paster.ini#pushhub """ usage = "%prog config_uri" parser = optparse.OptionParser( usage=usage, description=textwrap.dedent(description) ) options, args = parser.parse_args(sys.argv[1:]) if not len(args) >= 1: print("You must provide a configuration file.") return config_uri = args[0] request = Request.blank('/', base_url='http://localhost/hub') env = bootstrap(config_uri, request=request) hub = env['root'] topics = [v for v in hub.topics.values()] print "Topic URLs:" print "-----------" for topic in topics: print "%s\t%s" % (topic.url, topic.timestamp) env['closer']()
def __call__(self, filename, options): messages = [] self.env = bootstrap(filename) with open("project.yaml") as f: self.package = yaml.load(f) self.config = get_config(".build/config.yaml") try: from c2cgeoportal.models import Theme, LayerGroup, \ LayerWMS, LayerWMTS self._import(Theme, messages) self._import(LayerGroup, messages) self._import(LayerWMS, messages, self._import_layer_wms) self._import(LayerWMTS, messages, self._import_layer_wmts) except ProgrammingError as e: print( colorize( "ERROR: The database is probably not up to date " "(should be ignored when happen during the upgrade)", RED)) print(colorize(e, RED)) return messages
if k in ('-l', '--latest'): latest = True if k in ('-h', '-?', '--help'): usage() if k in ('-p,' '--package'): package = v if k in ('-s', '--set-db-version'): try: set_db_version = int(v) if set_db_version < 0: raise Exception except: usage('Bad version number %s' % v) setup_logging(config_uri) env = bootstrap(config_uri) root = env['root'] registry = env['registry'] try: results = evolve_packages( registry, root, package=package, set_db_version=set_db_version, latest=latest, ) except Exception as e: usage(repr(e)) for result in results:
def main(argv=sys.argv[1:]): try: opts, args = getopt.getopt(argv, "h", [ "config-file=", "scenario-id=", "scenario-name=", "scenario-description=", "topology=", "attacker=", "affected-area=", "target=", "attack-type=", "all-paths", "number-of-shortest-paths=" ]) except getopt.GetoptError: print( '\n' 'ERROR\n' 'Usage: MiniSecBGP_hijack_attack_scenario [options]\n' '\n' 'options (with examples):\n' '\n' '-h this help\n' '\n' '--config-file="minisecbgp.ini" pyramid config filename [.ini]\n' '--scenario-name="Test topology" the name that will be used to identify this scenario\n' '--scenario-description="date 20200729" the scenario description\n' '--topology=3 the topology used as the original base of the scenario\n' '--attacker=[65001,65002] define which AS(s) will be the attacker\n' '--affected-area=[65001,65003] define which these AS(s) will receive and accept the hijacked routes\n' '--target=[\'10.0.0.0/24\',\'20.0.0.0/24\'] define the prefix(s) and mask(s) that will be hijacked by the attacker(s)\n' '--attack-type=attraction|interception if the attack is an attraction attack or an interception attack\n' '--all-paths or --number-of-shortest-paths=[1..999] number of valid paths between the attacker AS, affected AS and target AS\n' '\n' 'or\n' '\n' '--scenario-id=16 scenario ID\n' ) sys.exit(2) config_file = scenario_id = scenario_name = scenario_description = topology = attacker = \ affected_area = target = attack_type = number_of_shortest_paths = '' for opt, arg in opts: if opt == '-h': print( '\n' 'HELP\n' 'Usage: MiniSecBGP_hijack_attack_scenario [options]\n' '\n' 'options (with examples):\n' '\n' '-h this help\n' '\n' '--config-file="minisecbgp.ini" pyramid config filename [.ini]\n' '--scenario-name="Test topology" the name that will be used to identify this scenario\n' '--scenario-description="date 20200729" the scenario description\n' '--topology=3 the topology used as the original base of the scenario\n' '--attacker=[65001,65002] define which AS(s) will be the attacker\n' '--affected-area=[65001,65003] define which these AS(s) will receive and accept the hijacked routes\n' '--target=[\'10.0.0.0/24\',\'20.0.0.0/24\'] define the prefix(s) and mask(s) that will be hijacked by the attacker(s)\n' '--attack-type=attraction|interception if the attack is an attraction attack or an interception attack\n' '--all-paths or --number-of-shortest-paths=[1..999] number of valid paths between the attacker AS, affected AS and target AS\n' '\n' 'or\n' '\n' '--scenario-id=16 scenario ID\n' ) sys.exit() if opt == '--config-file': config_file = arg elif opt == '--scenario-id': scenario_id = arg elif opt == '--scenario-name': scenario_name = arg elif opt == '--scenario-description': scenario_description = arg elif opt == '--topology': topology = arg elif opt == '--attacker': attacker = arg elif opt == '--affected-area': affected_area = arg elif opt == '--target': target = arg elif opt == '--attack-type': attack_type = arg elif opt == '--all-paths': number_of_shortest_paths = '0' elif opt == '--number-of-shortest-paths': number_of_shortest_paths = arg if (config_file and scenario_name and topology and attacker and affected_area and target and attack_type and number_of_shortest_paths) \ or (config_file and scenario_id): args = parse_args(config_file) setup_logging(args.config_uri) env = bootstrap(args.config_uri) try: with env['request'].tm: dbsession = env['request'].dbsession aa = AttackScenario(dbsession, scenario_id, scenario_name, scenario_description, topology, attacker, affected_area, target, attack_type, number_of_shortest_paths) aa.attack_scenario() if scenario_id: clear_database(dbsession, scenario_id) except OperationalError: print('Database error') else: print( '\n' 'Usage: MiniSecBGP_hijack_attack_scenario [options]\n' '\n' 'options (with examples):\n' '\n' '-h this help\n' '\n' '--config-file="minisecbgp.ini" pyramid config filename [.ini]\n' '--scenario-name="Test topology" the name that will be used to identify this scenario\n' '--scenario-description="date 20200729" the scenario description\n' '--topology=3 the topology used as the original base of the scenario\n' '--attacker=[65001,65002] define which AS(s) will be the attacker\n' '--affected-area=[65001,65003] define which these AS(s) will receive and accept the hijacked routes\n' '--target=[\'10.0.0.0/24\',\'20.0.0.0/24\'] define the prefix(s) and mask(s) that will be hijacked by the attacker(s)\n' '--attack-type=attraction|interception if the attack is an attraction attack or an interception attack\n' '--all-paths or --number-of-shortest-paths=[1..999] number of valid paths between the attacker AS, affected AS and target AS\n' '\n' 'or\n' '\n' '--scenario-id=16 scenario ID\n' )
"{0}/make_graphical_output.R --slave --vanilla --args "+\ "{1} {2} {3} {4}")\ .format(CDSCRIPTS,f4p,f5p,gene,f6p) if v: print "running cmd:" if v: print " {0}".format(cmd) prc = spc.Popen(cmd, shell=True) out = prc.communicate() print "outval: {0}".format(out) if v: print "DONE" if v: print import transaction from pyramid.paster import bootstrap if __name__ == "__main__": env = bootstrap(sys.argv[1]) while 1: j = Session.query(Job)\ .filter(Job.files_failed == False)\ .filter(Job.files_ready == False)\ .join(Spacer)\ .group_by(Job.id)\ .having(func.sum(case([(Spacer.score == None,1)], else_=0)) == 0)\ .order_by(Job.id.desc()).first() if j: with transaction.manager: commence_file_io(j.id) time.sleep(2)
def main(args=None): """The main routine.""" if args is None: args = sys.argv[1:] parser = argparse.ArgumentParser(description="Kinto Command-Line " "Interface") # XXX: deprecate this option, unnatural as first argument. parser.add_argument('--ini', help='Application configuration file', dest='ini_file', required=False, default=DEFAULT_CONFIG_FILE) parser.add_argument('-q', '--quiet', action='store_const', const=logging.CRITICAL, dest='verbosity', help='Show only critical errors.') parser.add_argument('--debug', action='store_const', const=logging.DEBUG, dest='verbosity', help='Show all messages, including debug messages.') commands = ('init', 'start', 'migrate', 'delete-collection', 'version') subparsers = parser.add_subparsers(title='subcommands', description='Main Kinto CLI commands', dest='subcommand', help="Choose and run with --help") subparsers.required = True for command in commands: subparser = subparsers.add_parser(command) subparser.set_defaults(which=command) if command == 'init': subparser.add_argument('--backend', help='{memory,redis,postgresql}', dest='backend', required=False, default=None) subparser.add_argument('--host', help='Host to listen() on.', dest='host', required=False, default='127.0.0.1') elif command == 'migrate': subparser.add_argument('--dry-run', action='store_true', help='Simulate the migration operations ' 'and show information', dest='dry_run', required=False, default=False) elif command == 'delete-collection': subparser.add_argument('--bucket', help='The bucket where the collection ' 'belongs to.', required=True) subparser.add_argument('--collection', help='The collection to remove.', required=True) elif command == 'start': subparser.add_argument('--reload', action='store_true', help='Restart when code or config changes', required=False, default=False) subparser.add_argument('--port', type=int, help='Listening port number', required=False, default=DEFAULT_PORT) # Parse command-line arguments parsed_args = vars(parser.parse_args(args)) config_file = parsed_args['ini_file'] which_command = parsed_args['which'] # Initialize logging from level = parsed_args.get('verbosity') or DEFAULT_LOG_LEVEL logging.basicConfig(level=level, format=DEFAULT_LOG_FORMAT) if which_command == 'init': if os.path.exists(config_file): print("%s already exists." % config_file, file=sys.stderr) return 1 backend = parsed_args['backend'] if not backend: while True: prompt = ("Select the backend you would like to use: " "(1 - postgresql, 2 - redis, default - memory) ") answer = input(prompt).strip() try: backends = {"1": "postgresql", "2": "redis", "": "memory"} backend = backends[answer] break except KeyError: pass init(config_file, backend, parsed_args['host']) # Install postgresql libraries if necessary if backend == "postgresql": try: import psycopg2 # NOQA except ImportError: import pip pip.main(['install', "kinto[postgresql]"]) elif backend == "redis": try: import kinto_redis # NOQA except ImportError: import pip pip.main(['install', "kinto[redis]"]) elif which_command == 'migrate': dry_run = parsed_args['dry_run'] env = bootstrap(config_file) scripts.migrate(env, dry_run=dry_run) elif which_command == 'delete-collection': env = bootstrap(config_file) return scripts.delete_collection(env, parsed_args['bucket'], parsed_args['collection']) elif which_command == 'start': pserve_argv = ['pserve', config_file] if parsed_args['reload']: pserve_argv.append('--reload') pserve_argv.append('http_port=%s' % parsed_args['port']) pserve.main(pserve_argv) elif which_command == 'version': print(__version__) return 0
def _callFUT(self, config_uri, request=None): from pyramid.paster import bootstrap return bootstrap(config_uri, request)
if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--genome', '-g', dest="genome_name", default="hg19", type=str, help="genome name [...]", required=True) parser.add_argument('--source', '-s', dest="source", default="ucsc", type=str, help="source ensembl gtf / ucsc tsv", required=True) parser.add_argument('inifile') args = parser.parse_args() env = bootstrap(args.inifile) if args.source == "ucsc": populate_exons(args.genome_name) elif args.source == "ensembl": populate_exons_ensembl(args.genome_name) elif args.source == "blank": populate_exons_blank(args.genome_name) create_indexes(args.genome_name)