示例#1
0
def main():
    parser = OptionParser(description=__doc__)
    parser.add_option('-s', '--source', dest='source',
        action="store", default='/', metavar='ZODB-PATH',
        help="The ZODB source path to dump (e.g. /foo/bar or /)")
    parser.add_option('-d', '--dest', dest='dest',
        action="store", default='dump', metavar='FILESYSTEM-PATH',
        help="The destination filesystem path to dump to.")

    options, args = parser.parse_args()

    if args:
        config_uri = args[0]
    else:
        parser.error("Requires a config_uri as an argument")

    source = options.source
    dest = os.path.expanduser(os.path.normpath(options.dest))

    setup_logging(config_uri)
    env = bootstrap(config_uri)
    root = env['root']

    source = traverse(root, source)['context']

    dump(source, dest)
示例#2
0
文件: app.py 项目: dmdm/PySite
    def init_app(self, args):
        """
        Initialises Pyramid application.

        Loads config settings. Initialises SQLAlchemy.
        """
        self._args = args
        setup_logging(self._args.config)
        settings = get_appsettings(self._args.config)

        if 'environment' not in settings:
            raise KeyError('Missing key "environment" in config. Specify '
                'environment in paster INI file.')
        rc = Rc(environment=settings['environment'],
            root_dir=os.path.abspath(
                os.path.join(os.path.dirname(__file__), '..')
            )
        )
        rc.load()
        settings.update(rc.data)
        settings['rc'] = rc

        pysite.models.init(settings, 'db.pysite.sa.')

        self._rc = rc
        self._settings = settings

        pysite._init_vmail(rc)
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.create_all(engine)
    with transaction.manager:
        
        model = MyModel(name='one', value=1)
        DBSession.add(model)

        message1 = Message(msgid = 'c1', content = 'yoloswag1', timestamp = '5.11.2015')
        message2 = Message(msgid = 'c2', content = 'yoloswag2', timestamp = '5.11.2015')
        DBSession.add(message1)
        DBSession.add(message2)

        delmessage = DelMessage(msgid = 'c1', timestamp = '6.11.2015')
        DBSession.add(delmessage)

        DBSession.delete(model)
        DBSession.delete(message1)
        DBSession.delete(message2)
        DBSession.delete(delmessage)
示例#4
0
def install_start(request):
    page={'editor': False, 'allerts': '', 'banners': [['/static/images/lerni.png', 'Lerni Logo']]}
    page['page_title'] = "Lerni - instalacja"  #ZSO nr 15 w Sosnowcu
    page['internet_on'] = internet_on()
    try:
        page['celery'] = 8 == task.delay(4,4).wait(timeout=2, propagate=True, interval=0.5)
    except celery.exceptions.TimeoutError:
        page['celery'] = False
    except socket_error as serr:
        if serr.args[0][0] != errno.ECONNREFUSED:
            raise serr
    try:
        gnupg.GPG(gnupghome='GPG')
        page['gpg'] = True
    except ValueError:
        page['gpg'] = False
    page['update_available'] = False
    #time.sleep(1)
    #task2 = AsyncResult(task1)
    #print task2.result
    if False:
        config_uri = "development.ini"
        setup_logging(config_uri)
        settings = get_appsettings(config_uri)
        engine = engine_from_config(settings, 'sqlalchemy.')
        DBSession.configure(bind=engine)
        Base.metadata.create_all(engine)
    return page
def main(argv=sys.argv):
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    mkdir(settings['static_files'])
    # Create Ziggurat tables
    alembic_ini_file = 'alembic.ini'
    if not os.path.exists(alembic_ini_file):
        alembic_ini = ALEMBIC_CONF.replace('{{db_url}}',
                                           settings['sqlalchemy.url'])
        f = open(alembic_ini_file, 'w')
        f.write(alembic_ini)
        f.close()
    bin_path = os.path.split(sys.executable)[0]
    alembic_bin = os.path.join(bin_path, 'alembic')
    command = '%s upgrade head' % alembic_bin
    os.system(command)
    os.remove(alembic_ini_file)
    # Insert data
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    init_model()
    create_schemas(engine)
    Base.metadata.create_all(engine)
    initial_data.insert()
    transaction.commit()
示例#6
0
def main(argv=sys.argv):
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    init_model(settings)
示例#7
0
def main(argv=sys.argv):
    # Usage and configuration
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    config = Configurator(settings=settings)
    config.include('pyramid_sqlalchemy')

    # Make the database with schema and default data
    with transaction.manager:
        metadata.create_all()
        root = RootFolder(name='',
                      title='Moonbase Demo',
                      __acl__=[
                          ['Allow', ['paul'], 'view']
                      ]
                      )
        Session.add(root)
        f1 = root['f1'] = Folder(
            title='Folder 1',
            __acl__=[
                ['Allow', ['shane'], 'view']
            ]
        )
        f1['da'] = Document(title='Document 1A')
示例#8
0
def main(argv=sys.argv):
    config_uri = argv[1]
    argv = argv[2:]

    parser = optparse.OptionParser(usage=USAGE, description=DESCRIPTION)
    parser.add_option('-t', '--title', dest='title', type='string')
    parser.add_option('-s', '--slug', dest='slug', type='string')

    options, args = parser.parse_args(argv)
    if options.title is None:
        parser.error('You must provide at least --title')

    slug = options.slug
    if slug is None:
        slug = options.title.lower().replace(' ', '_')

    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    with transaction.manager:
        board = Board(title=options.title, slug=slug)
        DBSession.add(board)
        DBSession.flush()
        print(("Successfully added %s (slug: %s)" %
               (board.title, board.slug)))
示例#9
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.create_all(engine)
    #with transaction.manager:
    #    model = MyModel(name='one', value=1)
    #    DBSession.add(model)

    with transaction.manager:
        with open('agencies.csv','r') as f:
            agencies = f.read().split('\n')

        for agency in agencies:
            if agency.strip() != '':
                # agencyid, shortname, longname, type, description, websiteurl
                parts = agency.split('\t')
                agency_type = AgencyTypes.get_from_code(DBSession, parts[3])
                a = Agencies(
                    agency_code = parts[1],
                    agency_name = parts[2],
                    type_id = agency_type.id,
                    description = parts[4],
                    website = parts[5],
                )
                DBSession.add(a)
                transaction.commit()
def main(argv=sys.argv):
    if len(argv) < 3:
        usage(argv)
    config_uri = argv[1]
    json_path = argv[2]
    options = parse_vars(argv[3:])
    setup_logging(config_uri)
    # Configure the application, so we can access the registry.
    env = bootstrap(config_uri, options=options)
    # Generate a DBSession using the sessionmaker:
    DBSession = env['registry']['db_sessionmaker']()
    # The SQLAlchemy engine is accessible as the session's bind.
    engine = DBSession.bind
    Base.metadata.create_all(engine)
    json_data = json.load(open(json_path))
    with transaction.manager:
        for kitten_data in json_data:
            kitten = Kitten(source_url=kitten_data['source_url'],
                            credit=kitten_data['credit'])
            r = requests.get(kitten_data['download_url'])
            if r.headers['content-type'] == 'image/jpeg':
                kitten.file_extension = '.jpeg'
            elif r.headers['content-type'] == 'image/png':
                kitten.file_extension = '.png'
            kitten.file_data = r.content
            DBSession.add(kitten)
    # Not strictly necessary, as everything gets unwound when main returns anyway.
    # But it's a good habit to keep.
    env['closer']()
示例#11
0
def bootstrap_script(argv):
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    return settings
示例#12
0
def main(global_config, **settings):
    """ This function returns a Pyramid WSGI application.
    """
    config = Configurator(settings=settings)
    setup_logging(global_config['__file__'])

    config.add_renderer('jsonp', JSONP(param_name='callback'))
    config.add_renderer('ujson', factory='app.renderers.UjsonRenderer')
    config.add_static_view('static', 'static', cache_max_age=3600)

    # initialise a connection to mongo on startup and store the client
    #  in the registry which will be injected into each request
    conf = appConfig(config.registry.settings.get('app.config'))
    config.registry.app_config = conf.app_config
    config.registry.app_config['mongodb']['client'] = init_mongodb_connection(config.registry.app_config['mongodb'])

    config.add_route('home',                    '/')
    config.add_route('health-check',            '/health-check')
    config.add_route('network-stats',           '/stats/{code}/{explore}')
    config.add_route('network-build',           '/network/{code}/{explore}')
    config.add_route('network-build-status',    '/network/{code}/{explore}/status')
    config.add_route('entity-data',             '/entity/{code}/data')
    config.add_route('entity-build',            '/entity/{code}/{id}')
    config.add_route('entity-build-status',     '/entity/{code}/{id}/status')
    config.add_route('convert-graph',           '/convert/{code}')

    config.scan()
    return config.make_wsgi_app()
示例#13
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.drop_all(engine)
    Base.metadata.create_all(engine)
    with transaction.manager:
		
		model2 = Genre(id = 0,name='HipHop', info='Nigger ma f****r',year_of_found = 1990)
		DBSession.add(model2)
		
		model = Genre(id = 1,name='GlitchHop', info='Nu vaaassheee',year_of_found = 2014, parent_genre = model2)
		DBSession.add(model)

                model3 = Genre(id = 2,name='Grime', info='Zhostkiy rap',year_of_found = 2006, parent_genre = model2)
		DBSession.add(model3)

                model4 = Genre(id = 3,name='Dark Grime', info='Tyomniy Zhostkiy rap',year_of_found = 2009, parent_genre = model3)
		DBSession.add(model4)
		
		music1 = Music(author = 'Simplex_Method', title = 'Of Literal Data', year = 2015,genre =  model3)
		DBSession.add(music1)
示例#14
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.create_all(engine)
    with transaction.manager:
        model = RasPi()
        model.uuid = '11:22:33:44:55:66'
        model.description = "Testing Pi"
        model.url = "http://www.facebook.com"
        model.orientation = 0
        model.browser = True
        model.lastseen = datetime.now()
        DBSession.add(model)

        tag = Tags()
        tag.uuid = '11:22:33:44:55:66'
        tag.tag = 'test'
        DBSession.add(tag)

        User = UserModel()
        User.email = '*****@*****.**'
        User.AccessLevel = 2
        DBSession.add(User)

    DBSession.flush()
示例#15
0
def main(argv=sys.argv):
    global logger, settings, ca, ri, aws_access, aws_secret, thekey

    if len(argv) != 2:
        usage(argv)

    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)

    # TODO: need to add settings to define where the files will be

    logger = logging.getLogger('scripts')

    # make configurable?
    site_blacklist = set(['MedPass', 'Bridge'])

    print "# Stack to site info generated by build_ini_info script"
    stackmap = tools.parse_sites()
    for stack, sites in stackmap.items():
        clean = set(sites) - site_blacklist
        print "stacks.%s = %s" % (stack, ', '.join(clean))

    print "# RDS to site info generated by build_ini_info script"
    rdsmap = tools.parse_rds()
    # dbs.a4.gentivalink = a4-css-sup-cleanup-rds
    for stack, sites in rdsmap.items():
        for site, rdses in sites.items():
            for rds in rdses:
                print "dbs.%s.%s = %s" % (stack, site, rds)
示例#16
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.drop_all(engine)
    Base.metadata.create_all(engine)
    with transaction.manager:
        user = User(username='******', password='******', email=settings.get('admin_mail'), activated=True,
                    groups='admin')
        DBSession.add(user)
        DBSession.flush()
        for k, v in Settings.settings.iteritems():
            if type(v) == dict:
                DBSession.add(Settings(key=unicode(k), value=unicode(v['value'])))
            else:
                if isinstance(v, basestring) and v.startswith('app_config.'):
                    v = settings.get(v[len('app_config.'):])
                DBSession.add(Settings(key=unicode(k), value=unicode(v)))

        DBSession.flush()
示例#17
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)

    #Delete old data
    Base.metadata.drop_all(engine)

    #Create data
    Base.metadata.create_all(engine)
    with transaction.manager:
        nb_sensors = 5
        nb_readings = 500
        minutes_between_readings = 10;
        for i in range(nb_sensors):
            sensor = Sensor(id=i+1,type='Moisture')
            DBSession.add(sensor)
            first_dttm = datetime.datetime.now()
            for j in range(nb_readings,0,-1):
                reading = Reading(
                    sensor_id=sensor.id,
                    timestamp=first_dttm - datetime.timedelta(minutes=minutes_between_readings*j),
                    value=random.randrange(1023)
                )
                DBSession.add(reading)
示例#18
0
文件: rtd.py 项目: baloon11/npui
def main(argv=sys.argv):
	if len(argv) != 2:
		usage(argv)
	config_uri = argv[1]
	setup_logging(config_uri)
	settings = get_appsettings(config_uri)
	engine = engine_from_config(settings, 'sqlalchemy.')
	DBSession.configure(bind=engine)
	cache.cache = cache.configure_cache(settings)

	config = Configurator(
		settings=settings,
		root_factory=RootFactory,
		locale_negotiator=locale_neg
	)
	config.add_route_predicate('vhost', VHostPredicate)
	config.add_view_predicate('vhost', VHostPredicate)

	mmgr = config.registry.getUtility(IModuleManager)
	mmgr.load('core')
	mmgr.load_enabled()

	rts = rt.configure(mmgr, config.registry)
	app = rts.app()
	rt.run(rts, app)
示例#19
0
def update(argv=sys.argv):
    """
    Download the GeoIP database from the URL provided under the config key
    `geoip.city.source` and save it under the path provided by the config key
    `geoip.city.destination`.

    """
    if len(argv) != 2:
        usage(argv)

    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    source = settings['geoip.city.source']
    output = settings['geoip.city.destination']
    log.info("Downloading %s...", source)
    response = requests.get(source)
    log.info("Downloading done.")

    compressed = gzip.GzipFile(fileobj=StringIO(response.content))

    with open(output, "wb") as f:
        log.info("Writting to %s...", output)
        f.write(compressed.read())
        log.info("Writting done.")
示例#20
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.create_all(engine)

    application_pdf_doc_type = DocumentTypes.add(
        name="Adobe PDF",
        description="Adobe PDF file",
        mime_type="application/pdf",
    )

    system_owner = Users.add(
        first="SYSTEM",
        last="USERS",
        email="system@localhost",
        password="******",
    )

    default_scraper = Scrapers.add(
        name="Default Scraper",
        description="CivicDocs.IO loads with a single, defualt scraper.",
        owner_id=system_owner.id,
    )

    print("DEFAULT SCRAPER ID:\r\n{0}\r\n".format(default_scraper.id))
    def run(self, argv):
        if len(argv) < 3:
            self.usage(argv)
        config_uri = argv[1]
        fromDate = argv[2]
        options = parse_vars(argv[3:])
        setup_logging(config_uri)
        settings = get_appsettings(config_uri, options=options)
        engine = engine_from_config(settings, 'sqlalchemy.')

        DBSession.configure(bind=engine)
        Base.metadata.create_all(engine)
        with transaction.manager:
            url = 'https://servicesenligne2.ville.montreal.qc.ca/api/infoneige/InfoneigeWebService?wsdl'
            client = Client(url)
            planification_request = client.factory.create('getPlanificationsForDate')
            planification_request.fromDate = fromDate
            planification_request.tokenString =  'ug33-b81ab488-c335-4021-9c52-26d6b8523301-e7aa002b-0d9d-4b5c-81ef-b012979cdafb-dab06588-1962-4b16-9942-a18054094f60-a4186179-d555-4fed-b35f-ec0c74da97a3-aa3b3766-4d26-42f0-888a-a6569a1dd745'
            response = client.service.GetPlanificationsForDate(planification_request)
            if response['responseStatus'] == 0:
                log.info('%s plannings returned', response['planifications']['count'])
                cartodb_client = CartoDBOAuth(settings['cartodb.key'], settings['cartodb.secret'], settings['cartodb.user'], settings['cartodb.password'], settings['cartodb.domain'])

                for result in response['planifications']['planification']:
                    '''
                    street_side_status = StreetSideHistory(
                        municipality_id = result['munid'],
                        street_side_id = result['coteRueId'],
                        state = result['etatDeneig'],
                        observed_on = result['dateMaj'],
                        )
                    DBSession.merge(street_side_status)
                    '''
                    if any(val in result for val in ['dateDebutPlanif', 'dateFinPlanif', 'dateDebutReplanif', 'dateFinReplanif']):
                        try:
                            result['dateDebutReplanif']
                        except AttributeError:
                            result['dateDebutReplanif'] = None
                        try:
                            result['dateFinReplanif']
                        except AttributeError:
                            result['dateFinReplanif'] = None
                        '''
                        print result
                        planning = Planning(
                            municipality_id = result['munid'],
                            street_side_id = result['coteRueId'],
                            planned_start_date = result['dateDebutPlanif'],
                            planned_end_date = result['dateFinPlanif'],
                            replanned_start_date = result['dateDebutReplanif'],
                            replanned_end_date = result['dateFinReplanif'],
                            modified_on = result['dateMaj'],
                            )
                        DBSession.merge(planning)
                        '''
                    #transaction.manager.commit()
                    cartodb_client.sql('UPDATE cote SET etat = %(etat)s WHERE cote_rue_id = %(cote_rue_id)d' %
                        {"etat": result['etatDeneig'], "cote_rue_id": result['coteRueId']})
            else:
                log.info('Status %s: %s', response['responseStatus'], response['responseDesc'])
示例#22
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    connect_string = settings['sqlalchemy.url']\
        .replace('DBUser', os.environ['DBUSER'])\
        .replace('DBPassword', os.environ['DBPASSWORD'])
    settings['sqlalchemy.url'] = connect_string
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.create_all(engine)
    with transaction.manager:
        store_data = json.loads(open('emporium/scripts/store_data.json').
                                read())

        suppliers = {}
        for item in store_data['suppliers']:
            supplier = Supplier(name=item['name'], tax_id=item['tax_id'])
            suppliers[supplier.name] = supplier
            DBSession.add(supplier)

        for item in store_data['bargains']:
            bargain = Bargain(
                sku=item['sku'],
                price=item['price'],
                supplier=suppliers[item['supplier_name']]
            )
            bargain.info = item['info']
            DBSession.add(bargain)
示例#23
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, name="testscaffold", options=options)

    engine = get_engine(settings)

    session_factory = get_session_factory(engine)
    dbsession = get_tm_session(session_factory, transaction.manager)

    with transaction.manager:
        user = User(user_name="admin", email="foo@localhost")
        UserService.set_password(user, "admin")
        admin_object = Group(group_name="Administrators")
        group_permission = GroupPermission(perm_name="root_administration")
        dbsession.add(admin_object)
        admin_object.permissions.append(group_permission)
        admin_object.users.append(user)

        test_group = Group(group_name="Other group")
        dbsession.add(test_group)
        for x in range(1, 25):
            user = User(user_name="test{}".format(x), email="foo{}@localhost".format(x))
            UserService.set_password(user, "test")
            test_group.users.append(user)
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)

    engine = engine_from_config(settings, 'sqlalchemy.', echo=False)
    DBSession.configure(bind=engine)
    Base.metadata.create_all(engine)

    # populate databases
    with transaction.manager:
        print('\ninit_setpoint_interpolations:\n')
        init_setpoint_interpolations(DBSession)
        print('\ninit_parameter_types:\n')
        init_parameter_types(DBSession)
        print('\ninit_device_types:\n')
        init_device_types(DBSession)
        print('\ninit_periphery_controllers:\n')
        DBSession.flush()
        init_periphery_controllers(DBSession)
        DBSession.flush()
        print('\ninit_parameters:\n')
        init_parameters(DBSession)
        print('\ninit_devices:\n')
        init_devices(DBSession)
        print('\ninit_field_settings:\n')
        init_field_settings(DBSession)
        print('\ninit_regulators:\n')
        DBSession.flush()
        init_regulators(DBSession)
        init_log_diagrams(DBSession)
示例#25
0
def main(argv=sys.argv):
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    engine = engine_from_config(settings, "sqlalchemy.")
    DBSession.configure(bind=engine)

    Base.metadata.create_all(engine)

    user = User()
    user.set_password("wolfvo8491")
    user.user_name = "wolfv"
    user.email = "*****@*****.**"
    user.status = 1  # Not verified yet

    DBSession.add(user)

    with transaction.manager:
        user = User()
        user.user_name = "wolf"
        user.email = "*****@*****.**"
        user.set_password("wolf")
        user.status = 1
        DBSession.add(user)

        user = User()
        user.user_name = "peter"
        user.email = "*****@*****.**"
        user.set_password("p")
        user.status = 1
        DBSession.add(user)
示例#26
0
def listuserinfo():
    arguments = listuserparser.parse_args()
    if not arguments.config or not arguments.username:
        listuserparser.print_usage()
    else:
        config_uri = arguments.config
        setup_logging(config_uri)
        try:
            settings = get_appsettings(config_uri, 'factored')
        except LookupError:
            settings = get_appsettings(config_uri, 'main')
        engine = engine_from_config(settings, 'sqlalchemy.')
        DBSession.configure(bind=engine)
        session = DBSession()
        users = session.query(User).filter_by(
            username=arguments.username).all()
        if len(users) > 0:
            user = users[0]
            print 'username:%s, secret: %s' % (
                user.username, user.secret)
            print 'bar code url:', get_barcode_image(user.username,
                                                     user.secret,
                                                     settings['appname'])
        else:
            print '"%s" user not found' % arguments.username
示例#27
0
def main(argv=sys.argv):
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]
    if config_uri not in ["jenkins.ini"]:
        print "You are not initialising using a testing URI"
        print "Edit init_testingdb.py if you really want to do this"
        sys.exit(0)
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    engine = engine_from_config(settings, "sqlalchemy.")
    meta.Session.configure(bind=engine)
    Base.metadata.bind = engine

    # Create Everythin
    print "Dropping Tables"
    Base.metadata.drop_all(engine)
    print "Creating Tables"
    Base.metadata.create_all(engine)

    # We also want any alembic scripts to be executed
    alembic_cfg = Config(config_uri)  # TODO: WARNING RELATIVE PATH
    command.stamp(alembic_cfg, "head")

    DBSession = meta.Session()
    # DBSession.configure(bind=engine)

    populateData.init_data(DBSession)
    populateUser()
    populatedata()
示例#28
0
 def __call__(self, argv=sys.argv):
     self.configure_parser()
     args = self.parser.parse_args(argv[1:])
     if self.requires_config:
         self.config_uri = args.config_uri
         setup_logging(self.config_uri)
     self.run(args)
示例#29
0
def main(argv=sys.argv): #pragma NOCOVER

    if len(argv) != 2:
        usage(argv)

    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.create_all(engine)

    with transaction.manager:
        password = hashlib.md5('secret'.encode()).hexdigest()
        user = User(name='distractionbike', password_md5=password)
        DBSession.add(user)
        user2 = User(name='otherguy', password_md5=password)
        DBSession.add(user2)
        journal = Journal(name='distractionbike')
        DBSession.add(journal)
        post = Post(journal_name='distractionbike',
                    title='First Post',
                    lede='Why do we love first posts so much?',
                    text='This is the <i>very first post</i>.')
        DBSession.add(post)
        post.add_comment(text='Yay!', user_id='ezra')
示例#30
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    Base.metadata.create_all(engine)

    with transaction.manager:
        user = User(email='*****@*****.**', password='******', admin=True)
        DBSession.add(user)
        site = Site(name='asd', key='80d621df066348e5938a469730ae0cab')
        DBSession.add(site)
        site.api_keys.append(SiteAPIKey(key='GIKfxIcIHPbM0uX9PrQ1To29Pb2on0pa'))
        site.users.append(user)

        aspect_ratio_1_1 = SiteAspectRatio(width=1, height=1)
        aspect_ratio_3_1 = SiteAspectRatio(width=3, height=1)
        site.aspect_ratios.append(aspect_ratio_1_1)
        site.aspect_ratios.append(aspect_ratio_3_1)

    from alembic.config import Config
    from alembic import command
    alembic_cfg = Config("alembic.ini")
    command.stamp(alembic_cfg, "head")
示例#31
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    if os.environ.get('DATABASE_URL', ''):
        settings["sqlalchemy.url"] = os.environ["DATABASE_URL"]
    engine = get_engine(settings)
    Base.metadata.drop_all(engine)
    Base.metadata.create_all(engine)

    session_factory = get_session_factory(engine)

    with transaction.manager:
        dbsession = get_tm_session(session_factory, transaction.manager)
        size_models = []
        distance_models = []
        absolute_magnitude_models = []
        orbit_models = []
        for item in SUPER_DICT:
            if item['orbit_body'] == 'Earth':
                if '2016' in item['approach_date'] or '2017' in item['approach_date'] or '2018' in item['approach_date']:
                    if float("{0:.2f}".format(float(item['miss_lunar']))) <= 14.00:
                        new_size = Size(
                            date=item['approach_date'],
                            neo_id=item["neo_id"],
                            name=item["name"],
                            url=item["rock_url"],
                            kilometers=item["diakm"],
                            meters=item["diam"],
                            miles=item["diamiles"],
                            feet=item["diafeet"],
                        )
                        size_models.append(new_size)
                        new_distance = Distance(
                            date=item['approach_date'],
                            neo_id=item["neo_id"],
                            name=item["name"],
                            url=item["rock_url"],
                            astronomical=item["miss_astronomical"],
                            lunar=item["miss_lunar"],
                            kilometers=item["miss_km"],
                            miles=item["miss_miles"],
                        )
                        distance_models.append(new_distance)
                        new_absmag = AbsoluteMag(
                            date=item['approach_date'],
                            neo_id=item["neo_id"],
                            name=item["name"],
                            url=item["rock_url"],
                            absolutemag=item["absmag"],
                            velocity_kps=item["velocity_kps"],
                            velocity_kph=item["velocity_kph"],
                        )
                        absolute_magnitude_models.append(new_absmag)
                        new_orbit = Orbit(
                            date=item['approach_date'],
                            neo_id=item["neo_id"],
                            name=item["name"],
                            url=item["rock_url"],
                            orbit_period=item["orbit_period"],
                            perihelion_dist=item["perihelion_dist"],
                            aphelion_dist=item["aphelion_dist"],
                            eccentricity=item["orbit_eccentricity"],
                            perihelion_time=item["perihelion_time"]
                        )
                        orbit_models.append(new_orbit)
        dbsession.add_all(size_models)
        dbsession.add_all(distance_models)
        dbsession.add_all(absolute_magnitude_models)
        dbsession.add_all(orbit_models)
示例#32
0
"""Pyramid bootstrap environment. """
from alembic import context
from pyramid.paster import get_appsettings, setup_logging
from sqlalchemy import engine_from_config

from {{ cookiecutter.repo_name }}.models.meta import Base

config = context.config

setup_logging(config.config_file_name)

settings = get_appsettings(config.config_file_name)
target_metadata = Base.metadata


def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
    script output.

    """
    context.configure(url=settings['sqlalchemy.url'])
    with context.begin_transaction():
        context.run_migrations()
def main(argv=sys.argv[1:]):
    try:
        opts, args = getopt.getopt(argv, "h", [
            "config-file=", "scenario-id=", "scenario-name=",
            "scenario-description=", "topology=", "attacker=",
            "affected-area=", "target=", "attack-type=", "all-paths",
            "number-of-shortest-paths="
        ])
    except getopt.GetoptError:
        print(
            '\n'
            'ERROR\n'
            'Usage: MiniSecBGP_hijack_attack_scenario [options]\n'
            '\n'
            'options (with examples):\n'
            '\n'
            '-h                                                       this help\n'
            '\n'
            '--config-file="minisecbgp.ini"                           pyramid config filename [.ini]\n'
            '--scenario-name="Test topology"                          the name that will be used to identify this scenario\n'
            '--scenario-description="date 20200729"                   the scenario description\n'
            '--topology=3                                             the topology used as the original base of the scenario\n'
            '--attacker=[65001,65002]                                 define which AS(s) will be the attacker\n'
            '--affected-area=[65001,65003]                            define which these AS(s) will receive and accept the hijacked routes\n'
            '--target=[\'10.0.0.0/24\',\'20.0.0.0/24\']                   define the prefix(s) and mask(s) that will be hijacked by the attacker(s)\n'
            '--attack-type=attraction|interception                    if the attack is an attraction attack or an interception attack\n'
            '--all-paths or --number-of-shortest-paths=[1..999]       number of valid paths between the attacker AS, affected AS and target AS\n'
            '\n'
            'or\n'
            '\n'
            '--scenario-id=16                                         scenario ID\n'
        )
        sys.exit(2)
    config_file = scenario_id = scenario_name = scenario_description = topology = attacker = \
        affected_area = target = attack_type = number_of_shortest_paths = ''
    for opt, arg in opts:
        if opt == '-h':
            print(
                '\n'
                'HELP\n'
                'Usage: MiniSecBGP_hijack_attack_scenario [options]\n'
                '\n'
                'options (with examples):\n'
                '\n'
                '-h                                                       this help\n'
                '\n'
                '--config-file="minisecbgp.ini"                           pyramid config filename [.ini]\n'
                '--scenario-name="Test topology"                          the name that will be used to identify this scenario\n'
                '--scenario-description="date 20200729"                   the scenario description\n'
                '--topology=3                                             the topology used as the original base of the scenario\n'
                '--attacker=[65001,65002]                                 define which AS(s) will be the attacker\n'
                '--affected-area=[65001,65003]                            define which these AS(s) will receive and accept the hijacked routes\n'
                '--target=[\'10.0.0.0/24\',\'20.0.0.0/24\']                   define the prefix(s) and mask(s) that will be hijacked by the attacker(s)\n'
                '--attack-type=attraction|interception                    if the attack is an attraction attack or an interception attack\n'
                '--all-paths or --number-of-shortest-paths=[1..999]       number of valid paths between the attacker AS, affected AS and target AS\n'
                '\n'
                'or\n'
                '\n'
                '--scenario-id=16                                         scenario ID\n'
            )
            sys.exit()
        if opt == '--config-file':
            config_file = arg
        elif opt == '--scenario-id':
            scenario_id = arg
        elif opt == '--scenario-name':
            scenario_name = arg
        elif opt == '--scenario-description':
            scenario_description = arg
        elif opt == '--topology':
            topology = arg
        elif opt == '--attacker':
            attacker = arg
        elif opt == '--affected-area':
            affected_area = arg
        elif opt == '--target':
            target = arg
        elif opt == '--attack-type':
            attack_type = arg
        elif opt == '--all-paths':
            number_of_shortest_paths = '0'
        elif opt == '--number-of-shortest-paths':
            number_of_shortest_paths = arg

    if (config_file and scenario_name and topology and attacker and affected_area and target and attack_type and number_of_shortest_paths) \
            or (config_file and scenario_id):
        args = parse_args(config_file)
        setup_logging(args.config_uri)
        env = bootstrap(args.config_uri)
        try:
            with env['request'].tm:
                dbsession = env['request'].dbsession
                aa = AttackScenario(dbsession, scenario_id, scenario_name,
                                    scenario_description, topology, attacker,
                                    affected_area, target, attack_type,
                                    number_of_shortest_paths)
                attacker_list, affected_area_list, target_list, scenario_attack_type = aa.attack_scenario(
                )

            # scenario_item / path / path_item
            if attacker_list and affected_area_list and target_list:
                if scenario_attack_type == 'attraction':
                    with env['request'].tm:
                        aa.attraction_attack_type()
                elif scenario_attack_type == 'interception':
                    with env['request'].tm:
                        aa.interception_attack_type()
                else:
                    print('attack type unknown')
                    return

            with env['request'].tm:
                if scenario_id:
                    clear_database(dbsession, scenario_id)
        except OperationalError:
            print('Database error')
    else:
        print(
            '\n'
            'Usage: MiniSecBGP_hijack_attack_scenario [options]\n'
            '\n'
            'options (with examples):\n'
            '\n'
            '-h                                                       this help\n'
            '\n'
            '--config-file="minisecbgp.ini"                           pyramid config filename [.ini]\n'
            '--scenario-name="Test topology"                          the name that will be used to identify this scenario\n'
            '--scenario-description="date 20200729"                   the scenario description\n'
            '--topology=3                                             the topology used as the original base of the scenario\n'
            '--attacker=[65001,65002]                                 define which AS(s) will be the attacker\n'
            '--affected-area=[65001,65003]                            define which these AS(s) will receive and accept the hijacked routes\n'
            '--target=[\'10.0.0.0/24\',\'20.0.0.0/24\']                   define the prefix(s) and mask(s) that will be hijacked by the attacker(s)\n'
            '--attack-type=attraction|interception                    if the attack is an attraction attack or an interception attack\n'
            '--all-paths or --number-of-shortest-paths=[1..999]       number of valid paths between the attacker AS, affected AS and target AS\n'
            '\n'
            'or\n'
            '\n'
            '--scenario-id=16                                         scenario ID\n'
        )
示例#34
0
def main(global_config, **settings):
    """ Return a Pyramid WSGI application. """
    settings = dict(global_config, **settings)
    settings['config_uri'] = global_config['__file__']

    # here we create the engine and bind it to the (not really a) session
    # factory
    settings = set_config(settings)
    if not session_maker_is_initialized():
        configure_engine(settings)
    if settings.get('idealoom_debug_signal', False):
        from assembl.lib import signals
        signals.listen()

    import os
    if 'UWSGI_ORIGINAL_PROC_NAME' in os.environ:
        # uwsgi does not load logging properly
        from pyramid.paster import setup_logging
        setup_logging(global_config['__file__'])

    config = Configurator(registry=getGlobalSiteManager())
    config.include('.lib.logging')
    from .views.traversal import root_factory
    config.setup_registry(settings=settings, root_factory=root_factory)
    config.add_translation_dirs('assembl:locale/')

    config.set_locale_negotiator(my_locale_negotiator)
    config.add_tween('assembl.tweens.logging.logging_tween_factory',
                     over="pyramid_tm.tm_tween_factory")

    config.include('pyramid_retry')
    config.include('.auth')
    config.include('.models')
    bake_lazy_loaders()
    # Tasks first, because it includes ZCA registration (for now)
    config.include('.tasks')

    config.include('pyramid_dogpile_cache')
    config.include('.lib.zmqlib')
    session_factory = session_factory_from_settings(settings)
    config.set_session_factory(session_factory)
    if not settings.get('nosecurity', False):
        # import after session to delay loading of BaseOps
        from .auth.util import authentication_callback
        auth_policy_name = settings.get(
            "auth_policy_class",
            "assembl.auth.util.UpgradingSessionAuthenticationPolicy")
        auth_policy = resolver.resolve(auth_policy_name)(
            callback=authentication_callback)
        config.set_authentication_policy(auth_policy)
        config.set_authorization_policy(ACLAuthorizationPolicy())
    # ensure default roles and permissions at startup
    if not settings.get('in_migration', False):
        from .lib.migration import bootstrap_db, bootstrap_db_data
        db = bootstrap_db(settings['config_uri'])
        bootstrap_db_data(db, settings['config_uri'] != "testing.ini")

    config.add_static_view('static', 'static', cache_max_age=3600)
    config.add_static_view('widget', 'widget', cache_max_age=3600)
    config.include('cornice')  # REST services library.
    # config.include('.lib.alembic')
    # config.include('.lib.email')
    config.include('.lib')
    config.include('.views')

    # jinja2
    config.include('pyramid_jinja2')
    config.add_jinja2_extension('jinja2.ext.i18n')

    # Mailer
    if 'pyramid_mailer.debug' not in settings.get('pyramid.includes', ()):
        config.include('pyramid_mailer')

    config.include('.view_def')

    wsgi_app = config.make_wsgi_app()
    if asbool(settings.get('sqltap', False)):
        import sqltap.wsgi
        wsgi_app = sqltap.wsgi.SQLTapMiddleware(wsgi_app)
    return wsgi_app
示例#35
0
def main(argv=sys.argv):
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)
    print("Beginning update.")
    PopulatedSystem.__table__.drop(engine)
    Listing.__table__.drop(engine)
    Station.__table__.drop(engine)
    Faction.__table__.drop(engine)
    Body.__table__.drop(engine)
    Faction.__table__.create(engine)
    PopulatedSystem.__table__.create(engine)
    Body.__table__.create(engine)
    Station.__table__.create(engine)
    Listing.__table__.create(engine)
    mark_changed(DBSession())
    transaction.commit()

    #
    # Factions
    #
    print("Updating factions...")
    print("Downloading factions.jsonl from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/factions.jsonl", stream=True)
    with open('factions.json', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved factions.json. Updating...")
    url = str(engine.url) + "::" + Faction.__tablename__
    ds = dshape("var *{  id: ?int64,  name: ?string,  updated_at: ?int64,  government_id: ?int64,  "
                "government: ?string,  allegiance_id: ?int64,  allegiance: ?string,  "
                "state_id: ?int64,  state: ?string, home_system_id: ?int64,  "
                "is_player_faction: ?bool }")
    t = odo('jsonlines://factions.json', url, dshape=ds)
    print("Done! Creating index...")
    DBSession.execute("CREATE INDEX factions_idx ON factions(id)")
    mark_changed(DBSession())
    transaction.commit()
    print("Completed processing factions.")

    #
    # Systems
    #
    print("Downloading systems_recently.csv from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/systems_recently.csv", stream=True)
    with open('systems_recently.csv', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved systems_recently.csv. Creating temporary table and importing...")
    DBSession.execute("CREATE TEMP TABLE systems_tmp (LIKE systems)")
    url = str(engine.url) + "::systems_tmp"
    ds = dshape("var *{  id: ?int64,  edsm_id: ?int64,  name: ?string,  x: ?float64,  y: ?float64,  "
                "z: ?float64,  population: ?int64,  is_populated: ?bool,  government_id: ?int64,  "
                "government: ?string,  allegiance_id: ?int64,  allegiance: ?string,  "
                "state_id: ?int64,  state: ?string,  security_id: ?float64,  security: ?string,  "
                "primary_economy_id: ?float64,  primary_economy: ?string,  power: ?string,  "
                "power_state: ?string,  power_state_id: ?string,  needs_permit: ?bool,  "
                "updated_at: ?int64,  simbad_ref: ?string,  controlling_minor_faction_id: ?string,  "
                "controlling_minor_faction: ?string,  reserve_type_id: ?float64,  reserve_type: ?string  }")
    t = odo('systems_recently.csv', url, dshape=ds)
    print("Updating systems...")
    DBSession.execute("INSERT INTO systems(id, edsm_id, name, x, y, z, population, is_populated, government_id, "
                      "government, allegiance_id, allegiance, state_id, state, security_id, security, "
                      "primary_economy_id, primary_economy, power, power_state, power_state_id, needs_permit, "
                      "updated_at, simbad_ref, controlling_minor_faction_id, controlling_minor_faction, "
                      "reserve_type_id, reserve_type) SELECT id, edsm_id, name, x, y, z, population, is_populated, "
                      "government_id, government, allegiance_id, allegiance, state_id, state, security_id, security, "
                      "primary_economy_id, primary_economy, power, power_state, power_state_id, needs_permit, "
                      "updated_at, simbad_ref, controlling_minor_faction_id, controlling_minor_faction, "
                      "reserve_type_id, reserve_type from systems_tmp ON CONFLICT DO UPDATE "
                      "SET edsm_id = EXCLUDED.edsm_id, name = EXCLUDED.name, x = EXCLUDED.x, "
                      "y = EXCLUDED.y, z = EXCLUDED.z, population = EXCLUDED.population, "
                      "is_populated = EXCLUDED.population, government_id = EXCLUDED.government_id, "
                      "government = EXCLUDED.government, allegiance_id = EXCLUDED.allegiance_id, "
                      "allegiance = EXCLUDED.allegiance, state_id = EXCLUDED.state_id, "
                      "state = EXCLUDED.state, security_id = EXCLUDED.security_id, security = EXCLUDED.security, "
                      "primary_economy_id = EXCLUDED.primary_economy_id, primary_economy = EXCLUDED.primary_economy, "
                      "power = EXCLUDED.power, power_state = EXCLUDED.power_state, power_state_id = "
                      "EXCLUDED.power_state_id, needs_permit = EXCLUDED.needs_permit, updated_at = "
                      "EXCLUDED.updated_at, simbad_ref = EXCLUDED.simbad_ref,"
                      "controlling_minor_faction_id = EXCLUDED.controlling_minor_faction_id, "
                      "reserve_type_id = EXCLUDED.reserve_type_id, reserve_type = EXCLUDED.reserve_type")
    mark_changed(DBSession())
    transaction.commit()
    print("Done!")

    #
    # Bodies
    #
    print("Downloading bodies.jsonl from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/bodies.jsonl", stream=True)
    with open('bodies.json', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved bodies.jsonl. Converting JSONL to SQL.")
    DBSession.execute("CREATE TEMP TABLE bodies_tmp (LIKE bodies)")
    url = str(engine.url) + "::bodies_tmp"
    ds = dshape("var *{ id: ?int64, created_at: ?int64, updated_at: ?int64, name: ?string, "
                "system_id: ?int64, group_id: ?int64, group_name: ?string, type_id: ?int64, "
                "type_name: ?string, distance_to_arrival: ?int64, full_spectral_class: ?string, "
                "spectral_class: ?string, spectral_sub_class: ?string, luminosity_class: ?string, "
                "luminosity_sub_class: ?string, surface_temperature: ?int64, is_main_star: ?bool, "
                "age: ?int64, solar_masses: ?float64, solar_radius: ?float64, catalogue_gliese_id : ?string, "
                "catalogue_hipp_id: ?string, catalogue_hd_id: ?string, volcanism_type_id: ?int64, "
                "volcanism_type_name: ?string, atmosphere_type_id: ?int64, atmosphere_type_name: ?string, "
                "terraforming_state_id: ?int64, terraforming_state_name: ?string, earth_masses: ?float64, "
                "radius: ?int64, gravity: ?float64, surface_pressure: ?int64, orbital_period: ?float64, "
                "semi_major_axis: ?float64, orbital_eccentricity: ?float64, orbital_inclination: ?float64, "
                "arg_of_periapsis: ?float64, rotational_period: ?float64, "
                "is_rotational_period_tidally_locked: ?bool, axis_tilt: ?float64, eg_id: ?int64, "
                "belt_moon_masses: ?float64, ring_type_id: ?int64, ring_type_name: ?string, "
                "ring_mass: ?int64, ring_inner_radius: ?float64, ring_outer_radius: ?float64, "
                "rings: ?json, atmosphere_composition: ?json, solid_composition: ?json, "
                "materials: ?json, is_landable: ?bool}")
    #url = str(engine.url) + "::" + Body.__tablename__
    t = odo('jsonlines://bodies.json', url, dshape=ds)
    print("Creating indexes...")
    DBSession.execute("CREATE INDEX bodies_idx ON bodies(name text_pattern_ops)")
    mark_changed(DBSession())
    transaction.commit()
    DBSession.execute("CREATE INDEX systemid_idx ON bodies(system_id)")
    mark_changed(DBSession())
    transaction.commit()
    print("Done!")

    #
    # Populated systems
    #
    print("Downloading systems_populated.jsonl from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/systems_populated.jsonl", stream=True)
    with open('systems_populated.json', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved systems_populated.json. Updating...")
    url = str(engine.url) + "::" + PopulatedSystem.__tablename__
    ds = dshape("var *{  id: ?int64,  edsm_id: ?int64,  name: ?string,  x: ?float64,  y: ?float64,  "
                "z: ?float64,  population: ?int64,  is_populated: ?bool,  government_id: ?int64,  "
                "government: ?string,  allegiance_id: ?int64,  allegiance: ?string,  "
                "state_id: ?int64,  state: ?string,  security_id: ?float64,  security: ?string,  "
                "primary_economy_id: ?float64,  primary_economy: ?string,  power: ?string,  "
                "power_state: ?string,  power_state_id: ?string,  needs_permit: ?int64,  "
                "updated_at: ?int64,  simbad_ref: ?string,  controlling_minor_faction_id: ?string,  "
                "controlling_minor_faction: ?string,  reserve_type_id: ?float64,  reserve_type: ?string,"
                "minor_faction_presences: ?json }")
    t = odo('jsonlines://systems_populated.json', url, dshape=ds)
    print("Done! Uppercasing system names...")
    DBSession.execute("UPDATE populated_systems SET name = UPPER(name)")
    mark_changed(DBSession())
    transaction.commit()
    print("Creating indexes...")
    DBSession.execute("CREATE INDEX index_populated_system_names_trigram ON populated_systems "
                      "USING GIN(name gin_trgm_ops)")
    mark_changed(DBSession())
    transaction.commit()
    DBSession.execute("CREATE INDEX index_populated_system_names_btree ON populated_systems (name)")
    mark_changed(DBSession())
    transaction.commit()
    print("Completed processing populated systems.")

    #
    # Stations
    #
    print("Downloading stations.jsonl from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/stations.jsonl", stream=True)
    with open('stations.json', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved stations.json. Updating...")
    DBSession.execute("CREATE TEMP TABLE stations_tmp (LIKE stations)")
    url = str(engine.url) + "::stations_tmp"
    #url = str(engine.url) + "::" + Station.__tablename__
    ds = dshape("var *{  id: ?int64,  name: ?string,  system_id: ?int64,  updated_at: ?int64,  "
                "max_landing_pad_size: ?string,  distance_to_star: ?int64,  government_id: ?int64,  "
                "government: ?string,  allegiance_id: ?int64,  allegiance: ?string,  "
                "state_id: ?int64,  state: ?string,  type_id: ?int64,  type: ?string,  "
                "has_blackmarket: ?bool,  has_market: ?bool,  has_refuel: ?bool,  "
                "has_repair: ?bool,  has_rearm: ?bool,  has_outfitting: ?bool,  "
                "has_shipyard: ?bool,  has_docking: ?bool,  has_commodities: ?bool,  "
                "import_commodities: ?json,  export_commodities: ?json,  prohibited_commodities: ?json, "
                "economies: ?json, shipyard_updated_at: ?int64, outfitting_updated_at: ?int64, "
                "market_updated_at: ?int64, is_planetary: ?bool, selling_ships: ?json, "
                "selling_modules: ?json, settlement_size_id: ?string, settlement_size: ?int64, "
                "settlement_security_id: ?int64, settlement_security: ?string, body_id: ?int64,"
                "controlling_minor_faction_id: ?int64 }")
    t = odo('jsonlines://stations.json', url, dshape=ds)
    print("Done! Cleaning stations without body references...")
    DBSession.execute("DELETE FROM stations_tmp WHERE body_id NOT IN (SELECT b.id from bodies b)")
    mark_changed(DBSession())
    transaction.commit()
    DBSession.execute("UPDATE stations SET id=t.id, name=t.name, system_id=t.system_id, updated_at=t.updated_at, "
                      "max_landing_pad_size=t.max_landing_pad_size, ")
    DBSession.execute("CREATE INDEX index_stations_systemid_btree ON stations(system_id)")
    mark_changed(DBSession())
    transaction.commit()
    DBSession.execute("CREATE INDEX index_stations_btree ON stations(id)")
    mark_changed(DBSession())
    transaction.commit()
    print("Completed processing stations.")

    #
    # Listings
    #
    print("Downloading listings.csv from EDDB.io...")
    r = requests.get("https://eddb.io/archive/v5/listings.csv", stream=True)
    with open('listings.csv', 'wb') as f:
        for chunk in r.iter_content(chunk_size=4096):
            if chunk:
                f.write(chunk)
    print("Saved listings.csv. Updating...")
    url = str(engine.url) + "::" + Listing.__tablename__
    ds = dshape("var *{  id: ?int64, station_id: ?int64, commodity: ?int64, supply: ?int64, "
                "buy_price: ?int64, sell_price: ?int64, demand: ?int64, collected_at: ?int64 }")
    t = odo('listings.csv', url, dshape=ds)

    print("Creating indexes...")
    DBSession.execute("CREATE INDEX index_listings_stationid_btree ON listings(station_id)")
    mark_changed(DBSession())
    transaction.commit()
    print("Updates complete.")
示例#36
0
import logging

from sqlalchemy import engine_from_config

from .helpers import (
    get_lp, )

from .models import (
    DBSession, )

from pyramid.paster import (
    get_appsettings,
    setup_logging,
)

setup_logging('%s.ini' % os.environ.get('ENV', 'development'))
logger = logging.getLogger(__file__)


def is_source(o):
    return inspect.isclass(o) and issubclass(o, SourcePlugin)


class SourcePlugin(object):
    def __init__(self, lp=None, log=None):
        if not log:
            log = logger

        self.logger = log
        self.log = self.logger.info
示例#37
0
    def run(self):  # pragma: no cover
        if self.options.stop_daemon:
            self._warn_daemon_deprecated()
            return self.stop_daemon()

        if not hasattr(self.options, 'set_user'):
            # Windows case:
            self.options.set_user = self.options.set_group = None

        # @@: Is this the right stage to set the user at?
        if self.options.set_user or self.options.set_group:
            self.change_user_group(self.options.set_user,
                                   self.options.set_group)

        if not self.args:
            self.out('You must give a config file')
            return 2
        app_spec = self.args[0]

        if (len(self.args) > 1 and self.args[1] in self.possible_subcommands):
            cmd = self.args[1]
        else:
            cmd = None

        if self.options.reload:
            if (getattr(self.options, 'daemon', False)
                    or cmd in ('start', 'stop', 'restart')):
                self.out(
                    'Error: Cannot use reloading while running as a dameon.')
                return 2
            if os.environ.get(self._reloader_environ_key):
                if self.options.verbose > 1:
                    self.out('Running reloading file monitor')
                install_reloader(int(self.options.reload_interval), [app_spec])
                # if self.requires_config_file:
                #     watch_file(self.args[0])
            else:
                return self.restart_with_reloader()

        if cmd not in (None, 'start', 'stop', 'restart', 'status'):
            self.out('Error: must give start|stop|restart (not %s)' % cmd)
            return 2

        if cmd == 'status' or self.options.show_status:
            self._warn_daemon_deprecated()
            return self.show_status()

        if cmd in ('restart', 'stop'):
            self._warn_daemon_deprecated()
            result = self.stop_daemon()
            if result:
                if cmd == 'restart':
                    self.out("Could not stop daemon; aborting")
                else:
                    self.out("Could not stop daemon")
                return result
            if cmd == 'stop':
                return result
            self.options.daemon = True

        if cmd == 'start':
            self.options.daemon = True

        app_name = self.options.app_name

        vars = self.get_options()

        if not self._scheme_re.search(app_spec):
            app_spec = 'config:' + app_spec
        server_name = self.options.server_name
        if self.options.server:
            server_spec = 'egg:pyramid'
            assert server_name is None
            server_name = self.options.server
        else:
            server_spec = app_spec
        base = os.getcwd()

        # warn before setting a default
        if self.options.pid_file or self.options.log_file:
            self._warn_daemon_deprecated()

        if getattr(self.options, 'daemon', False):
            if not self.options.pid_file:
                self.options.pid_file = 'pyramid.pid'
            if not self.options.log_file:
                self.options.log_file = 'pyramid.log'

        # Ensure the log file is writeable
        if self.options.log_file:
            try:
                writeable_log_file = open(self.options.log_file, 'a')
            except IOError as ioe:
                msg = 'Error: Unable to write to log file: %s' % ioe
                raise ValueError(msg)
            writeable_log_file.close()

        # Ensure the pid file is writeable
        if self.options.pid_file:
            try:
                writeable_pid_file = open(self.options.pid_file, 'a')
            except IOError as ioe:
                msg = 'Error: Unable to write to pid file: %s' % ioe
                raise ValueError(msg)
            writeable_pid_file.close()

        # warn before forking
        if (self.options.monitor_restart
                and not os.environ.get(self._monitor_environ_key)):
            self.out('''\
--monitor-restart has been deprecated in Pyramid 1.6. It will be removed
in a future release per Pyramid's deprecation policy. Please consider using
a real process manager for your processes like Systemd, Circus, or Supervisor.
''')

        if (getattr(self.options, 'daemon', False)
                and not os.environ.get(self._monitor_environ_key)):
            self._warn_daemon_deprecated()
            try:
                self.daemonize()
            except DaemonizeException as ex:
                if self.options.verbose > 0:
                    self.out(str(ex))
                return 2

        if (not os.environ.get(self._monitor_environ_key)
                and self.options.pid_file):
            self.record_pid(self.options.pid_file)

        if (self.options.monitor_restart
                and not os.environ.get(self._monitor_environ_key)):
            return self.restart_with_monitor()

        if self.options.log_file:
            stdout_log = LazyWriter(self.options.log_file, 'a')
            sys.stdout = stdout_log
            sys.stderr = stdout_log
            logging.basicConfig(stream=stdout_log)

        log_fn = app_spec
        if log_fn.startswith('config:'):
            log_fn = app_spec[len('config:'):]
        elif log_fn.startswith('egg:'):
            log_fn = None
        if log_fn:
            log_fn = os.path.join(base, log_fn)
            setup_logging(log_fn)

        server = self.loadserver(server_spec,
                                 name=server_name,
                                 relative_to=base,
                                 global_conf=vars)

        app = self.loadapp(app_spec,
                           name=app_name,
                           relative_to=base,
                           global_conf=vars)

        if self.options.verbose > 0:
            if hasattr(os, 'getpid'):
                msg = 'Starting server in PID %i.' % os.getpid()
            else:
                msg = 'Starting server.'
            self.out(msg)

        def serve():
            try:
                server(app)
            except (SystemExit, KeyboardInterrupt) as e:
                if self.options.verbose > 1:
                    raise
                if str(e):
                    msg = ' ' + str(e)
                else:
                    msg = ''
                self.out('Exiting%s (-v to see traceback)' % msg)

        if self.options.browser:

            def open_browser():
                context = loadcontext(SERVER,
                                      app_spec,
                                      name=server_name,
                                      relative_to=base,
                                      global_conf=vars)
                url = 'http://127.0.0.1:{port}/'.format(**context.config())
                time.sleep(1)
                webbrowser.open(url)

            t = threading.Thread(target=open_browser)
            t.setDaemon(True)
            t.start()

        serve()
def database():
    """The :func:`~ess_test.conftest.database` fixture initialises the database specified
    in the "testing.ini", removes any existing data, creates the standard permissions, and
    four test users:

    * admin - user with full administrative permissions
    * developer - user with full experiment development permissions
    * content - user with full editing permissions
    * general - user with no permissions
    """
    global dbsession_initialised

    # Load settings
    settings = get_appsettings('testing.ini')
    setup_logging('testing.ini')

    # Init the DB
    engine = engine_from_config(settings, 'sqlalchemy.')
    if not dbsession_initialised:
        DBSession.configure(bind=engine)
        dbsession_initialised = True
    Base.metadata.drop_all(engine)
    Base.metadata.create_all(engine)

    dbsession = DBSession()

    # Create Test Users
    with transaction.manager:
        admin_user = User(email='*****@*****.**', display_name='Admin', password='******')
        developer_user = User(email='*****@*****.**', display_name='Developer', password='******')
        content_user = User(email='*****@*****.**', display_name='Content', password='******')
        general_user = User(email='*****@*****.**', display_name='General', password='******')
        dbsession.add(general_user)
        group = PermissionGroup(title='Site administrator')
        group.permissions.append(Permission(name='admin.users', title='Administer the users'))
        group.permissions.append(Permission(name='admin.groups', title='Administer the permission groups'))
        group.permissions.append(Permission(name='admin.question_types', title='Administer the question types'))
        admin_user.permission_groups.append(group)
        group = PermissionGroup(title='Developer')
        group.permissions.append(Permission(name='survey.new', title='Create new experiments'))
        admin_user.permission_groups.append(group)
        developer_user.permission_groups.append(group)
        group = PermissionGroup(title='Content administrator')
        group.permissions.append(Permission(name='survey.view-all', title='View all experiments'))
        group.permissions.append(Permission(name='survey.edit-all', title='Edit all experiments'))
        group.permissions.append(Permission(name='survey.delete-all', title='Delete all experiments'))
        content_user.permission_groups.append(group)
        dbsession.add(admin_user)
        dbsession.add(developer_user)
        dbsession.add(content_user)
        question_types = QuestionTypeIOSchema(include_schemas=(QuestionTypeGroupIOSchema,), many=True).\
            loads(resource_string('ess','scripts/templates/default_question_types.json'))
        dbsession.add_all(question_types.data)

    # Alembic Stamp
    alembic_config = config.Config('testing.ini', ini_section='app:main')
    alembic_config.set_section_option('app:main', 'script_location', 'ess:migrations')
    command.stamp(alembic_config, "head")

    dbsession.close()
    DBSession.close_all()

    yield DBSession
示例#39
0
def main(argv=sys.argv):
    if len(argv) < 3:
        usage(argv)
    config_uri = argv[1]
    run_script = int(argv[2])
    options = parse_vars(argv[3:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)

    engine = get_engine(settings)
    Base.metadata.create_all(engine)

    session_factory = get_session_factory(engine)

    files = [
        'States', 'Cities', 'Projects', 'SubProjects', 'FinSources', 'Parties'
    ]

    import csv
    import psycopg2

    # conn = psycopg2.connect("host='localhost' port='5432' dbname='smartcities'")
    # curr = conn.cursor()

    __location__ = os.path.realpath(
        os.path.join(os.getcwd(), os.path.dirname(__file__)))

    if run_script == 1:
        print(__location__)
        for file in files:
            with open(__location__ + '\\' + file + '.csv', 'rt') as csvfile:
                spamreader = csv.reader(csvfile, delimiter=',', quotechar='|')
                for row in spamreader:
                    if row:
                        with transaction.manager:
                            dbsession = get_tm_session(session_factory,
                                                       transaction.manager)

                            if file == 'States':
                                dbsession.add(State(name=row[0]))

                            elif file == 'Cities':
                                name = row[0]
                                state = dbsession.query(State).filter_by(
                                    name=row[1]).first()
                                dbsession.add(
                                    City(name=name, state_id=state.id))

                            elif file == 'Projects':
                                dbsession.add(ProjectCategory(name=row[0]))

                            elif file == 'SubProjects':
                                name = row[0]
                                projectcategory = dbsession.query(
                                    ProjectCategory).filter_by(
                                        name=row[1]).first()
                                dbsession.add(
                                    ProjectSubCategory(
                                        name=name,
                                        category_id=projectcategory.id))

                            elif file == 'FinSources':
                                dbsession.add(FinanceSource(name=row[0]))

                            elif file == 'Parties':
                                dbsession.add(Party(name=row[0]))

    if run_script == 2:
        # Project data upload
        with open(__location__ + '\\' + 'ProjectsData.csv', 'rt') as csvfile:
            spamreader = csv.reader(csvfile, delimiter=',', quotechar='|')
            next(spamreader, None)  # Skip the headers
            for row in spamreader:
                if row:
                    with transaction.manager:
                        dbsession = get_tm_session(session_factory,
                                                   transaction.manager)
                        category = dbsession.query(ProjectCategory).filter_by(
                            name=row[6]).first()
                        subcategory = dbsession.query(ProjectSubCategory) \
                                                .filter_by(name=row[7]) \
                                                .filter_by(category_id=category.id) \
                                                .first()
                        city = dbsession.query(City).filter_by(
                            name=row[1]).first()
                        it_es_text = row[9]
                        ites_val = None
                        if it_es_text == 'Yes':
                            ites_val = True
                        elif it_es_text == 'No':
                            ites_val = False
                        dbsession.add(
                            Project(name=row[8],
                                    rank=int(row[0]),
                                    round=row[3],
                                    IT_ITES=ites_val,
                                    amount_total=float(row[5]) * 10000000,
                                    city_id=city.id,
                                    category_id=category.id,
                                    subcategory_id=subcategory.id))
示例#40
0
def configure_logging(*args, **kwargs):
    setup_logging(ini_file)
def main(argv=sys.argv[1:]):
    try:
        opts, args = getopt.getopt(argv, "h", ["config-file=", "topology=", "new-topology-name="])
    except getopt.GetoptError:
        print('\n'
              'Usage: MiniSecBGP_duplicate_topology [options]\n'
              '\n'
              'options (with examples):\n'
              '\n'
              '-h                                               this help\n'
              '\n'
              '--config-file=minisecbgp.ini                     pyramid config filename [.ini]\n'
              '--topology=3                                     the topology ID to be deleted\n'
              '--new-topology-name=<new topology name>          the name to be used in new topology\n')
        sys.exit(2)
    config_file = old_id_topology = new_topology_name = ''
    for opt, arg in opts:
        if opt == '-h':
            print('\n'
                  'Usage: MiniSecBGP_delete_topology [options]\n'
                  '\n'
                  'options (with examples):\n'
                  '\n'
                  '-h                                               this help\n'
                  '\n'
                  '--config-file=minisecbgp.ini                     pyramid config filename [.ini]\n'
                  '--topology=3                                     the topology ID to be deleted\n'
                  '--new-topology-name=<new topology name>          the name to be used in new topology\n')
            sys.exit()
        elif opt == '--config-file':
            config_file = arg
        elif opt == '--topology':
            old_id_topology = arg
        elif opt == '--new-topology-name':
            new_topology_name = arg
    if config_file and old_id_topology and new_topology_name:
        args = parse_args(config_file)
        setup_logging(args.config_uri)
        env = bootstrap(args.config_uri)
        dt = DuplicateTopology()
        with env['request'].tm:
            dbsession = env['request'].dbsession
            downloading = 1
            dt.downloading(dbsession, downloading)

        with env['request'].tm:
            dbsession = env['request'].dbsession
            new_id_topology = dt.duplicate_topology(dbsession, old_id_topology, new_topology_name)

        with env['request'].tm:
            dbsession = env['request'].dbsession
            dt.duplicate_region_data(dbsession, old_id_topology, new_id_topology)

        with env['request'].tm:
            dbsession = env['request'].dbsession
            dt.duplicate_autonomous_system_data(dbsession, old_id_topology, new_id_topology)

        with env['request'].tm:
            dbsession = env['request'].dbsession
            dt.duplicate_another_data(dbsession, old_id_topology, new_id_topology)

        with env['request'].tm:
            dbsession = env['request'].dbsession
            downloading = 0
            dt.downloading(dbsession, downloading)
    else:
        print('\n'
              'Usage: MiniSecBGP_delete_topology [options]\n'
              '\n'
              'options (with examples):\n'
              '\n'
              '-h                                               this help\n'
              '\n'
              '--config-file=minisecbgp.ini                     pyramid config filename [.ini]\n'
              '--topology=3                                     the topology ID to be deleted\n'
              '--new-topology-name=<new topology name>          the name to be used in new topology\n')
示例#42
0
def main():

  parser = argparse.ArgumentParser(
    description=
    'Launch the pyramid-scheduler job execution process.'
    ' This process is only required for pyramid-scheduler'
    ' configurations that are not running in "combined" mode'
    ' (i.e. all-in-one-process).'
    )

  parser.add_argument(
    '--app-name', metavar='NAME',
    dest='appName', default='main',
    help='Load the named application (default: %(default)s)')

  parser.add_argument(
    '-q', '--queue', metavar='NAME',
    dest='queues', action='append', default=[],
    help='Restrict queues that this scheduler will handle - if not'
         ' specified, all queues will be handled (can be specified'
         ' multiple times)')

  parser.add_argument(
    '--reload',
    action='store_true',
    help='Use auto-restart file monitor')

  parser.add_argument(
    '--reload-interval', metavar='SECONDS',
    dest='reload_interval', default=1, type=int,
    help='Seconds between checking files (default: %(default)i)')

  parser.add_argument(
    '--restart',
    action='store_true',
    help='Automatically restart on unexpected exit')

  parser.add_argument(
    '--restart-interval', metavar='SECONDS',
    dest='restart_interval', default=5, type=int,
    help='Seconds to wait after an unexpected exit'
         ' (default: %(default)i)')

  parser.add_argument(
    '--message', metavar='TEXT',
    help='Message to send to the current consumer (no check is'
         ' made to ensure that it is currently running)')

  parser.add_argument(
    'configUri', metavar='CONFIG-URI',
    help='The configuration URI or filename')

  options = parser.parse_args()

  setup_logging(options.configUri)
  global log
  log = logging.getLogger(__name__)

  if options.reload:
    if os.environ.get(RELOADERENV) == 'true':
      log.info('installing reloading file monitor')
      # todo: the problem with pserve's implementation of the
      #       restarter is that it uses `os._exit()` which is
      #       a "rude" exit and does not allow APS's atexit()
      #       registrations to kick in... use a better
      #       implementation!
      install_reloader(options.reload_interval, [options.configUri])
    else:
      return restart_with_reloader(options)

  try:
    log.debug('loading application from "%s"', options.configUri)
    from pyramid_scheduler.scheduler import Scheduler
    app = get_app(options.configUri, name=options.appName)
    if not hasattr(app.registry, 'scheduler'):
      log.error('application did not load a scheduler (try'
                ' "config.include(\'pyramid_scheduler\')") - aborting')
      return 10
    if options.message:
      with transaction.manager:
        app.registry.scheduler.broker.send(options.message)
      return 0
    if app.registry.scheduler.conf.combined:
      log.error('application is configured for combined (i.e. single-process) operation - aborting')
      return 11
    log.debug('starting consumer process on queues %r', options.queues)
    app.registry.scheduler.startConsumer(daemon=True, queues=options.queues)
    register_for_graceful_shutdown(app)
    wait_for_exit()
  except (KeyboardInterrupt, SystemExit):
    return 0
  except Exception:
    log.exception('error while starting pscheduler')
    return 20
示例#43
0
文件: pickledb.py 项目: yarda/bodhi
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
"""
This script pickles all updates/bugs/cves/comments and writes it out to disk
in the format of bodhi-pickledb-YYYYMMDD.HHMM
"""

__requires__ = 'bodhi'

import sys

import cPickle as pickle

from progressbar import ProgressBar, SimpleProgress, Percentage, Bar
from pyramid.paster import setup_logging

setup_logging('/etc/bodhi/production.ini')

from sqlalchemy.orm import scoped_session, sessionmaker
from zope.sqlalchemy import ZopeTransactionExtension

from bodhi.util import get_critpath_pkgs
import bodhi


def load_sqlalchemy_db():
    print "\nLoading pickled database %s" % sys.argv[2]
    db = file(sys.argv[2], 'r')
    data = pickle.load(db)

    import transaction
    from bodhi.models import Base
示例#44
0
    def run(self): # pragma: no cover
        if self.options.stop_daemon:
            return self.stop_daemon()

        if not hasattr(self.options, 'set_user'):
            # Windows case:
            self.options.set_user = self.options.set_group = None

        # @@: Is this the right stage to set the user at?
        self.change_user_group(
            self.options.set_user, self.options.set_group)

        if not self.args:
            self.out('You must give a config file')
            return 2
        app_spec = self.args[0]
        if (len(self.args) > 1
            and self.args[1] in self.possible_subcommands):
            cmd = self.args[1]
            restvars = self.args[2:]
        else:
            cmd = None
            restvars = self.args[1:]

        if self.options.reload:
            if os.environ.get(self._reloader_environ_key):
                if self.verbose > 1:
                    self.out('Running reloading file monitor')
                install_reloader(int(self.options.reload_interval), [app_spec])
                # if self.requires_config_file:
                #     watch_file(self.args[0])
            else:
                return self.restart_with_reloader()

        if cmd not in (None, 'start', 'stop', 'restart', 'status'):
            self.out(
                'Error: must give start|stop|restart (not %s)' % cmd)
            return 2

        if cmd == 'status' or self.options.show_status:
            return self.show_status()

        if cmd == 'restart' or cmd == 'stop':
            result = self.stop_daemon()
            if result:
                if cmd == 'restart':
                    self.out("Could not stop daemon; aborting")
                else:
                    self.out("Could not stop daemon")
                return result
            if cmd == 'stop':
                return result
            self.options.daemon = True

        if cmd == 'start':
            self.options.daemon = True

        app_name = self.options.app_name
        vars = self.parse_vars(restvars)
        if not self._scheme_re.search(app_spec):
            app_spec = 'config:' + app_spec
        server_name = self.options.server_name
        if self.options.server:
            server_spec = 'egg:pyramid'
            assert server_name is None
            server_name = self.options.server
        else:
            server_spec = app_spec
        base = os.getcwd()

        if getattr(self.options, 'daemon', False):
            if not self.options.pid_file:
                self.options.pid_file = 'pyramid.pid'
            if not self.options.log_file:
                self.options.log_file = 'pyramid.log'

        # Ensure the log file is writeable
        if self.options.log_file:
            try:
                writeable_log_file = open(self.options.log_file, 'a')
            except IOError as ioe:
                msg = 'Error: Unable to write to log file: %s' % ioe
                raise ValueError(msg)
            writeable_log_file.close()

        # Ensure the pid file is writeable
        if self.options.pid_file:
            try:
                writeable_pid_file = open(self.options.pid_file, 'a')
            except IOError as ioe:
                msg = 'Error: Unable to write to pid file: %s' % ioe
                raise ValueError(msg)
            writeable_pid_file.close()

        if getattr(self.options, 'daemon', False):
            try:
                self.daemonize()
            except DaemonizeException as ex:
                if self.verbose > 0:
                    self.out(str(ex))
                return 2

        if (self.options.monitor_restart
            and not os.environ.get(self._monitor_environ_key)):
            return self.restart_with_monitor()

        if self.options.pid_file:
            self.record_pid(self.options.pid_file)

        if self.options.log_file:
            stdout_log = LazyWriter(self.options.log_file, 'a')
            sys.stdout = stdout_log
            sys.stderr = stdout_log
            logging.basicConfig(stream=stdout_log)

        log_fn = app_spec
        if log_fn.startswith('config:'):
            log_fn = app_spec[len('config:'):]
        elif log_fn.startswith('egg:'):
            log_fn = None
        if log_fn:
            log_fn = os.path.join(base, log_fn)
            setup_logging(log_fn)

        server = self.loadserver(server_spec, name=server_name,
                                 relative_to=base, global_conf=vars)
        app = self.loadapp(app_spec, name=app_name,
                           relative_to=base, global_conf=vars)

        if self.verbose > 0:
            if hasattr(os, 'getpid'):
                msg = 'Starting server in PID %i.' % os.getpid()
            else:
                msg = 'Starting server.'
            self.out(msg)

        def serve():
            try:
                server(app)
            except (SystemExit, KeyboardInterrupt) as e:
                if self.verbose > 1:
                    raise
                if str(e):
                    msg = ' ' + str(e)
                else:
                    msg = ''
                self.out('Exiting%s (-v to see traceback)' % msg)

        serve()
示例#45
0
文件: db.py 项目: reiterl/ringo
def get_engine(config_file):
    setup_logging(config_file)
    settings = get_appsettings_(config_file)
    engine = engine_from_config(settings, 'sqlalchemy.')
    setup_db_session(engine)
    return engine
示例#46
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[3:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)

    engine = engine_from_config(settings, prefix='sqlalchemy.')
    Base.metadata.create_all(engine)

    maker = sessionmaker()
    maker.configure(bind=engine)

    session = get_session(maker, transaction.manager)

    all_data = Get_data(session=session, project_id=int(argv[2]))

    cost_list_parameter = []
    cost_list_param = all_data.proposal.delicate_budget.split(
        unichr(171)) if all_data.proposal.cost is not None else []
    cost_list_param.pop()

    for i in cost_list_param:
        cost_list_parameter.append(i.split(unichr(172)))

    if all_data.type == u'competitive':
        Gen_Doc_compet(
            doc_name='FRA241PROJECT/static/Gened_DOC/' + 'Competitive_' +
            str(all_data.id) + '.docx',
            project_name_th=all_data.title,
            date_cap=u'17 มกราคม – 21 มีนาคม 2559',
            where=all_data.proposal.activity_location,
            rational=all_data.proposal.Reason,
            purpose_list=all_data.proposal.objective.split(unichr(171)),
            profit=all_data.proposal.profit.split(unichr(171)),
            owner_list=all_data.proposal.owner_for_proposal.split(unichr(171)),
            advisor_list=all_data.proposal.advisor_for_proposal,
            member_list=all_data.proposal.member_for_proposal.split(
                unichr(171)),
            activity_place=all_data.proposal.activity_location,
            type_of_activity=all_data.proposal.type_of_activity,
            cost_list=cost_list_parameter,
            success_criteria=all_data.proposal.success_criteria.split(
                unichr(171)))
        # os.startfile('C:\Users\PHURINPAT\Documents\GitHub\FRA241Group6\FRA241PROJECT\static\Gened_DOC\Competitive_2.docx')
    elif all_data.type == u'camp':
        Gen_Doc_camp(
            camp_name_th=all_data.title,
            year=all_data.proposal.year,
            date_gap=u'',
            where=all_data.proposal.activity_location,
            rational=all_data.proposal.Reason,
            purpose_list=all_data.proposal.objective.split(unichr(171)),
            hours_compare=u'',
            owner_list=all_data.proposal.owner_for_proposal.split(unichr(171)),
            durations=all_data.proposal.duration,
            member_list=all_data.proposal.member_for_proposal.split(
                unichr(171)),
            evaluation_index=all_data.proposal.evaluation_index,
            profit=all_data.proposal.profit.split(unichr(171)),
            cost_list=all_data.proposal.cost.split(unichr(171)),
            cost_list_detail=cost_list_parameter,
            activity_list=all_data.proposal.schedule.split(unichr(171)))
        os.startfile(
            'C:\Users\PHURINPAT\Documents\GitHub\FRA241Group6\sdf.docx')
示例#47
0
from pyramid.paster import get_app, setup_logging
import os

repo_folder = os.environ.get('OPENSHIFT_REPO_DIR', os.path.dirname(__file__))

ini_path = os.path.join(repo_folder, 'production.ini')
setup_logging(ini_path)
application = get_app(ini_path, 'main')
示例#48
0
def main(argv=None):
    if argv is None:
        argv = sys.argv
    if len(argv) < 2:
        usage(argv)
    proxy = None
    serverurl = None
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = get_engine(settings)
    session_factory = get_session_factory(engine)
    session = get_tm_session(session_factory, transaction.manager)
    if 'xml_proxy' in settings:
        serverurl = settings['xml_proxy']
        proxy = ServerProxy(serverurl)

    context = zmq.Context()
    subscriber = context.socket(zmq.SUB)

    subscriber.setsockopt(zmq.SUBSCRIBE, b"")
    subscriber.setsockopt(zmq.RCVTIMEO, __timeoutEDDN)
    starttime = time.time()
    lasthourly = time.time(
    ) - 3700  # Ensure we start by running the hourly once.

    messages = 0
    syscount = 0
    starcount = 0
    stationcount = 0
    failstar = 0
    failstation = 0
    totmsg = 0
    hmessages = 0
    if proxy:
        try:
            proxy.command("botserv", "Absolver",
                          "say #rattech [SAPI]: EDDN client has started.")
        except ProtocolError as e:
            print(f"Failed to send start message to XMLRPC. {e.errmsg}")
        except TimeoutError as e:
            print(f"Failed to send start message to XMLRPC. {e.strerror}")
    while True:
        try:
            subscriber.connect(__relayEDDN)

            while True:
                __message = subscriber.recv()

                if not __message:
                    subscriber.disconnect(__relayEDDN)
                    break

                __message = zlib.decompress(__message)
                __json = simplejson.loads(__message)
                totmsg = totmsg + 1
                print(
                    f"EDDN Client running. Messages: {messages:10} Stars: {starcount:10} Systems: {syscount:10} "
                    f" Stations: {stationcount:5} Missing systems: {failstar+failstation:10}\r",
                    end='')
                if validsoftware(__json['header']['softwareName'], __json['header']['softwareVersion']) \
                        and __json['$schemaRef'] in __allowedSchema:
                    hmessages = hmessages + 1
                    if proxy:
                        if time.time() > (starttime + 3600 * 24):
                            try:
                                startot = session.query(func.count(
                                    Star.id64)).scalar()
                                systot = session.query(func.count(
                                    System.id64)).scalar()
                                proxy.command(
                                    "botserv", "Absolver",
                                    f"say #ratchat [\x0315SAPI\x03] Daily report: "
                                    f"{'{:,}'.format(messages)} messages processed"
                                    f", {'{:,}'.format(syscount)} new systems,"
                                    f"  {'{:,}'.format(starcount)} new stars."
                                    f" DB contains {'{:,}'.format(startot)} stars "
                                    f"and {'{:,}'.format(systot)} systems.")
                                messages = 0
                                syscount = 0
                                starcount = 0
                                failstar = 0
                                stationcount = 0
                                failstation = 0
                                starttime = time.time()
                            except TimeoutError:
                                print(
                                    "XMLRPC call failed due to timeout, retrying in 320 seconds."
                                )
                                starttime = starttime + 320
                            except ProtocolError as e:
                                print(
                                    f"XMLRPC call failed, skipping this update. {e.errmsg}"
                                )
                                starttime = time.time()
                        if time.time() > (lasthourly + 3600):
                            # print("Running stats update...")
                            loop = asyncio.get_event_loop()
                            future = asyncio.Future()
                            asyncio.ensure_future(update_stats(
                                session, future))
                            future.add_done_callback(update_complete)
                            try:
                                loop.run_until_complete(future)
                                proxy.command(
                                    f"botserv", "Absolver",
                                    f"say #announcerdev [\x0315SAPI\x03] "
                                    f"Hourly report: {hmessages} messages, "
                                    f"{totmsg - hmessages} ignored.")
                                lasthourly = time.time()
                                hmessages = 0
                                totmsg = 0
                            except TimeoutError:
                                print(
                                    "XMLRPC call failed due to timeout, retrying in one hour."
                                )
                                lasthourly = time.time() + 3600
                            except ProtocolError as e:
                                print(
                                    f"XMLRPC call failed, skipping this update. {e.errmsg}"
                                )
                                lasthourly = time.time()

                    data = __json['message']
                    messages = messages + 1
                    if 'event' in data:
                        if data['event'] in {'Docked', 'CarrierJump'}:
                            if 'StationType' in data and data[
                                    'StationType'] == 'FleetCarrier':
                                try:
                                    oldcarrier = session.query(Carrier).filter(
                                        Carrier.callsign ==
                                        data['StationName'])
                                    # Consistency?! What's that? Bah.
                                    if oldcarrier:
                                        oldcarrier.marketId = data['MarketID']
                                        oldcarrier.systemName = data[
                                            'StarSystem']
                                        oldcarrier.systemId64 = data[
                                            'SystemAddress']
                                        oldcarrier.haveShipyard = True if 'shipyard' in data['StationServices'] \
                                            else False
                                        oldcarrier.haveOutfitting = True if 'outfitting' in data[
                                            'StationServices'] else False
                                        oldcarrier.haveMarket = True if 'commodities' in data['StationServices'] \
                                            else False
                                        oldcarrier.updateTime = data[
                                            'timestamp']
                                    elif 'StationType' in data and data[
                                            'StationType'] != 'FleetCarrier':
                                        newcarrier = Carrier(
                                            callsign=data['StationName'],
                                            marketId=data['MarketID'],
                                            name=data['StationName'],
                                            updateTime=data['timestamp'],
                                            systemName=data['StarSystem'],
                                            systemId64=data['SystemAddress'],
                                            haveShipyard=True if 'shipyard'
                                            in data['StationServices'] else
                                            False,
                                            haveOutfitting=True if 'outfitting'
                                            in data['StationServices'] else
                                            False,
                                            haveMarket=True if 'commodities'
                                            in data['StationServices'] else
                                            False)
                                        session.add(newcarrier)
                                    transaction.commit()
                                except DataError as e:
                                    print(
                                        f"Failed to add a carrier! Invalid data passed: {e}"
                                    )
                                    transaction.abort()
                                except KeyError as e:
                                    print(f"Invalid key in carrier data: {e}")
                                    print(data)
                                    print(
                                        f"Software: {__json['header']['softwareName']} {__json['header']['softwareVersion']}"
                                    )
                                    transaction.abort()
                            else:
                                try:
                                    #print(f"Got a station: {data}")
                                    # Station data, check if exists.
                                    oldstation = session.query(Station).filter(Station.name == data['StationName']).\
                                        filter(Station.systemName == data['StarSystem'])
                                    if oldstation:
                                        continue
                                    else:
                                        # New station, add it!
                                        newstation = Station(
                                            id64=data['MarketID'],
                                            name=data['StationName'],
                                            distanceToArrival=data[
                                                'DistFromStarLS'],
                                            government=data[
                                                'StationGovernment'],
                                            economy=data['StationEconomy'],
                                            haveMarket=True if 'commodities'
                                            in data['StationServices'] else
                                            False,
                                            haveShipyard=True if 'shipyard'
                                            in data['StationServices'] else
                                            False,
                                            haveOutfitting=True if 'outfitting'
                                            in data['StationServices'] else
                                            False,
                                            otherServices=data[
                                                'StationServices'],
                                            updateTime=data['timestamp'],
                                            systemId64=data['SystemAddress'],
                                            systemName=data['StarSystem'])
                                        session.add(newstation)
                                        stationcount += 1
                                except DataError as e:
                                    print(
                                        f"Failed to add a station! Invalid data passed: {e}"
                                    )
                                    transaction.abort()
                                except KeyError as e:
                                    print(f"Invalid key in station data: {e}")
                                except IntegrityError:
                                    failstation = failstation + 1
                                    transaction.abort()

                        # TODO: Handle other detail Carrier events, such as Stats.
                        if data['event'] == 'FSDJump':
                            id64 = data['SystemAddress']
                            res = session.query(System.id64).filter(
                                System.id64 == id64).scalar() or False
                            if not res:
                                syscount = syscount + 1
                                newsys = System(id64=data['SystemAddress'],
                                                name=data['StarSystem'],
                                                coords={
                                                    'x': data['StarPos'][0],
                                                    'y': data['StarPos'][1],
                                                    'z': data['StarPos'][2]
                                                },
                                                date=data['timestamp'])
                                try:
                                    session.add(newsys)
                                    transaction.commit()
                                except DataError:
                                    print(
                                        "Failed to add a system! Invalid data passed"
                                    )
                                    transaction.abort()
                                except IntegrityError:
                                    transaction.abort()

                        if data['event'] == 'Scan':
                            bodyid = data['SystemAddress'] + (
                                data['BodyID'] << 55)
                            if 'AbsoluteMagnitude' in data:
                                res = session.query(Star.id64).filter(
                                    Star.id64 == bodyid).scalar() or False
                                if not res:
                                    starcount = starcount + 1
                                    newstar = Star(
                                        id64=bodyid,
                                        bodyId=data['BodyID'],
                                        name=data['BodyName'],
                                        age=data['Age_MY'],
                                        axialTilt=data['AxialTilt'],
                                        orbitalEccentricity=data['Eccentricity']
                                        if 'Eccentricity' in data else None,
                                        orbitalInclination=data[
                                            'OrbitalInclination'] if
                                        'OrbitalInclination' in data else None,
                                        orbitalPeriod=data['OrbitalPeriod']
                                        if 'OrbitalPeriod' in data else None,
                                        parents=data['Parents']
                                        if 'Parents' in data else None,
                                        argOfPeriapsis=data['Periapsis']
                                        if 'Periapsis' in data else None,
                                        belts=data['Rings']
                                        if 'Rings' in data else None,
                                        semiMajorAxis=data['SemiMajorAxis']
                                        if 'SemiMajorAxis' in data else None,
                                        systemName=data['StarSystem'],
                                        distanceToArrival=data[
                                            'DistanceFromArrivalLS'],
                                        luminosity=data['Luminosity'],
                                        solarRadius=data['Radius'],
                                        rotationalPeriod=data[
                                            'RotationPeriod'],
                                        type=data['StarType'],
                                        solarMasses=data['StellarMass'],
                                        subType=data['Subclass']
                                        if 'Subclass' in data else None,
                                        surfaceTemperature=data[
                                            'SurfaceTemperature'],
                                        isScoopable=True if data['StarType']
                                        in __scoopable else False,
                                        isMainStar=True
                                        if data['BodyID'] == 0 else False,
                                        updateTime=data['timestamp'],
                                        systemId64=data['SystemAddress'])
                                    try:
                                        session.add(newstar)
                                        transaction.commit()
                                    except DataError:
                                        print(
                                            "Failed to add star - Data Error!")
                                        transaction.abort()
                                    except IntegrityError:
                                        try:
                                            r = requests.get(
                                                f"{__EDSM_url}/systems?systemName={data['SystemName']}&"
                                                f"showId=1&showCoordinates=1&showInformation=1"
                                            ).json()[0]
                                            if r:
                                                system = System(
                                                    id64=r['id64'],
                                                    name=r['name'],
                                                    coords=r['coords'])
                                                session.add(system)
                                                transaction.commit()
                                        except IntegrityError:
                                            print(
                                                "Failed to add system during missing star handling. Bah. Give up."
                                            )
                                            failstar = failstar + 1
                                            transaction.abort()
                                        except KeyError:
                                            #print("No EDSM data found.")
                                            failstar += 1
                                            transaction.abort()

                sys.stdout.flush()

        except zmq.ZMQError as e:
            print('ZMQSocketException: ' + str(e))
            proxy.command(
                "botserv", "Absolver",
                f"say #rattech [\x0315SAPI\x03] EDDN error: "
                f"Exiting due to exception: {str(e)}")

            sys.stdout.flush()
            subscriber.disconnect(__relayEDDN)
            time.sleep(5)
def main(argv=sys.argv[1:]):
    try:
        opts, args = getopt.getopt(argv, 'h', ["config-file=", "execution-type=",
                                               "node-ip-address=", "username="******"password="])
    except getopt.GetoptError:
        print('\n'
              'Usage: MiniSecBGP_node_service [options]\n'
              '\n'
              'options (with examples):\n'
              '\n'
              '-h                                               this help\n'
              '\n'
              '--config-file=minisecbgp.ini                     pyramid config filename [.ini]\n'
              '--execution-type=[manual|scheduled]              manual = when user execute this function in CLI\n'
              '                                                 scheduled = when this function is executed by crontab\n'
              '--node-ip-address=[192.168.0.1|3232239375]       cluster node IP address\n'
              '--username=ubuntu                                the username to use to configure the cluster node\n'
              '--password=ubuntu                                the user password to access the cluster node\n')
        sys.exit(2)
    config_file = execution_type = node_ip_address = username = password = ''
    for opt, arg in opts:
        if opt == '-h':
            print('\n'
                  'Usage: MiniSecBGP_node_service [options]\n'
                  '\n'
                  'options (with examples):\n'
                  '\n'
                  '-h                                               this help\n'
                  '\n'
                  '--config-file=minisecbgp.ini                     pyramid config filename [.ini]\n'
                  '--execution-type=[manual|scheduled]              manual = when user execute this function in CLI\n'
                  '                                                 scheduled = when this function is executed by crontab\n'
                  '--node-ip-address=[192.168.0.1|3232239375]       cluster node IP address\n'
                  '--username=ubuntu                                the username to use to configure the cluster node\n'
                  '--password=ubuntu                                the user password to access the cluster node\n')
            sys.exit()
        elif opt == '--config-file':
            config_file = arg
        elif opt == '--execution-type' and (arg == 'manual' or arg == 'scheduled'):
            execution_type = arg
        elif opt == '--node-ip-address':
            node_ip_address = arg
        elif opt == '--username':
            username = arg
        elif opt == '--password':
            password = arg
    if config_file and execution_type and node_ip_address:
        args = parse_args(config_file)
        setup_logging(args.config_uri)
        env = bootstrap(args.config_uri)
        try:
            with env['request'].tm:
                dbsession = env['request'].dbsession
                ccn = TestClusterNode(dbsession, execution_type, node_ip_address, username, password)
                ccn.test_ping()
                ccn.test_ssh()
        except OperationalError:
            print('Database error')
    else:
        print('\n'
              'Usage: MiniSecBGP_node_service [options]\n'
              '\n'
              'options (with examples):\n'
              '\n'
              '-h                                               this help\n'
              '\n'
              '--config-file=minisecbgp.ini                     pyramid config filename [.ini]\n'
              '--execution-type=[manual|scheduled]              manual = when user execute this function in CLI\n'
              '                                                 scheduled = when this function is executed by crontab\n'
              '--node-ip-address=[192.168.0.1|3232239375]       cluster node IP address\n'
              '--username=ubuntu                                the username to use to configure the cluster node\n'
              '--password=ubuntu                                the user password to access the cluster node\n')
示例#50
0
def main(argv=sys.argv):  # pragma: no cover
    from substanced.evolution import EvolutionManager

    def usage(e=None):
        if e is not None:
            _print(e)
            _print('')
        _print("""\
    sd_evolve [--latest] [--dry-run] [--mark-finished=stepname] [--mark-unfinished=stepname] config_uri
      Evolves new database with changes from scripts in evolve packages
         - with no arguments, evolve displays finished and unfinished steps
         - with the --latest argument, evolve runs scripts as necessary
         - with the --dry-run argument, evolve runs scripts but does not issue any commits
         - with the --mark-finished argument, marks the stepname as finished
         - with the --mark-unfinished argument, marks the stepname as unfinished

    e.g. sd_evolve --latest etc/development.ini""")
        sys.exit(2)

    name, argv = argv[0], argv[1:]
    latest = False
    dry_run = False
    mark_finished = []
    mark_unfinished = []

    try:
        opts, args = getopt.getopt(argv, 'l?hdu:f:', [
            'latest',
            'help',
            'dry-run',
            'mark-unfinished=',
            'mark-finished=',
        ])
    except getopt.GetoptError as e:
        usage(e)

    if args:
        config_uri = args[0]
    else:
        usage('Requires a config_uri as an argument')

    for k, v in opts:
        if k in ('-h', '-?', '--help'):
            usage()
        if k in ('-l', '--latest'):
            latest = True
        if k in ('-d', '--dry-run'):
            dry_run = True
        if k in ('-f', '--mark-finished'):
            mark_finished.append(v)
        if k in ('-u', '--mark-unfinished'):
            mark_unfinished.append(v)

    if latest and dry_run:
        usage('--latest and --dry-run cannot be used together')

    if (latest or dry_run) and (mark_finished or mark_unfinished):
        usage(
            '--latest/--dry-run cannot be used with --mark-finished/--mark-unfinished'
        )

    setup_logging(config_uri)
    env = bootstrap(config_uri)
    root = env['root']
    registry = env['registry']

    manager = EvolutionManager(root, registry)

    if latest or dry_run:
        complete = manager.evolve(latest)

        if complete:
            if dry_run:
                _print('Evolution steps dry-run:')
            else:
                _print('Evolution steps executed:')
            for item in complete:
                _print('   %s' % item)
        else:
            if dry_run:
                _print('No evolution steps dry-run')
            else:
                _print('No evolution steps executed')

    elif mark_finished or mark_unfinished:
        t = transaction.get()

        for step in mark_finished:
            finished_steps = manager.get_finished_steps()
            unfinished_steps = dict(manager.get_unfinished_steps())
            if step in finished_steps:
                _print('Step %s already marked as finished' % step)
            else:
                if step in unfinished_steps:
                    manager.add_finished_step(step)
                    _print('Step %s marked as finished' % step)
                    t.note('Marked %s evolution step as finished' % step)
                else:
                    _print('Unknown step %s, not marking as finished' % step)

        for step in mark_unfinished:
            finished_steps = manager.get_finished_steps()
            unfinished_steps = dict(manager.get_unfinished_steps())
            if step in finished_steps:
                manager.remove_finished_step(step)
                _print('Step %s marked as unfinished' % step)
                t.note('Marked %s evolution step as unfinished' % step)
            else:
                if step in unfinished_steps:
                    _print('Step %s already marked as unfinished' % step)
                else:
                    _print('Unknown step %s, not marking as unfinished' % step)

        t.commit()

    else:
        _print('Finished steps:\n')
        for ts, stepname in manager.get_finished_steps_by_value():
            tp = datetime.datetime.fromtimestamp(ts).strftime(
                '%Y-%m-%d %H:%M:%S')
            _print('    %s %s' % (tp, stepname))
        _print('\nUnfinished steps:\n')
        for stepname, func in manager.get_unfinished_steps():
            _print(' ' * 24 + stepname)
示例#51
0
def main(argv=sys.argv):
    from fixtures.data import trees, geo
    from fixtures.styles_and_cultures import styles_and_cultures
    from fixtures.materials import materials
    from fixtures.eventtypes import eventtypes
    from fixtures.heritagetypes import heritagetypes
    from fixtures.periods import periods
    from fixtures.species import species
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)
    engine = engine_from_config(settings, 'sqlalchemy.')
    session_maker = sessionmaker(bind=engine,
                                 extension=ZopeTransactionExtension())
    db_session = session_maker()
    with transaction.manager:
        import_provider(
            trees,
            ConceptScheme(id=1,
                          uri='urn:x-skosprovider:trees',
                          labels=[
                              Label('Verschillende soorten bomen',
                                    u'prefLabel', u'nl'),
                              Label('Different types of trees', u'prefLabel',
                                    u'en')
                          ]), db_session)
        import_provider(
            geo,
            ConceptScheme(id=2,
                          uri='urn:x-skosprovider:geo',
                          labels=[
                              Label('Geografie', u'prefLabel', u'nl'),
                              Label('Geography', u'prefLabel', u'en')
                          ]), db_session)
        import_provider(
            styles_and_cultures,
            ConceptScheme(
                id=3,
                uri='https://id.erfgoed.net/thesauri/stijlen_en_culturen',
                labels=[
                    Label('Stijlen en Culturen', u'prefLabel', u'nl'),
                    Label('Styles and Cultures', u'prefLabel', u'en')
                ]), db_session)
        import_provider(
            materials,
            ConceptScheme(id=4,
                          uri='https://id.erfgoed.net/thesauri/materialen',
                          labels=[
                              Label('Materialen', u'prefLabel', u'nl'),
                              Label('Materials', u'prefLabel', u'en')
                          ]), db_session)
        import_provider(
            eventtypes,
            ConceptScheme(
                id=5,
                uri='https://id.erfgoed.net/thesauri/gebeurtenistypes',
                labels=[
                    Label('Gebeurtenistypes', u'prefLabel', u'nl'),
                    Label('Event types', u'prefLabel', u'en')
                ]), db_session)
        import_provider(
            heritagetypes,
            ConceptScheme(id=6,
                          uri='https://id.erfgoed.net/thesauri/erfgoedtypes',
                          labels=[
                              Label('Erfgoedtypes', u'prefLabel', u'nl'),
                              Label('Heritage types', u'prefLabel', u'en')
                          ]), db_session)
        import_provider(
            periods,
            ConceptScheme(id=7,
                          uri='https://id.erfgoed.net/thesauri/dateringen',
                          labels=[
                              Label('Dateringen', u'prefLabel', u'nl'),
                              Label('Periods', u'prefLabel', u'en')
                          ]), db_session)
        import_provider(
            species,
            ConceptScheme(id=8,
                          uri='https://id.erfgoed.net/thesauri/soorten',
                          labels=[
                              Label('Soorten', u'prefLabel', u'nl'),
                              Label('Species', u'prefLabel', u'en')
                          ]), db_session)
    print('--atramhasis-db-initialized--')
示例#52
0
from ..models import *

from pyramid.paster import (
    get_appsettings,
    setup_logging,
)

from pyramid.config import Configurator
from paste.deploy.loadwsgi import appconfig
from sqlalchemy import engine_from_config
from sqlalchemy.orm import sessionmaker
import os

ROOT_PATH = os.path.dirname(__file__)
CONFIG_PATH = os.path.join(ROOT_PATH, '..', '..', 'config', 'test.ini')
setup_logging(CONFIG_PATH)
from dbbase import settings


def setup():
    engine = engine_from_config(settings, prefix='sqlalchemy.')

    config = Configurator(settings=settings)
    DBSession.configure(bind=engine)
    Base.metadata.create_all(engine)


def teardown():
    engine = engine_from_config(settings, prefix='sqlalchemy.')

    DBSession.configure(bind=engine)
示例#53
0
def main(argv=sys.argv):
    if len(argv) < 2:
        usage(argv)
    config_uri = argv[1]
    options = parse_vars(argv[2:])
    setup_logging(config_uri)
    settings = get_appsettings(config_uri, options=options)

    engine = get_engine(settings)
    Base.metadata.create_all(engine)

    session_factory = get_session_factory(engine)

    with transaction.manager:
        dbsession = get_tm_session(session_factory, transaction.manager)

        group1 = Group(name='Group 1')
        group2 = Group(name='Group 2')

        dbsession.add(group1)
        dbsession.add(group2)

        dbsession.flush()
        student=[]
        student.append(Student(name='Vovan', group_id=group1.id))
        student.append(Student(name='Petya', group_id=group1.id))
        student.append(Student(name='Serega', group_id=group1.id))
        student.append(Student(name='Kolyan', group_id=group1.id))
        student.append(Student(name='Harry', group_id=group1.id))
        student.append(Student(name='Vasiliy', group_id=group2.id))

        for s in student:
            dbsession.add(s)

        dbsession.flush()
        prof1 = Professor(name='Mr White')
        prof2 = Professor(name='Mr Black')
        prof3 = Professor(name='Mr Brown')
        prof4 = Professor(name='Mr Pink')

        dbsession.add(prof1)
        dbsession.add(prof2)
        dbsession.add(prof3)
        dbsession.add(prof4)

        sub1 = Subject(name='Math')
        sub2 = Subject(name='Physics')
        sub3 = Subject(name='Programming')
        sub4 = Subject(name='History')

        dbsession.add(sub1)
        dbsession.add(sub2)
        dbsession.add(sub3)
        dbsession.add(sub4)

        dbsession.flush()
        course=[]
        course.append(Course(group_id=group1.id, professor_id=prof1.id, subject_id=sub1.id))
        course.append(Course(group_id=group1.id, professor_id=prof2.id, subject_id=sub2.id))
        course.append(Course(group_id=group1.id, professor_id=prof3.id, subject_id=sub3.id))
        course.append(Course(group_id=group1.id, professor_id=prof4.id, subject_id=sub4.id))
        course.append(Course(group_id=group2.id, professor_id=prof2.id, subject_id=sub2.id))
        course.append(Course(group_id=group2.id, professor_id=prof3.id, subject_id=sub3.id))
        course.append(Course(group_id=group2.id, professor_id=prof4.id, subject_id=sub4.id))
        course.append(Course(group_id=group2.id, professor_id=prof1.id, subject_id=sub1.id))

        for c in course:
            dbsession.add(c)

        dbsession.flush()
        work=[]
        work.append(Work(course_id=course[0].id, max_point=25, name='Control Work 1'))
        work.append(Work(course_id=course[0].id, max_point=30, name='Control Work 2'))
        work.append(Work(course_id=course[1].id, max_point=30, name='Control Work 3'))
        work.append(Work(course_id=course[1].id, max_point=30, name='Control Work 4'))
        work.append(Work(course_id=course[2].id, max_point=30, name='Control Work 5'))
        work.append(Work(course_id=course[3].id, max_point=25, name='Control Work 6'))
        work.append(Work(course_id=course[4].id, max_point=30, name='Control Work 7'))
        work.append(Work(course_id=course[5].id, max_point=30, name='Control Work 8'))
        work.append(Work(course_id=course[6].id, max_point=30, name='Control Work 9'))
        work.append(Work(course_id=course[7].id, max_point=30, name='Control Work 10'))

        for w in work:
            dbsession.add(w)

        dbsession.flush()
        rating=[]
        rating.append(Rating(student_id=student[0].id, work_id=work[0].id, point=15))
        rating.append(Rating(student_id=student[0].id, work_id=work[1].id, point=25))
        rating.append(Rating(student_id=student[1].id, work_id=work[2].id, point=24))
        rating.append(Rating(student_id=student[2].id, work_id=work[3].id, point=15))
        rating.append(Rating(student_id=student[3].id, work_id=work[4].id, point=25))
        rating.append(Rating(student_id=student[4].id, work_id=work[5].id, point=20))
        rating.append(Rating(student_id=student[4].id, work_id=work[6].id, point=25))
        for r in rating:
            dbsession.add(r)
        dbsession.flush()
        user=[]
        user.append(User(name='student', role='base'))
        user[0].set_password("123456")
        user.append(User(name='professor', role='editor'))
        user[1].set_password("123456")

        for u in user:
            dbsession.add(u)
示例#54
0
def update(argv=sys.argv):
    """
    Download the GeoIP database from the URL provided under the config key
    `geoip.city.source` and save it under the path provided by the config key
    `geoip.city.destination`.

    """
    if len(argv) != 2:
        usage(argv)

    config_uri = argv[1]
    setup_logging(config_uri)
    settings = get_appsettings(config_uri)
    source = settings['geoip.city.csv_source']
    destination = settings['geoip.city.csv_destination']

    engine = engine_from_config(settings, 'sqlalchemy.')
    db_session = scoped_session(sessionmaker())
    db_session.configure(bind=engine)

    if source.startswith('http://'):
        log.info("Downloading %s...", source)
        response = requests.get(source)
        log.info("Downloading done.")

        compressed = zipfile.ZipFile(StringIO(response.content))
    else:
        log.info("Opening %s...", source)
        compressed = zipfile.ZipFile(source)

    log.info("Writing to %s...", destination)
    compressed.extractall(path=destination)
    log.info("Writing done.")

    source_dir = os.path.join(destination, listdir(destination)[0])
    final_path = os.path.join(source_dir, 'GeoLiteCity-Location.csv')

    log.info("Dropping the GeoRegion table")
    GeoRegion.__table__.drop(engine)
    log.info("Creating the GeoRegion table")
    GeoRegion.__table__.create(engine)

    countries = {}

    for country in db_session.query(Country).all():
        countries[country.alpha2] = country

    db_session.commit()

    if six.PY3:
        infile = open(final_path, 'r', newline='', encoding='latin1')
    else:
        infile = open(final_path, 'rb')

    with infile as f:
        reader = csv_reader(f, delimiter=',')

        rows = list(reader)

        chunk_size = 5000
        split_rows = chunks(rows[2:], chunk_size)
        thread_count = 10.0
        row_len = len(split_rows)
        loops = math.ceil(row_len / thread_count)
        current_row = 0

        total = 0

        for i in range(int(loops)):
            threads = []

            for j in range(int(thread_count)):
                if current_row < row_len:
                    rows = split_rows[current_row]
                    t = Thread(target=write_to_db,
                               args=(db_session, rows, countries))
                    threads.append(t)
                    current_row += 1

            [x.start() for x in threads]

            for x in threads:
                x.join()
                total += int(thread_count) * chunk_size
                logging.info("Done adding %s locations in DB\n" % total)

        log.info('Finished, pushing to the database')
def main(argv=sys.argv[1:]):
    try:
        opts, args = getopt.getopt(argv, 'h', ["config-file=", "file="])
    except getopt.GetoptError:
        print(
            '\n'
            'Usage: MiniSecBGP_realistic_topology [options]\n'
            '\n'
            'options (with examples):\n'
            '\n'
            '-h                                               this help\n'
            '\n'
            '--config-file=minisecbgp.ini                     pyramid config filename [.ini]\n'
            '--file=20191201.as-rel2.txt.bz2                  CAIDA AS-Relationship compressed topology filename\n'
            '                                                 * the file must be in /tmp\n'
        )
        sys.exit(2)
    config_file = file = ''
    for opt, arg in opts:
        if opt == '-h':
            print(
                '\n'
                'Usage: MiniSecBGP_realistic_topology [options]\n'
                '\n'
                'options (with examples):\n'
                '\n'
                '-h                                               this help\n'
                '\n'
                '--config-file=minisecbgp.ini                     pyramid config filename [.ini]\n'
                '--file=20191201.as-rel2.txt.bz2                  CAIDA AS-Relationship compressed topology filename\n'
                '                                                 * the file must be in /tmp\n'
            )
            sys.exit()
        elif opt == '--config-file':
            config_file = arg
        elif opt == '--file':
            file = arg
    if config_file and file:
        args = parse_args(config_file)
        setup_logging(args.config_uri)
        env = bootstrap(args.config_uri)
        try:
            t = RealisticTopology(file)

            with env['request'].tm:
                dbsession = env['request'].dbsession
                downloading = 1
                t.downloading(dbsession, downloading)

            with env['request'].tm:
                pandas_unique_autonomous_systems, pandas_stub_autonomous_systems, df_from_file = t.as_relationship(
                )

            with env['request'].tm:
                dbsession = env['request'].dbsession
                t.topology(dbsession)

            with env['request'].tm:
                dbsession = env['request'].dbsession
                id_topology = t.get_topology_id(dbsession)

            with env['request'].tm:
                dbsession = env['request'].dbsession
                t.region(dbsession, id_topology)

            with env['request'].tm:
                dbsession = env['request'].dbsession
                id_region = t.get_region_id(dbsession, id_topology)

            with env['request'].tm:
                dbsession = env['request'].dbsession
                t.autonomous_system(dbsession, id_topology, id_region,
                                    pandas_unique_autonomous_systems,
                                    pandas_stub_autonomous_systems)

            with env['request'].tm:
                dbsession = env['request'].dbsession
                t.automatic_router_id(dbsession, id_topology)

            with env['request'].tm:
                dbsession = env['request'].dbsession
                t.automatic_prefix(dbsession, id_topology)

            with env['request'].tm:
                dbsession = env['request'].dbsession
                t.automatic_link(dbsession, id_topology, df_from_file)

            with env['request'].tm:
                dbsession = env['request'].dbsession
                downloading = 0
                t.downloading(dbsession, downloading)

            t.erase_file()
        except OperationalError:
            print('Database error')
    else:
        print(
            '\n'
            'Usage: MiniSecBGP_realistic_topology [options]\n'
            '\n'
            'options (with examples):\n'
            '\n'
            '-h                                               this help\n'
            '\n'
            '--config-file=minisecbgp.ini                     pyramid config filename [.ini]\n'
            '--file=20191201.as-rel2.txt.bz2                  CAIDA AS-Relationship compressed topology filename\n'
            '                                                 * the file must be in /tmp\n'
        )
def main(argv=sys.argv[1:]):
    try:
        opts, args = getopt.getopt(argv, "h", [
            "config-file=", "scenario-id=", "scenario-name=",
            "scenario-description=", "topology=", "attacker=",
            "affected-area=", "target=", "attack-type=", "all-paths",
            "number-of-shortest-paths="
        ])
    except getopt.GetoptError:
        print(
            '\n'
            'ERROR\n'
            'Usage: MiniSecBGP_hijack_attack_scenario [options]\n'
            '\n'
            'options (with examples):\n'
            '\n'
            '-h                                                       this help\n'
            '\n'
            '--config-file="minisecbgp.ini"                           pyramid config filename [.ini]\n'
            '--scenario-name="Test topology"                          the name that will be used to identify this scenario\n'
            '--scenario-description="date 20200729"                   the scenario description\n'
            '--topology=3                                             the topology used as the original base of the scenario\n'
            '--attacker=[65001,65002]                                 define which AS(s) will be the attacker\n'
            '--affected-area=[65001,65003]                            define which these AS(s) will receive and accept the hijacked routes\n'
            '--target=[\'10.0.0.0/24\',\'20.0.0.0/24\']                   define the prefix(s) and mask(s) that will be hijacked by the attacker(s)\n'
            '--attack-type=attraction|interception                    if the attack is an attraction attack or an interception attack\n'
            '--all-paths or --number-of-shortest-paths=[1..999]       number of valid paths between the attacker AS, affected AS and target AS\n'
            '\n'
            'or\n'
            '\n'
            '--scenario-id=16                                         scenario ID\n'
        )
        sys.exit(2)
    config_file = scenario_id = scenario_name = scenario_description = topology = attacker = \
        affected_area = target = attack_type = number_of_shortest_paths = ''
    for opt, arg in opts:
        if opt == '-h':
            print(
                '\n'
                'HELP\n'
                'Usage: MiniSecBGP_hijack_attack_scenario [options]\n'
                '\n'
                'options (with examples):\n'
                '\n'
                '-h                                                       this help\n'
                '\n'
                '--config-file="minisecbgp.ini"                           pyramid config filename [.ini]\n'
                '--scenario-name="Test topology"                          the name that will be used to identify this scenario\n'
                '--scenario-description="date 20200729"                   the scenario description\n'
                '--topology=3                                             the topology used as the original base of the scenario\n'
                '--attacker=[65001,65002]                                 define which AS(s) will be the attacker\n'
                '--affected-area=[65001,65003]                            define which these AS(s) will receive and accept the hijacked routes\n'
                '--target=[\'10.0.0.0/24\',\'20.0.0.0/24\']                   define the prefix(s) and mask(s) that will be hijacked by the attacker(s)\n'
                '--attack-type=attraction|interception                    if the attack is an attraction attack or an interception attack\n'
                '--all-paths or --number-of-shortest-paths=[1..999]       number of valid paths between the attacker AS, affected AS and target AS\n'
                '\n'
                'or\n'
                '\n'
                '--scenario-id=16                                         scenario ID\n'
            )
            sys.exit()
        if opt == '--config-file':
            config_file = arg
        elif opt == '--scenario-id':
            scenario_id = arg
        elif opt == '--scenario-name':
            scenario_name = arg
        elif opt == '--scenario-description':
            scenario_description = arg
        elif opt == '--topology':
            topology = arg
        elif opt == '--attacker':
            attacker = arg
        elif opt == '--affected-area':
            affected_area = arg
        elif opt == '--target':
            target = arg
        elif opt == '--attack-type':
            attack_type = arg
        elif opt == '--all-paths':
            number_of_shortest_paths = '0'
        elif opt == '--number-of-shortest-paths':
            number_of_shortest_paths = arg

    if (config_file and scenario_name and topology and attacker and affected_area and target and attack_type and number_of_shortest_paths) \
            or (config_file and scenario_id):
        args = parse_args(config_file)
        setup_logging(args.config_uri)
        env = bootstrap(args.config_uri)
        try:
            with env['request'].tm:
                dbsession = env['request'].dbsession

                print('iniciando o objeto')

                aa = AttackScenario(dbsession, scenario_id, scenario_name,
                                    scenario_description, topology, attacker,
                                    affected_area, target, attack_type,
                                    number_of_shortest_paths)

                print('iniciando o attack_scenario')

                links, edge_array_length, count_asys = aa.attack_scenario()

                link = links

                link_filename = '/tmp/link_' + str(datetime.now()).replace(
                    ' ', '').replace(':', '').replace('-', '').replace(
                        '.', '') + '.MiniSecBGP'
                f = open(link_filename, "a")
                f.write(link)
                f.close()

                print('VOU CHAMAR O CÓDIGO C++')

                print(link_filename)
                print(edge_array_length)
                print(count_asys)

                #arguments = [link_filename, edge_array_length, count_asys]
                arguments = ['/tmp/bla.MiniSecBGP', str(5), str(5)]
                subprocess.Popen(['./venv/bin/asp'] + arguments)

                print('RETORNEI DO CÓDIGO C++')

            with env['request'].tm:
                if scenario_id:
                    clear_database(dbsession, scenario_id)
        except OperationalError:
            print('Database error')
    else:
        print(
            '\n'
            'Usage: MiniSecBGP_hijack_attack_scenario [options]\n'
            '\n'
            'options (with examples):\n'
            '\n'
            '-h                                                       this help\n'
            '\n'
            '--config-file="minisecbgp.ini"                           pyramid config filename [.ini]\n'
            '--scenario-name="Test topology"                          the name that will be used to identify this scenario\n'
            '--scenario-description="date 20200729"                   the scenario description\n'
            '--topology=3                                             the topology used as the original base of the scenario\n'
            '--attacker=[65001,65002]                                 define which AS(s) will be the attacker\n'
            '--affected-area=[65001,65003]                            define which these AS(s) will receive and accept the hijacked routes\n'
            '--target=[\'10.0.0.0/24\',\'20.0.0.0/24\']                   define the prefix(s) and mask(s) that will be hijacked by the attacker(s)\n'
            '--attack-type=attraction|interception                    if the attack is an attraction attack or an interception attack\n'
            '--all-paths or --number-of-shortest-paths=[1..999]       number of valid paths between the attacker AS, affected AS and target AS\n'
            '\n'
            'or\n'
            '\n'
            '--scenario-id=16                                         scenario ID\n'
        )
示例#57
0
def main(argv=sys.argv):
    if len(argv) != 2:
        usage(argv)
    config_uri = argv[1]

    setup_logging(config_uri)

    settings = get_appsettings(config_uri)
    engine = engine_from_config(settings, 'sqlalchemy.')
    db_session.configure(bind=engine)

    es_product_index = get_es_product_index(settings)
    es_client.indices.create(
        index=es_product_index.get_name(),
        body={
            'settings': {
                'index': {
                    'number_of_shards': 4,
                    'number_of_replicas': 0,
                }
            }
        }
    )
    es_product_index.put_mapping(ProductDoc)

    limit = 5
    last_product_id = None
    indexed_count = 0
    while True:
        query = (
            db_session.query(Product.id, Product.name, Product.status)
            .order_by(Product.id)
        )
        if last_product_id is not None:
            query = query.filter(Product.id > last_product_id)
        products = query.limit(limit).all()

        if not products:
            break

        docs = [
            ProductDoc(_id=p.id, name=p.name, status=p.status)
            for p in products
        ]
        bulk_res = es_product_index.add(docs)
        indexed_count += len(docs)
        if bulk_res.errors:
            raise ElasticsearchBulkError(
                '\n' +
                '\n'.join(
                    '\t{}'.format(it.error.reason) for it in bulk_res.items
                )
            )

        if len(products) < limit:
            break
        last_product_id = products[-1].id

    es_product_index.refresh()

    log.info('Indexed %s products', indexed_count)
    log.info(
        '%s products in the index',
        es_product_index.search_query().count()
    )
示例#58
0
the database does not exists.

"""
import sys
import psycopg2
import logging
from datetime import datetime, timedelta
import time

from pyramid.paster import get_appsettings, setup_logging

from sqlalchemy.engine.url import make_url

config_uri = sys.argv[1]

setup_logging(config_uri)
settings = get_appsettings(config_uri)
log = logging.getLogger(__name__)

setting_url = settings['sqlalchemy.url']
url = make_url(setting_url)

if url.drivername not in ('postgresql', 'postgresql+psycopg2'):
    log.warn('{0} is not a postgresql database. ignored'.format(setting_url))
    sys.exit(0)

params = {
    'db': url.database,
    'user': url.username,
    'password': url.password,
    'encoding': 'utf-8',
def main(argv=sys.argv[1:]):
    try:
        opts, args = getopt.getopt(argv, "h:", [
            "config-file=", "topology=", "include-stub=", "cluster-list=",
            "topology-distribution-method=", "emulation-platform=",
            "router-platform="
        ])
    except getopt.GetoptError as error:
        print(
            'MiniSecBGP_hijack_realistic_analysis '
            '--config-file=<pyramid config file .ini> '
            '--topology=<Topology ID> '
            '--include-stub=<True|False> '
            '--cluster-list=<hostname list of cluster nodes> '
            '--topology-distribution-method=<Topology distribution method ID (CUSTOMER CONE|METIS|MANUAL|ROUND ROBIN)> '
            '--emulation-platform=<Emulation platform ID (MININET|DOCKER)> '
            '--router-platform=<Router platform ID (QUAGGA|BIRD)>')
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print(
                'MiniSecBGP_hijack_realistic_analysis '
                '--config-file=<pyramid config file .ini> '
                '--topology=<Topology ID> '
                '--include-stub=<True|False> '
                '--cluster-list=<hostname list of cluster nodes> '
                '--topology-distribution-method=<Topology distribution method ID (CUSTOMER CONE|METIS|MANUAL|ROUND ROBIN)> '
                '--emulation-platform=<Emulation platform ID (MININET|DOCKER)> '
                '--router-platform=<Router platform ID (QUAGGA|BIRD)>')
            sys.exit()
        elif opt == '--config-file':
            config_file = arg
        elif opt == '--topology':
            id_topology = arg
        elif opt == '--include-stub':
            include_stub = str2bool(arg)
        elif opt == '--cluster-list':
            cluster_list = arg
        elif opt == '--topology-distribution-method':
            topology_distribution_method = arg
        elif opt == '--emulation-platform':
            emulation_platform = arg
        elif opt == '--router-platform':
            router_platform = arg

    args = parse_args(config_file)
    setup_logging(args.config_uri)
    env = bootstrap(args.config_uri)
    try:
        with env['request'].tm:
            dbsession = env['request'].dbsession

            quagga = dbsession.query(models.RouterPlatform.id). \
                filter(func.lower(models.RouterPlatform.router_platform) == 'quagga').first()

            ra = RealisticAnalysis(dbsession, id_topology, include_stub,
                                   cluster_list, topology_distribution_method,
                                   emulation_platform, router_platform)

            time_get_data = time.time()
            ra.dfs_from_database()
            ra.data_frames()
            time_get_data = time.time() - time_get_data
            save_to_database(dbsession, ['time_get_data'], [time_get_data],
                             id_topology)

            time_autonomous_system_per_server = time.time()
            ra.autonomous_system_per_server()
            time_autonomous_system_per_server = time.time(
            ) - time_autonomous_system_per_server
            save_to_database(dbsession, ['time_autonomous_system_per_server'],
                             [time_autonomous_system_per_server], id_topology)

            time_emulate_platform_commands = time.time()
            ra.emulation_commands()
            time_emulate_platform_commands = time.time(
            ) - time_emulate_platform_commands
            save_to_database(dbsession, ['time_emulate_platform_commands'],
                             [time_emulate_platform_commands], id_topology)

            # Router platform
            time_router_platform_commands = time.time()
            if router_platform == str(quagga[0]):
                ra.quagga_commands()
            time_router_platform_commands = time.time(
            ) - time_router_platform_commands
            save_to_database(dbsession, ['time_router_platform_commands'],
                             [time_router_platform_commands], id_topology)

            time_write_files = time.time()
            ra.write_to_file()
            time_write_files = time.time() - time_write_files
            save_to_database(dbsession, ['time_write_files'],
                             [time_write_files], id_topology)

            time_copy_files = time.time()
            ra.copy_files_to_cluster_nodes()
            time_copy_files = time.time() - time_copy_files
            save_to_database(dbsession, ['time_copy_files'], [time_copy_files],
                             id_topology)

    except OperationalError:
        print('Database error')
示例#60
0
def main():
    setup_logging('development.ini')
    settings = get_appsettings('development.ini')

    load_local_settings(settings)

    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)

    translation_manager.options.update({
        'locales':
        settings['available_languages'].split(),
        'get_locale_fallback':
        True
    })
    configure_mappers()

    postgis_version = DBSession.execute(func.postgis_version()).scalar()
    if not postgis_version.startswith('2.'):
        # With PostGIS 1.x the AddGeometryColumn and DropGeometryColumn
        # management functions should be used.
        Area.__table__.c.geometry.type.management = True
        Area.__table__.c.centroid.type.management = True
        Task.__table__.c.geometry.type.management = True

    Base.metadata.drop_all(engine)
    Base.metadata.create_all(engine)
    with transaction.manager:
        geometry = '{"type":"Polygon","coordinates":[[[85.31038284301758,27.70731518595052],[85.31089782714842,27.698120147680104],[85.3242015838623,27.69842412827061],[85.323429107666,27.70731518595052],[85.31038284301758,27.70731518595052]]]}'  # noqa
        geometry = geojson.loads(geometry,
                                 object_hook=geojson.GeoJSON.to_instance)
        geometry = shapely.geometry.asShape(geometry)
        geometry = shape.from_shape(geometry, 4326)

        area = Area(geometry)
        DBSession.add(area)

        license1 = License()
        license1.name = 'NextView'
        license1.description = "This data is licensed for use by the US Government (USG) under the NextView (NV) license and copyrighted by Digital Globe or GeoEye. The NV license allows the USG to share the imagery and Literal Imagery Derived Products (LIDP) with entities outside the USG when that entity is working directly with the USG, for the USG, or in a manner that is directly beneficial to the USG. The party receiving the data can only use the imagery or LIDP for the original purpose or only as otherwise agreed to by the USG. The party receiving the data cannot share the imagery or LIDP with a third party without express permission from the USG. At no time should this imagery or LIDP be used for other than USG-related purposes and must not be used for commercial gain. The copyright information should be maintained at all times. Your acceptance of these license terms is implied by your use."  # noqa
        license1.plain_text = "In other words, you may only use NextView imagery linked from this site for digitizing OpenStreetMap data for humanitarian purposes."  # noqa
        DBSession.add(license1)

        license2 = License()
        license2.name = 'Astrium/UNOSAT'
        license2.description = "UNOSAT allow any INTERNET USER to use the IMAGE to develop DERIVATIVE WORKS provided that the INTERNET USER includes the DERIVATIVE WORKS he/she created in the OpenStreetMap database under CC-BY-SA licence (http://creativecommons.org/licenses/by-sa/2.0/) and/or Open Database licence (ODbL: http://www.opendatacommons.org/licenses/odbl/), with the credit of the corresponding PRODUCT conspicuously displayed and written in full, in order to allow any OpenStreetMap database user to have access to and to use the DERIVATIVE WORKS. Except for the foregoing, the END USER and/or the INTERNET USER shall not be entitled to sell, distribute, assign, dispose of, lease, sublicense or transfer, directly or indirectly, any DERIVATIVE WORKS to any third party."  # noqa
        license2.plain_text = "Astrium GEO-Information Services and UNOSAT are allowing access to this imagery for creating information in OpenStreetMap. Other uses are not allowed."  # noqa
        DBSession.add(license2)

        project = Project('Kathmandu - Map all primary roads and buildings')
        project.area = area
        project.short_description = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."  # noqa
        project.description = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."  # noqa
        project.instructions = "**The detailed instructions**\n\nLorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat."  # noqa
        project.entities_to_map = "primary roads, buildings"
        project.imagery = "tms[22]:http://hiu-maps.net/hot/1.0.0/kathmandu_flipped/{zoom}/{x}/{y}.png"  # noqa
        project.license = license1
        DBSession.add(project)

        with project.force_locale('fr'):
            project.name = "Kathmandu - Cartographier les routes et les bâtiments"  # noqa

        project.auto_fill(17)