def listuserinfo(): arguments = listuserparser.parse_args() if not arguments.config or not arguments.username: listuserparser.print_usage() else: config_uri = arguments.config setup_logging(config_uri) try: settings = get_appsettings(config_uri, 'factored') except LookupError: settings = get_appsettings(config_uri, 'main') engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) session = DBSession() users = session.query(User).filter_by( username=arguments.username).all() if len(users) > 0: user = users[0] print 'username:%s, secret: %s' % ( user.username, user.secret) print 'bar code url:', get_barcode_image(user.username, user.secret, settings['appname']) else: print '"%s" user not found' % arguments.username
def setUp(self): from sqlalchemy import create_engine engine = create_engine('sqlite://') try: # nosetest vs pycharm test runner, lets flex settings = get_appsettings('../tests.ini') except: settings = get_appsettings('tests.ini') self.config = testing.setUp(settings=settings) DBSession.configure(bind=engine) Base.metadata.create_all(engine) with transaction.manager: self.founder = User(username='******', password='******', email='*****@*****.**', activated=True, active=True, firstname='founder1_firstname', lastname='founder1_lastname', city='founder1_city', state='founder1_state', title='founder1_title') self.company = BaseCompany(name='testcompany', founded=2015, description='company_desc', startup_teamdescription='company_team_desc', website='company_website', linkedin='company_linkedin', twitterhandle='company_twitter', angelcoprofile='company_angellist') self.admin = User(username='******', password='******', email=settings.get('admin_mail'), activated=False, groups='admin') DBSession.add(self.founder) DBSession.add(self.company) DBSession.add(self.admin) for k, v in Settings.settings.iteritems(): if type(v) == dict: DBSession.add(Settings(key=unicode(k), value=unicode(v['value']))) else: if isinstance(v, basestring) and v.startswith('app_config.'): v = settings.get(v[len('app_config.'):]) DBSession.add(Settings(key=unicode(k), value=unicode(v))) DBSession.flush() self.user_id = self.founder.user_id self.founders_created = []
def init(): global _inited if _inited: return rootdir = dirname(dirname(dirname(realpath(__file__)))) if exists(join(rootdir, 'local.ini')): settings = get_appsettings(join(rootdir, 'local.ini')) else: settings = get_appsettings(join(rootdir, 'development.ini')) configure(settings) _inited = True
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) try: settings = get_appsettings(config_uri, 'factored') except LookupError: settings = get_appsettings(config_uri, 'main') engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine)
def setUpClass(cls): try: cls.settings = get_appsettings('test.local.ini') except FileNotFoundError: cls.settings = get_appsettings('test.ini') cls.engine = engine_from_config(cls.settings, 'sqlalchemy.') cls.conn = cls.engine.connect() cls.settings.setdefault('mako.directories', 'pizzavolus:templates/') cls.settings.setdefault('mako.imports', ''' from pizzavolus import date_fmt from datetime import datetime ''')
def test_the_test_2(self): """ Confirm that the .ini file has the expected shared secret in it. Confirm that I can patch the .ini file to provide an app that uses the secret from secret_file """ settings1 = get_appsettings(test_ini) self.assertEqual(settings1['agent.secret'], 'sharedsecret') with patch('builtins.open', filter_open(TestAgentAPI._ini_filter, pattern=r'test\.ini$', verbose=False)): settings2 = get_appsettings(test_ini) self.assertEqual(settings2.get('agent.secret', 'None'), 'None') self.assertEqual(settings2.get('agent.secretfile', 'None'), secret_file)
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) settings = get_appsettings(config_uri, options=options) if 'DATABASE_URL' in os.environ: settings['sqlalchemy.url'] = os.environ['DATABASE_URL'] engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine)
def init_app(self, args): """ Initialises Pyramid application. Loads config settings. Initialises SQLAlchemy. """ self._args = args setup_logging(self._args.config) settings = get_appsettings(self._args.config) if 'environment' not in settings: raise KeyError('Missing key "environment" in config. Specify ' 'environment in paster INI file.') rc = Rc(environment=settings['environment'], root_dir=os.path.abspath( os.path.join(os.path.dirname(__file__), '..') ) ) rc.load() settings.update(rc.data) settings['rc'] = rc pysite.models.init(settings, 'db.pysite.sa.') self._rc = rc self._settings = settings pysite._init_vmail(rc)
def main(argv=sys.argv): # Usage and configuration if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) config = Configurator(settings=settings) config.include('pyramid_sqlalchemy') # Make the database with schema and default data with transaction.manager: metadata.create_all() root = RootFolder(name='', title='Moonbase Demo', __acl__=[ ['Allow', ['paul'], 'view'] ] ) Session.add(root) f1 = root['f1'] = Folder( title='Folder 1', __acl__=[ ['Allow', ['shane'], 'view'] ] ) f1['da'] = Document(title='Document 1A')
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) mkdir(settings['static_files']) # Create Ziggurat tables alembic_ini_file = 'alembic.ini' if not os.path.exists(alembic_ini_file): alembic_ini = ALEMBIC_CONF.replace('{{db_url}}', settings['sqlalchemy.url']) f = open(alembic_ini_file, 'w') f.write(alembic_ini) f.close() bin_path = os.path.split(sys.executable)[0] alembic_bin = os.path.join(bin_path, 'alembic') command = '%s upgrade head' % alembic_bin os.system(command) os.remove(alembic_ini_file) # Insert data engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) init_model() create_schemas(engine) Base.metadata.create_all(engine) initial_data.insert() transaction.commit()
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) with transaction.manager: model = MyModel(name='one', value=1) DBSession.add(model) message1 = Message(msgid = 'c1', content = 'yoloswag1', timestamp = '5.11.2015') message2 = Message(msgid = 'c2', content = 'yoloswag2', timestamp = '5.11.2015') DBSession.add(message1) DBSession.add(message2) delmessage = DelMessage(msgid = 'c1', timestamp = '6.11.2015') DBSession.add(delmessage) DBSession.delete(model) DBSession.delete(message1) DBSession.delete(message2) DBSession.delete(delmessage)
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) init_model(settings)
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) #with transaction.manager: # model = MyModel(name='one', value=1) # DBSession.add(model) with transaction.manager: with open('agencies.csv','r') as f: agencies = f.read().split('\n') for agency in agencies: if agency.strip() != '': # agencyid, shortname, longname, type, description, websiteurl parts = agency.split('\t') agency_type = AgencyTypes.get_from_code(DBSession, parts[3]) a = Agencies( agency_code = parts[1], agency_name = parts[2], type_id = agency_type.id, description = parts[4], website = parts[5], ) DBSession.add(a) transaction.commit()
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] settings = get_appsettings(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') Session = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Session.configure(bind=engine) db = Session() with transaction.manager: testing = db.query(Update).filter_by(status=UpdateStatus.testing, request=None) for update in testing: # If this release does not have any testing requirements, skip it if not update.release.mandatory_days_in_testing: print('%s doesn\'t have mandatory days in testing' % update.release.name) continue # If this has already met testing requirements, skip it if update.met_testing_requirements: continue if update.meets_testing_requirements: print('%s now meets testing requirements' % update.title) text = config.get('testing_approval_msg') % update.days_in_testing update.comment(db, text, author='bodhi')
def setUp(self): """Set up the settings and the session""" self.settings = get_appsettings('configs/tests.ini', name='main') self.settings['askomics.upload_user_data_method'] = 'insert' self.request = testing.DummyRequest() self.request.session['username'] = '******' self.request.session['group'] = 'base' self.request.session['admin'] = False self.request.session['blocked'] = True # Files # Create the user dir if not exist self.temp_directory = self.settings['askomics.files_dir'] + '/upload/' + self.request.session['username'] if not os.path.isdir(self.temp_directory): os.makedirs(self.temp_directory) # Set the upload dir self.request.session['upload_directory'] = self.temp_directory # Copy files if directory is empty if not os.listdir(self.temp_directory): files = ['people.tsv', 'instruments.tsv', 'play_instrument.tsv', 'transcript.tsv', 'qtl.tsv', 'small_data.gff3', 'turtle_data.ttl', 'bed_example.bed'] for file in files: src = os.path.join(os.path.dirname(__file__), "..", "test-data") + '/' + file dst = self.request.session['upload_directory'] + '/' + file copyfile(src, dst)
def main(argv=sys.argv): config_uri = argv[1] argv = argv[2:] parser = optparse.OptionParser(usage=USAGE, description=DESCRIPTION) parser.add_option('-t', '--title', dest='title', type='string') parser.add_option('-s', '--slug', dest='slug', type='string') options, args = parser.parse_args(argv) if options.title is None: parser.error('You must provide at least --title') slug = options.slug if slug is None: slug = options.title.lower().replace(' ', '_') setup_logging(config_uri) settings = get_appsettings(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) with transaction.manager: board = Board(title=options.title, slug=slug) DBSession.add(board) DBSession.flush() print(("Successfully added %s (slug: %s)" % (board.title, board.slug)))
def setUp( self ): self.temp_directory = tempfile.mkdtemp() self.settings = get_appsettings('configs/development.ini', name='main') self.request = testing.DummyRequest() self.srcfile = SourceFileTsv(self.settings, self.request.session, SIMPLE_SOURCE_FILE, 10)
def ini_settings(request, test_config_path) -> dict: """Load INI settings for test run from py.test command line. Example: py.test yourpackage -s --ini=test.ini :return: A dictionary representing the key/value pairs in an ``app`` section within the file represented by ``config_uri`` """ # This enables our INI inclusion mechanism # TODO: Don't use get_appsettings() from paster, but create a INI includer compatible version from websauna.utils.configincluder import monkey_patch_paster_config_parser monkey_patch_paster_config_parser() # Setup Python logging from the INI setup_logging(test_config_path) # Read [app] section config = get_appsettings(test_config_path) # To pass the config filename itself forward config["_ini_file"] = test_config_path return config
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.drop_all(engine) Base.metadata.create_all(engine) with transaction.manager: model2 = Genre(id = 0,name='HipHop', info='Nigger ma f****r',year_of_found = 1990) DBSession.add(model2) model = Genre(id = 1,name='GlitchHop', info='Nu vaaassheee',year_of_found = 2014, parent_genre = model2) DBSession.add(model) model3 = Genre(id = 2,name='Grime', info='Zhostkiy rap',year_of_found = 2006, parent_genre = model2) DBSession.add(model3) model4 = Genre(id = 3,name='Dark Grime', info='Tyomniy Zhostkiy rap',year_of_found = 2009, parent_genre = model3) DBSession.add(model4) music1 = Music(author = 'Simplex_Method', title = 'Of Literal Data', year = 2015,genre = model3) DBSession.add(music1)
def main(argv=sys.argv): global logger, settings, ca, ri, aws_access, aws_secret, thekey if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) # TODO: need to add settings to define where the files will be logger = logging.getLogger('scripts') # make configurable? site_blacklist = set(['MedPass', 'Bridge']) print "# Stack to site info generated by build_ini_info script" stackmap = tools.parse_sites() for stack, sites in stackmap.items(): clean = set(sites) - site_blacklist print "stacks.%s = %s" % (stack, ', '.join(clean)) print "# RDS to site info generated by build_ini_info script" rdsmap = tools.parse_rds() # dbs.a4.gentivalink = a4-css-sup-cleanup-rds for stack, sites in rdsmap.items(): for site, rdses in sites.items(): for rds in rdses: print "dbs.%s.%s = %s" % (stack, site, rds)
def main(argv=sys.argv): settings_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'migration.ini') settings = get_appsettings(settings_file) engine_target = engine_from_config(settings, 'sqlalchemy_target.') engine_source = engine_from_config(settings, 'sqlalchemy_source.') logging.basicConfig() logging.getLogger('sqlalchemy.engine').setLevel(logging.WARN) # create a fresh schema on the target database Session = sessionmaker(extension=ZopeTransactionExtension()) # noqa session = Session(bind=engine_target) Base.metadata.drop_all(engine_target, checkfirst=True) setup_db(engine_target, session) connection_source = engine_source.connect() batch_size = 1000 MigrateUsers(connection_source, session, batch_size).migrate() MigrateSummits(connection_source, session, batch_size).migrate() MigrateParkings(connection_source, session, batch_size).migrate() MigrateSites(connection_source, session, batch_size).migrate() MigrateProducts(connection_source, session, batch_size).migrate() MigrateHuts(connection_source, session, batch_size).migrate() MigrateRoutes(connection_source, session, batch_size).migrate() MigrateVersions(connection_source, session, batch_size).migrate() UpdateSequences(connection_source, session, batch_size).migrate()
def setUp(self): self.config = testing.setUp() settings = get_appsettings('development.ini') engine = engine_from_config(settings, 'sqlalchemy.') from tof.models import Base DBSession.configure(bind=engine) Base.metadata.create_all(engine)
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) with transaction.manager: model = RasPi() model.uuid = '11:22:33:44:55:66' model.description = "Testing Pi" model.url = "http://www.facebook.com" model.orientation = 0 model.browser = True model.lastseen = datetime.now() DBSession.add(model) tag = Tags() tag.uuid = '11:22:33:44:55:66' tag.tag = 'test' DBSession.add(tag) User = UserModel() User.email = '*****@*****.**' User.AccessLevel = 2 DBSession.add(User) DBSession.flush()
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.drop_all(engine) Base.metadata.create_all(engine) with transaction.manager: user = User(username='******', password='******', email=settings.get('admin_mail'), activated=True, groups='admin') DBSession.add(user) DBSession.flush() for k, v in Settings.settings.iteritems(): if type(v) == dict: DBSession.add(Settings(key=unicode(k), value=unicode(v['value']))) else: if isinstance(v, basestring) and v.startswith('app_config.'): v = settings.get(v[len('app_config.'):]) DBSession.add(Settings(key=unicode(k), value=unicode(v))) DBSession.flush()
def bootstrap_script(argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) return settings
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) cache.cache = cache.configure_cache(settings) config = Configurator( settings=settings, root_factory=RootFactory, locale_negotiator=locale_neg ) config.add_route_predicate('vhost', VHostPredicate) config.add_view_predicate('vhost', VHostPredicate) mmgr = config.registry.getUtility(IModuleManager) mmgr.load('core') mmgr.load_enabled() rts = rt.configure(mmgr, config.registry) app = rts.app() rt.run(rts, app)
def update(argv=sys.argv): """ Download the GeoIP database from the URL provided under the config key `geoip.city.source` and save it under the path provided by the config key `geoip.city.destination`. """ if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) source = settings['geoip.city.source'] output = settings['geoip.city.destination'] log.info("Downloading %s...", source) response = requests.get(source) log.info("Downloading done.") compressed = gzip.GzipFile(fileobj=StringIO(response.content)) with open(output, "wb") as f: log.info("Writting to %s...", output) f.write(compressed.read()) log.info("Writting done.")
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) application_pdf_doc_type = DocumentTypes.add( name="Adobe PDF", description="Adobe PDF file", mime_type="application/pdf", ) system_owner = Users.add( first="SYSTEM", last="USERS", email="system@localhost", password="******", ) default_scraper = Scrapers.add( name="Default Scraper", description="CivicDocs.IO loads with a single, defualt scraper.", owner_id=system_owner.id, ) print("DEFAULT SCRAPER ID:\r\n{0}\r\n".format(default_scraper.id))
def run(self, argv): if len(argv) < 3: self.usage(argv) config_uri = argv[1] fromDate = argv[2] options = parse_vars(argv[3:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) with transaction.manager: url = 'https://servicesenligne2.ville.montreal.qc.ca/api/infoneige/InfoneigeWebService?wsdl' client = Client(url) planification_request = client.factory.create('getPlanificationsForDate') planification_request.fromDate = fromDate planification_request.tokenString = 'ug33-b81ab488-c335-4021-9c52-26d6b8523301-e7aa002b-0d9d-4b5c-81ef-b012979cdafb-dab06588-1962-4b16-9942-a18054094f60-a4186179-d555-4fed-b35f-ec0c74da97a3-aa3b3766-4d26-42f0-888a-a6569a1dd745' response = client.service.GetPlanificationsForDate(planification_request) if response['responseStatus'] == 0: log.info('%s plannings returned', response['planifications']['count']) cartodb_client = CartoDBOAuth(settings['cartodb.key'], settings['cartodb.secret'], settings['cartodb.user'], settings['cartodb.password'], settings['cartodb.domain']) for result in response['planifications']['planification']: ''' street_side_status = StreetSideHistory( municipality_id = result['munid'], street_side_id = result['coteRueId'], state = result['etatDeneig'], observed_on = result['dateMaj'], ) DBSession.merge(street_side_status) ''' if any(val in result for val in ['dateDebutPlanif', 'dateFinPlanif', 'dateDebutReplanif', 'dateFinReplanif']): try: result['dateDebutReplanif'] except AttributeError: result['dateDebutReplanif'] = None try: result['dateFinReplanif'] except AttributeError: result['dateFinReplanif'] = None ''' print result planning = Planning( municipality_id = result['munid'], street_side_id = result['coteRueId'], planned_start_date = result['dateDebutPlanif'], planned_end_date = result['dateFinPlanif'], replanned_start_date = result['dateDebutReplanif'], replanned_end_date = result['dateFinReplanif'], modified_on = result['dateMaj'], ) DBSession.merge(planning) ''' #transaction.manager.commit() cartodb_client.sql('UPDATE cote SET etat = %(etat)s WHERE cote_rue_id = %(cote_rue_id)d' % {"etat": result['etatDeneig'], "cote_rue_id": result['coteRueId']}) else: log.info('Status %s: %s', response['responseStatus'], response['responseDesc'])
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) #Delete old data Base.metadata.drop_all(engine) #Create data Base.metadata.create_all(engine) with transaction.manager: nb_sensors = 5 nb_readings = 500 minutes_between_readings = 10; for i in range(nb_sensors): sensor = Sensor(id=i+1,type='Moisture') DBSession.add(sensor) first_dttm = datetime.datetime.now() for j in range(nb_readings,0,-1): reading = Reading( sensor_id=sensor.id, timestamp=first_dttm - datetime.timedelta(minutes=minutes_between_readings*j), value=random.randrange(1023) ) DBSession.add(reading)
def main(argv=sys.argv): """ initialize the database """ if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) # add some content with transaction.manager: # a group for accountants/staff accountants_group = Group(name=u"staff") try: DBSession.add(accountants_group) DBSession.flush() print("adding group staff") except: print("could not add group staff.") # pass with transaction.manager: # staff personnel staffer1 = C3sStaff( login=u"rut", password=u"berries", email=u"*****@*****.**", ) staffer1.groups = [accountants_group] try: DBSession.add(staffer1) print("adding staff rut") DBSession.flush() except: print("it borked! (rut)") # pass # one more staffer with transaction.manager: staffer2 = C3sStaff( login=u"reel", password=u"boo", email=u"*****@*****.**", ) staffer2.groups = [accountants_group] try: DBSession.add(staffer2) print("adding staff reel") DBSession.flush() except: print("it borked! (reel)") # pass # a member, actually a membership form submission with transaction.manager: member1 = C3sMember( firstname=u"Firstnäme", # includes umlaut lastname=u"Lastname", email=u"*****@*****.**", password=u"berries", address1=u"address one", address2=u"address two", postcode=u"12345 foo", city=u"Footown Mäh", country=u"Foocountry", locale=u"DE", date_of_birth=date.today(), email_is_confirmed=False, email_confirm_code=u"ABCDEFGHIJ", num_shares=u'10', date_of_submission=datetime.now(), membership_type=u'normal', member_of_colsoc=True, name_of_colsoc=u"GEMA", ) try: DBSession.add(member1) print("adding Firstnäme") except: pass # even more members import random import string print("about to add %s members..." % how_many) with transaction.manager: for i in range(how_many): # create 50 members with semi-random dates #print i member = C3sMember( firstname=u"Firstnäme%s" % i, # includes umlaut lastname=u"Lastname", email=u"*****@*****.**", password=u"berries", address1=u"address one", address2=u"address two", postcode=u"12345 foo", city=u"Footown Mäh", country=u"Foocountry", locale=u"DE", date_of_birth=date.today(), email_is_confirmed=False, email_confirm_code=u''.join( random.choice(string.ascii_uppercase + string.digits) for x in range(8)), num_shares=random.randint(1, 60), date_of_submission=datetime.now(), membership_type=random.choice((u'normal', u'investing')), member_of_colsoc=random.choice((True, False)), name_of_colsoc=u"GEMA", ) try: DBSession.add(member) except IntegrityError: print("exception!!!!!!!!!!!!!!!!!!!!1")
def main(): setup_logging('development.ini') settings = get_appsettings('development.ini') load_local_settings(settings) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) translation_manager.options.update({ 'locales': settings['available_languages'].split(), 'get_locale_fallback': True }) configure_mappers() postgis_version = DBSession.execute(func.postgis_version()).scalar() if not postgis_version.startswith('2.'): # With PostGIS 1.x the AddGeometryColumn and DropGeometryColumn # management functions should be used. Area.__table__.c.geometry.type.management = True Area.__table__.c.centroid.type.management = True Task.__table__.c.geometry.type.management = True Base.metadata.drop_all(engine) Base.metadata.create_all(engine) # then, load the Alembic configuration and generate the # version table, "stamping" it with the most recent rev: from alembic.config import Config from alembic import command alembic_cfg = Config("alembic.ini") url = settings['sqlalchemy.url'] alembic_cfg.set_section_option("alembic", "sqlalchemy.url", url) command.stamp(alembic_cfg, "head") with transaction.manager: geometry = '{"type":"Polygon","coordinates":[[[85.31038284301758,27.70731518595052],[85.31089782714842,27.698120147680104],[85.3242015838623,27.69842412827061],[85.323429107666,27.70731518595052],[85.31038284301758,27.70731518595052]]]}' # noqa geometry = geojson.loads(geometry, object_hook=geojson.GeoJSON.to_instance) geometry = shapely.geometry.asShape(geometry) geometry = shape.from_shape(geometry, 4326) area = Area( geometry ) DBSession.add(area) license1 = License() license1.name = 'NextView' license1.description = "This data is licensed for use by the US Government (USG) under the NextView (NV) license and copyrighted by Digital Globe or GeoEye. The NV license allows the USG to share the imagery and Literal Imagery Derived Products (LIDP) with entities outside the USG when that entity is working directly with the USG, for the USG, or in a manner that is directly beneficial to the USG. The party receiving the data can only use the imagery or LIDP for the original purpose or only as otherwise agreed to by the USG. The party receiving the data cannot share the imagery or LIDP with a third party without express permission from the USG. At no time should this imagery or LIDP be used for other than USG-related purposes and must not be used for commercial gain. The copyright information should be maintained at all times. Your acceptance of these license terms is implied by your use." # noqa license1.plain_text = "In other words, you may only use NextView imagery linked from this site for digitizing OpenStreetMap data for humanitarian purposes." # noqa DBSession.add(license1) license2 = License() license2.name = 'Astrium/UNOSAT' license2.description = "UNOSAT allow any INTERNET USER to use the IMAGE to develop DERIVATIVE WORKS provided that the INTERNET USER includes the DERIVATIVE WORKS he/she created in the OpenStreetMap database under CC-BY-SA licence (http://creativecommons.org/licenses/by-sa/2.0/) and/or Open Database licence (ODbL: http://www.opendatacommons.org/licenses/odbl/), with the credit of the corresponding PRODUCT conspicuously displayed and written in full, in order to allow any OpenStreetMap database user to have access to and to use the DERIVATIVE WORKS. Except for the foregoing, the END USER and/or the INTERNET USER shall not be entitled to sell, distribute, assign, dispose of, lease, sublicense or transfer, directly or indirectly, any DERIVATIVE WORKS to any third party." # noqa license2.plain_text = "Astrium GEO-Information Services and UNOSAT are allowing access to this imagery for creating information in OpenStreetMap. Other uses are not allowed." # noqa DBSession.add(license2) project = Project( 'Kathmandu - Map all primary roads and buildings' ) project.area = area project.short_description = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua." # noqa project.description = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." # noqa project.instructions = "**The detailed instructions**\n\nLorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat." # noqa project.entities_to_map = "primary roads, buildings" project.imagery = "tms[22]:http://hiu-maps.net/hot/1.0.0/kathmandu_flipped/{zoom}/{x}/{y}.png" # noqa project.license = license1 DBSession.add(project) with project.force_locale('fr'): project.name = "Kathmandu - Cartographier les routes et les bâtiments" # noqa project.auto_fill(17)
# coding=utf-8 # Runner for daemon import os from pyramid.paster import get_appsettings from pyramid.paster import setup_logging from tracim_backend.config import CFG from tracim_backend.lib.mail_notifier.daemon import MailSenderDaemon config_uri = os.environ['TRACIM_CONF_PATH'] setup_logging(config_uri) settings = get_appsettings(config_uri) settings.update(settings.global_conf) app_config = CFG(settings) app_config.configure_filedepot() daemon = MailSenderDaemon(app_config, burst=False) daemon.run()
from skosprovider_sqlalchemy.models import Base, ConceptScheme, LabelType, Language, MatchType, Concept, NoteType, Match from atramhasis.data.models import Base as VisitLogBase, ConceptschemeCounts from atramhasis.data.datamanagers import ConceptSchemeManager, SkosManager, LanguagesManager, AuditManager, \ CountsManager from fixtures.materials import materials from fixtures.data import trees, geo try: from unittest.mock import Mock, patch except: from mock import Mock, patch from datetime import date, datetime from atramhasis.data.models import ConceptVisitLog here = os.path.dirname(__file__) settings = get_appsettings(os.path.join(here, '../', 'tests/conf_test.ini')) class DatamangersTests(unittest.TestCase): @classmethod def setUpClass(cls): cls.engine = engine_from_config(settings, prefix='sqlalchemy.') cls.session_maker = sessionmaker( bind=cls.engine, extension=ZopeTransactionExtension() ) def setUp(self): Base.metadata.drop_all(self.engine) Base.metadata.create_all(self.engine) Base.metadata.bind = self.engine
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) durl = os.environ.get("DATABASE_URL") #heroku if durl: settings['sqlalchemy.url'] = durl murl = os.environ.get("MEMCACHED_URL") if murl: settings['urlcache_url'] = murl engine = engine_from_config(settings, 'sqlalchemy.') config = Configurator(settings=settings) pyramid_dogpile_cache.includeme(config) from ..metadata import (init_session, init_declarative_base) init_session() init_declarative_base() from ..metadata import (Base, DBSession) from ..models import (Achievement, Goal, Variable, User, Language, TranslationVariable, Translation, Property, Reward, AchievementProperty, AchievementReward) DBSession.configure(bind=engine) if options.get("reset_db", False): Base.metadata.drop_all(engine) Base.metadata.create_all(engine) def add_translation_variable(name): t = TranslationVariable(name=name) DBSession.add(t) return t def add_translation(variable, lang, text): tr = Translation(translationvariable=variable, text=text, language=lang) DBSession.add(tr) return tr if options.get("populate_demo", False): with transaction.manager: lang_de = Language(name="de") lang_en = Language(name="en") DBSession.add(lang_de) DBSession.add(lang_en) var_invited_users = Variable(name="invite_users") DBSession.add(var_invited_users) var_invited_users = Variable(name="participate", group="none") DBSession.add(var_invited_users) achievement_invite = Achievement(name='invite_users', evaluation="immediately", maxtimes=20) DBSession.add(achievement_invite) transvar_invite = add_translation_variable( name="invite_users_goal_name") add_translation(transvar_invite, lang_en, '"Invite "+`(5*p.level)`+" Users"') add_translation(transvar_invite, lang_de, '"Lade "+`(5*p.level)`+" Freunde ein"') achievement_invite_goal1 = Goal(name_translation=transvar_invite, condition='p.var=="invite_users"', goal="5*p.level", operator="geq", achievement=achievement_invite) DBSession.add(achievement_invite_goal1) achievement_fittest = Achievement(name='fittest', relevance="friends", maxlevel=100) DBSession.add(achievement_fittest) transvar_fittest = add_translation_variable( name="fittest_goal_name") add_translation( transvar_fittest, lang_en, '"Do the most sport activities among your friends"') add_translation( transvar_fittest, lang_de, '"Mache unter deinen Freunden am meisten Sportaktivitäten"') achievement_fittest_goal1 = Goal( name_translation=transvar_fittest, condition= 'and_(p.var=="participate", p.key.in_(["5","7","9"]))', evaluation="weekly", goal="5*p.level", achievement=achievement_fittest) DBSession.add(achievement_fittest_goal1) property_name = Property(name='name') DBSession.add(property_name) property_xp = Property(name='xp') DBSession.add(property_xp) property_icon = Property(name='icon') DBSession.add(property_icon) property_description = Property(name='description') DBSession.add(property_description) reward_badge = Reward(name='badge') DBSession.add(reward_badge) reward_image = Reward(name='backgroud_image') DBSession.add(reward_image) transvar_invite_name = add_translation_variable( name="invite_achievement_name") add_translation(transvar_invite_name, lang_en, '"The Community!"') add_translation(transvar_invite_name, lang_de, '"Die Community!"') DBSession.add( AchievementProperty(achievement=achievement_invite, property=property_name, value_translation=transvar_invite_name)) DBSession.add( AchievementProperty(achievement=achievement_invite, property=property_xp, value='100 * p.level')) DBSession.add( AchievementProperty( achievement=achievement_invite, property=property_icon, value= "'https://www.gamification-software.com/img/running.png'")) DBSession.add( AchievementProperty(achievement=achievement_invite, property=property_description, value_translation=transvar_invite)) DBSession.add( AchievementReward( achievement=achievement_invite, reward=reward_badge, value= "'https://www.gamification-software.com/img/trophy.png'", from_level=5)) DBSession.add( AchievementReward( achievement=achievement_invite, reward=reward_image, value= "'https://www.gamification-software.com/img/video-controller-336657_1920.jpg'", from_level=5)) transvar_fittest_name = add_translation_variable( name="fittest_achievement_name") add_translation(transvar_fittest_name, lang_en, '"The Fittest!"') add_translation(transvar_fittest_name, lang_de, '"Der Fitteste!"') DBSession.add( AchievementProperty(achievement=achievement_fittest, property=property_name, value_translation=transvar_fittest_name)) DBSession.add( AchievementProperty(achievement=achievement_fittest, property=property_xp, value='50 + (200 * p.level)')) DBSession.add( AchievementProperty( achievement=achievement_fittest, property=property_icon, value= "'https://www.gamification-software.com/img/colorwheel.png'" )) DBSession.add( AchievementProperty(achievement=achievement_fittest, property=property_description, value_translation=transvar_fittest)) DBSession.add( AchievementReward( achievement=achievement_fittest, reward=reward_badge, value= "'https://www.gamification-software.com/img/easel.png'", from_level=1)) DBSession.add( AchievementReward( achievement=achievement_fittest, reward=reward_image, value= "'https://www.gamification-software.com/img/game-characters-622654.jpg'", from_level=1)) user1 = User(id=1, lat=10, lng=50, timezone="Europe/Berlin") user2 = User(id=2, lat=10, lng=50, timezone="US/Eastern") user3 = User(id=3, lat=10, lng=50) user1.friends.append(user2) user1.friends.append(user3) user2.friends.append(user1) user2.friends.append(user3) user3.friends.append(user1) user3.friends.append(user2) DBSession.add(user1) DBSession.add(user2) DBSession.add(user3)
# -*- coding: utf-8 -*- import os from pyramid.paster import get_appsettings from .config import configure pyramid_ini = os.environ['PYRAMID_INI'] settings = get_appsettings(pyramid_ini) app = configure(settings).make_celery_app()
here = os.path.dirname(os.path.abspath(__file__)) if 'OPENSHIFT_APP_NAME' in os.environ: #are we on OPENSHIFT? ip = os.environ['OPENSHIFT_PYTHON_IP'] port = int(os.environ['OPENSHIFT_PYTHON_PORT']) config = os.path.join(here, 'production.ini') else: ip = '0.0.0.0' #localhost port = 6543 config = os.path.join(here, 'development.ini') app = get_app( config, 'main') #find 'main' method in __init__.py. That is our wsgi app settings = get_appsettings( config, 'main' ) #don't really need this but is an example on how to get settings from the '.ini' files # Waitress (remember to include the waitress server in "install_requires" in the setup.py) from waitress import serve print("Starting Waitress.") serve(app, host=ip, port=port, threads=50) # Cherrypy server (remember to include the cherrypy server in "install_requires" in the setup.py) # from cherrypy import wsgiserver # print("Starting Cherrypy Server on http://{0}:{1}".format(ip, port)) # server = wsgiserver.CherryPyWSGIServer((ip, port), app, server_name='Server') # server.start() #Simple Server # from wsgiref.simple_server import make_server
def app_settings(ini_file): return get_appsettings(ini_file)
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = get_engine(settings) Base.metadata.create_all(engine) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager) #Create a few test users testy = User(name='testy', email='*****@*****.**') testy.set_password('testy') dbsession.add(testy) basic = User(name='basic', email='*****@*****.**') basic.set_password('basic') dbsession.add(basic) #Create a few categories and subcategories for the forums general = Category(name='General') announcements = Subcategory(name='Announcements', description='General news and happenings') updates = Subcategory(name='DevLog', description='Updates and ramblings of the dev team') general.subcategories = [announcements, updates] dbsession.add(general) suggestions = Category(name='Suggestions') site_suggestions = Subcategory(name='Site Suggestions', description='Have an idea for the site? Let\'s discuss!') feedback = Subcategory(name='Feedback', description='Tell us how we are doing') suggestions.subcategories = [site_suggestions, feedback] dbsession.add(feedback) discussion = Category(name='Site Discussion') general_chat = Subcategory(name='General Chat', description='Discuss anything about the site') originals = Subcategory(name='Original Work', description='Share what you\'ve done or are working on') intros = Subcategory(name='Introductions', description='New to the site? Say hello!') discussion.subcategories = [general_chat, originals, intros] dbsession.add(discussion) #Add a few threads to the subcategories in the forums version = Thread(title='New Version!', subcategory=announcements, creator=testy) dbsession.add(version) opensource = Thread(title='We are open source', subcategory=announcements, creator=testy) dbsession.add(opensource) ramblings = Thread(title='Should I switch to SQLite', subcategory=updates, creator=basic) dbsession.add(ramblings) hunter2 = Thread(title='The story of hunter2', subcategory=general_chat, creator=testy) hunter2.posts = [Post(creator=testy, body='hey, if you type in your pw, it will show as stars'), Post(creator=testy, body='********* see!'), Post(creator=basic, body='hunter2'), Post(creator=basic, body='doesnt look like stars to me'), Post(creator=testy, body='<basic> *******'), Post(creator=testy, body='thats what I see'), Post(creator=basic, body='oh, really?'), Post(creator=testy, body='Absolutely'), Post(creator=basic, body='you can go hunter2 my hunter2-ing hunter2'), Post(creator=basic, body='haha, does that look funny to you?'), Post(creator=testy, body='lol, yes. See, when YOU type hunter2, it shows to us as *******'), Post(creator=basic, body='thats neat, I didnt know IRC did that'), Post(creator=testy, body='yep, no matter how many times you type hunter2, it will show to us as *******'), Post(creator=basic, body='awesome!'), Post(creator=basic, body='wait, how do you know my pw?'), Post(creator=testy, body='er, I just copy pasted YOUR ******\'s and it appears to YOU as hunter2 cause its your pw'), Post(creator=basic, body='oh, ok.')] dbsession.add(hunter2)
def _callFUT(self, config_file, section_name, appconfig): from pyramid.paster import get_appsettings return get_appsettings(config_file, section_name, appconfig)
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = get_engine(settings, prefix='sqlalchemy.') session_factory = get_session_factory(engine) with transaction.manager: ssn = get_tm_session(session_factory, transaction.manager) # Drop all tables Base.metadata.drop_all(engine) # Create all tables Base.metadata.create_all(engine) # Add root to entities tree. root = Entity(_name='root', _layer='root', _dimension_name='root') ssn.add(root) transaction.manager.commit() db_config = settings['sqlalchemy.url'] wh = Warehouse(db_config) config = settings['path.data_lake'] config_name = 'JJOralCare_config' loader = Loader(config, db_config) loader.run_processing(config_name) transaction.manager.commit() project = wh.get_project_data("JJOralCare") imanage_access = IManageAccess(ssn=ssn) #Add Tool tool = imanage_access.add_tool(name='Forecasting', description='This is forecasting') tool.id = "forecast" #Add Projects project_1 = imanage_access.add_project( name='Oral Care Forecasting', description="This is JJOralCare Project") project_1.id = "JJOralCare" imanage_access.add_tool_to_project(tool, project_1) project_2 = imanage_access.add_project( name='Lean Forecasting', description="This is JJLean Project") project_2.id = "JJLean" imanage_access.add_tool_to_project(tool, project_2) #Add role's and connect it to tools role_forecast = Role(name="forecaster") imanage_access.add_role_to_tool(role=role_forecast, tool=tool) role_superviser = Role(name="superviser") imanage_access.add_role_to_tool(role=role_superviser, tool=tool) # Add [email protected] User for Project #1 email = "*****@*****.**" password = "******" user_1 = imanage_access.add_user(email=email, password=password) imanage_access.add_role_to_user(user_1, role_forecast) # Add default_user User for Project #2 email = "default_user" password = "******" user_2 = imanage_access.add_user(email=email, password=password) imanage_access.add_role_to_user(user_2, role_superviser) email = "user_2" password = "******" user_3 = imanage_access.add_user(email=email, password=password) imanage_access.add_role_to_user(user_3, role_forecast) #Add roles Forecaster and set that feature features = [ 'create', 'view', 'finalize', 'modify', 'delete', 'edit', 'share', 'copy' ] for feature in features: imanage_access.add_feature(name=feature, tool=tool, role=role_forecast) # Add Roles Superviser and set that feature features = [ 'create', 'view', 'publish', 'finalize', 'modify', 'include', 'edit', 'delete', 'share', 'copy' ] for feature in features: imanage_access.add_feature(name=feature, tool=tool, role=role_superviser) #Add data permission: permission = imanage_access.create_permission( name="Development Template") for data in perm_data["JJLean"]: data_permission = imanage_access.create_data_permision( project="JJLean", in_path=data['in_path'], out_path=data['out_path'], mask=data['mask']) imanage_access.add_data_permission_to_permission( permission, data_permission) imanage_access.add_permission_to_user(user_1, permission) for data in perm_data["JJOralCare"]: data_permission = imanage_access.create_data_permision( project="JJOralCare", in_path=data['in_path'], out_path=data['out_path'], mask=data['mask']) imanage_access.add_data_permission_to_permission( permission, data_permission) imanage_access.add_permission_to_user(user_2, permission) #Add Scenario input_data_1 = dict( name="Price Growth Dynamics JJOralCare", description="Dynamics of Price Growth in Brazil", status="Final", shared="Yes", criteria="Brazil-Nike-Main", author=user_2.email, ) input_data_2 = dict( name="Price Growth Dynamics JJLean", description="Dynamics of Price Growth in USA", status="Draft", shared="No", criteria="USA-iPhone-Main", author=user_1.email, ) for i in range(0, 5): scenario = scenario_manager.create_scenario( ssn, user=None, input_data=input_data_1) scenario.users = [user_1, user_2] for i in range(0, 5): scenario = scenario_manager.create_scenario( ssn, user=None, input_data=input_data_2) scenario.users = [user_1, user_2] #TODO realise add user to scenario transaction.manager.commit() #Add Project and tool """ Fill Persistance storage """ #TODO admin manager add user_id = 2 #user.email tool_id = 'forecast' project_id = 'JJOralCare' #wb = Workbench(user_id) #wb.init_load(wh, dev_template_JJLean) #backup = wb.get_backup() #persistent_storage.save_backup(user_id, tool_id, 'JJLean', backup) wb = Workbench(user_id) template = load_dev_templates(settings, "JJOralCare") user_access_rights = { "features": template['features'], "entities": template['user_data_access'] } wb.initial_load(wh, template, dev_template_JJOralCare['calc_instructions'], user_access_rights) backup = wb.get_backup() persistent_storage.save_backup(user_id, tool_id, project_id, backup, backup_name='default') project_id = "JJLean" template = load_dev_templates(settings, "JJLean") user_access_rights = { "features": template['features'], "entities": template['user_data_access'] } wb.initial_load(wh, template, dev_template_JJLean['calc_instructions'], user_access_rights) backup = wb.get_backup() persistent_storage.save_backup(user_id, tool_id, project_id, backup, backup_name='default') # Add tools #transaction.manager.commit() tool_forecast = imanage_access.get_tool(name='Forecasting') f_tool_id = tool_forecast.id # Add roles role_jj_admin = imanage_access.add_role(name='jj_role_admin', tool_id=f_tool_id) role_jj_manager = imanage_access.add_role('jj_role_manager', f_tool_id) #transaction.manager.commit() role_jj_admin = imanage_access.get_role(name='jj_role_admin') role_jj_manager = imanage_access.get_role(name='jj_role_manager') role_admin_id = role_jj_admin.id role_manager_id = role_jj_manager.id # Add users user_jj_admin = imanage_access.add_user('*****@*****.**', 'pass', [role_admin_id]) user_jj_manager = imanage_access.add_user('*****@*****.**', 'pass', [role_manager_id]) #transaction.manager.commit() user_jj_admin = imanage_access.get_user(email='*****@*****.**') user_admin_id = user_jj_admin.id #imanage_access.set_permissions_template(f_tool_id, tool_template) #imanage_access.init_user_wb(f_tool_id, user_admin_id) # imanage_access.update_user_data_permissions(1, 1, permissions) #transaction.manager.commit() features = imanage_access.get_features(f_tool_id) imanage_access.update_role_features(role_admin_id, [f.id for f in features])
def web_app(): settings = get_appsettings(os.path.join(ABS_PATH, 'test.ini'), name='main') app = main(global_config = None, **settings) from webtest import TestApp testapp = TestApp(app) return testapp
def load_config(self): configfile = get_configfile() self.update(get_appsettings(configfile)) self.loaded = True
import os import unittest from pyramid import testing from pyramid.paster import get_appsettings from webtest import TestApp from pyramid_api_example import main from pyramid_api_example.path import SERVER_ROOT_PATH app_settings = get_appsettings( config_uri=os.path.join(SERVER_ROOT_PATH, 'etc', 'local.ini')) app = main({}, **app_settings) class BaseFunctionalTest(unittest.TestCase): def setUp(self): testing.setUp() self.app = TestApp(app) def tearDown(self): testing.tearDown()
scoped_session, sessionmaker, ) from sqlalchemy.orm.exc import NoResultFound import transaction import pafy from pyramid.paster import get_appsettings from raven import Client from raven.contrib.celery import register_signal, register_logger_signal from zope.sqlalchemy import ZopeTransactionExtension from jigglypuff.celery_utils import celery from jigglypuff.models import Song paster_uri = os.getenv('JIGGLYPUFF_PASTER_URI') or 'development.ini' settings = get_appsettings(paster_uri) Task_DBSession = scoped_session( sessionmaker(extension=ZopeTransactionExtension()) ) engine = create_engine(settings['sqlalchemy.url']) Task_DBSession.configure(bind=engine) raven_dsn = settings.get('worker.raven.dsn') if raven_dsn: client = Client(raven_dsn) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) import gengine gengine.main({}, **settings) from gengine.metadata import (DBSession) sess = DBSession() import gengine.app.model as m import crontab from gengine.app.registries import get_task_registry enginetasks = get_task_registry().registrations with transaction.manager: mark_changed(sess, transaction.manager, True) tasks = sess.execute(m.t_tasks.select()).fetchall() for task in tasks: cron = task["cron"] if not cron: cron = enginetasks.get(task["task_name"]).get( "default_cron", None) if cron: now = dt_now().replace(second=0) item = crontab.CronItem(line=cron) s = item.schedule(date_from=now) prev = s.get_next().replace(second=0) next = s.get_next().replace(second=0) execs = sess.execute(m.t_taskexecutions.select().where( and_( m.t_taskexecutions.c.task_id == task["id"], m.t_taskexecutions.c.canceled_at == None, m.t_taskexecutions.c.finished_at == None, )).order_by( m.t_taskexecutions.c.planned_at.desc())).fetchall() found = False for exec in execs: if exec["planned_at"] >= next: # The next execution is already planned found = True if exec["planned_at"] <= prev and prev < dt_ago( minutes=10) and not exec["locked_at"]: # The execution is more than 10 minutes in the past and not yet locked (worker not running / overloaded) if next - datetime.timedelta(minutes=10) < dt_now(): # The next execution is planned in less than 10 minutes, cancel the other one sess.execute(m.t_taskexecutions.update().values({ 'canceled_at': dt_now() }).where({'id': exec["id"]})) if exec["locked_at"] and exec["locked_at"] < dt_ago( hours=24): # this task is running for more than 24 hours. probably crashed.... set it to canceled sess.execute(m.t_taskexecutions.update().values({ 'canceled_at': dt_now() }).where({'id': exec["id"]})) if not found: # Plan next execution sess.execute(m.t_taskexecutions.insert().values({ 'task_id': task["id"], 'planned_at': next })) sess.flush() sess.commit()
def create_sync_scripts(argv=None): """ Set bucket notifications and create AWS Lambda functions that will sync changes in the S3 bucket to the cache """ if argv is None: argv = sys.argv[1:] parser = argparse.ArgumentParser(description=create_sync_scripts.__doc__) parser.add_argument( "-n", help="Name of the AWS Lambda function to " " create (default %(default)s)", default="pypicloud_s3_sync", ) parser.add_argument("-f", help="Overwrite existing AWS Lambda functions", action="store_true") parser.add_argument( "-a", help="The ARN of the IAM role to run as. If not " "provided, pypicloud will attempt to create one.", ) parser.add_argument( "-r", help="The AWS region to create the Lambda in " "(will attempt to auto-detect from config file)", ) parser.add_argument("config", help="Name of config file") args = parser.parse_args(argv) logging.basicConfig() settings = get_appsettings(args.config) if args.r: region = args.r else: if settings.get("pypi.storage") == "s3": region = settings.get("storage.region_name") if not region: print("No region detected in config file. Please use -r <region>") sys.exit(1) kwargs = S3Storage.configure(settings) bucket = kwargs["bucket"] # Create the Role if args.a: role_arn = args.a else: db = settings["pypi.db"] if db == "dynamo": role_arn = _create_dynamodb_role(settings, bucket) else: role_arn = _create_default_role(settings, bucket) lam = boto3.client("lambda", region_name=region) func_arn = None try: func = lam.get_function(FunctionName=args.n) except lam.exceptions.ResourceNotFoundException: pass else: if args.f: print("Deleting pre-existing function %r" % args.n) lam.delete_function(FunctionName=args.n) else: func_arn = func["Configuration"]["FunctionArn"] print("Lambda function %r already exists. Use -f to overwrite" % args.n) # Create the lambda function if func_arn is None: bundle = _build_lambda_bundle(settings) handler_module = os.path.splitext(HANDLER_FILENAME)[0] # Pull out only the cache db settings small_settings = {"pypi.db": settings["pypi.db"]} for key, val in settings.items(): if key.startswith("db."): small_settings[key] = val print("Creating Lambda function %s" % args.n) func = lam.create_function( FunctionName=args.n, Runtime="python2.7", Handler=handler_module + ".handle_s3_event", Code={"ZipFile": bundle}, Environment={ "Variables": { "PYPICLOUD_SETTINGS": json.dumps(small_settings) } }, Description= "Process S3 Object notifications & update pypicloud cache", Timeout=30, Publish=True, Role=role_arn, ) func_arn = func["FunctionArn"] print("Adding invoke permission for S3") account_id = boto3.client("sts").get_caller_identity().get("Account") try: lam.add_permission( Action="lambda:InvokeFunction", FunctionName=args.n, Principal="s3.amazonaws.com", SourceAccount=account_id, SourceArn="arn:aws:s3:::" + bucket.name, StatementId="s3_invoke", ) except lam.exceptions.ResourceConflictException: print("Permission already present") print("Adding lambda configuration to S3 bucket") notification = bucket.Notification() notification.put( NotificationConfiguration={ "LambdaFunctionConfigurations": [ { "LambdaFunctionArn": func_arn, "Events": ["s3:ObjectCreated:*"] }, { "LambdaFunctionArn": func_arn, "Events": ["s3:ObjectRemoved:*"] }, ] })
def main(argv=sys.argv): """ Comment on updates that are eligible to be pushed to stable. Queries for updates in the testing state that have a NULL request, looping over them looking for updates that are eligible to be pushed to stable but haven't had comments from Bodhi to this effect. For each such update it finds it will add a comment stating that the update may now be pushed to stable. This function is the entry point for the bodhi-approve-testing console script. Args: argv (list): A list of command line arguments. Defaults to sys.argv. """ if len(argv) != 2: usage(argv) settings = get_appsettings(argv[1]) initialize_db(settings) db = Session() try: testing = db.query(Update).filter_by(status=UpdateStatus.testing, request=None) for update in testing: # If this release does not have any testing requirements, skip it if not update.release.mandatory_days_in_testing: print('%s doesn\'t have mandatory days in testing' % update.release.name) continue # If this has already met testing requirements, skip it if update.met_testing_requirements: continue # Approval message when testing based on karma threshold if update.stable_karma not in (0, None) and update.karma >= update.stable_karma \ and not update.autokarma and update.meets_testing_requirements: print('%s now reaches stable karma threshold' % update.title) text = config.get('testing_approval_msg_based_on_karma') update.comment(db, text, author=u'bodhi') continue # If autokarma updates have reached the testing threshold, say something! Keep in mind # that we don't care about karma here, because autokarma updates get their request set # to stable by the Update.comment() workflow when they hit the required threshold. Thus, # this function only needs to consider the time requirements because these updates have # not reached the karma threshold. if update.meets_testing_requirements: print('%s now meets testing requirements' % update.title) text = unicode( config.get('testing_approval_msg') % update.mandatory_days_in_testing) update.comment(db, text, author=u'bodhi') db.commit() except Exception as e: print(str(e)) db.rollback() Session.remove() sys.exit(1)
from functools import partial from pyramid.paster import get_appsettings from .. import ( mock, patch_service, ) from reddit_service_ads_tracking import ( config, make_wsgi_app, ) from reddit_service_ads_tracking.lib import ( response, ) app_config = get_appsettings("example.ini", name="main") cfg = config.parse_config(app_config) signer = MessageSigner(cfg.ads_tracking.click_secret) def _encode_data(data): return base64.urlsafe_b64encode(json.dumps(data)) def _generate_click_url(url, data, expires=None): if expires is None: expires = cfg.ads_tracking.max_click_age params = { "url": url, "data": data,
def run(args): """ 1. Read preview_file to get viable labels. 2. For each label, get all existing GUIDs of examples that have been used for crowdflower. 3. Select N examples that haven't been used for crowdflower. :param args: :return: """ logger = initialize_logger() logger.info("Begin.") config_uri = args.config_uri settings = get_appsettings(config_uri, options={}, name='myapp') engine = get_engine(settings) Base.metadata.create_all(engine) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager) labels = {} # Read Eligible Classes and Preview URLs with open(os.path.abspath(args.preview_file)) as f: col_index = {} col_vals = [] for i, line in enumerate(f): # Read Header if i == 0: keys = line.strip().split(",") for j, k in enumerate(keys): col_index[k] = j col_vals = [k for k in keys] continue # Read Row cols = line.strip().split(",") label_dict = dict(name=cols[col_index["name"]], val=cols[col_index["value"]], image_1_url=cols[col_index["image_1_url"]], image_2_url=cols[col_index["image_2_url"]], image_3_url=cols[col_index["image_3_url"]]) labels[label_dict["val"]] = label_dict label_names = list(labels.keys()) header = ",".join([ "image_url", "label_{i}_name,label_{i}_value,label_{i}_image_1,label_{i}_image_2,label_{i}_image_3" .format(i=1), "label_{i}_name,label_{i}_value,label_{i}_image_1,label_{i}_image_2,label_{i}_image_3" .format(i=2), "label_{i}_name,label_{i}_value,label_{i}_image_1,label_{i}_image_2,label_{i}_image_3" .format(i=3), ]) row_tpl = "{" + "},{".join(header.split(",")) + "}" rows = [] print(header) print(row_tpl) N_LABELS = 3 # Randomly Sample Examples from Data for k, v in labels.items(): label_name = k examples = dbsession.query(Example)\ .join(Example.label, aliased=True)\ .filter_by(name=label_name)\ .order_by(func.rand())\ .limit(args.per_label)\ .all() for example in examples: # choose random other classes candidate_labels = set() candidate_labels.add(label_name) while len(candidate_labels) < N_LABELS: candidate_label = label_names[random.randint( 0, len(label_names) - 1)] if candidate_label in candidate_labels: continue candidate_labels.add(candidate_label) row_dict = {} example_url = json.loads(example.flickr_data).get( "url_m", None) if example_url is None: example_url = json.loads(example.flickr_data).get( "url_z", None) if example_url is None: example_url = json.loads(example.flickr_data).get( "url_s", None) if example_url is None: raise Exception("No example url.") row_dict.update(image_url=example_url) for ic, candidate_label in enumerate(sorted(candidate_labels)): row_dict.update({ "label_{i}_name".format(i=ic + 1): labels[candidate_label]["name"], "label_{i}_value".format(i=ic + 1): labels[candidate_label]["val"], "label_{i}_image_1".format(i=ic + 1): labels[candidate_label]["image_1_url"], "label_{i}_image_2".format(i=ic + 1): labels[candidate_label]["image_2_url"], "label_{i}_image_3".format(i=ic + 1): labels[candidate_label]["image_3_url"] }) row = row_tpl.format(**row_dict) rows.append(row) random.shuffle(rows) # Write rows to output file. with open(args.output_file, "w") as f: f.write(header + "\n") for row in rows: f.write(row + "\n")
def ini_app(): settings = get_appsettings(os.path.join(os.path.dirname(__file__), 'app', 'config.ini'), name='main') return App(main({}, **settings))
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) with transaction.manager: for (u, r, c) in defaults['users']: sp = transaction.savepoint() try: user = User(email=u, name=r, credentials=c) DBSession.add(user) DBSession.flush() except IntegrityError: sp.rollback() print(u'Username "{}" already exists.'.format(u)) for (t, d) in defaults['tags']: sp = transaction.savepoint() try: tag = Tag(tag=t, description=d) DBSession.add(tag) DBSession.flush() except IntegrityError: sp.rollback() print(u'Tag "{}" already exists.'.format(t)) for (t, tag, o) in defaults['sites']: sp = transaction.savepoint() try: site = Site(title=t, tagline=tag, owner=DBSession.query(User).filter( User.email == o).first()) DBSession.add(site) DBSession.flush() except IntegrityError: sp.rollback() print(u'Site "{}" already exists.'.format(t)) for (d, s, o) in defaults['domains']: sp = transaction.savepoint() try: owner = DBSession.query(User).filter(User.email == o).first() site = DBSession.query(Site).filter(Site.title == s).filter( Site.owner == owner).first() domain = Domain(domain=d, site=site) DBSession.add(domain) DBSession.flush() except IntegrityError: sp.rollback() print(u'Domain "{}" already exists.'.format(d)) for (t, e, s, u, ta, d, p) in defaults['entries']: sp = transaction.savepoint() try: insert_new_rev_entry(t, e, s, u, ta, d, published=p) except IntegrityError: sp.rollback() print(u'Entry "{}" already exists.'.format(t))
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = get_engine(settings) Base.metadata.create_all(engine) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager) raw_airdata = [ { 'airdata_gps_location': '123,456', 'airdata_co2': 1.222, 'airdata_pm25': 1.232, 'airdata_pm10': 1.234, 'airdata_temperature': '12 c', 'airdata_building': 'building A', 'airdata_device': 'A' }, { 'airdata_gps_location': '133,456', 'airdata_co2': 1.222, 'airdata_pm25': 1.232, 'airdata_pm10': 1.234, 'airdata_temperature': '32 c', 'airdata_building': 'building B', 'airdata_device': 'B', }, { 'airdata_gps_location': '1223,1456', 'airdata_co2': 1.222, 'airdata_pm25': 1.232, 'airdata_pm10': 1.234, 'airdata_temperature': '32 c', 'airdata_building': 'building C', 'airdata_device': 'C' }, { 'airdata_gps_location': '122.3,145.6', 'airdata_co2': 1.222, 'airdata_pm25': 1.232, 'airdata_pm10': 1.234, 'airdata_temperature': '32 c', 'airdata_building': 'building D', 'airdata_device': 'D' }, ] default_airdata = [] for row in raw_airdata: default_airdata.append( AirData( airdata_gps_location=row['airdata_gps_location'], airdata_co2=row['airdata_co2'], airdata_pm25=row['airdata_pm25'], airdata_pm10=row['airdata_pm10'], airdata_temperature=row['airdata_temperature'], airdata_device=row['airdata_device'], airdata_building=row['airdata_building'], )) dbsession.add_all(default_airdata) default_userdata = UserData( userdata_name='jhon doe', userdata_email='*****@*****.**', userdata_wa='1234444', ) dbsession.add(default_userdata)
def main(argv=sys.argv): if len(argv) != 3: usage(argv) config_uri = argv[1] try: num_users = int(argv[2]) except ValueError: usage(argv) setup_logging(config_uri) settings = get_appsettings(config_uri) config = Configurator(settings=settings) config.add_directive('set_password_context', set_password_context) db_session = init_sa(config) logger = logging.getLogger('speak_friend.createusers') if 'speak_friend.password_hasher' in settings: config.include(settings['speak_friend.password_hasher']) else: from passlib.apps import ldap_context config.set_password_context(context=ldap_context) # makes the password_context available on the registry config.commit() pass_ctx = config.registry.password_context user_num = 1 buf = StringIO() cols = [ 'username', 'first_name', 'last_name', 'email', 'password_hash', 'password_salt', 'login_attempts', 'admin_disabled', 'is_superuser' ] csv_file = csv.DictWriter(buf, cols, delimiter='\t', lineterminator='\n') logger.info("Beginning to create %d users,", num_users) cxn = db_session.connection() cur = cxn.connection.cursor() user_num += 0 user_passwords = {} while user_num <= num_users: first_name = u'Test' last_name = pwgen(num_pw=1, pw_length=10, no_numerals=True, no_symbols=True) username = u'%s.%s' % (first_name, last_name) password = pwgen(num_pw=1, pw_length=20) user_passwords[username] = password csv_file.writerow( dict( username=username, first_name=first_name, last_name=last_name, email=u'*****@*****.**' % username, password_hash=pass_ctx.encrypt(password), password_salt='', login_attempts=0, admin_disabled=False, is_superuser=False, )) logger.info("Created user %s, %04d/%04d.", username, user_num, num_users) user_num += 1 logger.info("Committing...") tname = '%s.%s' % (UserProfile.__table__.schema, UserProfile.__table__.name) buf.seek(0) cur.copy_from(buf, tname, columns=cols) cxn.connection.commit() for uname, pw in user_passwords.items(): print uname, pw
def main(argv=sys.argv): settings_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'loadtests.ini') settings = get_appsettings(settings_file) engine = engine_from_config(settings, 'sqlalchemy.') logging.basicConfig() logging.getLogger('sqlalchemy.engine').setLevel(logging.WARN) Session = sessionmaker(extension=ZopeTransactionExtension()) # noqa session = Session(bind=engine) with transaction.manager: for i in range(1, NB_USERS_TO_CREATE + 1): username = BASE_USERNAME + str(i) password = username email = username + '@foo.bar' lang = 'fr' profile = UserProfile( categories=['amateur'], geometry=DocumentGeometry(version=1, geom=None, geom_detail=None), locales=[DocumentLocale(lang=lang, title='')]) user = User(username=username, forum_username=username, name=username, email=email, lang=lang, password=password, profile=profile) # make sure user account is directly validated user.clear_validation_nonce() user.email_validated = True session.add(user) session.flush() # also create a version for the profile # code from DocumentRest.create_new_version archive = user.profile.to_archive() archive_locales = user.profile.get_archive_locales() archive_geometry = user.profile.get_archive_geometry() meta_data = HistoryMetaData(comment='creation', user_id=user.id) versions = [] for locale in archive_locales: version = DocumentVersion( document_id=user.profile.document_id, lang=locale.lang, document_archive=archive, document_locales_archive=locale, document_geometry_archive=archive_geometry, history_metadata=meta_data) versions.append(version) session.add(archive) session.add_all(archive_locales) session.add(meta_data) session.add_all(versions) session.flush() print('Created %d users with base username `%s`' % (NB_USERS_TO_CREATE, BASE_USERNAME))
def db_url(args): config = args.pkg_dir.parent / 'development.ini' settings = get_appsettings(str(config)) engine = engine_from_config(settings, 'sqlalchemy.') return engine.url
def integration_test_settings(): """Integration settings initializer""" settings = get_appsettings(config_uri(), name='main') return settings
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) engine = get_engine(settings) Base.metadata.create_all(engine) session_factory = get_session_factory(engine) with transaction.manager: dbsession = get_tm_session(session_factory, transaction.manager) # SYSTEM user for ownership of initial bootstrap constants sys_user = User(name='SYSTEM', role='ADMIN') dbsession.add(sys_user) # TODO: Move bootstrap data to its own file next_actions=[ [ 1,'NONE','Do Nothing'], [ 2,'CALL','Call'], [ 3,'EMAIL','EMail'], [ 4,'CHECK','Check Status'], [ 5,'POST','Write to them'], [ 6,'CLOSE','Close Job'], ] for action in next_actions: nextaction = NextAction( id=action[0], keyword=action[1], description=action[2], creator=sys_user ) dbsession.add(nextaction) statuses = [ [ 1,'OPEN','Open',1], [ 2,'HOLD','On Hold',1], [ 3,'NORESP','Closed - No response',0], [ 4,'REJECTED','Closed - Rejected',0], [ 5,'CLOSED','Closed - Other',0], [ 6,'NOVAC','Closed - No Vacancies',0], ] for status in statuses: stat = Status( id=status[0], keyword=status[1], description=status[2], active=status[3], creator=sys_user ) dbsession.add(stat) jobtypes = [ [ 1,'PERM','Permanent'], [ 2,'CONTRACT','Contract'], [ 3,'PART','Part Time'], ] for jobtype in jobtypes: jtype = JobType( id=jobtype[0], keyword=jobtype[1], description=jobtype[2], creator=sys_user ) dbsession.add(jtype) #Still to add... # contact_type, job_data_type(?) contacttypes = [ [ 1,'PHONE','Telephone'], [ 2,'PHONEDL','Direct Line'], [ 3,'FAX','Fax Number'], [ 4,'EMAIL','EMail Address'], [ 5,'ADDRESS','Address'], [ 6,'URL','Web Address'], ] for contacttype in contacttypes: ctype = ContactType( id=contacttype[0], keyword=contacttype[1], description=contacttype[2], creator=sys_user ) dbsession.add(ctype) # Not Bothering with prefilled Location data this time. # testing data... # TODO: (#6) Figure out how to make this into proper unit tests import datetime ct = ctype # just grab the last created contact type for testing parent_job = Job(title='job1', salary='salary1',creator=sys_user) child_job = Job(title='job2', salary='salary2',creator=sys_user) keyword1 = Keyword(keyword='newkeyword') keyword2 = Keyword(keyword='new2') parent_job.keywords.append(keyword1) child_job.keywords.append(keyword1) child_job.keywords.append(keyword2) #parent_job.child_jobs.append(child_job) jobrelate = JobRelated(parent=parent_job, child=child_job, description='relatedtest') dbsession.add(parent_job) dbsession.add(child_job) dbsession.add(jobrelate) source1 = Source(keyword='AGENT', description='Agent',creator=sys_user) source2 = Source(keyword='SOJOBS', description='StackOverflow',creator=sys_user) parent_job.source=source1 child_job.source=source2 parent_job.type_id=1 child_job.type_id=2 parent_job.status_id=1 child_job.status_id=2 # checking contacts company = Company(name="foo bar Ltd", creator=sys_user) parent_job.company=company #ct = ContactType(keyword='PHONE', description='Phone', creator=sys_user) cont = CompanyContact(contacttype=ct, data='123456789') company.contacts.append(cont) agency = Agency(name='Agents R US', creator=sys_user) agent = Agent(name='Mr Smith', creator=sys_user) agency.agents.append(agent) aycont = AgencyContact(contacttype=ct, data='987654321') ctcont = AgentContact(contacttype=ct, data='99999999') agency.contacts.append(aycont) agent.contacts.append(ctcont) parent_job.agents.append(agent) parent_job.agency=(agency) agency.jobs.append(child_job) #print(keyword1.jobs[0].title, '---job2')
"""Pyramid bootstrap environment. """ from alembic import context from pyramid.paster import get_appsettings, setup_logging from sqlalchemy import engine_from_config from infolica.models.meta import Base config = context.config setup_logging(config.config_file_name) settings = get_appsettings(config.config_file_name) target_metadata = Base.metadata def include_object(object, name, type_, reflected, compare_to): """ Exclude views from Alembic's consideration. """ return not object.info.get('is_view', False) def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the
def main(argv=sys.argv): if len(argv) != 2 and len(argv) != 3: usage(argv) config_uri = argv[1] setup_logging(config_uri) settings = get_appsettings(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) md5_pass = False if len(argv) == 3 and argv[2] == '--md5-pass': md5_pass = True Base.metadata.drop_all(engine) Base.metadata.create_all(engine) # Заполним таблицы данными: # Таксоны taxons_file = 'nextgisbio/initial_data/csv/taxon.csv' Taxon.add_from_file(taxons_file) synonym_file = 'nextgisbio/initial_data/csv/synonym.csv' Synonym.add_from_file(synonym_file) # Справочники person_file = 'nextgisbio/initial_data/csv/person.csv' Person.add_from_file(person_file) taxa_file = 'nextgisbio/initial_data/csv/taxa_scheme.csv' Taxa_scheme.add_from_file(taxa_file) museum_file = 'nextgisbio/initial_data/csv/museum.csv' Museum.add_from_file(museum_file) coord_type_file = 'nextgisbio/initial_data/csv/coord_type.csv' Coord_type.add_from_file(coord_type_file) ant_file = 'nextgisbio/initial_data/csv/anthr_press.csv' Anthr_press.add_from_file(ant_file) vital_file = 'nextgisbio/initial_data/csv/vitality.csv' Vitality.add_from_file(vital_file) abundance_file = 'nextgisbio/initial_data/csv/abundance.csv' Abundance.add_from_file(abundance_file) footprint_file = 'nextgisbio/initial_data/csv/footprint.csv' Footprint.add_from_file(footprint_file) pheno_file = 'nextgisbio/initial_data/csv/pheno.csv' Pheno.add_from_file(pheno_file) infores_file = 'nextgisbio/initial_data/csv/inforesources.csv' Inforesources.add_from_file(infores_file) area_type_file = 'nextgisbio/initial_data/csv/area_type.csv' Area_type.add_from_file(area_type_file) legend_file = 'nextgisbio/initial_data/csv/legend.csv' Legend.add_from_file(legend_file) key_area_file = 'nextgisbio/initial_data/csv/key_area.csv' Key_area.add_from_file(key_area_file) # Нужно добавить шейпы и заполнить данными таблицу # связей (square_keyarea_association) многие-ко-многим между Squares и Key_area shp_file = 'nextgisbio/initial_data/shp/key_areas_25km.shp' association_file = 'nextgisbio/initial_data/csv/square_karea_association.csv' Squares.add_from_file(association_file, shp_file) # Карточки и аннотации cards_file = 'nextgisbio/initial_data/csv/cards.csv' Cards.add_from_file(cards_file) ann_file = 'nextgisbio/initial_data/csv/annotation.csv' Annotation.add_from_file(ann_file) # Пользователи users_file = 'nextgisbio/initial_data/csv/user.csv' User.add_from_file(users_file, md5_pass) red_books_csv = 'nextgisbio/initial_data/csv/redbooks.csv' RedBook.import_from_csv(red_books_csv) images_csv = 'nextgisbio/initial_data/csv/images.csv' Images.import_from_csv(images_csv) cards_images_csv = 'nextgisbio/initial_data/csv/cards_images.csv' CardsImages.import_from_csv(cards_images_csv)