Ejemplo n.º 1
0
 def getseesion(self, new=False):
     """ if new is True then create a new session otherwise return the global one """
     if new:
         return sessionmaker(bind=self.application.engine)()
     if not BaseHandler.session:
         BaseHandler.session = sessionmaker(bind=self.application.engine)()
     return BaseHandler.session
Ejemplo n.º 2
0
 def create_session(self, dburi, short_lived_sessions=False, **kwargs):
     engine = self.get_engine(dburi, **kwargs)
     if self.forked:
         if short_lived_sessions or dburi not in self._sessions:
             self._sessions[dburi] = sessionmaker(bind=engine)
         return engine, self._sessions[dburi]
     return engine, sessionmaker(bind=engine)
Ejemplo n.º 3
0
 def register(cls, name, scoped):
     if scoped:
         cls.sessions[name] = scoped_session(sessionmaker(
             extension=ZopeTransactionExtension()))
     else:
         cls.sessions[name] = sessionmaker()
     return cls.sessions[name]
Ejemplo n.º 4
0
def convert_all():
    """
    Convert all images from SQLite to MySQL
    :return:
    """

    # setup engines to each database
    mysql_engine = create_engine('mysql+pymysql://root:semperfi@localhost:3306/plane_viewer')
    sqlite_engine = create_engine('sqlite:///images.sqlite3')

    # setup session for each database
    mysqlsessionamaker = sessionmaker(bind=mysql_engine)
    sqlitesessionmaker = sessionmaker(bind=sqlite_engine)

    mysql_session = mysqlsessionamaker()
    sqlite_session = sqlitesessionmaker()

    # retrieve all images from SQLite
    sqlite_images = sqlite_session.query(SqlitePlaneImage).all()

    # convert all images from SQLite to MySQL and attempt to load them, ignoring duiplicates
    for image in sqlite_images:
        mysql_image = convert_image_object(image)

        mysql_session.add(mysql_image)
        try:
            mysql_session.commit()
            print('Loaded', image)
        except exc.IntegrityError:
            mysql_session.rollback()
            print("Image {} already loaded".format(image))

    mysql_images = mysql_session.query(MySqlPlaneImage).all()
    print(len(mysql_images))
Ejemplo n.º 5
0
 def init_sqlalchemy(self, scheme, connection):
     try:
         import sqlalchemy
         from sqlalchemy import create_engine, MetaData
         from sqlalchemy.orm import scoped_session, sessionmaker
         from torweb.db import CacheQuery
         import _mysql_exceptions
         from sqlalchemy import event
         from sqlalchemy.exc import DisconnectionError
         def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
             try:
                 dbapi_conn.cursor().execute('select now()')
             except _mysql_exceptions.OperationalError:
                 raise DisconnectionError
         
         engine = create_engine(
             connection,
             convert_unicode=True,
             encoding="utf-8",
             pool_recycle=3600*7,
             #echo_pool=True,
             echo=False,
         )
         event.listen(engine, 'checkout', my_on_checkout)
         metadata = MetaData(bind=engine)
         session = scoped_session(sessionmaker(bind=engine, query_cls=CacheQuery))
         sqlalchemy_sessions = [session]
         DB_Session = sessionmaker(bind=engine)
         return {"metadata":metadata, "session":session, "sqlalchemy_sessions":sqlalchemy_sessions}
         #setattr(self.app, 'metadata', metadata)
         #setattr(self.app, scheme.get('sqlalchemy', 'session'), session)
         #setattr(self.app, 'sqlalchemy_sessions', sqlalchemy_sessions)
     except Exception as e:
         print e
Ejemplo n.º 6
0
    def __init__(self, bus, connection_string):
        """
        The plugin is registered to the CherryPy engine and therefore
        is part of the bus (the engine *is* a bus) registery.

        We use this plugin to create the SA engine. At the same time,
        when the plugin starts we create the tables into the database
        using the mapped class of the global metadata.
        """
        plugins.SimplePlugin.__init__(self, bus)

#        self.sa_engine = None
#        self.connection_string = connection_string
#        self.session = scoped_session(sessionmaker(autoflush=True, autocommit=False))

        self.sa_engine_online = None
        self.connection_string_online = connection_string.connectUrlonline
        self.session_online = scoped_session(sessionmaker(autoflush=True,
                                                   autocommit=False))

        self.sa_engine_offline = None
        self.connection_string_offline = connection_string.connectUrloffline
        self.session_offline = scoped_session(sessionmaker(autoflush=True,
                                                   autocommit=False))

        self.sa_engine_cache = None
        self.connection_string_cache = connection_string.connectUrlcache
        self.session_cache = scoped_session(sessionmaker(autoflush=True,
                                                   autocommit=False))
def edit_category(category_id):
    if request.method == "GET":
        DBSession = sessionmaker(bind=engine)
        session = DBSession()
        try:
            result = session.query(Category).filter_by(id=category_id).one()
            return render_template("edit_category.html", category=result)
        except NoResultFound:
            abort(404)
        finally:
            session.close()
    elif request.method == "POST":
        DBSession = sessionmaker(bind=engine)
        session = DBSession()
        try:
            result = session.query(Category).filter_by(id=category_id).one()
            result.name = request.form["name"]
            result.description = request.form["description"]
            session.add(result)
            session.commit()
            return redirect(url_for("get_category", category_id=category_id), code=302)
        except NoResultFound:
            abort(404)
        finally:
            session.close()

        return "test"
    else:
        abort(400)
Ejemplo n.º 8
0
def connect(dbstr):
  engine = create_engine(dbstr, echo = False)
  connection = engine.connect()

  #handle case that the db was initialized before a version table existed yet
  if engine.dialect.has_table(connection, "program"):
    # if there are existing tables
    if not engine.dialect.has_table(connection, "_meta"):
      # if no version table, assume outdated db version and error
      connection.close()
      raise Exception("Your opentuner database is currently out of date. Save a back up and reinitialize")

  # else if we have the table already, make sure version matches
  if engine.dialect.has_table(connection, "_meta"):
    Session = scoped_session(sessionmaker(autocommit=False,
                                          autoflush=False,
                                          bind=engine))
    version = _Meta.get_version(Session)
    if not DB_VERSION == version:
      raise Exception('Your opentuner database version {} is out of date with the current version {}'.format(version, DB_VERSION))

  Base.metadata.create_all(engine)

  Session = scoped_session(sessionmaker(autocommit=False,
                                        autoflush=False,
                                        bind=engine))
  # mark database with current version
  _Meta.add_version(Session, DB_VERSION)
  Session.commit()

  return engine, Session
Ejemplo n.º 9
0
    def test_all_htmlpopups(self):
        from chsdi.models import models_from_name
        from chsdi.models.bod import LayersConfig
        from sqlalchemy import distinct
        from sqlalchemy.orm import scoped_session, sessionmaker
        val = True
        DBSession = scoped_session(sessionmaker())
        valnone = None
        query = DBSession.query(distinct(LayersConfig.layerBodId)).filter(LayersConfig.staging == 'prod').filter(LayersConfig.queryable == val).filter(LayersConfig.parentLayerId == valnone)
        features = []
        try:
            for layer in getLayers(query):
                try:
                    FeatDBSession = scoped_session(sessionmaker())
                    models = models_from_name(layer)
                    self.failUnless(models is not None and len(models) > 0, layer)
                    model = models[0]
                    query = FeatDBSession.query(model.primary_key_column()).limit(1)
                    ID = [q[0] for q in query]
                    if ID:
                        features.append((layer, str(ID[0])))
                finally:
                    FeatDBSession.close()
        finally:
            DBSession.close()

        for f in features:
            for lang in ('de', 'fr', 'it', 'rm', 'en'):
                link = '/rest/services/all/MapServer/' + f[0] + '/' + f[1] + '/htmlPopup?callback=cb&lang=' + lang
                resp = self.testapp.get(link)
                self.failUnless(resp.status_int == 200, link)
Ejemplo n.º 10
0
    def __init__(self, uri=None, engine=None, **kwargs):
        """Create a new SqlAlchemyStore.  When given a connection string (uri)
        or SQLAlchemy Engine (engine), this Store will create it's own internal
        SQLAlchemy Session to manage objects.  If you do not provide a URI or
        Engine, your mapped object metadata must be bound to their own engines.
        """

        super(SqlAlchemyStore, self).__init__()
        
        # get the session
        if "session" in kwargs:
            # we no longer allow initialization with a pre-existing session object, there
            # are too many issues with this approach at the moment
            raise DeprecationWarning("cannot instantiate a SqlAlchemyStore with a pre-existing session object")
        else:
            # no session, we have to make one
            # first we need to get the engine
            
            if uri and engine:
                # can't give two ways to get an engine
                raise ValueError("you can only provide either a connection string URI or an engine, not both")
                
            elif uri:
                # we have a uri to create an engine
                engine = create_engine(uri)
            
            if engine:
                # we have an engine, we can create the bound session now
                self.__session = scoped_session(sessionmaker(autoflush=True, bind=engine))
            
            else:
                # no engine or URI was specified, create an unbound session
                # (mapped object metadata will need to be bound to an engine in this case)
                self.__session = scoped_session(sessionmaker(autoflush=True))
Ejemplo n.º 11
0
 def test_all_htmlpopups(self):
     from chsdi.models import models_from_name
     from chsdi.models.bod import LayersConfig
     from sqlalchemy import distinct
     from sqlalchemy.orm import scoped_session, sessionmaker
     val = True
     DBSession = scoped_session(sessionmaker())
     query = DBSession.query(distinct(LayersConfig.idBod)).filter(LayersConfig.queryable == val).filter(LayersConfig.staging == 'prod')
     # Get a list of all the queryable layers
     layers = [q[0] for q in query]
     DBSession.close()
     # Get a list of feature ids
     features = []
     for layer in layers:
         try:
             model = models_from_name(layer)[0]
             DBSession = scoped_session(sessionmaker())
             query = DBSession.query(model.primary_key_column()).limit(1)
             ID = [q[0] for q in query]
             # If tables are empty ID is an empty list
             if ID:
                 features.append((layer, str(ID[0])))
             DBSession.close()
         except Exception as e:
             print e
         finally:
             DBSession.close()
     for f in features:
         for lang in ('de', 'fr', 'it', 'rm', 'en'):
             self.testapp.get('/rest/services/all/MapServer/%s/%s/htmlPopup' % (f[0], f[1]), params={'callback': 'cb', 'lang': '%s' % lang}, status=200)
Ejemplo n.º 12
0
Archivo: sqla.py Proyecto: riffm/mage
def construct_maker(databases, models=None, query_cls=Query, engine_params=None,
                    session_class=DBSession):
    '''
    databases - str with db uri or dict.
    models - str name of models package (module), default is 'module'
    query_cls - additional query class
    engine_params - additional engine params
    '''
    models = models or 'models'
    engine_params = engine_params or {}
    db_dict = {}
    if isinstance(databases, basestring):
        engine = create_engine(databases, **engine_params)
        return orm.sessionmaker(class_=session_class, query_cls=query_cls,
                                bind=engine, autoflush=False)
    for ref, uri in databases.items():
        md_ref = '.'.join(filter(None, [models, ref]))
        metadata = import_string(md_ref, 'metadata')
        engine = create_engine(uri, **engine_params)
        #TODO: find out why this is broken since sqlalchemy 0.7
        #engine.logger.logger.name += '(%s)' % ref
        for table in metadata.sorted_tables:
            db_dict[table] = engine
    return orm.sessionmaker(class_=session_class, query_cls=query_cls, binds=db_dict,
                            autoflush=False)
Ejemplo n.º 13
0
    def __init__(self, dbname='xraydata.db', read_only=True):
        "connect to an existing database"
        if not os.path.exists(dbname):
            parent, _ = os.path.split(__file__)
            dbname = os.path.join(parent, dbname)
            if not os.path.exists(dbname):
                raise IOError("Database '%s' not found!" % dbname)

        if not isxrayDB(dbname):
            raise ValueError("'%s' is not a valid X-ray Database file!" % dbname)

        self.dbname = dbname
        self.engine = make_engine(dbname)
        self.conn = self.engine.connect()
        kwargs = {}
        if read_only:
            kwargs = {'autoflush': True, 'autocommit':False}
            def readonly_flush(*args, **kwargs):
                return
            self.session = sessionmaker(bind=self.engine, **kwargs)()
            self.session.flush = readonly_flush
        else:
            self.session = sessionmaker(bind=self.engine, **kwargs)()

        self.metadata = MetaData(self.engine)
        self.metadata.reflect()
        self.tables = self.metadata.tables

        self.atomic_symbols = [e.element for e in self.tables['elements'].select(
            ).execute().fetchall()]
Ejemplo n.º 14
0
	def __init__( self, filename ):
		echo = False
		if self.DebugLevel > 2:
			echo = True

		engine = create_engine( 'sqlite:///' + filename, echo = echo )
		metadata=Base.metadata
		metadata.create_all(engine)

		self.Session=sessionmaker()
		self.Session.configure(bind=engine)	
		self.SessionInstance = self.Session()

		dirname=os.path.dirname(filename)
		self.SessionsInstances={}
		for file_list in self.SessionInstance.query( FileList ).all():
			filename=file_list.filename
			if not os.path.isfile(filename):
				filename=os.path.join(dirname,os.path.basename(file_list.filename))

			engine = create_engine( 'sqlite:///' + filename, echo = echo )
			metadata=Base.metadata
			metadata.create_all(engine)

			self.Session=sessionmaker()
			self.Session.configure(bind=engine)	
			self.SessionsInstances[file_list.type] = self.Session()

		if not self.SessionsInstances.has_key('Source'):
			self.SessionsInstances['Source']=self.SessionInstance

		if not self.SessionsInstances.has_key('Target'):
			self.SessionsInstances['Target']=self.SessionInstance
Ejemplo n.º 15
0
  def setup_database(self):
    # DB library imports
    from sqlalchemy import (create_engine, Table, MetaData, Column, Integer, 
                            String, Unicode, Text, UnicodeText, Date, Numeric, 
                            Time, Float, DateTime, Interval, Binary, Boolean, 
                            PickleType)
    from sqlalchemy.orm import sessionmaker, mapper
    # Create global name mappings for model()
    global column_mapping
    column_mapping = {
              'string': String,       'str': String,
             'integer': Integer,      'int': Integer, 
             'unicode': Unicode,     'text': Text,
         'unicodetext': UnicodeText, 'date': Date,
             'numeric': Numeric,     'time': Time,
               'float': Float,   'datetime': DateTime,
            'interval': Interval,  'binary': Binary,
             'boolean': Boolean,     'bool': Boolean,
          'pickletype': PickleType,
    }
 
    # Add a few SQLAlchemy types to globals() so we can use them in models
    globals().update({'Column': Column, 'Table': Table, 'Integer': Integer,
                      'MetaData': MetaData, 'mapper': mapper})
    # Ensures correct slash number for sqlite
    if self.config['db_type'] == 'sqlite':
      self.config['db_location'] = '/' + self.config['db_location']
      eng_name = self.config['db_type'] + '://' + self.config['db_location']
      self.config['db_engine'] = create_engine(eng_name)
      self.config['db_session'] = sessionmaker(bind=self.config['db_engine'])()
    elif self.config['db_type'] == 'postgres':
      eng_name = self.config['db_type'] + '://' + self.config['db_location']
      self.config['db_engine'] = create_engine(eng_name, encoding="utf8", convert_unicode=True)
      self.config['db_session'] = sessionmaker(bind=self.config['db_engine'])()
Ejemplo n.º 16
0
def session_maker(databases, query_cls=Query, models_location='models',
                  engine_params=None, session_params=None,
                  session_class=orm.session.Session):
    '''
    Session maker with multiple databases support. For each database there
    should be corresponding submodule of `models_location` package with
    `metadata` object for that database.

    models_location - put models filename name as 'models'
    '''
    engine_params = engine_params or {}
    session_params = dict(session_params or {})
    session_params.setdefault('autoflush', False)
    if isinstance(databases, basestring):
        engine = create_engine(databases, **engine_params)
        engine.logger = logging.getLogger('sqlalchemy.engine')
        binds = {}
        metadata = import_string(models_location, 'metadata')
        for table in metadata.sorted_tables:
            binds[table] = engine
        return orm.sessionmaker(class_=session_class, query_cls=query_cls,
                                bind=engine, binds=binds, **session_params)
    binds = multidb_binds(databases, models_location, engine_params=engine_params)
    return orm.sessionmaker(class_=session_class, query_cls=query_cls,
                            binds=binds, **session_params)
Ejemplo n.º 17
0
def get_data():
    engine = db_connect()
    create_deals_table(engine)
    Session = sessionmaker(bind=engine)
    sess = Session()
    if request.method == 'POST':
        search_code = request.form['naics']
        if not search_code:
            rfps_nasa = sess.query(RFP).all()
        else:
            rfps_nasa = sess.query(RFP).filter(RFP.NAICS_code == search_code).all()
    else:
        rfps_nasa = sess.query(RFP).all()

    engine = db_connect()
    create_deals_table(engine)
    Session = sessionmaker(bind=engine)
    sess = Session()
    if request.method == 'POST':
        search_code = request.form['naics']
        if not search_code:
            rfps_fbo = OpportunityDetail.query.all()
        else:
            code = NAICSChild.query.filter_by(code=search_code).first()
            rfps_fbo = OpportunityDetail.query.filter_by(naics_code=code)\
                .all()
    else:
        rfps_fbo = OpportunityDetail.query.all()

    res = []
    for r in rfps_nasa:
        res.append(r.to_json())
    for r in rfps_fbo:
        res.append(r.to_json())
    return str(res)
Ejemplo n.º 18
0
def create_db(dbname, password, username):
    session = sessionmaker(bind=con)()
    session.connection().connection.set_isolation_level(0)
    try:
        session.execute("CREATE USER %s PASSWORD '%s'" % (username, password))
    except:
        print(traceback.format_exc())
    try:
        session.execute('CREATE DATABASE %s' % (dbname,))
    except:
        print(traceback.format_exc())
    try:
        session.execute('GRANT ALL PRIVILEGES ON DATABASE %s TO %s' % (dbname, username))
        session.connection().connection.set_isolation_level(1)
        add_user_in_maindb = UserBD(username=username, password=password, title=dbname)
        add_user_in_maindb.save()
    except:
        print(traceback.format_exc())
    finally:
        session.close()


    try:
        c = create_engine('postgresql+psycopg2://%s:%[email protected]:5432/%s' % (username, password, dbname))
        s = sessionmaker(bind=c)()
        s.connection().connection.set_isolation_level(0)
        # s.execute("CREATE TABLE product(id SERIAL NOT NULL PRIMARY KEY)")
        # s.execute("CREATE TABLE files(id SERIAL NOT NULL PRIMARY KEY, file CHAR(255))")
        s.connection().connection.set_isolation_level(1)
    except:
        print(traceback.format_exc())
Ejemplo n.º 19
0
    def test_merge_load(self):
        Parent = self.classes.Parent

        sess = sessionmaker()()
        sess2 = sessionmaker()()
        p1 = sess.query(Parent).get(1)
        p1.children

        # preloading of collection took this down from 1728 to 1192
        # using sqlite3 the C extension took it back up to approx. 1257
        # (py2.6)

        @profiling.function_call_count()
        def go():
            sess2.merge(p1)

        go()

        # one more time, count the SQL

        def go2():
            sess2.merge(p1)

        sess2 = sessionmaker(testing.db)()
        self.assert_sql_count(testing.db, go2, 2)
def test_taxa_to_percent_and_push(
    MergedataToUpload, engine, tbl_taxa, percent, tbl_percent, percent_cover_table, convert_types, find_types
):
    percent.fillna("NA", inplace=True)

    uploading = MergedataToUpload(sessionmaker(bind=engine, autoflush=False))
    uploading.merge_for_datatype_table_upload(
        raw_dataframe=percent,
        formated_dataframe=tbl_percent,
        formated_dataframe_name="percent_cover",
        raw_data_taxa_columns=["site", "code"],
        uploaded_taxa_columns=["study_site_table_fkey", "sppcode"],
    )

    Session = sessionmaker(bind=engine, autoflush=False)
    session = Session()
    tbl_percent_cover_query = select([percent_cover_table])
    percent_cover_query_stm = session.execute(tbl_percent_cover_query)
    percent_cover_query_df = pd.DataFrame(percent_cover_query_stm.fetchall())
    percent_cover_query_df.columns = percent_cover_query_stm.keys()
    session.close()
    engine.dispose()

    percent_cover_true_list = tbl_percent["percent_cover_observation"].values.tolist()
    percent_cover_test_list = percent_cover_query_df["percent_cover_observation"].values.tolist()

    percent_cover_true_list = [float(x) for x in percent_cover_true_list]
    percent_cover_test_list = [float(x) for x in percent_cover_test_list]

    assert (percent_cover_true_list == percent_cover_test_list) is True

    assert (
        [str(x) for x in tbl_percent["spatial_replication_level_2"].values.tolist()]
        == [str(x) for x in percent_cover_query_df["spatial_replication_level_2"].values.tolist()]
    ) is True
def test_taxa_to_individual_and_push(MergedataToUpload, engine, tbl_taxa, individual, tbl_individual, individual_table):
    individual.fillna("NA", inplace=True)
    uploading = MergedataToUpload(sessionmaker(bind=engine, autoflush=False))
    uploading.merge_for_datatype_table_upload(
        raw_dataframe=individual,
        formated_dataframe=tbl_individual,
        formated_dataframe_name="individual",
        raw_data_taxa_columns=["SITE", "TAXON_GENUS", "TAXON_SPECIES"],
        uploaded_taxa_columns=["study_site_table_fkey", "genus", "species"],
    )

    Session = sessionmaker(bind=engine, autoflush=False)
    session = Session()
    tbl_individual_query = select([individual_table])
    individual_query_stm = session.execute(tbl_individual_query)
    individual_query_df = pd.DataFrame(individual_query_stm.fetchall())
    individual_query_df.columns = individual_query_stm.keys()
    session.close()
    engine.dispose()

    print(tbl_individual["individual_observation"].values.tolist())
    print(individual_query_df["individual_observation"].values.tolist())

    assert (
        tbl_individual["individual_observation"].values.tolist()
        == individual_query_df["individual_observation"].values.tolist()
    ) is True

    assert (
        [str(x) for x in tbl_individual["spatial_replication_level_2"].values.tolist()]
        == [str(x) for x in individual_query_df["spatial_replication_level_2"].values.tolist()]
    ) is True
def test_taxa_to_count_and_push(MergedataToUpload, engine, tbl_taxa, count, tbl_count, count_table):
    count.fillna("NA", inplace=True)

    uploading = MergedataToUpload(sessionmaker(bind=engine, autoflush=False))
    uploading.merge_for_datatype_table_upload(
        raw_dataframe=count,
        formated_dataframe=tbl_count,
        formated_dataframe_name="count",
        raw_data_taxa_columns=["site", "genus", "species"],
        uploaded_taxa_columns=["study_site_table_fkey", "genus", "species"],
    )

    Session = sessionmaker(bind=engine, autoflush=False)
    session = Session()
    tbl_count_query = select([count_table])
    count_query_stm = session.execute(tbl_count_query)
    count_query_df = pd.DataFrame(count_query_stm.fetchall())
    count_query_df.columns = count_query_stm.keys()
    session.close()
    engine.dispose()

    print(count_query_df)
    assert (
        tbl_count["count_observation"].values.tolist() == count_query_df["count_observation"].values.tolist()
    ) is True
    assert (
        [str(x) for x in tbl_count["spatial_replication_level_2"].values.tolist()]
        == [str(x) for x in count_query_df["spatial_replication_level_2"].values.tolist()]
    ) is True
Ejemplo n.º 23
0
Archivo: orm.py Proyecto: saebyn/baph
    def __init__(self, name=None):
        if not name:
            name = 'default'
        data = {}
        if hasattr(settings, 'DATABASES') and \
           name in settings.DATABASES:
            data = settings.DATABASES[name]
        if data.get('ENGINE', '') == '':
            raise ImproperlyConfigured('''\
The database ORM connection requires, at minimum, an engine type.''')
        if '.' in data['ENGINE']:
            data['ENGINE'] = data['ENGINE'].rsplit('.', 1)[-1]

        # django needs sqlite3 but sqlalchemy references sqlite
        if data['ENGINE'] == 'sqlite3':
            data['ENGINE'] = 'sqlite'

        self.engine = self._create_engine(data)
        ro_values = dict([(k[9:], v) for k, v in data.iteritems()
                             if k.startswith('READONLY_')])
        if len(ro_values):
            ro_data = dict(data)
            ro_data.update(ro_values)
            self.readonly_engine = self._create_engine(ro_data)
        self.metadata = MetaData(self.engine)
        self.Base = declarative_base(metadata=self.metadata)
        if hasattr(self, 'readonly_engine'):
            self._readonly_sessionmaker = \
                scoped_session(sessionmaker(bind=self.readonly_engine))
        self._sessionmaker = scoped_session(sessionmaker(bind=self.engine))
Ejemplo n.º 24
0
def create_session():
    # cloud_usage
    engine_csu = create_engine(config.get('main', 'cloud_usage_uri'))
    metadata_csu = MetaData(engine_csu)
    account_csu = Table('account', metadata_csu, autoload=True)
    mapper(AccountCsU, account_csu)
    usage_event_csu = Table('usage_event', metadata_csu, autoload=True)
    mapper(UsageEventCsU, usage_event_csu)
    cloud_usage_csu = Table('cloud_usage', metadata_csu, autoload=True)
    mapper(CloudUsageCsU, cloud_usage_csu)
    # cloud database
    engine_cs = create_engine(config.get('main', 'cloud_uri'))
    metadata_cs = MetaData(engine_cs)
    account_cs = Table('account', metadata_cs, autoload=True)
    mapper(AccountCs, account_cs)
    disk_offering_cs = Table('disk_offering', metadata_cs, autoload=True)
    mapper(DiskOfferingCs, disk_offering_cs)
    template_cs = Table('vm_template', metadata_cs, autoload=True)
    mapper(TemplateCs, template_cs)
    guest_os_cs = Table('guest_os', metadata_cs, autoload=True)
    mapper(GuestOSCs, guest_os_cs)

    SessionCs = sessionmaker(bind=engine_cs)
    session_cs = SessionCs()
    SessionCsU = sessionmaker(bind=engine_csu)
    session_csu = SessionCsU()

    # setup our dedicated model
    engine = create_engine(config.get('main', 'rbc_usage_uri'))
    Session = sessionmaker(bind=engine)
    session = Session()
    return (session_csu, session_cs, session)
Ejemplo n.º 25
0
def copy_stations_to_sqlite(src_dsn, dest_dsn):
    src_sesh = sessionmaker(bind=create_engine(src_dsn))()
    dest_sesh = sessionmaker(bind=create_engine(dest_dsn))()

    net = Network(name='MoTIe')
    dest_sesh.add(net)
    dest_sesh.flush()

    q = src_sesh.query(Station).join(History).join(
        Network).filter(Network.name == 'MoTIe')
    for stn in q.all():
        histories = [History(station_name=hist.station_name)
                     for hist in stn.histories]
        new_obj = Station(native_id=stn.native_id,
                          network_id=net.id, histories=histories)
        dest_sesh.add(new_obj)
    dest_sesh.commit()

    q = src_sesh.query(Variable).join(Network).filter(Network.name == 'MoTIe')
    for var in q.all():
        v = Variable(name=var.name, standard_name=var.standard_name,
                     cell_method=var.cell_method, network_id=net.id,
                     unit=var.unit)
        dest_sesh.add(v)
    dest_sesh.commit()
Ejemplo n.º 26
0
def initialize_sql(settings):

    global DBSession
    global sqlalchemy_url
    global DBSession
    global Base
    global engine
    global Session
    global session
    global tmp_dir
    global extended

    sqlalchemy_url = settings['sqlalchemy.url']
    DBSession = scoped_session(sessionmaker())
    Base = declarative_base()
    engine = create_engine(sqlalchemy_url)
    Session = sessionmaker(bind=engine)
    session = Session()
    tmp_dir = settings['tmp_dir']

    # Extended attributes
    if settings['extended'] == 'true':
        extended = True
    else:
        extended = False

    DBSession.configure(bind=engine)
    Base.metadata.bind = engine
Ejemplo n.º 27
0
def init_sqlalchemy(settings):
    # master
    master_url = settings['sqlalchemy.url']
    connect_kwargs = settings.get('sqlalchemy.connect_kwargs')
    kwargs = {}
    if connect_kwargs is not None:
        if isinstance(connect_kwargs, str):
            connect_kwargs = json.loads(connect_kwargs)
        for k, v in connect_kwargs.items():
            kwargs[k] = v
    engine = create_engine(master_url, **kwargs)

    sm = orm.sessionmaker(bind=engine, extension=ZopeTransactionExtension())
    meta.Session = orm.scoped_session(sm)
    meta.metadata.bind = engine

    # slaves
    slaves_url = settings.get('sqlalchemy.slaves', [])
    slaves = []
    for url in slaves_url:
        slave = create_engine(url, **kwargs)
        sm = orm.sessionmaker(bind=slave, extension=ZopeTransactionExtension())
        slaves.append(orm.scoped_session(sm))

    if slaves:
        slave = random.choice(slaves)
        meta.BaseObject.query = slave.query_property(orm.Query)
    else:
        meta.BaseObject.query = meta.Session.query_property(orm.Query)
Ejemplo n.º 28
0
 def __init__(self, dburi):
     self.engine = create_engine(dburi, echo=False, pool_recycle=3600, echo_pool=True,
                                 connect_args={'check_same_thread':False})
     Session = sessionmaker(bind=self.engine)
     self.session = Session()
     Base.metadata.create_all(self.engine, checkfirst=True)
     self.session_factory = sessionmaker(bind=self.engine)
Ejemplo n.º 29
0
    def test_merge_load(self):
        Parent = self.classes.Parent

        sess = sessionmaker()()
        sess2 = sessionmaker()()
        p1 = sess.query(Parent).get(1)
        p1.children

        # preloading of collection took this down from 1728 to 1192
        # using sqlite3 the C extension took it back up to approx. 1257
        # (py2.6)

        @profiling.function_call_count(variance=0.10,
                                versions={'2.5':1050, '2.6':1050,
                                        '2.6+cextension':1005, 
                                        '2.7':1005,
                                        '3':1050}
                            )
        def go():
            p2 = sess2.merge(p1)
        go()

        # one more time, count the SQL

        sess2 = sessionmaker(testing.db)()
        self.assert_sql_count(testing.db, go, 2)
def test_taxa_to_biomass_and_push(MergedataToUpload, engine, tbl_taxa, biomass, tbl_biomass, biomass_table):
    biomass.fillna("NA", inplace=True)
    uploading = MergedataToUpload(sessionmaker(bind=engine, autoflush=False))
    uploading.merge_for_datatype_table_upload(
        raw_dataframe=biomass,
        formated_dataframe=tbl_biomass,
        formated_dataframe_name="biomass",
        raw_data_taxa_columns=["site", "phylum", "clss", "ordr", "family", "genus", "species"],
        uploaded_taxa_columns=["study_site_table_fkey", "phylum", "clss", "ordr", "family", "genus", "species"],
    )
    Session = sessionmaker(bind=engine, autoflush=False)
    session = Session()
    tbl_biomass_query = select([biomass_table])
    biomass_query_stm = session.execute(tbl_biomass_query)
    biomass_query_df = pd.DataFrame(biomass_query_stm.fetchall())
    biomass_query_df.columns = biomass_query_stm.keys()
    session.close()
    engine.dispose()

    biomass_true_list = tbl_biomass["biomass_observation"].values.tolist()
    biomass_test_list = biomass_query_df["biomass_observation"].values.tolist()

    biomass_true_list = [float(x) for x in biomass_true_list]
    biomass_test_list = [float(x) for x in biomass_test_list]

    assert (biomass_true_list == biomass_test_list) is True

    assert (
        [str(x) for x in tbl_biomass["spatial_replication_level_2"].values.tolist()]
        == [str(x) for x in biomass_query_df["spatial_replication_level_2"].values.tolist()]
    ) is True
Ejemplo n.º 31
0
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, orm, MetaData

from src import config
import src.auth.adapters.orm

DEFAULT_SESSION_FACTORY: Session = orm.sessionmaker(
    bind=create_engine(
        # ISOLATION LEVEL ENSURES aggregate's version IS RESPECTED
        # That is, if version differs it will raise an exception
        config.get_postgres_uri(),
        isolation_level="REPEATABLE_READ",
    ),
    autoflush=False,
)

def start_mappers() -> MetaData:
    """
    Updates metadata reference and run sqlalchemy's mappers
    Classical way
    """
    metadata = MetaData()
    src.auth.adapters.orm.start_mappers(metadata)

    return metadata
Ejemplo n.º 32
0
	def insert_data_session(self, data):
		Session = sessionmaker(bind=self.engine, autoflush=True)
		session = Session()
		session.add(data)
		session.commit()
		session.close()
Ejemplo n.º 33
0
    if fields:
        fields = [i for i in fields.split(',') if i in valid_fields]
    else:
        fields = valid_fields

    comments = IssueWrapper().comments(id, fields, offset, limit)

    response = json.dumps(comments)
    return response

class Development(object):
    DEBUG=True

if __name__ == '__main__':
    connstring = os.environ('DATABASE_URL')
    if connstring is None:
        print('Setup the DATABASE_URL variable')
        exit(1)

    db = sa.create_engine(connstring, echo=True)
    Session = sessionmaker(bind=db)
    Base.metadata.create_all(db)

    IssueWrapper(Session)
    EventWrapper(Session)
    UserWrapper(Session)
    
    app.config.from_object(Development)

    app.run()
Ejemplo n.º 34
0
import datetime

from sqlalchemy import create_engine, Column, Integer, String, Text, DateTime, ForeignKey
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy.ext.declarative import declarative_base

from flask_login import UserMixin

from . import app

engine = create_engine(app.config["SQLALCHEMY_DATABASE_URI"])
Base = declarative_base()
Session = sessionmaker(bind=engine)
session = Session()


class Entry(Base):
    __tablename__ = "entries"

    id = Column(Integer, primary_key=True)
    title = Column(String(1024))
    content = Column(Text)
    datetime = Column(DateTime, default=datetime.datetime.now)
    author_id = Column(Integer, ForeignKey('users.id'))


class User(Base, UserMixin):
    __tablename__ = "users"

    id = Column(Integer, primary_key=True)
    name = Column(String(128))
Ejemplo n.º 35
0
def showRestaurantsJSON():
    """API endpoint for displaying the restaurants."""
    DBSession = sessionmaker(bind=engine)
    session = DBSession()
    restaurants = session.query(Restaurant).all()
    return jsonify(Restaurants=[i.serialize for i in restaurants])
Ejemplo n.º 36
0
def getUserInfo(user_id):
    """Fetch stored info on user."""
    DBSession = sessionmaker(bind=engine)
    session = DBSession()
    user = session.query(User).filter_by(id=user_id).one()
    return user
Ejemplo n.º 37
0
from alembic.migration import MigrationContext
from alembic.operations import Operations
import sqlalchemy.exc
from sqlalchemy.ext.declarative import declarative_base as sa_declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.orm.attributes import InstrumentedAttribute
from zope.sqlalchemy import ZopeTransactionExtension


# Ignore SQLAlchemy unicode warnings
warnings.filterwarnings('ignore', '^Unicode type received non-unicode bind param value', sqlalchemy.exc.SAWarning)  # NOQA


DBSession = scoped_session(sessionmaker(
    extension=ZopeTransactionExtension(),
    expire_on_commit=False))


class BaseClass(object):

    def __init__(self, **kwargs):
        cls = type(self)
        for k, v in kwargs.items():
            if not hasattr(cls, k):
                continue

            clsattr = type(getattr(cls, k))
            if issubclass(clsattr, InstrumentedAttribute):
                setattr(self, k, v)
Ejemplo n.º 38
0

class hoeren(Base):
    __tablename__ = 'hoeren'
    MatrNr = Column(Integer, ForeignKey('Studenten.MatrNr'), primary_key=True)
    VorlNr = Column(Integer,
                    ForeignKey('Vorlesungen.VorlNr'),
                    primary_key=True)
    Student = relationship("Studenten")
    Vorlesung = relationship("Vorlesungen")


#Erstellen der Engine für den Datenbankzugriff
engine = create_engine("mysql://*****:*****@localhost/uni")
#c = engine.connect()
Session = sessionmaker()
Session.configure(bind=engine)
session = Session()

# new_stud = Studenten(MatrNr = '111', Name='Albert Greinöcker', Semester=30)
# session.add(new_stud)
# session.commit()

erg = session.query(Studenten).all()
#print(erg)
for e in erg:
    print(e.MatrNr)

erg = session.query(Professoren).filter_by(Name="Sokrates").all()
#print(erg)
for e in erg:
# Need our custom types, but don't import anything else from model
from galaxy.model.custom_types import JSONType
from galaxy.util.bunch import Bunch

now = datetime.datetime.utcnow
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
format = "%(name)s %(levelname)s %(asctime)s %(message)s"
formatter = logging.Formatter(format)
handler.setFormatter(formatter)
log.addHandler(handler)

metadata = MetaData()
context = scoped_session(sessionmaker(autoflush=False, autocommit=True))


class DeferredJob(object):
    states = Bunch(NEW='new',
                   WAITING='waiting',
                   QUEUED='queued',
                   RUNNING='running',
                   OK='ok',
                   ERROR='error')

    def __init__(self, state=None, plugin=None, params=None):
        self.state = state
        self.plugin = plugin
        self.params = params
Ejemplo n.º 40
0
def main():
    lobj = Logger(sys.argv[0])
    logger = lobj.get()

    cdb = conf_opts['settings']['cache']

    parser = argparse.ArgumentParser(
        description="isabella-users-frontend update users DB")
    parser.add_argument('-d',
                        required=False,
                        help='SQLite DB file',
                        dest='sql')
    parser.add_argument('-v',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Verbose',
                        dest='verbose')
    args = parser.parse_args()

    if args.sql:
        cdb = args.sql

    engine = create_engine('sqlite:///%s' % cdb, echo=args.verbose)

    Session = sessionmaker()
    Session.configure(bind=engine)
    session = Session()

    usertool = UserUtils(logger)

    # fetch projects feed data as it is needed for email extraction
    projects = fetch_projects(conf_opts['external']['subscription'], logger)

    if not projects:
        logger.error('Could not fetch projects and users')
        raise SystemExit(1)

    # create /home and /shared directories for user
    not_home = session.query(User).filter(User.ishomecreated == False).all()
    for u in not_home:
        if (os.path.exists(u.homedir)
                and os.path.exists(conf_opts['settings']['sharedpath'])):
            rh, rs = True, True
        else:
            rh = create_homedir(u.homedir, u.uid, u.gid, logger)
            sharedpath = conf_opts['settings']['sharedpath']
            rs = create_shareddir(sharedpath + u.username, u.uid, u.gid,
                                  logger)
        if all([rh, rs]):
            u.ishomecreated = True
            logger.info('Created directories for %s' % u.username)
    session.commit()

    # add users to SGE projects
    # for new users projects and last_projects field are the same so we pick
    # values from any of them.
    not_sge = session.query(User).filter(User.issgeadded == False).all()
    for u in not_sge:
        for project in u.last_projects.split():
            sgecreateuser_cmd = conf_opts['settings']['sgecreateuser']
            try:
                os.chdir(os.path.dirname(sgecreateuser_cmd))
                subprocess.check_call('{0} {1} {2}'.format(
                    sgecreateuser_cmd, u.username, project.strip()),
                                      shell=True,
                                      bufsize=512)
                u.issgeadded = True
                logger.info('User %s added in SGE project %s' %
                            (u.username, project.strip()))

            except Exception as e:
                logger.error('Failed adding user %s to SGE: %s' %
                             (u.username, str(e)))
    session.commit()

    # update SGE projects for users
    # for existing user that is assigned to new project or signed off the
    # existing project, projects and last_projects field differ. based on their
    # values, it will be concluded what needs to be done and projects field
    # will be updated to match last_projects field afterward.
    # also if user was previously disabled, but now is back with new project,
    # update his shell record with one from /etc/passwd
    update_sge = session.query(User).filter(
        User.projects != User.last_projects).all()
    for u in update_sge:
        diff = diff_projects(u.projects, u.last_projects)

        if diff['rem']:
            for project in diff['rem'].split():
                logger.info('User %s sign off from SGE project %s' %
                            (u.username, project.strip()))
                sgeremoveuser_cmd = conf_opts['settings']['sgeremoveuser']
                try:
                    os.chdir(os.path.dirname(sgeremoveuser_cmd))
                    subprocess.check_call('{0} {1} {2}'.format(
                        sgeremoveuser_cmd, u.username, project.strip()),
                                          shell=True,
                                          bufsize=512)
                    logger.info('User %s removed from SGE project ACL %s' %
                                (u.username, project.strip()))

                except Exception as e:
                    logger.error('Failed removing of user %s from SGE: %s' %
                                 (u.username, str(e)))

        if diff['add']:
            for project in diff['add'].split():
                sgecreateuser_cmd = conf_opts['settings']['sgecreateuser']
                try:
                    os.chdir(os.path.dirname(sgecreateuser_cmd))
                    subprocess.check_call('{0} {1} {2}'.format(
                        sgecreateuser_cmd, u.username, project.strip()),
                                          shell=True,
                                          bufsize=512)
                    logger.info('User %s updated to SGE project %s' %
                                (u.username, project.strip()))

                except Exception as e:
                    logger.error('Failed updating user %s to SGE: %s' %
                                 (u.username, str(e)))

            # user was previously disabled and now is back with new project
            if u.shell == '/sbin/nologin':
                userobj = usertool.get_user(u.username)
                shell = usertool.get_user_shell(userobj)
                u.shell = shell
                logger.info(
                    'User %s was disabled, recording new shell settings: %s' %
                    (u.username, shell))

        # this one is called to explicitly set SGE default_project to user's
        # last_project assigned
        if diff['last'] not in diff['add']:
            sgecreateuser_cmd = conf_opts['settings']['sgecreateuser']
            try:
                os.chdir(os.path.dirname(sgecreateuser_cmd))
                subprocess.check_call('{0} {1} {2}'.format(
                    sgecreateuser_cmd, u.username, diff['last'].strip()),
                                      shell=True,
                                      bufsize=512)
                logger.info(
                    'User %s SGE default_project explicitly set to %s' %
                    (u.username, diff['last'].strip()))

            except Exception as e:
                logger.error(
                    'Failed setting SGE default_project for %s to %s' %
                    (u.username, str(e)))

        u.projects = u.last_projects
    session.commit()

    # set password for opened user accounts
    not_password = session.query(User).filter(
        User.ispasswordset == False).all()
    for u in not_password:
        password = gen_password()
        u.password = password
        usertool.set_user_pass(usertool.get_user(u.username), password)
        u.ispasswordset = True
    session.commit()

    # send email to user whose account is opened
    not_email = session.query(User).filter(User.issentemail == False).all()
    for u in not_email:
        templatepath = conf_opts['external']['emailtemplate']
        templatehtml = conf_opts['external']['emailhtml']
        smtpserver = conf_opts['external']['emailsmtp']
        emailfrom = conf_opts['external']['emailfrom']
        emailto = extract_email(projects, u.name, u.surname, u.last_projects,
                                logger)
        u.email = emailto

        e = InfoAccOpen(templatepath, templatehtml, smtpserver, emailfrom,
                        emailto, u.username, u.password, logger)
        r = e.send()
        if r:
            u.issentemail = True
            logger.info('Mail sent for %s' % u.username)
    session.commit()

    # subscribe opened user account to mailing list. skip closed accounts that
    # were previously unsubscribed with /sbin/nologin explicitly set in
    # close-accounts.py
    not_subscribed = session.query(User).filter(
        User.issubscribe == False).all()
    for u in not_subscribed:
        if u.shell == '/sbin/nologin':
            continue
        credentials = conf_opts['external']['mailinglistcredentials']
        listname = conf_opts['external']['mailinglistname']
        listserver = conf_opts['external']['mailinglistserver']
        if u.email:
            r = subscribe_maillist(listserver, credentials, listname, u.email,
                                   u.username, logger)
            if r:
                u.issubscribe = True
                logger.info('User %s subscribed to %s' %
                            (u.username, listname))
        else:
            logger.error(
                'Email for user %s unknown, not trying to subscribe to mailinglist'
                % u.username)

    session.commit()
Ejemplo n.º 41
0
from os import getenv
from dotenv import load_dotenv
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base

load_dotenv()

engine = create_engine(getenv("DATABASE_URL"), connect_args={"check_same_thread": False})
SessionLocal = sessionmaker(autocommit=True, autoflush=True, bind=engine)
Base = declarative_base()
import logging

logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
Base = declarative_base()


class User(Base):
    __tablename__ = 'users'
    id = Column(Integer, primary_key=True)
    name = Column(String)
    fullname = Column(String)


test_engine = create_engine('sqlite:///:memory:', echo=True)
Session = sessionmaker(bind=test_engine)

session = Session()

metadata = MetaData()
users = Table('users', metadata,
    Column('id', Integer, primary_key=True),
    Column('name', String),
    Column('fullname', String),
)

addresses = Table('addresses', metadata,
    Column('id', Integer, primary_key=True),
    Column('user_id', None, ForeignKey('users.id')),
    Column('email_address', String, nullable=False)
)
Ejemplo n.º 43
0
 def __init__(self):
     engine = db_connect()
     create_table(engine)
     self.Session = sessionmaker(bind=engine)
     logger.info("****GoogleApisCrawlerPipeline: database connected****")
Ejemplo n.º 44
0
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, DateTime
from sqlalchemy.dialects.postgresql import JSONB

SQLALCHEMY_DATABASE_URL = "postgresql://postgres:@127.0.0.1:5432/ub"

engine = create_engine(SQLALCHEMY_DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)

Base = declarative_base()


class Role(Base):
    __tablename__ = "ub_role"
    id = Column(Integer, primary_key=True, index=True)
    name = Column(String)


class Link(Base):
    __tablename__ = "ub_link"
    id = Column(Integer, primary_key=True, index=True)
    name = Column(String)
    discription = Column(String)
    button_label = Column(String)
    url = Column(String)
    title = Column(String)
    img_url = Column(String)

Ejemplo n.º 45
0
    if islocal:
        upload_to_local(pandasdf, s3_filepath)
    else:
        upload_to_s3(pandasdf, region, bucket_name, s3_filepath)


properties = {"user": "******", "password": "******"}
uidTableName = "uid_creatTime"
acidTableName = "account_creatTime"
payTableName = "account_payTime"
url = "jdbc:mysql://54.223.192.252:3306/test"

Base = declarative_base()
dbname = 'mysql+mysqlconnector://test1:[email protected]:3306/test'
engine = create_engine(dbname, echo=False)
DBSession = scoped_session(sessionmaker(bind=engine))


class LTVByGid(Base):
    __tablename__ = "ltv_byGid"
    id = Column(Integer, primary_key=True)
    gid = Column(String(255))
    creat_time = Column(String(255))
    people_count = Column(String(255))
    days = Column(String(255))
    consume = Column(Float, nullable=True)


class LTVBySid(Base):
    __tablename__ = "ltv_bySid"
    id = Column(Integer, primary_key=True)
Ejemplo n.º 46
0
 def open_spider(self, spider):
     Session = sessionmaker(bind=engine)
     self.session = Session()
Ejemplo n.º 47
0
from sqlalchemy.orm import sessionmaker
from data_updater.models import server as server_models
from datetime import datetime, timedelta
from time import mktime

# 该部分function的作用是定时更新rank。
# 只会选择未完成的rank进行更新(严格来说,有一定的时间差)。

Session = sessionmaker(bind=server_models.engine)
session = Session()


def get_submissions_group(submissions):
    """
    将所有的提交按照user进行分组
    :param submissions: 查询集。必须已经进行排序。
    :return: 返回一个字典,字典的键是user_id,值是一个submission的列表。
    """
    ret = dict()
    user_id = None
    group = []
    for submission in submissions:
        if user_id is None:  # 第一个
            user_id = submission.user_id
        elif user_id != submission.user_id:  # 碰到了分组边界
            ret[user_id] = group
            group = []
            user_id = submission.user_id
        group.append(submission)
    if len(group) > 0 and user_id is not None:
        ret[user_id] = group
Ejemplo n.º 48
0
    def setUpSession(self):

        DBSession = sessionmaker()
        DBSession.bind = DBHandler.engine
        return DBSession()
Ejemplo n.º 49
0
"""Base clase for our db Models."""

from sqlalchemy import create_engine
from sqlalchemy.orm import (scoped_session, sessionmaker)
from sqlalchemy.ext.declarative import declarative_base

engine = create_engine('sqlite:///database.sqlite3', convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine))

BaseModel = declarative_base()
# We will need this for querying
BaseModel.query = db_session.query_property()
Ejemplo n.º 50
0
 def get_local_session(self):
     return sessionmaker(autocommit=False, autoflush=False, bind=self.engine)()
Ejemplo n.º 51
0
from libs import variables, logs
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker

Base = declarative_base()
MANUAL_ENGINE_POSTGRES = create_engine(variables.DATABASE_URL,
                                       pool_size=30,
                                       max_overflow=0)
Base.metadata.bind = MANUAL_ENGINE_POSTGRES
dbSession_postgres = sessionmaker(bind=MANUAL_ENGINE_POSTGRES)
session_postgres = dbSession_postgres()

# init platform event table


def insertTimeTravel(uid_, CurrentTime, DestinationTime, Customer_Number__c):
    sqlRequest = """
        insert into public.time_travel(id, "Current_Time", "Destination_Time", "Customer_Number") values (%(id)s, %(CurrentTime)s,  %(DestinationTime)s, %(Customer_Number__c)s)
    """
    MANUAL_ENGINE_POSTGRES.execute(
        sqlRequest, {
            'id': uid_,
            'CurrentTime': CurrentTime,
            'DestinationTime': DestinationTime,
            'Customer_Number__c': Customer_Number__c
        })


def initPETable():
    try:
Ejemplo n.º 52
0
def setup_func():
    global SESSION
    engine = create_engine('sqlite:///')
    Friend.metadata.create_all(engine)
    User.metadata.create_all(engine)
    SESSION = sessionmaker(bind=engine)
Ejemplo n.º 53
0
Archivo: meta.py Proyecto: xbian/ckan
            del session._object_cache

    def after_rollback(self, session):
        if hasattr(session, '_object_cache'):
            del session._object_cache

# __all__ = ['Session', 'engine', 'metadata', 'mapper']

# SQLAlchemy database engine. Updated by model.init_model()
engine = None

Session = orm.scoped_session(orm.sessionmaker(
    autoflush=False,
    autocommit=False,
    expire_on_commit=False,
    extension=[CkanCacheExtension(),
               CkanSessionExtension(),
               extension.PluginSessionExtension(),
               activity.DatasetActivitySessionExtension()],
))

create_local_session = orm.sessionmaker(
    autoflush=False,
    autocommit=False,
    expire_on_commit=False,
    extension=[CkanCacheExtension(),
               CkanSessionExtension(),
               extension.PluginSessionExtension(),
               activity.DatasetActivitySessionExtension()],
)
Ejemplo n.º 54
0
from flask import Flask, request, Response, render_template
from sqlalchemy import create_engine, ForeignKey, Column, Integer, String, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy.pool import NullPool
from viberbot import Api
from viberbot.api.bot_configuration import BotConfiguration
from viberbot.api.viber_requests import ViberMessageRequest, ViberConversationStartedRequest
from viberbot.api.messages import (
    TextMessage
)

#engine = create_engine('sqlite:///mydb.db', echo=True)
engine = create_engine('postgres://*****:*****@ec2-54-195-247-108.eu-west-1.compute.amazonaws.com:5432/d8ug54bgore9n7', echo=True)
Base = declarative_base()
Session = sessionmaker(engine)


class Users(Base):
    __tablename__ = 'users'
    user_id = Column(Integer, primary_key=True)
    viber_id = Column(String, nullable=False, unique=True)
    all_answers = Column(Integer, nullable=False, default=0)
    correct_answers = Column(Integer, nullable=False, default=0)
    question = Column(String)
    dt_last_answer = Column(DateTime)
    words = relationship('Learning', back_populates='users')


class Learning(Base):
    __tablename__ = 'learning'
def test_getting_a_session_and_binding_later():
    engine = create_engine('sqlite:///:memory:')
    schema.create_all(engine)
    Session = sessionmaker(autoflush=True, transactional=True)
    Session.configure(bind=engine)
    unused_session = Session()
Ejemplo n.º 56
0
    def getSession(self, engine):
        if self.session == None:
            Session = sessionmaker(bind=engine)
            session = Session()

        return session
Ejemplo n.º 57
0
def setup_database_engine(db_uri):
    engine = create_engine(db_uri)
    Session = sessionmaker(engine)
    bot.Session = scoped_session(Session)
def session_from_new_db():
    engine = create_engine('sqlite:///:memory:')
    schema.create_all(engine)
    Session = sessionmaker(bind=engine, autoflush=True, transactional=True)
    return Session()
Ejemplo n.º 59
0
def make_session_maker(home: str) -> scoped_session:
    db_path = os.path.join(home, 'svs.sqlite')
    engine = create_engine('sqlite:///{}'.format(db_path))
    maker = sessionmaker(bind=engine)
    return scoped_session(maker)
Ejemplo n.º 60
0
def load_workouts():
    # TODO: Autoupdate athlete stats in case of PR's
    # ADD COEFFICIENT FINDERS

    # read csv
    print("importing workouts...")
    df = pd.read_csv('data/workouts.csv').fillna(0.0)
    df.name = df.name.str.upper()

    print("loading {} athletes...".format(len(df.name.unique())))
    Session = sessionmaker()
    Session.configure(bind=engine)
    session = Session()

    # find all current athletes in DB
    athletes = db_query.query("SELECT * FROM athletes")

    # check workouts aren't from foreign Athletes
    for item in df.name.unique().tolist():
        if item not in athletes.name.unique().tolist():
            raise ValueError("unknown athlete: {}".format(item))
            break

    athlete_list = df.name.unique().tolist()

    # iterate through athletes to load data for future calculation and table
    # insertion
    for athlete in athlete_list:
        stats = athletes[athletes['name'] == athlete]

        weight = stats['weight']
        height = stats['height']
        shoulder_height = stats['shoulder_height']
        arm_length = stats['arm_length']
        leg_length = stats['leg_length']
        upper_leg_length = stats['upper_leg_length']
        lower_leg_length = stats['lower_leg_length']
        back_squat = stats['back_squat']
        front_squat = stats['front_squat']
        overhead_squat = stats['overhead_squat']
        snatch = stats['snatch']
        clean = stats['clean']
        jerk = stats['jerk']
        bench = stats['bench']
        mile_run_seconds = stats['mile_run_seconds']
        row_500_time = stats['row_500_time']

        # iterate through workouts by athlete

        for row, index in df[df['name'] == athlete].iterrows():
            try:
                name = str(athlete)
                workout_type = str(index['workout_type'])
                date = pd.to_datetime(index['date'])
                workout_length_seconds = int(index['workout_length_seconds'])
                created_at = datetime.datetime.now()
                pull_up_number = int(index['pull_up'])
                push_up_number = int(index['push_up'])
                burpie_number = int(index['burpie'])
                double_under_number = int(index['double_under'])
                run_dist_meters = int(index['run_dist_meters'])
                deadlift_number = int(index['deadlift'])
                deadlift_weight = int(index['deadlift_weight'])
                box_jump_number = int(index['box_jump'])
                box_jump_height = int(index['box_jump_height'])
                air_squat_number = int(index['air_squat'])
                handstand_push_up_number = int(index['handstand_push_up'])
                wall_ball_number = int(index['wall_ball'])
                wall_ball_weight = int(index['wall_ball_weight'])
                kettle_bell_swing_number = int(index['kettle_bell_swing'])
                kettle_bell_swing_weight = int(
                    index['kettle_bell_swing_weight'])
                russian_kettle_bell_swing_number = int(
                    index['russian_kettle_bell_swing'])
                russian_kettle_bell_swing_weight = int(
                    index['russian_kettle_bell_swing_weight'])
                thruster_number = int(index['thruster'])
                thruster_weight = int(index['thruster_weight'])
                row_dist_meters = int(index['row_dist_meters'])
                row_calories = int(index['row_calories'])
                back_squat_number = int(index['back_squat'])
                back_squat_weight = int(index['back_squat_weight'])
                muscle_up_number = int(index['muscle_up'])
                push_press_number = int(index['push_press'])
                push_press_weight = int(index['push_press_weight'])
                overhead_squat_number = int(index['overhead_squat'])
                overhead_squat_weight = int(index['overhead_squat_weight'])
                back_extension_number = int(index['back_extension'])
                GHD_sit_up_number = int(index['GHD_sit_up'])
                press_number = int(index['press'])
                press_weight = int(index['press_weight'])
                abmat_sit_up_number = int(index['abmat_sit_up'])
                front_squat_number = int(index['front_squat'])
                front_squat_weight = int(index['front_squat_weight'])
                rope_climb_number = int(index['rope_climb'])
                ring_dip_number = int(index['ring_dip'])
                walking_lunge_number = int(index['walking_lunge'])
                knees_to_elbows_number = int(index['knees_to_elbows'])
                bench_press_number = int(index['bench_press'])
                bench_press_weight = int(index['bench_press_weight'])
                push_jerk_number = int(index['push_jerk'])
                push_jerk_weight = int(index['push_jerk_weight'])
                clean_number = int(index['clean'])
                clean_weight = int(index['clean_weight'])
                power_clean_number = int(index['power_clean'])
                power_clean_weight = int(index['power_clean_weight'])
                jerk_number = int(index['jerk'])
                jerk_weight = int(index['jerk_weight'])
                sumo_dead_lift_number = int(index['sumo_dead_lift'])
                sumo_dead_lift_weight = int(index['sumo_dead_lift_weight'])
                cycling_avg_watts = int(index['cycling_avg_watts'])
                snatch_number = int(index['snatch'])
                snatch_weight = int(index['snatch_weight'])
                power_snatch_number = int(index['power_snatch'])
                power_snatch_weight = int(index['power_snatch_weight'])
                total_reps = int(index[[
                    'pull_up', 'push_up', 'burpie', 'double_under',
                    'run_dist_meters', 'deadlift', 'box_jump', 'air_squat',
                    'handstand_push_up', 'wall_ball', 'kettle_bell_swing',
                    'russian_kettle_bell_swing', 'thruster', 'row_dist_meters',
                    'row_calories', 'back_squat', 'muscle_up', 'push_press',
                    'overhead_squat', 'back_extension', 'GHD_sit_up', 'press',
                    'abmat_sit_up', 'front_squat', 'rope_climb', 'ring_dip',
                    'walking_lunge', 'knees_to_elbows', 'bench_press',
                    'push_jerk', 'clean', 'power_clean', 'jerk',
                    'sumo_dead_lift', 'cycling_avg_watts', 'snatch',
                    'power_snatch'
                ]].sum())

                # workout joules calculation
                pull_up_joules = pull_up_calc(weight, arm_length,
                                              pull_up_number)
                push_up_joules = push_up_calc(weight, arm_length,
                                              push_up_number)
                burpie_joules = burpie_calc(weight, height, burpie_number)
                double_under_joules = double_under_calc(
                    weight, height, double_under_number)
                run_dist_meters_joules = run_dist_meters_calc(
                    weight, run_dist_meters)
                deadlift_joules = dead_lift_calc(weight, shoulder_height,
                                                 height, arm_length,
                                                 deadlift_weight,
                                                 deadlift_number)
                box_jump_joules = box_jump_calc(weight, box_jump_height,
                                                box_jump_number)
                air_squat_joules = air_squat_calc(weight, upper_leg_length,
                                                  air_squat_number)
                handstand_push_up_joules = handstand_push_up_calc(
                    weight, height, arm_length, shoulder_height,
                    handstand_push_up_number)
                wall_ball_joules = wall_ball_calc(weight, upper_leg_length,
                                                  shoulder_height,
                                                  wall_ball_weight,
                                                  wall_ball_number)
                kettle_bell_swing_joules = kettle_bell_calc(
                    kettle_bell_swing_weight, arm_length,
                    kettle_bell_swing_number)
                russian_kettle_bell_swing_joules = russian_kettle_bell_calc(
                    russian_kettle_bell_swing_weight, arm_length,
                    russian_kettle_bell_swing_number)
                thruster_joules = thruster_calc(weight, upper_leg_length,
                                                shoulder_height, arm_length,
                                                thruster_weight,
                                                thruster_number)
                row_dist_meters_joules = row_distance_calc(
                    row_500_time, row_dist_meters, height, weight)
                row_calories_joules = row_calories_calc(
                    row_calories, height, weight)
                back_squat_joules = back_squat_calc(weight, upper_leg_length,
                                                    back_squat_weight,
                                                    back_squat_number)
                muscle_up_joules = muscle_up_calc(weight, arm_length,
                                                  muscle_up_number)
                push_press_joules = push_press_calc(weight, push_press_weight,
                                                    arm_length,
                                                    upper_leg_length,
                                                    push_press_number)
                overhead_squat_joules = overhead_squat_calc(
                    weight, upper_leg_length, overhead_squat_weight,
                    overhead_squat_number)
                back_extension_joules = back_extension_calc(
                    weight, shoulder_height, leg_length, back_extension_number)
                GHD_sit_up_joules = GHD_sit_up_calc(weight, shoulder_height,
                                                    leg_length,
                                                    GHD_sit_up_number)
                press_joules = press_calc(press_weight, arm_length,
                                          press_number)
                abmat_sit_up_joules = sit_up_calc(weight, shoulder_height,
                                                  leg_length,
                                                  abmat_sit_up_number)
                front_squat_joules = front_squat_calc(weight, upper_leg_length,
                                                      front_squat_weight,
                                                      front_squat_number)
                rope_climb_joules = rope_climb_calc(weight, rope_climb_number)
                ring_dip_joules = ring_dip_calc(weight, arm_length,
                                                ring_dip_number)
                walking_lunge_joules = walking_lunge_calc(
                    weight, upper_leg_length, walking_lunge_number)
                knees_to_elbows_joules = knees_to_elbows_calc(
                    weight, shoulder_height, leg_length,
                    knees_to_elbows_number)
                bench_press_joules = bench_press_calc(bench_press_weight,
                                                      arm_length,
                                                      bench_press_number)
                push_jerk_joules = push_jerk_calc(weight, push_jerk_weight,
                                                  arm_length, upper_leg_length,
                                                  push_jerk_number)
                clean_joules = clean_calc(weight, shoulder_height, height,
                                          arm_length, clean_weight,
                                          upper_leg_length, clean_number)
                power_clean_joules = power_clean_calc(weight, shoulder_height,
                                                      height, arm_length,
                                                      power_clean_weight,
                                                      upper_leg_length,
                                                      power_clean_number)
                jerk_joules = jerk_calc(weight, jerk_weight, arm_length,
                                        upper_leg_length, jerk_number)
                sumo_dead_lift_joules = sumo_dead_lift_calc(
                    weight, shoulder_height, height, arm_length,
                    sumo_dead_lift_weight, sumo_dead_lift_number)
                cycling_avg_watts_joules = cycling_avg_watts_calc(
                    cycling_avg_watts)
                snatch_joules = snatch_calc(weight, upper_leg_length, height,
                                            arm_length, snatch_weight,
                                            snatch_number)
                power_snatch_joules = power_snatch_calc(
                    weight, upper_leg_length, height, arm_length,
                    power_snatch_weight, power_snatch_number)
            except ValueError:
                raise

            # Calculate Joules expended in the workout:
            joules_total = np.sum(
                np.array([
                    pull_up_joules, push_up_joules, burpie_joules,
                    double_under_joules, run_dist_meters_joules,
                    deadlift_joules, box_jump_joules, air_squat_joules,
                    handstand_push_up_joules, wall_ball_joules,
                    kettle_bell_swing_joules, russian_kettle_bell_swing_joules,
                    thruster_joules, row_dist_meters_joules,
                    row_calories_joules, back_squat_joules, muscle_up_joules,
                    push_press_joules, overhead_squat_joules,
                    back_extension_joules, GHD_sit_up_joules, press_joules,
                    abmat_sit_up_joules, front_squat_joules, rope_climb_joules,
                    ring_dip_joules, walking_lunge_joules,
                    knees_to_elbows_joules, bench_press_joules,
                    push_jerk_joules, clean_joules, power_clean_joules,
                    jerk_joules, sumo_dead_lift_joules,
                    cycling_avg_watts_joules, snatch_joules,
                    power_snatch_joules
                ]))
            try:
                insert = Workout(
                    name=athlete,
                    workout_type=workout_type,
                    date=date,
                    workout_length_seconds=workout_length_seconds,
                    created_at=created_at,
                    total_reps=total_reps,
                    joules=int(joules_total),
                    pull_up=pull_up_number,
                    push_up=push_up_number,
                    burpie=burpie_number,
                    double_under=double_under_number,
                    run_dist_meters=run_dist_meters,
                    deadlift=deadlift_number,
                    deadlift_weight=deadlift_weight,
                    box_jump=box_jump_number,
                    box_jump_height=box_jump_height,
                    air_squat=air_squat_number,
                    handstand_push_up=handstand_push_up_number,
                    wall_ball=wall_ball_number,
                    wall_ball_weight=wall_ball_weight,
                    kettle_bell_swing=kettle_bell_swing_number,
                    kettle_bell_swing_weight=kettle_bell_swing_weight,
                    russian_kettle_bell_swing=russian_kettle_bell_swing_number,
                    russian_kettle_bell_swing_weight=
                    russian_kettle_bell_swing_weight,
                    thruster=thruster_number,
                    thruster_weight=thruster_weight,
                    row_dist_meters=row_dist_meters,
                    row_calories=row_calories,
                    back_squat=back_squat_number,
                    back_squat_weight=back_squat_weight,
                    muscle_up=muscle_up_number,
                    push_press=push_press_number,
                    push_press_weight=push_press_weight,
                    overhead_squat=overhead_squat_number,
                    overhead_squat_weight=overhead_squat_weight,
                    back_extension=back_extension_number,
                    GHD_sit_up=GHD_sit_up_number,
                    press=press_number,
                    press_weight=press_weight,
                    abmat_sit_up=abmat_sit_up_number,
                    front_squat=front_squat_number,
                    front_squat_weight=front_squat_weight,
                    rope_climb=rope_climb_number,
                    ring_dip=ring_dip_number,
                    walking_lunge=walking_lunge_number,
                    knees_to_elbows=knees_to_elbows_number,
                    bench_press=bench_press_number,
                    bench_press_weight=bench_press_weight,
                    push_jerk=push_jerk_number,
                    push_jerk_weight=push_jerk_weight,
                    clean=clean_number,
                    clean_weight=clean_weight,
                    power_clean=power_clean_number,
                    power_clean_weight=power_clean_weight,
                    jerk=jerk_number,
                    jerk_weight=jerk_weight,
                    sumo_dead_lift=sumo_dead_lift_number,
                    sumo_dead_lift_weight=sumo_dead_lift_weight,
                    cycling_avg_watts=cycling_avg_watts,
                    snatch=snatch_number,
                    snatch_weight=snatch_weight,
                    power_snatch=power_snatch_number,
                    power_snatch_weight=power_snatch_weight)
            except (ValueError, TypeError):
                import pdb
                pdb.set_trace()
                raise "please fill out the form correctly!"
            try:
                session.add(insert)
                session.flush()
            except IntegrityError:
                # raise
                session.rollback()
        session.commit()