Ejemplo n.º 1
0
    def test_report_primary_error_when_rollback_fails(self):
        User, users = self.classes.User, self.tables.users

        mapper(User, users)

        session = Session(testing.db)

        with expect_warnings(".*during handling of a previous exception.*"):
            session.begin_nested()
            savepoint = session.\
                connection()._Connection__transaction._savepoint

            # force the savepoint to disappear
            session.connection().dialect.do_release_savepoint(
                session.connection(), savepoint
            )

            # now do a broken flush
            session.add_all([User(id=1), User(id=1)])

            assert_raises_message(
                sa_exc.DBAPIError,
                "ROLLBACK TO SAVEPOINT ",
                session.flush
            )
Ejemplo n.º 2
0
class DataStore(object):
  def __init__(self):
    engine = create_engine("sqlite:///tweet.db", echo=False)
     
    metadata = BASE.metadata
    metadata.create_all(engine)

    Session = sessionmaker(bind=engine)
    self.session = Session()

  def __enter__(self):
    return self

  def __exit__(self, type, value, traceback):
    self.session.connection().close()

  def insert_tweet(self, tweet):
    self.session.add(tweet)
    self.session.commit()

  def insert_tweets(self, tweets):
    for tweet in tweets:
      self.insert_tweet(tweet)

  def get_all_tweets(self):
    raw_list = self.session.query(Tweet.id, Tweet.tweet_id,
        func.count(Tweet.tweet_id), Tweet.message, Tweet.followers, Tweet.user_handle, Tweet.sentiment).group_by(Tweet.tweet_id).order_by('sentiment desc').all()

    result = []

    # raw_list is a tuple, so we need to convert to a dictioanry so that calling code can refer to attributes, rather than indexes.
    # NOTE : Investigate better/cleaner way to resolve this.
    result = [dict(zip(['id','tweet_id', 'count','message','followers','user_handle','sentiment'],item)) for item in raw_list]

    return result
Ejemplo n.º 3
0
    def get_invalid(self, session: Session) -> List[NamedTuple]:
        """
        The query makes use of the SpatialIndex so we won't have to calculate the
        distance between all connection nodes.

        The query only works on a spatialite and therefore skips postgres.
        """
        if session.bind.name == "postgresql":
            return []

        check_spatial_index = (
            "SELECT CheckSpatialIndex('v2_connection_nodes', 'the_geom')")
        if not session.connection().execute(check_spatial_index).scalar():
            recover_spatial_index = (
                "SELECT RecoverSpatialIndex('v2_connection_nodes', 'the_geom')"
            )
            session.connection().execute(recover_spatial_index).scalar()

        query = text(f"""SELECT *
               FROM v2_connection_nodes AS cn1, v2_connection_nodes AS cn2
               WHERE
                   distance(cn1.the_geom, cn2.the_geom, 1) < :min_distance
                   AND cn1.ROWID != cn2.ROWID
                   AND cn2.ROWID IN (
                     SELECT ROWID
                     FROM SpatialIndex
                     WHERE (
                       f_table_name = "v2_connection_nodes"
                       AND search_frame = Buffer(cn1.the_geom, {self.minimum_distance / 2})));
            """)
        results = (session.connection().execute(
            query, min_distance=self.minimum_distance).fetchall())

        return results
Ejemplo n.º 4
0
    def test_report_primary_error_when_rollback_fails(self):
        User, users = self.classes.User, self.tables.users

        mapper(User, users)

        session = Session(testing.db)

        with expect_warnings(".*during handling of a previous exception.*"):
            session.begin_nested()
            savepoint = session.\
                connection()._Connection__transaction._savepoint

            # force the savepoint to disappear
            session.connection().dialect.do_release_savepoint(
                session.connection(), savepoint
            )

            # now do a broken flush
            session.add_all([User(id=1), User(id=1)])

            assert_raises_message(
                sa_exc.DBAPIError,
                "ROLLBACK TO SAVEPOINT ",
                session.flush
            )
Ejemplo n.º 5
0
 def test_execution_options_ignored_mid_transaction(self):
     bind = mock.Mock()
     conn = mock.Mock(engine=bind)
     bind.contextual_connect = mock.Mock(return_value=conn)
     sess = Session(bind=bind)
     sess.execute("select 1")
     with expect_warnings("Connection is already established for the "
                          "given bind; execution_options ignored"):
         sess.connection(execution_options={'isolation_level': 'FOO'})
Ejemplo n.º 6
0
 def test_execution_options_ignored_mid_transaction(self):
     bind = mock.Mock()
     conn = mock.Mock(engine=bind)
     bind.contextual_connect = mock.Mock(return_value=conn)
     sess = Session(bind=bind)
     sess.execute("select 1")
     with expect_warnings(
             "Connection is already established for the "
             "given bind; execution_options ignored"):
         sess.connection(execution_options={'isolation_level': 'FOO'})
Ejemplo n.º 7
0
        def go():
            s = Session(testing.db)
            assert s._transaction is None

            s.connection()

            s.close()
            assert s._transaction is None

            s.connection()
            assert s._transaction is not None

            s.close()
def _downgrade_endpoint_table_with_copy(meta, migrate_engine):
    # Used with databases that don't support dropping a column (e.g., sqlite).

    orig_endpoint_table = sql.Table('endpoint', meta, autoload=True)
    orig_endpoint_table.deregister()
    orig_endpoint_table.rename('orig_endpoint')

    session = Session(bind=migrate_engine)
    with session.transaction:
        # Need to load the metadata for the service table since it's used as
        # foreign key.
        sql.Table('service',
                  meta,
                  autoload=True,
                  autoload_with=session.connection())

        endpoint_table = sql.Table(
            'endpoint', meta, sql.Column('id',
                                         sql.String(64),
                                         primary_key=True),
            sql.Column('legacy_endpoint_id', sql.String(64)),
            sql.Column('interface', sql.String(8), nullable=False),
            sql.Column('region', sql.String(255)),
            sql.Column('service_id',
                       sql.String(64),
                       sql.ForeignKey('service.id'),
                       nullable=False),
            sql.Column('url', sql.Text(), nullable=False),
            sql.Column('extra', sql.Text()))
        endpoint_table.create(migrate_engine, checkfirst=True)

        orig_endpoint_table = sql.Table('orig_endpoint',
                                        meta,
                                        autoload=True,
                                        autoload_with=session.connection())
        for endpoint in session.query(orig_endpoint_table):
            new_values = {
                'id': endpoint.id,
                'legacy_endpoint_id': endpoint.legacy_endpoint_id,
                'interface': endpoint.interface,
                'region': endpoint.region,
                'service_id': endpoint.service_id,
                'url': endpoint.url,
                'extra': endpoint.extra,
            }
            session.execute(
                'insert into endpoint (id, legacy_endpoint_id, '
                'interface, region, service_id, url, extra) '
                'values ( :id, :legacy_endpoint_id, :interface, '
                ':region, :service_id, :url, :extra);', new_values)
    orig_endpoint_table.drop()
Ejemplo n.º 9
0
def _downgrade_endpoint_table_with_copy(meta, migrate_engine):
    # Used with databases that don't support dropping a column (e.g., sqlite).

    orig_endpoint_table = sql.Table(
        'endpoint', meta, autoload=True)
    orig_endpoint_table.deregister()
    orig_endpoint_table.rename('orig_endpoint')

    session = Session(bind=migrate_engine)
    with session.transaction:
        # Need to load the metadata for the service table since it's used as
        # foreign key.
        sql.Table(
            'service', meta, autoload=True,
            autoload_with=session.connection())

        endpoint_table = sql.Table(
            'endpoint',
            meta,
            sql.Column('id', sql.String(64), primary_key=True),
            sql.Column('legacy_endpoint_id', sql.String(64)),
            sql.Column('interface', sql.String(8), nullable=False),
            sql.Column('region', sql.String(255)),
            sql.Column(
                'service_id', sql.String(64),
                sql.ForeignKey('service.id'),
                nullable=False),
            sql.Column('url', sql.Text(), nullable=False),
            sql.Column('extra', sql.Text()))
        endpoint_table.create(migrate_engine, checkfirst=True)

        orig_endpoint_table = sql.Table(
            'orig_endpoint', meta, autoload=True,
            autoload_with=session.connection())
        for endpoint in session.query(orig_endpoint_table):
            new_values = {
                'id': endpoint.id,
                'legacy_endpoint_id': endpoint.legacy_endpoint_id,
                'interface': endpoint.interface,
                'region': endpoint.region,
                'service_id': endpoint.service_id,
                'url': endpoint.url,
                'extra': endpoint.extra,
            }
            session.execute('insert into endpoint (id, legacy_endpoint_id, '
                            'interface, region, service_id, url, extra) '
                            'values ( :id, :legacy_endpoint_id, :interface, '
                            ':region, :service_id, :url, :extra);',
                            new_values)
    orig_endpoint_table.drop()
Ejemplo n.º 10
0
    def test_invalidate(self):
        User, users = self.classes.User, self.tables.users
        mapper(User, users)
        sess = Session()
        u = User(name='u1')
        sess.add(u)
        sess.flush()
        c1 = sess.connection(User)

        sess.invalidate()
        assert c1.invalidated

        eq_(sess.query(User).all(), [])
        c2 = sess.connection(User)
        assert not c2.invalidated
Ejemplo n.º 11
0
    def test_invalidate(self):
        User, users = self.classes.User, self.tables.users
        mapper(User, users)
        sess = Session()
        u = User(name='u1')
        sess.add(u)
        sess.flush()
        c1 = sess.connection(User)

        sess.invalidate()
        assert c1.invalidated

        eq_(sess.query(User).all(), [])
        c2 = sess.connection(User)
        assert not c2.invalidated
Ejemplo n.º 12
0
 def db_connection(self):
     """
     This method open the communication to data base
     """
     try:
         global engine
         engine = create_engine(self.engineUrl)
         print(engine)
         global session
         session = Session(engine)
         session.connection()
         print('connected to db successful')
     except Exception as exception:
         print(exception)
         print('unable to connected to db')
Ejemplo n.º 13
0
def init_session(url: str):
    """Initialise a SQLAlchemy session against a Calibre database."""
    engine = create_engine(url)
    session = Session(engine)
    connection = session.connection()
    connection.connection.create_function("title_sort", 1, title_sort)
    return session
Ejemplo n.º 14
0
    def test_get_bind(self, testcase, expected):
        users, Address, addresses, User = (
            self.tables.users,
            self.classes.Address,
            self.tables.addresses,
            self.classes.User,
        )

        mapper(User, users, properties={"addresses": relationship(Address)})
        mapper(Address, addresses)

        e1 = engines.testing_engine()
        e2 = engines.testing_engine()
        e3 = engines.testing_engine()

        testcase = testing.resolve_lambda(
            testcase,
            User=User,
            Address=Address,
            e1=e1,
            e2=e2,
            e3=e3,
            addresses=addresses,
        )

        sess = Session(e3)
        sess.bind_mapper(User, e1)
        sess.bind_mapper(Address, e2)

        engine = {"e1": e1, "e2": e2, "e3": e3}[expected]
        conn = sess.connection(**testcase)
        is_(conn.engine, engine)

        sess.close()
Ejemplo n.º 15
0
def query_current_outcome_results(current_term, engine):
    sql = f"""
            SELECT o_res.id as "_id",
                    o_res.user_id AS "links.user", 
                    o_res.score, o.id AS outcome_id, 
                    c.name AS course_name, 
                    c.id AS course_id, 
                    o.title, 
                    o.calculation_int, 
                    o.display_name, 
                    a.name, 
                    o_res.enrollment_term, 
                    o_res.submitted_or_assessed_at
            FROM outcome_results o_res
                LEFT JOIN courses c ON c.id = o_res.course_id
                LEFT JOIN outcomes o ON o.id = o_res.outcome_id
                LEFT JOIN alignments a ON a.id = o_res.alignment_id
            WHERE o_res.score IS NOT NULL 
                 AND  c.enrollment_term_id = {current_term}
            ORDER BY o_res.submitted_or_assessed_at DESC;
        """
    session = Session(engine)
    conn = session.connection()
    outcome_results = pd.read_sql(sql, conn)
    session.close()

    return outcome_results
Ejemplo n.º 16
0
def get_db_courses(engine, current_term=None):
    stmt = Courses.select(Courses.c.enrollment_term_id == current_term)
    session = Session(engine)
    conn = session.connection()
    courses = conn.execute(stmt)
    session.close()
    return courses
Ejemplo n.º 17
0
    def test_savepoints(self):
        metadata = self.metadata

        some_table = Table(
            't', metadata,
            Column('id', Integer, primary_key=True,
                   test_needs_autoincrement=True)
        )

        class SomeClass(object):
            pass

        mapper(SomeClass, some_table)

        metadata.create_all()

        session = Session(testing.db)

        target_strings = session.connection().\
            dialect.identifier_preparer._strings

        with session.transaction:
            @profile_memory(
                assert_no_sessions=False,
                get_num_objects=lambda: len(target_strings))
            def go():

                sc = SomeClass()
                session.add(sc)

                with session.begin_nested():
                    session.query(SomeClass).first()

            go()
Ejemplo n.º 18
0
 def _fetch_anomalies(
     self,
     session: Session,
     profile: db.Profile,
     start: datetime,
 ) -> pa.DataFrame:
     end = start + timedelta(seconds=self.seconds)
     if self._wait(end):
         return pa.DataFrame()
     logger.info("Fetching events between %s and %s", start, end)
     query = session.query(
         db.Event.id.label('event_id'),
         db.Event.message_id.label('message_id'),
         db.Event.actor_id.label('actor_id'),
         db.Event.created_at.label('created_at'),
         db.Anomaly.id.label('anomaly_id'),
         db.Anomaly.field_id.label('field_id'),
         db.Anomaly.score.label('score'),
     ).join(
         (db.Anomaly, db.Event.anomalies),
         (db.Actor, db.Event.actor),
         (db.Profile, db.Actor.profile),
     ).filter(
         db.Event.created_at > start,
         db.Event.created_at <= end,
         db.Profile.id == profile.id,
     )
     return pa.read_sql_query(
         query.statement,
         session.connection(),
         parse_dates=['created_at'],
     )
Ejemplo n.º 19
0
def load_data(data_uri: str, dataset: SqlaTable, example_database: Database,
              session: Session) -> None:
    data = request.urlopen(data_uri)  # pylint: disable=consider-using-with
    if data_uri.endswith(".gz"):
        data = gzip.open(data)
    df = pd.read_csv(data, encoding="utf-8")
    dtype = get_dtype(df, dataset)

    # convert temporal columns
    for column_name, sqla_type in dtype.items():
        if isinstance(sqla_type, (Date, DateTime)):
            df[column_name] = pd.to_datetime(df[column_name])

    # reuse session when loading data if possible, to make import atomic
    if example_database.sqlalchemy_uri == current_app.config.get(
            "SQLALCHEMY_DATABASE_URI"
    ) or not current_app.config.get("SQLALCHEMY_EXAMPLES_URI"):
        logger.info("Loading data inside the import transaction")
        connection = session.connection()
    else:
        logger.warning("Loading data outside the import transaction")
        connection = example_database.get_sqla_engine()

    df.to_sql(
        dataset.table_name,
        con=connection,
        schema=dataset.schema,
        if_exists="replace",
        chunksize=CHUNKSIZE,
        dtype=dtype,
        index=False,
        method="multi",
    )
Ejemplo n.º 20
0
    def test_savepoints(self):
        metadata = self.metadata

        some_table = Table(
            't', metadata,
            Column('id',
                   Integer,
                   primary_key=True,
                   test_needs_autoincrement=True))

        class SomeClass(object):
            pass

        mapper(SomeClass, some_table)

        metadata.create_all()

        session = Session(testing.db)

        target_strings = session.connection().\
            dialect.identifier_preparer._strings

        with session.transaction:

            @profile_memory(assert_no_sessions=False,
                            get_num_objects=lambda: len(target_strings))
            def go():

                sc = SomeClass()
                session.add(sc)

                with session.begin_nested():
                    session.query(SomeClass).first()

            go()
Ejemplo n.º 21
0
def connect(database=DEFAULT_DB, debug=False):
    """Returns a SQLAlchemy engine conencted to the given sqlite database.

    Kwargs:
        database (str): sqlite database (default: ":memory:")
        debug (bool): whether to output log statements (default: False)

    Returns:
        Session. Database session.
    """
    engine = create_engine('sqlite:///' + database, echo=debug)
    Base.metadata.create_all(engine)
    session = Session(bind=engine)

    regexp = lambda expr, item: re.search(expr, item, re.I + re.U) is not None
    session.connection().connection.create_function('regexp', 2, regexp)
    return session
Ejemplo n.º 22
0
def connect_database():
    '''connect the database
    :return table as dateframe'''
    url = 'mysql+pymysql://root:AI@2019@[email protected]:3306/stu_db'
    engine = create_engine(url)
    session = Session(engine)
    data = pd.read_sql_table(table_name="news_chinese", con=session.connection())
    return data
Ejemplo n.º 23
0
def main():
    symbol = "cmp"
    informations = get_informations_for_day(symbol)
    day_class = Day.get_day_class(symbol)
    days = []
    days.append(day_class(informations=informations[0:2]))
    days.append(day_class(informations=informations[3:4]))
    create_tables(symbol)

    engine = create_engine("mysql://*****:*****@localhost/trade2",
                           encoding='utf-8')

    session = Session(engine)
    session.connection()
    session.add_all(days)
    session.commit()
    session.flush()
Ejemplo n.º 24
0
    def get_countries(self):
        session = Session(self.engine)

        results = session.query(self.Country)
        df = pd.read_sql(results.statement, session.connection())
        session.close()

        return df.to_dict(orient='records')
Ejemplo n.º 25
0
    def get_subject_ids(self):
        session = Session(self.engine)

        results = session.query(self.Subjects.id)

        df = pd.read_sql(results.statement, session.connection())
        session.close()
        return list(df.id)
Ejemplo n.º 26
0
def import_dataset(
    session: Session, config: Dict[str, Any], overwrite: bool = False
) -> SqlaTable:
    existing = session.query(SqlaTable).filter_by(uuid=config["uuid"]).first()
    if existing:
        if not overwrite:
            return existing
        config["id"] = existing.id

    # TODO (betodealmeida): move this logic to import_from_dict
    config = config.copy()
    for key in JSON_KEYS:
        if config.get(key):
            try:
                config[key] = json.dumps(config[key])
            except TypeError:
                logger.info("Unable to encode `%s` field: %s", key, config[key])
    for metric in config.get("metrics", []):
        if metric.get("extra"):
            try:
                metric["extra"] = json.dumps(metric["extra"])
            except TypeError:
                logger.info("Unable to encode `extra` field: %s", metric["extra"])

    # should we delete columns and metrics not present in the current import?
    sync = ["columns", "metrics"] if overwrite else []

    # should we also load data into the dataset?
    data_uri = config.get("data")

    # import recursively to include columns and metrics
    dataset = SqlaTable.import_from_dict(session, config, recursive=True, sync=sync)
    if dataset.id is None:
        session.flush()

    # load data
    if data_uri:
        data = request.urlopen(data_uri)
        df = pd.read_csv(data, encoding="utf-8")
        dtype = get_dtype(df, dataset)

        # convert temporal columns
        for column_name, sqla_type in dtype.items():
            if isinstance(sqla_type, (Date, DateTime)):
                df[column_name] = pd.to_datetime(df[column_name])

        df.to_sql(
            dataset.table_name,
            con=session.connection(),
            schema=dataset.schema,
            if_exists="replace",
            chunksize=CHUNKSIZE,
            dtype=dtype,
            index=False,
            method="multi",
        )

    return dataset
Ejemplo n.º 27
0
    def test_bind_arguments(self):
        users, Address, addresses, User = (self.tables.users,
                                           self.classes.Address,
                                           self.tables.addresses,
                                           self.classes.User)

        mapper(User, users)
        mapper(Address, addresses)

        e1 = engines.testing_engine()
        e2 = engines.testing_engine()
        e3 = engines.testing_engine()

        sess = Session(e3)
        sess.bind_mapper(User, e1)
        sess.bind_mapper(Address, e2)

        assert sess.connection().engine is e3
        assert sess.connection(bind=e1).engine is e1
        assert sess.connection(mapper=Address, bind=e1).engine is e1
        assert sess.connection(mapper=Address).engine is e2
        assert sess.connection(clause=addresses.select()).engine is e2
        assert sess.connection(mapper=User,
                               clause=addresses.select()).engine is e1
        assert sess.connection(mapper=User,
                               clause=addresses.select(),
                               bind=e2).engine is e2

        sess.close()
Ejemplo n.º 28
0
    def test_bind_arguments(self):
        users, Address, addresses, User = (self.tables.users,
                                           self.classes.Address,
                                           self.tables.addresses,
                                           self.classes.User)

        mapper(User, users)
        mapper(Address, addresses)

        e1 = engines.testing_engine()
        e2 = engines.testing_engine()
        e3 = engines.testing_engine()

        sess = Session(e3)
        sess.bind_mapper(User, e1)
        sess.bind_mapper(Address, e2)

        assert sess.connection().engine is e3
        assert sess.connection(bind=e1).engine is e1
        assert sess.connection(mapper=Address, bind=e1).engine is e1
        assert sess.connection(mapper=Address).engine is e2
        assert sess.connection(clause=addresses.select()).engine is e2
        assert sess.connection(mapper=User,
                               clause=addresses.select()).engine is e1
        assert sess.connection(mapper=User, clause=addresses.select(),
                               bind=e2).engine is e2

        sess.close()
    def get_launch_date(self):
        session = Session(self.engine)

        results = session.query(self.LaunchDate)

        df = pd.read_sql(results.statement, session.connection())

        session.close()
        return df.to_dict(orient="records")
    def get_satellite_names(self):
        session = Session(self.engine)

        results = session.query(self.DemoGData.Satellite_Names)

        df = pd.read_sql(results.statement, session.connection())

        session.close()
        return list(df.Satellite_Names)
Ejemplo n.º 31
0
    def states_list(self):
        session = Session(self.engine)

        results = session.query(self.min_wage.State)
            
        df = pd.read_sql(results.statement, session.connection())

        session.close()  
        return list(df.State) 
Ejemplo n.º 32
0
    def get_BabyNames():
        session = Session(self.engine)

        results = session.query(self.babynames.names)

        df = pd.read_sql(results.statement, session.connection())

        session.close()
        return list(df.names)
Ejemplo n.º 33
0
    def get_top_ten(self):
        session = Session(self.engine)

        results = session.query(self.top_ten)

        top_ten_df = pd.read_sql(results.statement, session.connection())

        session.close()
        return top_ten_df
Ejemplo n.º 34
0
    def get_state_ids(self):
        session = Session(self.engine)

        results = session.query(self.Cases.State_Abbrev)

        df = pd.read_sql(results.statement, session.connection())

        session.close()
        return list(df.State_Abbrev.unique())
Ejemplo n.º 35
0
    def get_report_counts(self):
        session = Session(self.engine)

        results = session.query(self.report_counts)

        report_counts_df = pd.read_sql(results.statement, session.connection())

        session.close()
        return report_counts_df
Ejemplo n.º 36
0
def send_data_from_df(db: Session, tablename: str, df: pd.DataFrame):
    conn = db.connection()
    df.to_sql(tablename, con=conn, if_exists='replace', index=False)
    try:
        db.commit()
        return True
    except IntegrityError as e:
        # raise ArithmeticError("Duplicate record exists")
        return False
    def get_40yr_master_record(self):
        session = Session(self.engine)

        results = session.query(self.MasterRecord)

        df = pd.read_sql(results.statement, session.connection())

        session.close()
        return df.to_dict(orient="records")
Ejemplo n.º 38
0
class DataStore(object):
    def __init__(self):
        engine = create_engine("sqlite:///tweet.db", echo=False)

        metadata = BASE.metadata
        metadata.create_all(engine)

        Session = sessionmaker(bind=engine)
        self.session = Session()

    def __enter__(self):
        return self

    def __exit__(self, type, value, traceback):
        self.session.connection().close()

    def insert_tweet(self, tweet):
        self.session.add(tweet)
        self.session.commit()

    def insert_tweets(self, tweets):
        for tweet in tweets:
            self.insert_tweet(tweet)

    def get_all_tweets(self):
        raw_list = self.session.query(
            Tweet.id, Tweet.tweet_id, func.count(Tweet.tweet_id),
            Tweet.message, Tweet.followers, Tweet.user_handle,
            Tweet.sentiment).group_by(
                Tweet.tweet_id).order_by('sentiment desc').all()

        result = []

        # raw_list is a tuple, so we need to convert to a dictioanry so that calling code can refer to attributes, rather than indexes.
        # NOTE : Investigate better/cleaner way to resolve this.
        result = [
            dict(
                zip([
                    'id', 'tweet_id', 'count', 'message', 'followers',
                    'user_handle', 'sentiment'
                ], item)) for item in raw_list
        ]

        return result
Ejemplo n.º 39
0
 def test_execution_options_begin_transaction(self):
     bind = mock.Mock()
     sess = Session(bind=bind)
     c1 = sess.connection(execution_options={'isolation_level': 'FOO'})
     eq_(
         bind.mock_calls,
         [
             mock.call.contextual_connect(),
             mock.call.contextual_connect().
             execution_options(isolation_level='FOO'),
             mock.call.contextual_connect().execution_options().begin()
         ]
     )
     eq_(c1, bind.contextual_connect().execution_options())
Ejemplo n.º 40
0
 def make_session(self, engine):
   session = Session(engine)
   session.connection()
   return session
Ejemplo n.º 41
0
# The attributes on our mapped class act like Column objects, and
# produce SQL expressions.

print(User.name == "ed")

### slide:: p
# These SQL expressions are compatible with the select() object
# we introduced earlier.

from sqlalchemy import select

sel = select([User.name, User.fullname]).\
        where(User.name == 'ed').\
        order_by(User.id)

session.connection().execute(sel).fetchall()


### slide:: p
# but when using the ORM, the Query() object provides a lot more functionality,
# here selecting the User *entity*.

query = session.query(User).filter(User.name == 'ed').order_by(User.id)

query.all()


### slide:: p
# Query can also return individual columns

for name, fullname in session.query(User.name, User.fullname):
Ejemplo n.º 42
0
class Env(object):

    def __init__(self, config):
        self._config = config
        self._engine = create_engine('sqlite:///tmp.sqlite')
        self._session = Session(bind=self._engine)

        root = os.path.abspath(os.path.dirname(__file__))
        self._templates = TemplateLookup(
            directories=[os.path.join(root, 'template'), ],
            input_encoding='utf-8',
            output_encoding='utf-8',
        )

    @property
    def config(self):
        return self._config

    @property
    def session(self):
        return self._session

    @property
    def levels(self):
        return self.session.query(Level)

    @property
    def participants(self):
        return self.session.query(Participant)

    def build_database(self):
        metadata.drop_all(self._session.connection())
        metadata.create_all(self._session.connection())

        for p in self.config.participants:
            pobj = Participant(id=p.id, name=p.name, color=p.color)
            self.session.merge(pobj)

        for l in self.config.levels:
            lobj = Level(
                id=l.id, name=l.name,
                geometry=l.geometry, zoom=l.zoom
            )
            self.session.merge(lobj)

            for d in l.datasources:
                ds = DATASOURCE_BY_NAME[d.type](config=d, **d.data)
                for obj in ds.read():
                    self._session.merge(obj)

        for r in self.session.query(Result):
            r.update_vote_persent()

        Result.update_calc_fields(self.session)

        self._session.commit()

    def participant_vote_stat(self):
        q = self.session.query(
            ResultVote.participant_id.label('participant_id'),
            func.min(ResultVote.vote_persent).label('min'),
            func.max(ResultVote.vote_persent).label('max'),
        ).group_by(ResultVote.participant_id)

        result = dict()
        for r in q:
            result[r.participant_id] = dict(min=r.min, max=r.max)

        return result

    def parameter_stat(self):
        r = self.session.query(
            func.min(Result.turnout_p).label('turnout_min'),
            func.max(Result.turnout_p).label('turnout_max'),
            func.min(Result.absentee_p).label('absentee_min'),
            func.max(Result.absentee_p).label('absentee_max'),
        ).one()

        return dict(
            turnout=dict(min=r.turnout_min, max=r.turnout_max),
            absentee=dict(min=r.absentee_min, max=r.absentee_max),
        )

    def level_json(self, level_id):
        q = self._session.query(Result).filter_by(level_id=level_id)
        features = []
        for r in q:
            geom = None
            if r.level.geometry == 'area' and r.area_wkt:
                geom = wkt.loads(r.area_wkt)
            elif r.level.geometry == 'site' and r.site_x and r.site_y:
                geom = geojson.Point([r.site_x, r.site_y])

            if geom:
                f = geojson.Feature(
                    geometry=geom,
                    properties=dict(name=r.name, **r.parameters)
                )
                features.append(f)

        return geojson.dumps(geojson.FeatureCollection(features))

    def render_template(self, template, target, context):
        templateobj = self._templates.get_template(template)
        with open(target, 'w') as f:
            f.write(templateobj.render(**context))
Ejemplo n.º 43
0
Archivo: timps.py Proyecto: tgbugs/mlab
from database.table_logic import *
from analysis.abf_analysis import *
from database.main import printFD
from sqlalchemy.orm import Session

#from neo import AxonIO, AnalogSignal
#engine=pgTest(args['--echo'])
engine.echo=args['--echo']
session=Session(engine)
s=session

print('Connected to:',engine.url.database)


#session type
dbtype=session.connection().engine.name #dialect.name??
#FIXME use this to change how models import??

#table logic
logic_StepEdge(session)

#load up the stuff we need to test dataios and steps
if args['--rio-start']:
    from rig.rigcontrol import rigIOMan, keyDicts
    rio=rigIOMan(keyDicts, session)#, globals())
    rio.start()

#sc=StepCompiler(bind_pia_xys,stepDict)
#FIXME use ExperimentType???
#sr=StepRunner(session,bind_pia_xys,stepDict,rio.ctrlDict, session.query(Experiment).all()[10])
#sr.do() #DUN DUN DUN!