Ejemplo n.º 1
0
    def test_one_to_many_on_m2o(self):
        Node, nodes = self.classes.Node, self.tables.nodes

        mapper(Node, nodes, properties={
            'children': relationship(Node,
                                 backref=sa.orm.backref('parentnode',
                                            remote_side=nodes.c.name,
                                            passive_updates=False),
                                 )})

        sess = Session()
        n1 = Node(name='n1')
        sess.add(n1)
        n2 = Node(name='n11', parentnode=n1)
        n3 = Node(name='n12', parentnode=n1)
        n4 = Node(name='n13', parentnode=n1)
        sess.add_all([n2, n3, n4])
        sess.commit()

        n1.name = 'new n1'
        sess.commit()
        eq_(['new n1', 'new n1', 'new n1'],
            [n.parent
             for n in sess.query(Node).filter(
                 Node.name.in_(['n11', 'n12', 'n13']))])
Ejemplo n.º 2
0
 def _two_obj_fixture(self):
     e1 = Engineer(name='wally')
     e2 = Engineer(name='dilbert', reports_to=e1)
     sess = Session()
     sess.add_all([e1, e2])
     sess.commit()
     return sess
def test_unlock_scenario_structure(fresh_database_config, scenario_manager):
    """Test that unlock method removes records from the table for locks.

    1. Create 3 scenarios with scenario manager.
    2. Create records in the ScenarioStructureLock table for the first and third scenarios.
    3. Unlock the first scenario.
    4. Check that the ScenarioStructureLock table contains only one record for the third scenario.
    """
    scenarios = scenario_manager.create_scenarios([
        NewScenarioSpec(name=u'First'),
        NewScenarioSpec(name=u'Second'),
        NewScenarioSpec(name=u'Third'),
        ])

    session = Session(bind=create_engine(fresh_database_config.get_connection_string()))
    session.add_all([
        ScenarioStructureLockRecord(scenario_id=scenarios[0].scenario_id),
        ScenarioStructureLockRecord(scenario_id=scenarios[2].scenario_id),
        ])
    session.commit()

    scenarios[0]._unlock_structure()

    session = Session(bind=create_engine(fresh_database_config.get_connection_string()))
    lock_record = session.query(ScenarioStructureLockRecord).one()
    assert lock_record.scenario_id == scenarios[2].scenario_id, "Wrong scenario has been unlocked"
Ejemplo n.º 4
0
    def test_batch_interaction(self):
        """test batching groups same-structured, primary 
        key present statements together.

        """

        t = self.tables.t

        class T(fixtures.ComparableEntity):
            pass
        mapper(T, t)
        sess = Session()
        sess.add_all([
            T(data='t1'),
            T(data='t2'),
            T(id=3, data='t3'),
            T(id=4, data='t4'),
            T(id=5, data='t5'),
            T(id=6, data=func.lower('t6')),
            T(id=7, data='t7'),
            T(id=8, data='t8'),
            T(id=9, data='t9', def_='def2'),
            T(id=10, data='t10', def_='def3'),
            T(id=11, data='t11'),
        ])
        self.assert_sql_execution(
            testing.db,
            sess.flush,
            CompiledSQL(
                "INSERT INTO t (data) VALUES (:data)",
                {'data': 't1'}
            ),
            CompiledSQL(
                "INSERT INTO t (data) VALUES (:data)",
                {'data': 't2'}
            ),
            CompiledSQL(
                "INSERT INTO t (id, data) VALUES (:id, :data)",
                [{'data': 't3', 'id': 3}, 
                    {'data': 't4', 'id': 4}, 
                    {'data': 't5', 'id': 5}]
            ),
            CompiledSQL(
                "INSERT INTO t (id, data) VALUES (:id, lower(:lower_1))",
                {'lower_1': 't6', 'id': 6}
            ),
            CompiledSQL(
                "INSERT INTO t (id, data) VALUES (:id, :data)",
                [{'data': 't7', 'id': 7}, {'data': 't8', 'id': 8}]
            ),
            CompiledSQL(
                "INSERT INTO t (id, data, def_) VALUES (:id, :data, :def_)",
                [{'data': 't9', 'id': 9, 'def_':'def2'}, 
                {'data': 't10', 'id': 10, 'def_':'def3'}]
            ),
            CompiledSQL(
                "INSERT INTO t (id, data) VALUES (:id, :data)",
                {'data': 't11', 'id': 11}
            ),
        )
Ejemplo n.º 5
0
    def _roundtrip(self):
        Foo = Base._decl_class_registry["Foo"]
        Bar = Base._decl_class_registry["Bar"]

        s = Session(testing.db)

        s.add_all(
            [
                Bar(data="d1", bar_data="b1"),
                Bar(data="d2", bar_data="b2"),
                Bar(data="d3", bar_data="b3"),
                Foo(data="d4"),
            ]
        )
        s.commit()

        eq_(
            s.query(Foo).order_by(Foo.id).all(),
            [
                Bar(data="d1", bar_data="b1"),
                Bar(data="d2", bar_data="b2"),
                Bar(data="d3", bar_data="b3"),
                Foo(data="d4"),
            ],
        )
Ejemplo n.º 6
0
    def test_custom_comparator_factory(self):
        self._fixture(True)
        Edge, Point = (self.classes.Edge, self.classes.Point)

        edge_1, edge_2 = (
            Edge(Point(0, 0), Point(3, 5)),
            Edge(Point(0, 1), Point(3, 5)),
        )

        sess = Session()
        sess.add_all([edge_1, edge_2])
        sess.commit()

        near_edges = (
            sess.query(Edge).filter(Edge.start.near(Point(1, 1), 1)).all()
        )

        assert edge_1 not in near_edges
        assert edge_2 in near_edges

        near_edges = (
            sess.query(Edge).filter(Edge.start.near(Point(0, 1), 1)).all()
        )

        assert edge_1 in near_edges and edge_2 in near_edges
def test_state_creation_by_rule(engine_with_parse_rule):
    """Check that parse rule state could be created by sqlalchemy.

    1. Create ParseRuleState table by sqlalchemy means.
    2. Add record.
    3. Check that new record could be addressed.
    """
    session = Session(bind=engine_with_parse_rule)

    now = datetime.utcnow()
    rule = ParseRule(created=now)

    name = u"New name"
    changed = datetime.utcnow()
    parser = 1

    state = ParseRuleState(rule=rule, name=name, changed=changed, parser=parser)

    session.add_all((rule, state))
    session.commit()

    assert state.state_id is not None, "New parse rule state does not obtain ID"
    assert rule.states.one().state_id == state.state_id, "Wrong ID of rule state"

    check_engine = create_engine(engine_with_parse_rule.url)
    check_session = Session(bind=check_engine)
    obtained_record = check_session.query(ParseRuleState).first()
    assert obtained_record.state_id == state.state_id, "Wrong state ID"
    assert obtained_record.rule_id == rule.rule_id, "Wrong rule ID"
    assert obtained_record.name == name, "Wrong name value"
    assert obtained_record.parser == parser, "Wrong parser"
    assert obtained_record.changed == changed, "wrong changed value"
Ejemplo n.º 8
0
def dbFillin():
    engine = create_engine("sqlite:///cinema.db")
    # will create all tables
    Base.metadata.create_all(engine)

    session = Session(bind=engine)

    print("Adding new student to the database via the session object")
    session.add_all([
        Movie(name="The Hunger Games: Catching Fire", rating=7.9),
        Movie(name="Wreck-It Ralph", rating=7.8),
        Movie(name="Her", rating=8.3),
        Projection(movie_id=1, type="3D", date=datetime.date(2014, 4, 1), time=datetime.time(19, 10)),
        Projection(movie_id=1, type="2D", date=datetime.date(2014, 4, 2), time=datetime.time(19, 00)),
        Projection(movie_id=1, type="4DX", date=datetime.date(2014, 4, 3), time=datetime.time(21, 10)),
        Projection(movie_id=3, type="2D", date=datetime.date(2014, 4, 1), time=datetime.time(20, 20)),
        Projection(movie_id=2, type="3D", date=datetime.date(2014, 4, 1), time=datetime.time(22, 00)),
        Projection(movie_id=2, type="2D", date=datetime.date(2014, 4, 1), time=datetime.time(19, 30)),
        Reservation(username="******", projection_id=1, row=2, col=1),
        Reservation(username="******", projection_id=1, row=3, col=5),
        Reservation(username="******", projection_id=1, row=7, col=8),
        Reservation(username="******", projection_id=3, row=1, col=1),
        Reservation(username="******", projection_id=3, row=1, col=2),
        Reservation(username="******", projection_id=5, row=2, col=3),
        Reservation(username="******", projection_id=5, row=2, col=4)])

    session.commit()
Ejemplo n.º 9
0
    def test_self_referential_bidirectional_mutation(self):
        place, Place, place_place = (
            self.tables.place,
            self.classes.Place,
            self.tables.place_place,
        )

        mapper(
            Place,
            place,
            properties={
                "child_places": relationship(
                    Place,
                    secondary=place_place,
                    primaryjoin=place.c.place_id == place_place.c.pl1_id,
                    secondaryjoin=place.c.place_id == place_place.c.pl2_id,
                    order_by=place_place.c.pl2_id,
                    backref="parent_places",
                )
            },
        )

        sess = Session()
        p1 = Place("place1")
        p2 = Place("place2")
        p2.parent_places = [p1]
        sess.add_all([p1, p2])
        p1.parent_places.append(p2)
        sess.commit()

        assert p1 in p2.parent_places
        assert p2 in p1.parent_places
Ejemplo n.º 10
0
def test_get_reports(fresh_database_config):
    """Test method to get report files.

    1. Create several report files.
    2. Get report files via report manager.
    3. Check that method returned proper records.
    """
    engine = create_engine(fresh_database_config.get_connection_string())
    session = Session(bind=engine)

    now = datetime.utcnow()

    first_report_file = ReportFile(
        file_path=u'first_report.xml', created=(now + timedelta(minutes=1)))
    second_report_file = ReportFile(file_path=u'second_report.xml', created=now)
    third_report_file = ReportFile(
        file_path=u'third_report.xml', created=(now - timedelta(minutes=1)))

    reports = (first_report_file, second_report_file, third_report_file)

    session.add_all(reports)
    session.commit()

    api = API(fresh_database_config)
    report_files = ReportManager(api).get_report_files().with_session(Session(bind=engine)).all()
    obtained_ids = set((report.file_id for report in report_files))
    expected_ids = set((report.file_id for report in reports))

    assert obtained_ids == expected_ids, "Wrong ids of the report files"
Ejemplo n.º 11
0
    def test_bulk_save_updated_include_unchanged(self):
        User, = self.classes("User",)

        s = Session(expire_on_commit=False)
        objects = [
            User(name="u1"),
            User(name="u2"),
            User(name="u3")
        ]
        s.add_all(objects)
        s.commit()

        objects[0].name = 'u1new'
        objects[2].name = 'u3new'

        s = Session()
        with self.sql_execution_asserter() as asserter:
            s.bulk_save_objects(objects, update_changed_only=False)

        asserter.assert_(
            CompiledSQL(
                "UPDATE users SET name=:name WHERE "
                "users.id = :users_id",
                [{'users_id': 1, 'name': 'u1new'},
                 {'users_id': 2, 'name': 'u2'},
                 {'users_id': 3, 'name': 'u3new'}]
            )
        )
Ejemplo n.º 12
0
    def test_report_primary_error_when_rollback_fails(self):
        User, users = self.classes.User, self.tables.users

        mapper(User, users)

        session = Session(testing.db)

        with expect_warnings(".*during handling of a previous exception.*"):
            session.begin_nested()
            savepoint = session.\
                connection()._Connection__transaction._savepoint

            # force the savepoint to disappear
            session.connection().dialect.do_release_savepoint(
                session.connection(), savepoint
            )

            # now do a broken flush
            session.add_all([User(id=1), User(id=1)])

            assert_raises_message(
                sa_exc.DBAPIError,
                "ROLLBACK TO SAVEPOINT ",
                session.flush
            )
Ejemplo n.º 13
0
    def test_threelevel_selectin_to_inline_mapped(self):
        self._fixture_from_geometry(
            {
                "a": {
                    "subclasses": {
                        "b": {"polymorphic_load": "selectin"},
                        "c": {
                            "subclasses": {
                                "d": {
                                    "polymorphic_load": "inline",
                                    "single": True,
                                },
                                "e": {
                                    "polymorphic_load": "inline",
                                    "single": True,
                                },
                            },
                            "polymorphic_load": "selectin",
                        },
                    }
                }
            }
        )

        a, b, c, d, e = self.classes("a", "b", "c", "d", "e")
        sess = Session()
        sess.add_all([d(d_data="d1"), e(e_data="e1")])
        sess.commit()

        q = sess.query(a)

        result = self.assert_sql_execution(
            testing.db,
            q.all,
            CompiledSQL(
                "SELECT a.type AS a_type, a.id AS a_id, "
                "a.a_data AS a_a_data FROM a",
                {},
            ),
            Or(
                CompiledSQL(
                    "SELECT a.type AS a_type, c.id AS c_id, a.id AS a_id, "
                    "c.c_data AS c_c_data, c.e_data AS c_e_data, "
                    "c.d_data AS c_d_data "
                    "FROM a JOIN c ON a.id = c.id "
                    "WHERE a.id IN ([EXPANDING_primary_keys]) ORDER BY a.id",
                    [{"primary_keys": [1, 2]}],
                ),
                CompiledSQL(
                    "SELECT a.type AS a_type, c.id AS c_id, a.id AS a_id, "
                    "c.c_data AS c_c_data, "
                    "c.d_data AS c_d_data, c.e_data AS c_e_data "
                    "FROM a JOIN c ON a.id = c.id "
                    "WHERE a.id IN ([EXPANDING_primary_keys]) ORDER BY a.id",
                    [{"primary_keys": [1, 2]}],
                ),
            ),
        )
        with self.assert_statement_count(testing.db, 0):
            eq_(result, [d(d_data="d1"), e(e_data="e1")])
def test_get_view_states_with_pagination(fresh_database_config, scenario_manager):
    # pylint: disable=invalid-name
    """Test that we can apply pagination to obtain view states of scenario.

    1. Create 2 scenarios with scenario manager.
    2. Add 2 more states to first scenario with sqlalchemy.
    3. Find scenario with scenario manager.
    4. Obtain scenario view states with pagination: 2 items per page.
    5. Check returned states.
    """
    spec = NewScenarioSpec(name=u'first', description=u'description')
    additional_spec = NewScenarioSpec(name=u'second')
    scenario = scenario_manager.create_scenarios([spec, additional_spec])[0]

    scenario_id = scenario.scenario_id
    past_state = ScenarioViewStateRecord(
        scenario_id=scenario_id,
        name=u'past',
        changed=datetime.utcnow() - timedelta(days=1)
        )
    future_state = ScenarioViewStateRecord(
        scenario_id=scenario_id,
        name=u'future',
        description=u'changed description',
        changed=datetime.utcnow() + timedelta(days=1)
        )

    session = Session(bind=create_engine(fresh_database_config.get_connection_string()))
    session.add_all([past_state, future_state])
    session.commit()

    found_scenario = scenario_manager.get_scenarios(ScenarioFilter(scenario_ids=[scenario_id]))[0]

    # check first page
    states = found_scenario.get_view_states(pagination=Pagination(page_index=0, items_per_page=2))
    assert len(states) == 2, "Wrong number of states"
    assert all(isinstance(state, ScenarioViewStateEntity) for state in states), (
        "Wrong type of the items"
        )
    assert all(state.scenario.scenario_id == scenario_id for state in states), (
        "One of the states has wrong scenario"
        )

    expected_names = [future_state.name, spec.name]
    assert [state.name for state in states] == expected_names, "Wrong name of state"
    expected_descriptions = [future_state.description, spec.description]
    assert [state.description for state in states] == expected_descriptions, (
        "Wrong description of state"
        )
    assert states[0].date == future_state.changed, "Wrong date in the state"

    # check second page
    states = found_scenario.get_view_states(pagination=Pagination(page_index=1, items_per_page=2))
    assert len(states) == 1, "Wrong number of states"
    assert isinstance(states[0], ScenarioViewStateEntity), "Wrong type of the items"
    assert states[0].scenario.scenario_id == scenario_id, "State has wrong scenario"

    assert states[0].name == past_state.name, "Wrong name of state"
    assert states[0].description == past_state.description, "Wrong description of state"
    assert states[0].date == past_state.changed, "Wrong date in the state"
Ejemplo n.º 15
0
    def test_one_to_many_on_m2o(self):
        Node, nodes = self.classes.Node, self.tables.nodes

        mapper(
            Node,
            nodes,
            properties={
                "children": relationship(
                    Node, backref=sa.orm.backref("parentnode", remote_side=nodes.c.name, passive_updates=False)
                )
            },
        )

        sess = Session()
        n1 = Node(name="n1")
        sess.add(n1)
        n2 = Node(name="n11", parentnode=n1)
        n3 = Node(name="n12", parentnode=n1)
        n4 = Node(name="n13", parentnode=n1)
        sess.add_all([n2, n3, n4])
        sess.commit()

        n1.name = "new n1"
        sess.commit()
        eq_(
            ["new n1", "new n1", "new n1"],
            [n.parent for n in sess.query(Node).filter(Node.name.in_(["n11", "n12", "n13"]))],
        )
def test_state_creation_by_node(engine_with_scenario_node):
    """Check that scenario node state could be created by sqlalchemy.

    1. Create ScenarioNodeState table by sqlalchemy means.
    2. Add record.
    3. Check that new record could be addressed.
    """
    session = Session(bind=engine_with_scenario_node)

    now = datetime.utcnow()
    node = ScenarioNode(created=now)

    name = u"New name"
    changed = datetime.utcnow()

    state = ScenarioNodeState(node=node, name=name, changed=changed)

    session.add_all((node, state))
    session.commit()

    assert state.state_id is not None, "New ScenarioNodeState does not obtain ID"
    assert node.states.one().state_id == state.state_id, "Wrong ID of node state"

    check_engine = create_engine(engine_with_scenario_node.url)
    check_session = Session(bind=check_engine)
    obtained_record = check_session.query(ScenarioNodeState).first()
    assert obtained_record.state_id == state.state_id, "Wrong state ID"
    assert obtained_record.node_id == node.node_id, "Wrong node ID"
    assert obtained_record.name == name, "Wrong name value"
    assert obtained_record.changed == changed, "wrong changed value"
Ejemplo n.º 17
0
    def test_partial_load_no_invoke_eagers(self):
        # test issue #4199

        self._fixture_from_geometry({
            "a": {
                "subclasses": {
                    "a1": {"polymorphic_load": "selectin"},
                    "a2": {"polymorphic_load": "selectin"}
                }
            }
        })

        a, a1, a2 = self.classes("a", "a1", "a2")
        sess = Session()

        a1_obj = a1()
        a2_obj = a2()
        sess.add_all([a1_obj, a2_obj])

        del a2_obj
        sess.flush()
        sess.expire_all()

        # _with_invoke_all_eagers(False), used by the lazy loader
        # strategy, will cause one less state to be present such that
        # the poly loader won't locate a state limited to the "a1" mapper,
        # needs to test that it has states
        sess.query(a)._with_invoke_all_eagers(False).all()
def test_state_creation_by_scenario(engine_with_scenario):
    """Check that scenario view state could be created by sqlalchemy.

    1. Create ScenarioViewState table by sqlalchemy means.
    2. Add record.
    3. Check that new record could be addressed.
    """
    session = Session(bind=engine_with_scenario)

    now = datetime.utcnow()
    scenario = Scenario(created=now)

    name = u"New name"
    description = u"Some description"
    changed = datetime.utcnow()

    state = ScenarioViewState(
        scenario=scenario, name=name, description=description, changed=changed)

    session.add_all((scenario, state))
    session.commit()

    assert state.state_id is not None, "New ScenarioViewState does not obtain ID"
    assert scenario.view_states.one().state_id == state.state_id, (
        "Wrong ID of scenario view state"
        )

    check_engine = create_engine(engine_with_scenario.url)
    check_session = Session(bind=check_engine)
    obtained_record = check_session.query(ScenarioViewState).first()
    assert obtained_record.state_id == state.state_id, "Wrong state ID"
    assert obtained_record.scenario_id == scenario.scenario_id, "Wrong scenario ID"
    assert obtained_record.name == name, "Wrong name value"
    assert obtained_record.description == description, "Wrong description value"
    assert obtained_record.changed == changed, "wrong changed value"
Ejemplo n.º 19
0
    def test_multiple_key_replaced_by_update(self):
        users, User = self.tables.users, self.classes.User

        mapper(User, users)

        u1 = User(name='u1')
        u2 = User(name='u2')
        u3 = User(name='u3')

        s = Session()
        s.add_all([u1, u2, u3])
        s.commit()

        s.delete(u1)
        s.delete(u2)
        s.flush()

        u3.name = 'u1'
        s.flush()

        u3.name = 'u2'
        s.flush()

        s.rollback()

        assert u1 in s
        assert u2 in s
        assert u3 in s

        assert s.identity_map[(User, ('u1',))] is u1
        assert s.identity_map[(User, ('u2',))] is u2
        assert s.identity_map[(User, ('u3',))] is u3
Ejemplo n.º 20
0
    def test_mapper_args_deferred(self):
        """test that __mapper_args__ is not called until *after* table reflection"""

        class User(decl.DeferredReflection, fixtures.ComparableEntity,
                            Base):
            __tablename__ = 'users'

            @decl.declared_attr
            def __mapper_args__(cls):
                return {
                    "order_by": cls.__table__.c.name
                }

        decl.DeferredReflection.prepare(testing.db)
        sess = Session()
        sess.add_all([
            User(name='G'),
            User(name='Q'),
            User(name='A'),
            User(name='C'),
        ])
        sess.commit()
        eq_(
            sess.query(User).all(),
            [
                User(name='A'),
                User(name='C'),
                User(name='G'),
                User(name='Q'),
            ]
        )
Ejemplo n.º 21
0
    def test_bulk_update(self):
        User, = self.classes("User",)

        s = Session(expire_on_commit=False)
        objects = [
            User(name="u1"),
            User(name="u2"),
            User(name="u3")
        ]
        s.add_all(objects)
        s.commit()

        s = Session()
        with self.sql_execution_asserter() as asserter:
            s.bulk_update_mappings(
                User,
                [{'id': 1, 'name': 'u1new'},
                 {'id': 2, 'name': 'u2'},
                 {'id': 3, 'name': 'u3new'}]
            )

        asserter.assert_(
            CompiledSQL(
                "UPDATE users SET name=:name WHERE users.id = :users_id",
                [{'users_id': 1, 'name': 'u1new'},
                 {'users_id': 2, 'name': 'u2'},
                 {'users_id': 3, 'name': 'u3new'}]
            )
        )
Ejemplo n.º 22
0
    def _do_save(self):
        assert not isInIOThread()

        while not self.write_queue.empty():
            items = []

            try:
                self.writelock = True
                try:
                    while True:
                        items.append(self.write_queue.get_nowait())
                except Empty:
                    pass

                session = Session()

                try:
                    session.add_all(items)
                    session.commit()
                except:
                    session.rollback()
                    raise
                finally:
                    session.close()
            finally:
                self.writelock = False
Ejemplo n.º 23
0
    def _test_many_to_one(self, passive):
        Node, nodes = self.classes.Node, self.tables.nodes

        mapper(
            Node,
            nodes,
            properties={
                "parentnode": relationship(
                    Node, remote_side=nodes.c.name, passive_updates=passive
                )
            },
        )

        sess = Session()
        n1 = Node(name="n1")
        n11 = Node(name="n11", parentnode=n1)
        n12 = Node(name="n12", parentnode=n1)
        n13 = Node(name="n13", parentnode=n1)
        sess.add_all([n1, n11, n12, n13])
        sess.commit()

        n1.name = "new n1"
        sess.commit()
        eq_(
            ["new n1", "new n1", "new n1"],
            [
                n.parent
                for n in sess.query(Node).filter(
                    Node.name.in_(["n11", "n12", "n13"])
                )
            ],
        )
Ejemplo n.º 24
0
 def insert_data(cls):
     sess = Session()
     sess.add_all([
         cls.classes.Data(d1='d%dd1' % i, d2='d%dd2' % i, d3='d%dd3' % i,
                 others=[cls.classes.Other(o1="d%do%d" % (i, j)) for j in range(5)])
         for i in range(10)
     ])
     sess.commit()
Ejemplo n.º 25
0
 def insert_data(cls):
     User, Address = cls.classes.User, cls.classes.Address
     s = Session()
     s.add_all([
         User(id=1, name='u1', addresses=[Address(id=1, email='a1'),
                                     Address(id=2, email='a2')])
     ])
     s.commit()
Ejemplo n.º 26
0
    def _unhashable_fixture(self, metadata, load_on_pending=False):
        class MyHashType(sa.TypeDecorator):
            impl = sa.String(100)

            def process_bind_param(self, value, dialect):
                return ";".join(
                    "%s=%s" % (k, v)
                    for k, v in sorted(value.items(), key=lambda key: key[0])
                )

            def process_result_value(self, value, dialect):
                return dict(elem.split("=", 1) for elem in value.split(";"))

        category = Table(
            "category",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("data", MyHashType()),
        )
        article = Table(
            "article",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("data", MyHashType()),
        )

        class Category(fixtures.ComparableEntity):
            pass

        class Article(fixtures.ComparableEntity):
            pass

        mapper(Category, category)
        mapper(
            Article,
            article,
            properties={
                "category": relationship(
                    Category,
                    primaryjoin=orm.foreign(article.c.data) == category.c.data,
                    load_on_pending=load_on_pending,
                )
            },
        )

        metadata.create_all()
        sess = Session(autoflush=False)
        data = {"im": "unhashable"}
        a1 = Article(id=1, data=data)
        c1 = Category(id=1, data=data)
        if load_on_pending:
            sess.add(c1)
        else:
            sess.add_all([c1, a1])
        sess.flush()
        if load_on_pending:
            sess.add(a1)
        return Category, Article, sess, a1, c1
Ejemplo n.º 27
0
    def test_columns_single_inheritance_conflict_resolution(self):
        """Test that a declared_attr can return the existing column and it will
        be ignored.  this allows conditional columns to be added.

        See [ticket:2472].

        """
        class Person(Base):
            __tablename__ = 'person'
            id = Column(Integer, primary_key=True)

        class Engineer(Person):
            """single table inheritance"""

            @declared_attr
            def target_id(cls):
                return cls.__table__.c.get('target_id',
                       Column(Integer, ForeignKey('other.id'))
                    )
            @declared_attr
            def target(cls):
                return relationship("Other")

        class Manager(Person):
            """single table inheritance"""

            @declared_attr
            def target_id(cls):
                return cls.__table__.c.get('target_id',
                        Column(Integer, ForeignKey('other.id'))
                    )
            @declared_attr
            def target(cls):
                return relationship("Other")

        class Other(Base):
            __tablename__ = 'other'
            id = Column(Integer, primary_key=True)

        is_(
            Engineer.target_id.property.columns[0],
            Person.__table__.c.target_id
        )
        is_(
            Manager.target_id.property.columns[0],
            Person.__table__.c.target_id
        )
        # do a brief round trip on this
        Base.metadata.create_all()
        session = Session()
        o1, o2 = Other(), Other()
        session.add_all([
            Engineer(target=o1),
            Manager(target=o2),
            Manager(target=o1)
            ])
        session.commit()
        eq_(session.query(Engineer).first().target, o1)
Ejemplo n.º 28
0
    def test_many_to_one_delete_childonly_unloaded_expired(self):
        users, Address, addresses, User = (self.tables.users,
                                self.classes.Address,
                                self.tables.addresses,
                                self.classes.User)

        mapper(User, users)
        mapper(Address, addresses, properties={
            'parent':relationship(User)
        })

        parent = User(name='p1')
        c1, c2 = Address(email_address='c1', parent=parent), \
                    Address(email_address='c2', parent=parent)

        session = Session()
        session.add_all([c1, c2])
        session.add(parent)

        session.flush()

        pid = parent.id
        c1id = c1.id
        c2id = c2.id

        session.expire(parent)
        session.expire(c1)
        session.expire(c2)

        session.delete(c1)
        session.delete(c2)

        self.assert_sql_execution(
            testing.db,
            session.flush,
            AllOf(
                # the parent User is expired, so it gets loaded here.
                CompiledSQL(
                    "SELECT addresses.id AS addresses_id, addresses.user_id AS "
                    "addresses_user_id, addresses.email_address AS "
                    "addresses_email_address FROM addresses WHERE addresses.id = "
                    ":param_1",
                    lambda ctx: {'param_1': c1id}
                ),
                CompiledSQL(
                    "SELECT addresses.id AS addresses_id, addresses.user_id AS "
                    "addresses_user_id, addresses.email_address AS "
                    "addresses_email_address FROM addresses WHERE addresses.id = "
                    ":param_1",
                    lambda ctx: {'param_1': c2id}
                ),
            ),
            CompiledSQL(
                "DELETE FROM addresses WHERE addresses.id = :id",
                lambda ctx: [{'id': c1id}, {'id': c2id}]
            ),
        )
Ejemplo n.º 29
0
 def _fixture(self):
     Parent = self.classes.Parent
     sess = Session()
     sess.add_all([
         Parent(data1='d1', data2='d2', data3='d3', data4='d4')
         for i in range(10)
     ])
     sess.commit()
     sess.close()
class BasicTestCase(unittest.TestCase):
    def setUp(self):
        class Base(object):
            @declared_attr
            def __tablename__(cls):
                return cls.__name__.lower()

            id = Column(Integer, primary_key=True)

        Base = declarative_base(cls=Base)

        class Blog(Base):
            name = Column(String)
            entries = relationship('Entry', backref='blog')

        class Entry(Base):
            blog_id = Column(Integer, ForeignKey('blog.id'))
            pub_date = Column(Date)
            headline = Column(String)
            body = Column(String)

        engine = create_engine('sqlite://')
        Base.metadata.create_all(engine)
        self.session = Session(engine, query_cls=DjangoQuery)
        self.Base = Base
        self.Blog = Blog
        self.Entry = Entry
        self.engine = engine

        self.b1 = Blog(name='blog1',
                       entries=[
                           Entry(headline='b1 headline 1',
                                 body='body 1',
                                 pub_date=datetime.date(2010, 2, 5)),
                           Entry(headline='b1 headline 2',
                                 body='body 2',
                                 pub_date=datetime.date(2010, 4, 8)),
                           Entry(headline='b1 headline 3',
                                 body='body 3',
                                 pub_date=datetime.date(2010, 9, 14))
                       ])
        self.b2 = Blog(name='blog2',
                       entries=[
                           Entry(headline='b2 headline 1',
                                 body='body 1',
                                 pub_date=datetime.date(2010, 5, 12)),
                           Entry(headline='b2 headline 2',
                                 body='body 2',
                                 pub_date=datetime.date(2010, 7, 18)),
                           Entry(headline='b2 headline 3',
                                 body='body 3',
                                 pub_date=datetime.date(2011, 8, 27))
                       ])

        self.session.add_all([self.b1, self.b2])
        self.session.commit()

    def test_basic_filtering(self):
        bq = self.session.query(self.Blog)
        eq = self.session.query(self.Entry)
        assert bq.filter_by(name__exact='blog1').one() is self.b1
        assert bq.filter_by(name__contains='blog').all() == [self.b1, self.b2]
        assert bq.filter_by(
            entries__headline__exact='b2 headline 2').one() is self.b2
        assert bq.filter_by(entries__pub_date__range=(
            datetime.date(2010, 1, 1),
            datetime.date(2010, 3, 1))).one() is self.b1
        assert eq.filter_by(pub_date__year=2011).one() is self.b2.entries[2]
        assert eq.filter_by(
            pub_date__year=2011,
            id=self.b2.entries[2].id).one() is self.b2.entries[2]

    def test_basic_excluding(self):
        eq = self.session.query(self.Entry)
        assert eq.exclude_by(pub_date__year=2010).one() is self.b2.entries[2]

    def test_basic_ordering(self):
        eq = self.session.query(self.Entry)
        assert eq.order_by('-blog__name', 'id').all() == \
            self.b2.entries + self.b1.entries
Ejemplo n.º 31
0
Base.metadata.create_all(engine)

session = Session(engine)

session.add_all(
    [
        Customer(
            name="customer 1",
            addresses=[
                Customer.Address(
                    street="123 anywhere street", city="New York", zip="10110"
                ),
                Customer.Address(
                    street="40 main street", city="San Francisco", zip="95732"
                ),
            ],
        ),
        Supplier(
            company_name="Ace Hammers",
            addresses=[
                Supplier.Address(
                    street="2569 west elm", city="Detroit", zip="56785"
                )
            ],
        ),
    ]
)

session.commit()

for customer in session.query(Customer):
Ejemplo n.º 32
0
def foo_engine():
    engine = create_engine(foo_db_url)
    Base.metadata.drop_all(engine)
    Base.metadata.create_all(engine)
    session = Session(bind=engine)

    # atoms
    h = Atom(atomic_number=1, symbol='H')
    ne = Atom(atomic_number=10, symbol='Ne')

    # data sources
    nist = DataSource(short_name='nist')
    ku = DataSource(short_name='ku')
    ch = DataSource(short_name="chianti")

    # atomic weights
    h.weights = [
        AtomWeight(quantity=1.00784 * u.u, data_source=nist, uncert=4e-3),
        AtomWeight(quantity=1.00811 * u.u, data_source=ku, uncert=4e-3),
    ]

    # ions
    h0 = Ion(atomic_number=1,
             ion_charge=0,
             ionization_energies=[
                 IonizationEnergy(quantity=13.5984 * u.eV,
                                  data_source=nist,
                                  method="th")
             ])

    ne1 = Ion(atomic_number=10,
              ion_charge=1,
              ionization_energies=[
                  IonizationEnergy(quantity=40.96296 * u.eV,
                                   data_source=nist,
                                   method="th"),
                  IonizationEnergy(quantity=40.97 * u.eV,
                                   data_source=nist,
                                   method="m")
              ])

    ne2 = Ion(atomic_number=10,
              ion_charge=2,
              ionization_energies=[
                  IonizationEnergy(quantity=48.839 * u.eV,
                                   data_source=nist,
                                   method="th")
              ])

    # levels
    ne2_lvl0_ku = Level(ion=ne2,
                        data_source=ku,
                        level_index=0,
                        configuration="2s2.2p5",
                        term="2P1.5",
                        L="P",
                        J=1.5,
                        spin_multiplicity=2,
                        parity=1,
                        energies=[
                            LevelEnergy(quantity=0, data_source=ku,
                                        method="m"),
                            LevelEnergy(quantity=0,
                                        data_source=ku,
                                        method="th")
                        ])

    ne2_lvl1_ku = Level(ion=ne2,
                        data_source=ku,
                        level_index=1,
                        configuration="2s2.2p5",
                        term="2P0.5",
                        L="P",
                        J=0.5,
                        spin_multiplicity=2,
                        parity=1,
                        energies=[
                            LevelEnergy(quantity=780.4 * u.Unit("cm-1"),
                                        data_source=ku,
                                        method="m"),
                            LevelEnergy(quantity=780.0 * u.Unit("cm-1"),
                                        data_source=ku,
                                        method="th")
                        ])

    ne2_lvl2_ku = Level(ion=ne2,
                        data_source=ku,
                        level_index=2,
                        configuration="2s2.2p5",
                        term="2D2.5",
                        L="D",
                        J=2.5,
                        spin_multiplicity=2,
                        parity=0,
                        energies=[
                            LevelEnergy(quantity=1366.3 * u.Unit("cm-1"),
                                        data_source=ku,
                                        method="m")
                        ])

    ne2_lvl1_ch = Level(ion=ne2,
                        data_source=ch,
                        level_index=1,
                        configuration="2s2.2p5",
                        term="2P0.5",
                        L="P",
                        J=0.5,
                        spin_multiplicity=2,
                        parity=1,
                        energies=[
                            LevelEnergy(quantity=780.2 * u.Unit("cm-1"),
                                        data_source=ch,
                                        method="m")
                        ])

    # lines
    ne2_line0_ku = Line(
        lower_level=ne2_lvl0_ku,
        upper_level=ne2_lvl1_ku,
        data_source=ku,
        wavelengths=[LineWavelength(quantity=183.571 * u.AA, data_source=ku)],
        a_values=[
            LineAValue(quantity=5.971e-03 * u.Unit("s-1"), data_source=ku)
        ],
        gf_values=[LineGFValue(quantity=8.792e-01, data_source=ku)])

    ne2_line1_ku = Line(
        lower_level=ne2_lvl0_ku,
        upper_level=ne2_lvl2_ku,
        data_source=ku,
        wavelengths=[
            LineWavelength(quantity=18.4210 * u.nm, medium=1, data_source=ku)
        ],
        a_values=[
            LineAValue(quantity=5.587e-03 * u.Unit("s-1"), data_source=ku)
        ],
        gf_values=[LineGFValue(quantity=8.238e-01, data_source=ku)])

    # electron collisions
    ne2_e_col0_ku = ECollision(
        lower_level=ne2_lvl0_ku,
        upper_level=ne2_lvl1_ku,
        data_source=ku,
        bt92_ttype=2,
        bt92_cups=11.16,
        energies=[
            ECollisionEnergy(quantity=0.007108 * u.rydberg, data_source=ku)
        ],
        temp_strengths=[(ECollisionTempStrength(temp=0.0, strength=0.255)),
                        (ECollisionTempStrength(temp=0.07394,
                                                strength=0.266))])

    session.add_all([
        h, ne, nist, ku, ch, h0, ne1, ne2_lvl1_ch, ne2_lvl0_ku, ne2_lvl1_ku,
        ne2_lvl2_ku, ne2_line0_ku, ne2_line1_ku, ne2_e_col0_ku
    ])
    session.commit()
    session.close()
    return engine
Ejemplo n.º 33
0
    item = relationship(Item, lazy='joined')


if __name__ == '__main__':
    engine = create_engine('sqlite://')
    Base.metadata.create_all(engine)

    session = Session(engine)

    # create catalog
    tshirt, mug, hat, crowbar = (Item('SA T-Shirt',
                                      10.99), Item('SA Mug',
                                                   6.50), Item('SA Hat', 8.99),
                                 Item('MySQL Crowbar', 16.99))
    session.add_all([tshirt, mug, hat, crowbar])
    session.commit()

    # create an order
    order = Order('john smith')

    # add three OrderItem associations to the Order and save
    order.order_items.append(OrderItem(mug))
    order.order_items.append(OrderItem(crowbar, 10.99))
    order.order_items.append(OrderItem(hat))
    session.add(order)
    session.commit()

    # query the order, print items
    order = session.query(Order).filter_by(customer_name='john smith').one()
    print([(order_item.item.description, order_item.price)
Ejemplo n.º 34
0
from sqlalchemy import create_engine, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from models import Adopter, Adoptions, Animals, Employee, Organization, Supplies, Transfers, Base, Favorites
from sqlalchemy.orm import Session
import random

engine = create_engine('postgresql://*****:*****@localhost/ShelterHelp',
                       echo=False)

session = Session(bind=engine)

n = 35

session.add_all([
    Organization("P. Sherman, 42 Wallaby Way, Sydney", "555-555-5555",
                 random.uniform(0, 100), "User %d" % i,
                 "Sydney Animal Rescue Center", "123-_password_-321")
    for i in range(n)
])

session.commit()

session.add_all([
    Adopter("User %d" % i, "123-_password_-321", "Energetic",
            "A large apartment building", 13, 18) for i in range(n)
])

session.commit()

session.add_all([
    Animals("In good health. Vaccinated, tagged, and spayed/neutered", 4,
            "Lazy", "Garfield", "1", "static/Garfield.png", "Tabby", "Cat",
Ejemplo n.º 35
0
# Engine and create tables
from sqlalchemy import create_engine
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)

# Session with identity map
from sqlalchemy.orm import Session
session = Session(bind=engine)

# adding multiple objects as *pending*
u1 = User(name="slavo")
session.add_all([
    u1,
    User(name="jano"),
    User(name="vlado"),
    User(name="peter"),
    User(name="brano")
])

# finalize transaction
session.commit()

# Many-to-One relationship (Adr->User - one user can live on multiple addresses)
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship


# in sqlalchemy we have to declare relation ship twice
# 1) relation type at core level
# 2) relationship on orm level and object level
if __name__ == "__main__":
    engine = create_engine("postgresql://*****:*****@localhost/python",
                           echo=False)
    Base.metadata.create_all(engine)

    session = Session(engine)

    print("-" * 80)
    print("create a tree")
    session.add_all([
        Node(id=1, path="1"),
        Node(id=2, path="1.2"),
        Node(id=3, path="1.3"),
        Node(id=4, path="1.3.4"),
        Node(id=5, path="1.3.5"),
        Node(id=6, path="1.3.6"),
        Node(id=7, path="1.7"),
        Node(id=8, path="1.7.8"),
        Node(id=9, path="1.7.9"),
        Node(id=10, path="1.7.9.10"),
        Node(id=11, path="1.7.11"),
    ])
    session.flush()
    print(str(session.query(Node).get(1)))

    print("-" * 80)
    print("move 7 under 3")
    session.query(Node).get(7).move_to(session.query(Node).get(3))
    session.flush()
    print(str(session.query(Node).get(1)))
Ejemplo n.º 37
0
mapper(User, user_table)

metadata.drop_all(db)
metadata.create_all(db)

session = Session(bind=db)

wilber = User(name='wilber', fullname='Wilber Tian', password='******')
session.add(wilber)

session.commit()

session.add_all([
    User(name='wendy', fullname='Wendy Williams', password='******'),
    User(name='mary', fullname='Mary Contrary', password='******'),
    User(name='fred', fullname='Fred Flinstone', password='******')
])

session.commit()

for instance in session.query(User).order_by(User.id):
    print(instance.name, instance.fullname)

for name, fullname in session.query(User.name, User.fullname):
    print(name, fullname)

for row in session.query(User, User.name).all():
    print(row.User, row.name)

for row in session.query(User.name.label('name_label')).all():
Ejemplo n.º 38
0
    def test_threelevel_selectin_to_inline_awkward_alias_options(self):
        self._fixture_from_geometry({
            "a": {
                "subclasses": {
                    "b": {},
                    "c": {
                        "subclasses": {
                            "d": {},
                            "e": {},
                        },
                    }
                }
            }
        })

        a, b, c, d, e = self.classes("a", "b", "c", "d", "e")
        sess = Session()
        sess.add_all([d(d_data="d1"), e(e_data="e1")])
        sess.commit()

        from sqlalchemy import select

        a_table, c_table, d_table, e_table = self.tables("a", "c", "d", "e")

        poly = select(
            [a_table.c.id, a_table.c.type, c_table, d_table,
             e_table]).select_from(
                 a_table.join(c_table).outerjoin(d_table).outerjoin(
                     e_table)).apply_labels().alias('poly')

        c_alias = with_polymorphic(c, (d, e), poly)
        q = sess.query(a).options(selectin_polymorphic(
            a, [b, c_alias])).order_by(a.id)

        result = self.assert_sql_execution(
            testing.db,
            q.all,
            CompiledSQL(
                "SELECT a.type AS a_type, a.id AS a_id, "
                "a.a_data AS a_a_data FROM a ORDER BY a.id", {}),
            Or(
                # here, the test is that the adaptation of "a" takes place
                CompiledSQL(
                    "SELECT poly.a_type AS poly_a_type, "
                    "poly.c_id AS poly_c_id, "
                    "poly.a_id AS poly_a_id, poly.c_c_data AS poly_c_c_data, "
                    "poly.e_id AS poly_e_id, poly.e_e_data AS poly_e_e_data, "
                    "poly.d_id AS poly_d_id, poly.d_d_data AS poly_d_d_data "
                    "FROM (SELECT a.id AS a_id, a.type AS a_type, "
                    "c.id AS c_id, "
                    "c.c_data AS c_c_data, d.id AS d_id, "
                    "d.d_data AS d_d_data, "
                    "e.id AS e_id, e.e_data AS e_e_data FROM a JOIN c "
                    "ON a.id = c.id LEFT OUTER JOIN d ON c.id = d.id "
                    "LEFT OUTER JOIN e ON c.id = e.id) AS poly "
                    "WHERE poly.a_id IN ([EXPANDING_primary_keys]) "
                    "ORDER BY poly.a_id", [{
                        'primary_keys': [1, 2]
                    }]),
                CompiledSQL(
                    "SELECT poly.a_type AS poly_a_type, "
                    "poly.c_id AS poly_c_id, "
                    "poly.a_id AS poly_a_id, poly.c_c_data AS poly_c_c_data, "
                    "poly.d_id AS poly_d_id, poly.d_d_data AS poly_d_d_data, "
                    "poly.e_id AS poly_e_id, poly.e_e_data AS poly_e_e_data "
                    "FROM (SELECT a.id AS a_id, a.type AS a_type, "
                    "c.id AS c_id, c.c_data AS c_c_data, d.id AS d_id, "
                    "d.d_data AS d_d_data, e.id AS e_id, "
                    "e.e_data AS e_e_data FROM a JOIN c ON a.id = c.id "
                    "LEFT OUTER JOIN d ON c.id = d.id "
                    "LEFT OUTER JOIN e ON c.id = e.id) AS poly "
                    "WHERE poly.a_id IN ([EXPANDING_primary_keys]) "
                    "ORDER BY poly.a_id", [{
                        'primary_keys': [1, 2]
                    }])))
        with self.assert_statement_count(testing.db, 0):
            eq_(result, [d(d_data="d1"), e(e_data="e1")])
Ejemplo n.º 39
0
 def insert_data(cls, connection):
     A = cls.classes.A
     s = Session(connection)
     s.add_all([A(data="asdf") for i in range(5)])
     s.commit()
Ejemplo n.º 40
0
c5 = Customer(first_name='Toby',
              last_name='Miller',
              username='******',
              email='*****@*****.**',
              address='1662 Kinney Street',
              town='Wolfden')

c6 = Customer(first_name='Scott',
              last_name='Harvey',
              username='******',
              email='*****@*****.**',
              address='424 Patterson Street',
              town='Beckinsdale')

session.add_all([c3, c4, c5, c6])
session.commit()

i1 = Item(name='Chair', cost_price=9.21, selling_price=10.81, quantity=5)
i2 = Item(name='Pen', cost_price=3.45, selling_price=4.51, quantity=3)
i3 = Item(name='Headphone', cost_price=15.52, selling_price=16.81, quantity=50)
i4 = Item(name='Travel Bag', cost_price=20.1, selling_price=24.21, quantity=50)
i5 = Item(name='Keyboard', cost_price=20.1, selling_price=22.11, quantity=50)
i6 = Item(name='Monitor', cost_price=200.14, selling_price=212.89, quantity=50)
i7 = Item(name='Watch', cost_price=100.58, selling_price=104.41, quantity=50)
i8 = Item(name='Water Bottle', cost_price=20.89, selling_price=25, quantity=50)

session.add_all([i1, i2, i3, i4, i5, i6, i7, i8])
session.commit()

o1 = Order(customer=c1)
    titulo = Column('TITULO', String(255))
    paginas = Column('PAGINAS', Integer)
    autor_id = Column('AUTOR_ID', Integer)

    def __init__(self, id, titulo, paginas, autor_id):
        self.id = id
        self.titulo = titulo
        self.paginas = paginas
        self.autor_id = autor_id


autor1 = Autor(1, 'Paulo')
autor2 = Autor(2, 'Maria')

lista = [autor1, autor2]
session.add_all(lista)
session.commit()

livro1 = Livro(1, 'Titulo do livro 1', 300, autor1.id)
livro2 = Livro(2, 'Titulo do livro 2', 150, autor2.id)

lista = [livro1, livro2]
session.add_all(lista)
session.commit()

resultado = session.query(Autor).all()
for r in resultado:
    print(r.id, r.nome)

resultado = session.query(Livro).all()
for r in resultado:
Ejemplo n.º 42
0
class Grade(Base):
    __tablename__ = "grade"
    id = Column(Integer, primary_key=True)
    value = Column(Float)
    student_id = Column(Integer, ForeignKey("student.id"))
    student = relationship("Student", backref="grades")


engine = create_engine("sqlite:///university.db")
Base.metadata.create_all(engine)

session = Session(bind=engine)
session.add_all([
    Student(name="Rado", age=23),
    Student(name="Ivo", age=21),
    Student(name="Ivan", age=23)
])

print("Adding new student to the database via the session object")

rado = session.query(Student).filter(Student.name == "Rado").one()

rado.grades = [Grade(value=6), Grade(value=5), Grade(value=3)]
session.commit()

ivo = session.query(Student).filter(Student.name == "Ivo").one()
ivo.grades.append(Grade(value=6))

avg_grades = session.query(func.avg(Grade.value)).\
    filter(Student.id == ivo.id).\
Ejemplo n.º 43
0
 def add_all(self, db: Session, objs_in: List[DataSchemaType]) -> None:
     db_objs = [self.model(**obj.dict()) for obj in objs_in]
     db.add_all(db_objs)
Ejemplo n.º 44
0
print("Accessing domain objects after expiration results in lazy loading.")
print(wendy_user.fullname)
print(wendy_user.__dict__)
"""
heading = "Inserting multiple rows into the table. "
print_output(number, code, heading)

print("Creating a session using - session = Session(engine)")
session = Session(engine)

print("Adding User objects to the session - using session.add_all([ U1, U2])")
wendy_user = User(name='wendy', fullname='Wendy Weathersmith')
session.add(wendy_user)

session.add_all([
    User(name='mary', fullname='Mary Contrary'),
    User(name='fred', fullname='Fred Flintstone')
])

print("Pending user list not yet flushed : {}".format(session.new))

print("A query to the table flushes the pending entries to the database")
print("before fetching new data.")
our_user = session.query(User).filter_by(name='wendy').first()

print("Data Fetched for User whose name = 'wendy'")
print(our_user.id, our_user.name, our_user.fullname)

print("Pending user list after fetching using a query : {}".format(
    session.new))
print("-" * 80)
Ejemplo n.º 45
0
def seed_db(db: Session):
    seed_data = [
        Team(team_name='Marketing'),
        Team(team_name='Engineering'),
        Project(team_id=1, project_name='Advertisement'),
        Project(team_id=1, project_name='SEO'),
        Project(team_id=2, project_name='API v2'),
        Column(column_pos=0, project_id=1, column_name='Preproduction'),
        Column(column_pos=1, project_id=1, column_name='Processing'),
        Column(column_pos=2, project_id=1, column_name='Postproduction'),
        Column(column_pos=3, project_id=1, column_name='Final'),
        Column(column_pos=0, project_id=2, column_name='To do'),
        Column(column_pos=0, project_id=3, column_name='Pre-process'),
        Column(column_pos=1, project_id=3, column_name='Post-process'),
        Task(column_id=1, column_idx=0, task_description='Research'),
        Task(column_id=1, column_idx=1, task_description='Due diligence'),
        Task(column_id=2, column_idx=0, task_description='A+B Testing'),
        Task(column_id=2, column_idx=1, task_description='Sampling'),
        Task(column_id=2, column_idx=2, task_description='Filming'),
        Task(column_id=3, column_idx=0, task_description='Casting'),
        Task(column_id=4, column_idx=0, task_description='Budget'),
        Task(column_id=4, column_idx=1, task_description='Design campaign'),
        Task(column_id=4,
             column_idx=2,
             task_description='Hire production crew'),
        Task(column_id=5, column_idx=0, task_description='Use Google'),
        Task(column_id=6, column_idx=0, task_description='Write application'),
        Task(column_id=6, column_idx=1, task_description='Profit'),
        Task(column_id=7, column_idx=0,
             task_description='Gather requirements'),
        Task(column_id=7, column_idx=1, task_description='Design specs'),
        Task(column_id=7, column_idx=2, task_description='Hire engineers'),
        User(first_name='Bob',
             last_name='Smith',
             email='*****@*****.**',
             hashed_password='******',
             team_id=1),
        User(first_name='Sally',
             last_name='McBeth',
             email='*****@*****.**',
             hashed_password='******',
             team_id=1),
        User(first_name='John',
             last_name='Applebaum',
             email='*****@*****.**',
             hashed_password='******',
             team_id=1),
        User(first_name='Bill',
             last_name='McSorley',
             email='*****@*****.**',
             hashed_password='******',
             team_id=2)
    ]

    try:
        db.add_all(seed_data)
        db.commit()
        logger.info('database seed complete.')
    except (IntegrityError, OperationalError) as e:  # if data already exists
        logger.warning(e.orig)
        logger.info('data already exists. skipping seed.')
Ejemplo n.º 46
0
    id = Column(Integer, primary_key=True)
    a_id = Column(StringAsInt)
    a = relationship(
        "A",
        # specify primaryjoin.  The string form is optional
        # here, but note that Declarative makes available all
        # of the built-in functions we might need, including
        # cast() and foreign().
        primaryjoin="cast(A.a_id, Integer) == foreign(B.a_id)",
        backref="bs",
    )


# we demonstrate with SQLite, but the important part
# is the CAST rendered in the SQL output.

e = create_engine("sqlite://", echo=True)
Base.metadata.create_all(e)

s = Session(e)

s.add_all([A(a_id="1"), A(a_id="2", bs=[B(), B()]), A(a_id="3", bs=[B()])])
s.commit()

b1 = s.query(B).filter_by(a_id="2").first()
print(b1.a)

a1 = s.query(A).filter_by(a_id="2").first()
print(a1.bs)
Ejemplo n.º 47
0
class PersisterMysql:

    __mysql_session = None
    start = None
    end = None
    querylimit = None

    def __init__(self, config, start, end):

        db_conf = config._sections.get('mysql')
        dbuser = db_conf.get('dbuser')
        dbpass = db_conf.get('dbpass')
        dbhost = db_conf.get('dbhost')
        dbport = db_conf.get('dbport')
        dbname = db_conf.get('dbname')
        self.querylimit = int(db_conf.get('querylimit'))

        engine = create_engine("mysql://" + dbuser + ":" + dbpass + "@" +
                               dbhost + ":" + str(dbport) + "/" + dbname)
        # engine.echo = True

        # Reflect the tables
        Base = automap_base()
        Base.prepare(engine, reflect=True)

        # Create the session
        self.__mysql_session = Session(engine)

        self.start = start
        self.end = end

    def persist_process(self, processes):  # type: Process

        # Retrieve host_service list in DB to check duplication
        hs_db_list = self._get_host_service_list_hourly(self.start, self.end)
        hs_db_set = set(hs_db_list)

        # Build collection of (not duplicated) measurements to persist
        host_services = []
        for process in processes:
            host_service_list = model_mysql_adapter.from_process_measurement_to_mysql_host_service(
                process)
            # Exclude duplicates
            for host_service in host_service_list:
                if host_service not in hs_db_set:
                    host_services.append(host_service)

        # Persist measurements
        self._smart_persist(host_services)

    def persist_sanity(self, sanities):  # type: SanityCheck

        # Retrieve host_service list in DB to check duplication
        hs_db_list = self._get_host_service_list_hourly(self.start, self.end)
        hs_db_set = set(hs_db_list)

        for sanity_check in sanities:
            if sanity_check.aggregation.code is 'd':
                # Convert sanity measurements to host_service list
                host_service_list = model_mysql_adapter.from_sanity_check_to_mysql_host_service_list(
                    sanity_check)
                # Build collection of (not duplicated) measurements to persist
                host_service_day = []
                for host_service in host_service_list:
                    # Spread daily sanity on the 24 hours and persist
                    start_hour = host_service.timestampId
                    for _ in range(24):
                        my_host_service = deepcopy(host_service)
                        my_host_service.timestampId = start_hour + datetime.timedelta(
                            hours=_)
                        my_host_service.aggregationType = 'h'
                        if my_host_service not in hs_db_set:
                            host_service_day.append(my_host_service)
                # Persist measurements
                self.__mysql_session.add_all(host_service_day)
                self._session_commit()
            else:
                # TODO: Move to ERR logger
                print "ERR: Persit of " + sanity_check.__class__.__name__ + " with aggregation different from daily (d) " \
                                                                        "not implemented. nothing imported"

    def persist_host_service_daily_avg(self, start, end):
        # Retrieve daily host_service average for each region
        hs_list_daily_avg_hours = self._get_host_service_list_daily_average_from_hours(
            start, end)
        hs_list_daily_avg = self._get_host_service_list_daily(start, end)
        hs_list_daily_avg_set = set(hs_list_daily_avg)

        hs_list_daily_avg = []
        for hs_daily_avg_res in hs_list_daily_avg_hours:
            # Detach object from the session otherwise in import will be duplicated
            make_transient(hs_daily_avg_res.HostService)
            hs = deepcopy(hs_daily_avg_res.HostService)  # type: HostService
            hs.aggregationType = 'd'
            hs.timestampId = hs.timestampId.replace(hour=0, minute=0, second=0)
            hs.avg_Uptime = hs_daily_avg_res.dailyUptime
            if hs not in hs_list_daily_avg_set:
                hs_list_daily_avg.append(hs)
        self.__mysql_session.add_all(hs_list_daily_avg)
        self._session_commit()

    def _get_per_region_host_service_list(self, start, end, service_type,
                                          aggregation_type):
        return self.__mysql_session.query(HostService) \
            .group_by(HostService.entityId) \
            .group_by(func.date(HostService.timestampId))\
            .filter(HostService.serviceType == service_type) \
            .filter(HostService.aggregationType == aggregation_type) \
            .filter(HostService.timestampId >= start, HostService.timestampId <= end) \
            .all()

    def _get_per_host_service_average_uptime(self, start, end, host_service):
        r = self.__mysql_session.query(
            func.avg(HostService.avg_Uptime).label('dailyUptime')) \
            .filter(HostService.region == host_service.region) \
            .filter(HostService.serviceType == host_service.serviceType) \
            .filter(HostService.timestampId >= start, HostService.timestampId < end)
        return r.value(r.label('dailyUptime'))

    def _get_host_service_list_daily_average_from_hours(self, start, end):
        return self.__mysql_session.query(HostService, func.avg(HostService.avg_Uptime).label('dailyUptime')) \
            .group_by(HostService.entityId) \
            .group_by(func.date(HostService.timestampId)) \
            .filter(HostService.aggregationType == 'h') \
            .filter(HostService.timestampId >= start, HostService.timestampId <= end) \
            .all()

    def _get_host_service_list_daily(self, start, end):
        return self.__mysql_session.query(HostService) \
            .filter(HostService.aggregationType == 'd') \
            .filter(HostService.timestampId >= start, HostService.timestampId <= end) \
            .all()

    def _get_host_service_list_hourly(self, start, end):
        return self.__mysql_session.query(HostService) \
            .filter(HostService.aggregationType == 'h') \
            .filter(HostService.timestampId >= start, HostService.timestampId <= end) \
            .all()

    def _smart_persist(self, entities):
        chunks = [
            entities[x:x + self.querylimit]
            for x in xrange(0, len(entities), self.querylimit)
        ]
        for chunk in chunks:
            self.__mysql_session.add_all(chunk)
            self._session_commit()

    def _session_commit(self):
        try:
            self.__mysql_session.commit()
        except Exception as e:
            # TODO: Print from logger
            print "Mysql ERR: " + str(e)
            self.__mysql_session.rollback()

    def persist_host_service_monthly_avg(self, start, end):
        # Retrieve monthly host_service average for each region
        hs_list_monthly_avg_days = self._get_host_service_list_monthly_average_from_days(
            start, end)
        hs_list_monthly_avg = self._get_host_service_list_monthly(start, end)
        hs_list_monthly_avg_set = set(hs_list_monthly_avg)

        hs_list_monthly_avg = []
        for hs_monthly_avg_res in hs_list_monthly_avg_days:
            # Detach object from the session otherwise in import will be duplicated
            make_transient(hs_monthly_avg_res.HostService)
            hs = deepcopy(hs_monthly_avg_res.HostService)  # type: HostService
            hs.aggregationType = 'm'
            hs.timestampId = utils.get_last_day_datetime(hs.timestampId)
            hs.avg_Uptime = hs_monthly_avg_res.monthlyUptime
            if hs not in hs_list_monthly_avg_set:
                hs_list_monthly_avg.append(hs)
        self.__mysql_session.add_all(hs_list_monthly_avg)
        self._session_commit()

    def _get_host_service_list_monthly_average_from_days(self, start, end):
        return self.__mysql_session.query(HostService, func.avg(HostService.avg_Uptime).label('monthlyUptime')) \
            .group_by(HostService.entityId) \
            .group_by(func.month(HostService.timestampId)) \
            .filter(HostService.aggregationType == 'd') \
            .filter(HostService.timestampId >= start, HostService.timestampId <= end) \
            .all()

    def _get_host_service_list_monthly(self, start, end):
        return self.__mysql_session.query(HostService) \
            .filter(HostService.aggregationType == 'm') \
            .filter(HostService.timestampId >= start, HostService.timestampId <= end) \
            .all()
Ejemplo n.º 48
0
    def insert_data(cls, connection):
        s = Session(connection)

        s.add_all(cls._fixture())
        s.commit()
Ejemplo n.º 49
0
def create_travels(db: Session, travels: List[schemas.TravelCreate]):
    db_travels = [models.Travel(**travel.dict()) for travel in travels]
    db.add_all(db_travels)
    db.commit()
    return db_travels
Ejemplo n.º 50
0
viralIds = session.query(Viral50.spotify_id).distinct(Viral50.spotify_id).all()
viralIds = list(map(lambda item: 'spotify:track:' + str(item[0]), viralIds))

top200Ids = session.query(Top200.spotify_id).distinct(Top200.spotify_id).all()
ids = list(map(lambda item: 'spotify:track:' + str(item[0]), top200Ids))
ids.extend(viralIds)
ids = list(set(ids))

print('Audio features search will be performed for {} rows'.format(len(ids)))
print('Sample ids: ', ids[:3])

load_dotenv()
ID = os.getenv('ID')
SECRET = os.getenv('SECRET')
sp = spotipy.Spotify(
    auth_manager=SpotifyClientCredentials(client_id=ID, client_secret=SECRET))
chunk_size = 10
count = 0
for chunk in chunks(ids, chunk_size):
    try:
        res = sp.audio_features(tracks=chunk)
        res = list(map(lambda item: extract_relevant_features(item), res))
        session.add_all(list(map(lambda item: AudioFeatures(**item), res)))
        session.commit()
        count += 10
        print('Persisted {}/{}'.format(count, len(ids)))
    except Exception as e:
        print('Exception:', e)

session.close()
Ejemplo n.º 51
0
    def test_classreg_setup(self):
        class A(Base, fixtures.ComparableEntity):
            __tablename__ = 'a'
            id = Column(Integer,
                        primary_key=True,
                        test_needs_autoincrement=True)
            data = Column(String(50))
            collection = relationship("BC",
                                      primaryjoin="BC.a_id == A.id",
                                      collection_class=set)

        class BC(AbstractConcreteBase, Base, fixtures.ComparableEntity):
            pass

        class B(BC):
            __tablename__ = 'b'
            id = Column(Integer,
                        primary_key=True,
                        test_needs_autoincrement=True)

            a_id = Column(Integer, ForeignKey('a.id'))
            data = Column(String(50))
            b_data = Column(String(50))
            __mapper_args__ = {"polymorphic_identity": "b", "concrete": True}

        class C(BC):
            __tablename__ = 'c'
            id = Column(Integer,
                        primary_key=True,
                        test_needs_autoincrement=True)
            a_id = Column(Integer, ForeignKey('a.id'))
            data = Column(String(50))
            c_data = Column(String(50))
            __mapper_args__ = {"polymorphic_identity": "c", "concrete": True}

        Base.metadata.create_all()
        sess = Session()
        sess.add_all([
            A(data='a1',
              collection=set([
                  B(data='a1b1', b_data='a1b1'),
                  C(data='a1b2', c_data='a1c1'),
                  B(data='a1b2', b_data='a1b2'),
                  C(data='a1c2', c_data='a1c2'),
              ])),
            A(data='a2',
              collection=set([
                  B(data='a2b1', b_data='a2b1'),
                  C(data='a2c1', c_data='a2c1'),
                  B(data='a2b2', b_data='a2b2'),
                  C(data='a2c2', c_data='a2c2'),
              ]))
        ])
        sess.commit()
        sess.expunge_all()

        eq_(
            sess.query(A).filter_by(data='a2').all(), [
                A(data='a2',
                  collection=set([
                      B(data='a2b1', b_data='a2b1'),
                      B(data='a2b2', b_data='a2b2'),
                      C(data='a2c1', c_data='a2c1'),
                      C(data='a2c2', c_data='a2c2'),
                  ]))
            ])

        self.assert_compile(
            sess.query(A).join(A.collection),
            "SELECT a.id AS a_id, a.data AS a_data FROM a JOIN "
            "(SELECT c.id AS id, c.a_id AS a_id, c.data AS data, "
            "c.c_data AS c_data, CAST(NULL AS VARCHAR(50)) AS b_data, "
            "'c' AS type FROM c UNION ALL SELECT b.id AS id, b.a_id AS a_id, "
            "b.data AS data, CAST(NULL AS VARCHAR(50)) AS c_data, "
            "b.b_data AS b_data, 'b' AS type FROM b) AS pjoin ON pjoin.a_id = a.id"
        )
Ejemplo n.º 52
0
+-------------------------------------------------------------------------+
| 5. Session maintains a *unique* object per identity. ed_user is the same
| object as our_user
+-------------------------------------------------------------------------+
"""

print(ed_user is our_user)
ed_user.haha = "asdf"
"""
+-------------------------------------------------------------------------+
| 6. Add multiple objects, they'll appear in session.new
+-------------------------------------------------------------------------+
"""
session.add_all([
    User(name="wendey", full_name="wendy weathersmith"),
    User(name="mary", full_name="Mary Contrary"),
    User(name="fred", full_name="Fred Flintstone"),
])

print(session.new)
"""
+-------------------------------------------------------------------------+
| 7. Modifying objects that are flushed appear in session.dirty
+-------------------------------------------------------------------------+
"""
ed_user.full_name = "Ed Jones"
print(session.dirty)
"""
+-------------------------------------------------------------------------+
| 8. Finally committing will trigger a flush and add records to the db.
+-------------------------------------------------------------------------+
Ejemplo n.º 53
0
class Supplier(HasAddresses, Base):
    company_name = Column(String)


engine = create_engine('sqlite://', echo=True)
Base.metadata.create_all(engine)

session = Session(engine)

session.add_all([
    Customer(name='customer 1',
             addresses=[
                 Address(street='123 anywhere street',
                         city="New York",
                         zip="10110"),
                 Address(street='40 main street',
                         city="San Francisco",
                         zip="95732")
             ]),
    Supplier(company_name="Ace Hammers",
             addresses=[
                 Address(street='2569 west elm', city="Detroit", zip="56785")
             ]),
])

session.commit()

for customer in session.query(Customer):
    for address in customer.addresses:
        print(address)
        # no parent here
Ejemplo n.º 54
0
    b_id = Column(Integer, ForeignKey("b.id"), nullable=False)
    value = Column(Integer)
    def __init__(self, value):
        self.value = value

if __name__ == '__main__':
    engine = create_engine('sqlite://', echo=True)
    Base.metadata.create_all(engine)
    session = Session(engine)

    # only "A" is referenced explicitly.  Using "collections",
    # we deal with a dict of key/sets of integers directly.

    session.add_all([
        A(collections={
            "1": set([1, 2, 3]),
        })
    ])
    session.commit()

    a1 = session.query(A).first()
    print a1.collections["1"]
    a1.collections["1"].add(4)
    session.commit()

    a1.collections["2"].update([7, 8, 9])
    session.commit()

    print a1.collections["2"]

Ejemplo n.º 55
0
    def insert_data(cls):
        s = Session(testing.db)

        s.add_all(cls._fixture())
        s.commit()
Ejemplo n.º 56
0
        user_dict = {
            'name': 'Aditi Akella',
            'password': '******',
            'email': '*****@*****.**'
        }
        user_create(user_dict)
        u2 = Users(name='Sophie Bulkin',
                   email='*****@*****.**',
                   password="******")
        u3 = Users(name='Grace Le',
                   email='*****@*****.**',
                   password="******")
        u4 = Users(name='Luke Manning',
                   email='*****@*****.**',
                   password="******")
        session.add_all([u2, u3, u4])
        session.commit()
    except:
        print("Records exist")

    print("Table: Users")
    list = Users.query.all()
    for row in list:
        print(row.user_id)
        print(row.name)
        print(row.email)
        print(row.password)
"""
Test on Terminal from IntelliJ
MacBook-Pro-2:flask-idea-homesiteZ johnmortensen$ cd models
MacBook-Pro-2:models johnmortensen$ sqlite3
Ejemplo n.º 57
0
#!/usr/bin/python3
""" Script that creates the State “California” with the City
    “San Francisco” from the database hbtn_0e_100_usa """

if __name__ == "__main__":

    import sys
    from relationship_state import State, Base
    from relationship_city import City
    from sqlalchemy import create_engine
    from sqlalchemy.orm import Session
    from sqlalchemy.schema import Table

    engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.format(
        sys.argv[1], sys.argv[2], sys.argv[3]),
                           pool_pre_ping=True)
    Base.metadata.create_all(engine)

    s = Session(engine)
    news = State(name='California')
    newc = City(name='San Francisco')
    news.cities.append(newc)
    s.add_all([news, newc])
    s.commit()
    s.close()
Ejemplo n.º 58
-1
    def test_bulk_update(self):
        User, = self.classes("User")

        s = Session(expire_on_commit=False)
        objects = [User(name="u1"), User(name="u2"), User(name="u3")]
        s.add_all(objects)
        s.commit()

        s = Session()
        with self.sql_execution_asserter() as asserter:
            s.bulk_update_mappings(
                User,
                [
                    {"id": 1, "name": "u1new"},
                    {"id": 2, "name": "u2"},
                    {"id": 3, "name": "u3new"},
                ],
            )

        asserter.assert_(
            CompiledSQL(
                "UPDATE users SET name=:name WHERE users.id = :users_id",
                [
                    {"users_id": 1, "name": "u1new"},
                    {"users_id": 2, "name": "u2"},
                    {"users_id": 3, "name": "u3new"},
                ],
            )
        )