def test_order_by_group_by(self): self.connection.execute("INSERT INTO test VALUES (100, 'Title 10')") self.connection.execute("INSERT INTO test VALUES (101, 'Title 10')") id = Column("id", "test") title = Column("title", "test") expr = Select(Count(id), group_by=title, order_by=Count(id)) result = self.connection.execute(expr) self.assertEquals(result.get_all(), [(1, ), (3, )])
def test_expr_contains_string(self): self.connection.execute("INSERT INTO test VALUES (30, 'blah_%!!x')") self.connection.execute("INSERT INTO test VALUES (40, 'blah!!x')") id = Column("id", SQLToken("test")) title = Column("title", SQLToken("test")) expr = Select(id, title.contains_string(u"_%!!")) result = list(self.connection.execute(expr)) self.assertEquals(result, [(30,)])
def test_expr_contains_string(self): self.connection.execute("INSERT INTO test VALUES (30, 'blah_%!!x')") self.connection.execute("INSERT INTO test VALUES (40, 'blah!!x')") id = Column("id", SQLToken("test")) title = Column("title", SQLToken("test")) expr = Select(id, title.contains_string(u"_%!!")) result = list(self.connection.execute(expr)) self.assertEquals(result, [(30, )])
def test_compile_case(self): """The Case expr is compiled in a Postgres' CASE expression.""" cases = [(Column("foo") > 3, u"big"), (Column("bar") == None, 4)] state = State() statement = compile(Case(cases), state) assert statement == ( "CASE WHEN (foo > ?) THEN ? WHEN (bar IS NULL) THEN ? END") assert [3, "big", 4] == [param.get() for param in state.parameters]
def test_expr_startswith(self): self.connection.execute("INSERT INTO test VALUES (30, '!!_%blah')") self.connection.execute("INSERT INTO test VALUES (40, '!!blah')") id = Column("id", SQLToken("test")) title = Column("title", SQLToken("test")) expr = Select(id, title.startswith(u"!!_%")) result = list(self.connection.execute(expr)) self.assertEquals(result, [(30, )])
def test_expr_endswith(self): self.connection.execute("INSERT INTO test VALUES (30, 'blah_%!!')") self.connection.execute("INSERT INTO test VALUES (40, 'blah!!')") id = Column("id", SQLToken("test")) title = Column("title", SQLToken("test")) expr = Select(id, title.endswith(u"_%!!")) result = list(self.connection.execute(expr)) assert result == [(30,)]
def test_expr_startswith(self): self.connection.execute("INSERT INTO test VALUES (30, '!!_%blah')") self.connection.execute("INSERT INTO test VALUES (40, '!!blah')") id = Column("id", SQLToken("test")) title = Column("title", SQLToken("test")) expr = Select(id, title.startswith(u"!!_%")) result = list(self.connection.execute(expr)) self.assertEquals(result, [(30,)])
def test_get_insert_identity(self): result = self.connection.execute("INSERT INTO test (title) " "VALUES ('Title 30')") primary_key = (Column("id", SQLToken("test")), ) primary_variables = (Variable(), ) expr = result.get_insert_identity(primary_key, primary_variables) select = Select(Column("title", SQLToken("test")), expr) result = self.connection.execute(select) self.assertEquals(result.get_one(), ("Title 30", ))
def add_criterion(self, name): count_name = "{}_count".format(name) self.event_query.columns.append( Sum(If(C.event_id.is_in(Param(name)), C.event_count, 0)).as_(count_name), ) self.event_where.append(Param(name)) self.event_having.exprs += (Column(count_name), ) self.info_query.columns.append( Sum(Column(count_name)).as_(count_name))
def test_execute_update_returning(self): if self.database._version < 80200: return # Can't run this test with old PostgreSQL versions. column1 = Column("id1", "returning_test") column2 = Column("id2", "returning_test") self.connection.execute("INSERT INTO returning_test VALUES (1, 2)") update = Update({"id2": 3}, column1 == 1, primary_columns=(column1, column2)) result = self.connection.execute(Returning(update)) self.assertEquals(result.get_one(), (1, 3))
def __init__(self, prop, cls, name, primary, variable_class, variable_kwargs): Column.__init__( self, name, cls, primary, VariableFactory(variable_class, column=self, **variable_kwargs)) self.cls = cls # Used by references # Copy attributes from the property to avoid one additional # function call on each access. for attr in ["__get__", "__set__", "__delete__"]: setattr(self, attr, getattr(prop, attr))
def __init__(self, prop, cls, name, primary, variable_class, variable_kwargs): Column.__init__(self, name, cls, primary, VariableFactory(variable_class, column=self, **variable_kwargs)) self.cls = cls # Used by references # Copy attributes from the property to avoid one additional # function call on each access. for attr in ["__get__", "__set__", "__delete__"]: setattr(self, attr, getattr(prop, attr))
def join_aliased_relation(local_cls, remote_cls, relation): """Build a join expression between local_cls and remote_cls. This is equivalent to relation.get_where_for_join(), except that the join expression is changed to be relative to the given local_cls and remote_cls (which may be aliases). The result is the join expression. """ remote_key = tuple( Column(column.name, remote_cls) for column in relation.remote_key) local_key = tuple( Column(column.name, local_cls) for column in relation.local_key) return compare_columns(local_key, remote_key)
def test_set_none_with_allow_none_and_column_with_table(self): column = Column("column_name", SQLToken("table_name")) variable = CustomVariable(allow_none=False, column=column) try: variable.set(None) except NoneError, e: pass
def test_get_insert_identity(self): column = Column("thecolumn", "thetable") variable = IntVariable() result = self.connection.execute("SELECT 1") where = result.get_insert_identity((column, ), (variable, )) assert compile(where) == ( "thetable.thecolumn = (SELECT currval('thetable_thecolumn_seq'))")
def test_set_none_with_allow_none_and_column(): column = Column("column_name") variable = CustomVariable(allow_none=False, column=column) try: variable.set(None) assert False except NoneError as e: assert "column_name" in ustr(e)
def test_set_none_with_allow_none_and_column(self): column = Column("column_name") variable = CustomVariable(allow_none=False, column=column) try: variable.set(None) except NoneError as e: pass self.assertTrue("column_name" in str(e))
def test_execute_insert_returning_without_variables(self): """Without primary_variables, the RETURNING system won't be used.""" column1 = Column("id1", "returning_test") insert = Insert({}, primary_columns=(column1, )) self.connection.execute(insert) result = self.connection.execute("SELECT * FROM returning_test") self.assertEquals(result.get_one(), (123, 456))
def test_set_none_with_allow_none_and_column_with_table(self): column = Column("column_name", SQLToken("table_name")) variable = CustomVariable(allow_none=False, column=column) try: variable.set(None) except NoneError as e: self.assertTrue("table_name.column_name" in str(e)) else: self.fail("NoneError not raised")
def test_wb_execute_insert_returning_not_used_with_old_postgres(self): """Shouldn't try to use RETURNING with PostgreSQL < 8.2.""" column1 = Column("id1", "returning_test") column2 = Column("id2", "returning_test") variable1 = IntVariable() variable2 = IntVariable() insert = Insert({}, primary_columns=(column1, column2), primary_variables=(variable1, variable2)) self.database._version = 80109 self.connection.execute(insert) self.assertFalse(variable1.is_defined()) self.assertFalse(variable2.is_defined()) result = self.connection.execute("SELECT * FROM returning_test") self.assertEquals(result.get_one(), (123, 456))
def test_compile_case_with_default(self): """ If a default is provided, the resulting CASE expression includes an ELSE clause. """ cases = [(Column("foo") > 3, u"big")] state = State() statement = compile(Case(cases, default=9), state) assert "CASE WHEN (foo > ?) THEN ? ELSE ? END" == statement assert [3, "big", 9] == [param.get() for param in state.parameters]
def __init__(self, prop, cls, attr, name, primary, variable_class, variable_kwargs): self.size = variable_kwargs.pop('size', Undef) self.unsigned = variable_kwargs.pop('unsigned', False) self.index = variable_kwargs.pop('index', False) self.unique = variable_kwargs.pop('unique', False) self.auto_increment = variable_kwargs.pop('auto_increment', False) self.array = variable_kwargs.pop('array', None) Column.__init__(self, name, cls, primary, VariableFactory(variable_class, column=self, validator_attribute=attr, **variable_kwargs)) self.cls = cls # Used by references # Copy attributes from the property to avoid one additional # function call on each access. for attr in ["__get__", "__set__", "__delete__", '_creation_order']: setattr(self, attr, getattr(prop, attr))
def test_compile_case_with_expression(self): """ If an expression is provided, the resulting CASE expression uses the simple syntax. """ cases = [(1, u"one"), (2, u"two")] state = State() statement = compile(Case(cases, expression=Column("foo")), state) self.assertEqual("CASE foo WHEN ? THEN ? WHEN ? THEN ? END", statement) self.assertEqual([1, "one", 2, "two"], [param.get() for param in state.parameters])
def test_execute_insert_returning_without_columns(self): """Without primary_columns, the RETURNING system won't be used.""" column1 = Column("id1", "returning_test") variable1 = IntVariable() insert = Insert({column1: 123}, primary_variables=(variable1, )) self.connection.execute(insert) self.assertFalse(variable1.is_defined()) result = self.connection.execute("SELECT * FROM returning_test") self.assertEquals(result.get_one(), (123, 456))
def __init__(self, prop, cls, attr, name, primary, variable_class, variable_kwargs): # here we go! self.size = variable_kwargs.pop('size', Undef) self.unsigned = variable_kwargs.pop('unsigned', False) self.auto_increment = variable_kwargs.pop('auto_increment', False) self.array = variable_kwargs.pop('array', None) Column.__init__( self, name, cls, primary, properties.VariableFactory(variable_class, column=self, validator_attribute=attr, **variable_kwargs)) self.cls = cls # Used by references # Copy attributes from the property to avoid one additional # function call on each access. for attr in ["__get__", "__set__", "__delete__"]: setattr(self, attr, getattr(prop, attr))
def test_execute_insert_returning(self): if self.database._version < 80200: return # Can't run this test with old PostgreSQL versions. column1 = Column("id1", "returning_test") column2 = Column("id2", "returning_test") variable1 = IntVariable() variable2 = IntVariable() insert = Insert({}, primary_columns=(column1, column2), primary_variables=(variable1, variable2)) self.connection.execute(insert) self.assertTrue(variable1.is_defined()) self.assertTrue(variable2.is_defined()) self.assertEquals(variable1.get(), 123) self.assertEquals(variable2.get(), 456) result = self.connection.execute("SELECT * FROM returning_test") self.assertEquals(result.get_one(), (123, 456))
def test_execute_insert_auto_increment_primary_key(self): id_column = Column("id", "test") id_variable = IntVariable() title_column = Column("title", "test") title_variable = UnicodeVariable(u"testing") # This is not part of the table. It is just used to show that # only one primary key variable is set from the insert ID. dummy_column = Column("dummy", "test") dummy_variable = IntVariable() insert = Insert({title_column: title_variable}, primary_columns=(id_column, dummy_column), primary_variables=(id_variable, dummy_variable)) self.connection.execute(insert) self.assertTrue(id_variable.is_defined()) self.assertFalse(dummy_variable.is_defined()) # The newly inserted row should have the maximum id value for # the table. result = self.connection.execute("SELECT MAX(id) FROM test") self.assertEqual(result.get_one()[0], id_variable.get())
def preloadBuildsData(self, builds): # Circular import. from lp.snappy.model.snap import Snap load_related(Person, builds, ["requester_id"]) lfas = load_related(LibraryFileAlias, builds, ["log_id"]) load_related(LibraryFileContent, lfas, ["contentID"]) archives = load_related(Archive, builds, ["archive_id"]) load_related(Person, archives, ["ownerID"]) distroarchseries = load_related( DistroArchSeries, builds, ['distro_arch_series_id']) distroseries = load_related( DistroSeries, distroarchseries, ['distroseriesID']) load_related(Distribution, distroseries, ['distributionID']) snaps = load_related(Snap, builds, ["snap_id"]) getUtility(ISnapSet).preloadDataForSnaps(snaps) snapbuild_ids = set(map(attrgetter("id"), builds)) latest_jobs_cte = With("LatestJobs", Select( (SnapBuildJob.job_id, SQL( "rank() OVER " "(PARTITION BY snapbuild ORDER BY job DESC) AS rank")), tables=SnapBuildJob, where=And( SnapBuildJob.snapbuild_id.is_in(snapbuild_ids), SnapBuildJob.job_type == SnapBuildJobType.STORE_UPLOAD))) LatestJobs = Table("LatestJobs") sbjs = list(IStore(SnapBuildJob).with_(latest_jobs_cte).using( SnapBuildJob, LatestJobs).find( SnapBuildJob, SnapBuildJob.job_id == Column("job", LatestJobs), Column("rank", LatestJobs) == 1)) sbj_map = {} for sbj in sbjs: sbj_map[sbj.snapbuild] = sbj.makeDerived() for build in builds: get_property_cache(build).last_store_upload_job = ( sbj_map.get(build)) load_related(Job, sbjs, ["job_id"])
def get_packagesets(dsds, in_parent): """Return the packagesets for the given dsds inside the parent or the derived `DistroSeries`. Returns a dict with the corresponding packageset list for each dsd id. :param dsds: An iterable of `DistroSeriesDifference` instances. :param in_parent: A boolean indicating if we should look in the parent series' archive instead of the derived series' archive. """ if len(dsds) == 0: return {} FlatPackagesetInclusion = Table("FlatPackagesetInclusion") tables = IStore(Packageset).using(DistroSeriesDifference, Packageset, PackagesetSources, FlatPackagesetInclusion) results = tables.find( (DistroSeriesDifference.id, Packageset), PackagesetSources.packageset_id == Column("child", FlatPackagesetInclusion), Packageset.distroseries_id == ( DistroSeriesDifference.parent_series_id if in_parent else DistroSeriesDifference.derived_series_id), Column("parent", FlatPackagesetInclusion) == Packageset.id, PackagesetSources.sourcepackagename_id == ( DistroSeriesDifference.source_package_name_id), DistroSeriesDifference.id.is_in(dsd.id for dsd in dsds)) results = results.order_by(PackagesetSources.sourcepackagename_id, Packageset.name) grouped = defaultdict(list) for dsd_id, packageset in results: grouped[dsd_id].append(packageset) return grouped
def test_currval_no_escaping(self): expr = currval(Column("thecolumn", "theschema.thetable")) statement = compile(expr) expected = """currval('theschema.thetable_thecolumn_seq')""" self.assertEquals(statement, expected)
def check_sequence(self): """Check if punchtimes match punch sequence numbers.""" punchsequence = list(self.punches.find(Not(Punch.card_punchtime == None), Not(Punch.ignore == True), Punch.sistation == SIStation.id, SIStation.control == Control.id, Not(Control.override == True)).order_by('COALESCE(manual_punchtime, card_punchtime)').values(Column('sequence'))) sorted = copy(punchsequence) sorted.sort() return punchsequence == sorted
def test_currval_escaped_schema_table_and_column(self): expr = currval(Column("the column", "the schema.the table")) statement = compile(expr) expected = """currval('"the schema"."the table_the column_seq"')""" self.assertEquals(statement, expected)
def test_set_none_with_allow_none_and_column_with_table(): column = Column("column_name", SQLToken("table_name")) variable = CustomVariable(allow_none=False, column=column) with pytest.raises(NoneError): variable.set(None)
def get_insert_identity(self, primary_key, primary_variables): return Eq(Column('rowid'), Rowid(self.lastrowid))