def test_weak_ref(self): i = self.get_interface() table_name_1 = "".join( random.sample(string.ascii_lowercase, random.randint(5, 15))) table_name_2 = "".join( random.sample(string.ascii_lowercase, random.randint(5, 15))) s_1 = Schema(table_name_1, _id=Field(int, pk=True), foo=Field(int, True)) s_2 = Schema( table_name_2, _id=Field(int, pk=True), s_pk=Field(s_1, False), ) i.set_table(s_1) i.set_table(s_2) pk1 = i.insert(s_1, {'foo': 1}) pk2 = i.insert(s_2, {'s_pk': pk1}) q2 = query.Query() q2.is__id(pk2) # make sure it exists and is visible r = i.get_one(s_2, q2) self.assertGreater(len(r), 0) q1 = query.Query() q1.is__id(pk1) i.delete(s_1, q1) r = i.get_one(s_2, q2) self.assertGreater(len(r), 0) self.assertIsNone(r['s_pk'])
def test_to_interface_insert(self): orm_class = self.get_orm_class(foo=Field(int, True, default=1), bar=Field(str, False)) o = orm_class() fields = o.to_interface() self.assertTrue("foo" in fields) self.assertFalse("bar" in fields) orm_class = self.get_orm_class() o = orm_class() # missing foo with self.assertRaises(KeyError): fields = o.to_interface() o.foo = 1 # missing bar with self.assertRaises(KeyError): fields = o.to_interface() o.bar = "2" fields = o.to_interface() self.assertFalse("_id" in fields)
def test_transaction_nested_fail_1(self): """make sure 2 new tables in a wrapped transaction work as expected""" i = self.get_interface() table_name_1 = self.get_table_name() table_name_2 = self.get_table_name() s1 = Schema(table_name_1, _id=Field(int, pk=True), foo=Field(int, True)) s2 = Schema( table_name_2, _id=Field(int, pk=True), bar=Field(int, True), s_pk=Field(s1), ) with i.transaction() as connection: pk1 = i.insert(s1, {"foo": 1}, connection=connection) pk2 = i.insert(s2, {"bar": 2, "s_pk": pk1}, connection=connection) q1 = query.Query() q1.is__id(pk1) r1 = i.get_one(s1, q1) self.assertEqual(pk1, r1['_id']) q2 = query.Query() q2.is__id(pk2) r2 = i.get_one(s2, q2) self.assertEqual(pk2, r2['_id']) self.assertEqual(pk1, r2['s_pk'])
def test___init___default_fset(self): orm_class = self.get_orm_class(foo=Field(int, default=5), bar=Field(int, fset=lambda o, v: 6 if v is None else v), che=Field(int)) o = orm_class() self.assertEqual(5, o.foo) self.assertEqual(6, o.bar) self.assertIsNone(o.che) o.modify(che=7) self.assertEqual(5, o.foo) self.assertEqual(6, o.bar) self.assertEqual(7, o.che) o = orm_class(foo=1) self.assertEqual(1, o.foo) self.assertEqual(6, o.bar) self.assertIsNone(o.che) o.modify(che=7, bar=8) self.assertEqual(1, o.foo) self.assertEqual(8, o.bar) self.assertEqual(7, o.che) o = orm_class(foo=1, bar=2, che=3) self.assertEqual(1, o.foo) self.assertEqual(2, o.bar) self.assertEqual(3, o.che)
def test_transaction_nested_fail_3(self): """make sure 2 tables where the first one already exists works, and second one has 2 refs""" i = self.get_interface() table_name_1 = "{}_1".format(self.get_table_name()) table_name_2 = "{}_2".format(self.get_table_name()) s1 = self.get_schema(foo=Field(int, True)) i.set_table(s1) s2 = self.get_schema( bar=Field(int, True), s_pk=Field(s1, True), s_pk2=Field(s1, True), ) pk1 = i.insert(s1, {"foo": 1}) pk2 = i.insert(s1, {"foo": 1}) pk3 = i.insert(s2, {"bar": 2, "s_pk": pk1, "s_pk2": pk2}) r1 = i.get_one(s1, query.Query().is__id(pk1)) self.assertEqual(r1['_id'], pk1) r2 = i.get_one(s2, query.Query().is__id(pk3)) self.assertEqual(r2['_id'], pk3) self.assertEqual(r2['s_pk'], pk1) self.assertEqual(r2['s_pk2'], pk2)
def test__normalize_date_SQL(self): """this tests the common date kwargs you can use (in both SQLight and Postgres) if we ever add other backends this might need to be moved out of the general generator test""" i = self.get_interface() s = Schema( self.get_table_name(), foo=Field(datetime.datetime, True), _id=Field(int, True, pk=True), index_foo=Index('foo'), ) i.set_table(s) pk20 = i.insert(s, {'foo': datetime.datetime(2014, 4, 20)}) pk21 = i.insert(s, {'foo': datetime.datetime(2014, 4, 21)}) q = query.Query() q.is_foo(day=20) d = i.get_one(s, q) self.assertEqual(d['_id'], pk20) q = query.Query() q.is_foo(day=21, month=4) d = i.get_one(s, q) self.assertEqual(d['_id'], pk21) q = query.Query() q.is_foo(day=21, month=3) d = i.get_one(s, q) self.assertFalse(d)
def get_orm_class(self, table_name=None, **properties): tn = self.get_table_name(table_name) properties["table_name"] = tn if "interface" not in properties: properties["interface"] = self.get_interface() has_field = False for v in properties.values(): if isinstance(v, Field): has_field = True break elif isinstance(v, type) and issubclass(v, Field): has_field = True break if not has_field: properties.update({ "foo": Field(int, True), "bar": Field(str, True), "ifoobar": Index("foo", "bar"), }) orm_class = type( ByteString(tn) if is_py2 else String(tn), (Orm, ), properties, ) return orm_class
def test_unicode(self): """ Jarid was having encoding issues, so I'm finally making sure prom only ever returns unicode strings """ orm_class = self.get_orm_class() table_name = self.get_table_name() orm_class.schema = self.get_schema( self.get_table_name(), foo=Field(unicode, True), bar=Field(str, True), che=Field(str, False), baz=Field(int, False), ) t = orm_class.create( foo=testdata.get_unicode_name(), bar=testdata.get_unicode_words(), che=testdata.get_unicode_words().encode('utf-8'), baz=testdata.get_int(1, 100000) ) t2 = orm_class.query.get_pk(t.pk) self.assertEqual(t.foo, t2.foo) self.assertEqual(t.bar, t2.bar) #self.assertEqual(t.che, t2.che.encode('utf-8')) self.assertEqual(t.che.decode("utf-8"), t2.che) self.assertTrue(isinstance(t.baz, int))
def test_hydrate_2(self): orm_class = self.get_orm_class( foo=Field(int, True), bar=Field(str, default=lambda *_, **__: "lambda bar"), ) o = orm_class.hydrate(foo=1) self.assertEqual("lambda bar", o.bar)
def test_type_fk(self): orm_class = self.get_orm_class() f = Field(orm_class) self.assertEqual(orm_class, f.original_type) self.assertEqual(long, f.interface_type) self.assertEqual(long, f.type) self.assertIsNotNone(f.schema) self.assertFalse(f.is_serialized())
def test_field_bool(self): """There was a bug where SQLite boolean field always returned True, this tests to make sure that is fixed and it won't happen again""" i, s = self.get_table(bar=Field(bool), che=Field(bool)) pk = i.insert(s, {"bar": False, "che": True}) q = query.Query().is__id(pk) d = dict(i.get_one(s, q)) self.assertFalse(d["bar"]) self.assertTrue(d["che"])
def get_schema(self, table_name=None, **fields_or_indexes): if not fields_or_indexes: fields_or_indexes.setdefault("foo", Field(int, True)) fields_or_indexes.setdefault("bar", Field(str, True)) fields_or_indexes.setdefault("ifoobar", Index("foo", "bar")) fields_or_indexes.setdefault("_id", Field(long, True, pk=True)) s = Schema(self.get_table_name(table_name), **fields_or_indexes) return s
def test___init__(self): f = Field(str, True) self.assertTrue(f.required) self.assertTrue(issubclass(f.type, str)) with self.assertRaises(TypeError): f = Field() f = Field(int, max_length=100) self.assertTrue(issubclass(f.type, int)) self.assertEqual(f.options['max_length'], 100)
class FOFieldGAOrm(Orm): table_name = "fofgaorm_table" foo = Field(int) @foo.fsetter def foo(self, val): return getattr(self, "bar", 10) bar = Field(int) @bar.fsetter def bar(self, val): return getattr(self, "foo", 10)
def test_set_table(self): i = self.get_interface() s = self.get_schema() r = i.has_table(str(s)) self.assertFalse(r) r = i.set_table(s) r = i.has_table(str(s)) self.assertTrue(r) # make sure known indexes are there indexes = i.get_indexes(s) count = 0 for known_index_name, known_index in s.indexes.items(): for index_name, index_fields in indexes.items(): if known_index.fields == index_fields: count += 1 self.assertEqual(len(s.indexes), count) # make sure more exotic datatypes are respected s_ref = self.get_schema() i.set_table(s_ref) s_ref_id = self.insert(i, s_ref, 1)[0] s = prom.Schema( self.get_table_name(), _id=Field(int, pk=True), one=Field(bool, True), two=Field(int, True, size=50), three=Field(decimal.Decimal), four=Field(float, True, size=10), six=Field( long, True, ), seven=Field(s_ref, False), eight=Field(datetime.datetime), nine=Field(datetime.date), ) r = i.set_table(s) d = { 'one': True, 'two': 50, 'three': decimal.Decimal('1.5'), 'four': 1.987654321, 'six': 40000, 'seven': s_ref_id, 'eight': datetime.datetime(2005, 7, 14, 12, 30), 'nine': datetime.date(2005, 9, 14), } pk = i.insert(s, d) q = query.Query() q.is__id(pk) odb = i.get_one(s, q) #d['five'] = 1.98765 for k, v in d.items(): self.assertEqual(v, odb[k])
def test_query_modified_table(self): i = self.get_interface() s = prom.Schema('test_table', one=Field(int, True)) i.set_table(s) # Add new column s.set_field("two", Field(int, False)) q = query.Query() q.is_two(None) # Test if query succeeds i.get_one(s, q)
def test_index_ignore_case(self): i = self.get_interface() s = Schema( self.get_table_name(), _id=Field(int, pk=True), foo=Field(str, True, ignore_case=True), bar=Field(str, True), index_foo=Index('foo', 'bar'), ) i.set_table(s) v = '*****@*****.**' d = i.insert(s, {'foo': v, 'bar': 'bar'}) q = query.Query() q.is_foo(v) r = i.get_one(s, q) self.assertGreater(len(r), 0) lv = list(v) for x in range(len(v)): lv[x] = lv[x].upper() qv = "".join(lv) q = query.Query() q.is_foo(qv) r = i.get_one(s, q) self.assertGreater(len(r), 0) lv[x] = lv[x].lower() d = i.insert(s, {'foo': 'FoO', 'bar': 'bar'}) q = query.Query() q.is_foo('foo') r = i.get_one(s, q) self.assertGreater(len(r), 0) self.assertEqual(r['foo'], 'FoO') q = query.Query() q.is_foo('Foo').is_bar('BAR') r = i.get_one(s, q) self.assertEqual(len(r), 0) q = query.Query() q.is_foo('FoO').is_bar('bar') r = i.get_one(s, q) self.assertGreater(len(r), 0) self.assertEqual(r['foo'], 'FoO') d = i.insert(s, {'foo': 'foo2', 'bar': 'bar'}) q = query.Query() q.is_foo('foo2') r = i.get_one(s, q) self.assertGreater(len(r), 0) self.assertEqual(r['foo'], 'foo2')
class TM(Orm): table_name = self.get_table_name() bar = Field(str, True) che = Field(str, False) @che.fsetter def che(self, field_val): if field_val is None: return field_val if not field_val.startswith('boom'): raise ValueError("what the heck?") return field_val
def test_type_json(self): json_types = ( dict, list, ) for field_type in json_types: f = Field(field_type) self.assertEqual(field_type, f.original_type) self.assertEqual(str, f.interface_type) self.assertEqual(str, f.type) self.assertIsNone(f.schema) self.assertTrue(f.is_serialized())
def get_schema(self, table_name=None, **fields_or_indexes): if not fields_or_indexes: fields_or_indexes.setdefault("foo", Field(int, True)) fields_or_indexes.setdefault("bar", Field(str, True)) fields_or_indexes.setdefault("ifoobar", Index("foo", "bar")) fields_or_indexes.setdefault("_id", Field(long, True, pk=True)) # remove any None values for k in list(fields_or_indexes.keys()): if not fields_or_indexes[k]: fields_or_indexes.pop(k) s = Schema(self.get_table_name(table_name), **fields_or_indexes) return s
class FOFieldIGetOrm(Orm): table_name = "FOFieldIGetOrm_table" foo = Field(int) @foo.igetter def foo(cls, val): return 1000
def test__normalize_val_SQL_with_list(self): i = self.get_interface() s = Schema("fake_table_name", ts=Field(datetime.datetime, True)) kwargs = dict(day=[10]) fstr, fargs = i._normalize_val_SQL(s, { 'symbol': 'IN', 'list': True }, 'ts', None, kwargs) self.assertEqual('EXTRACT(DAY FROM "ts") IN (%s)', fstr) self.assertEqual(kwargs['day'], fargs) kwargs = dict(day=[11, 13], hour=[12]) fstr, fargs = i._normalize_val_SQL(s, { 'symbol': 'IN', 'list': True }, 'ts', None, kwargs) self.assertEqual( 'EXTRACT(DAY FROM "ts") IN (%s, %s) AND EXTRACT(HOUR FROM "ts") IN (%s)', fstr) self.assertEqual(kwargs['day'], fargs[0:2]) self.assertEqual(kwargs['hour'], fargs[2:]) kwargs = dict(bogus=[5]) with self.assertRaises(KeyError): fstr, fargs = i._normalize_val_SQL(s, { 'symbol': 'IN', 'list': True }, 'ts', None, kwargs)
class FOFieldISetOrm(Orm): table_name = "FOFieldISetOrm_table" foo = Field(int) @foo.isetter def foo(cls, val, is_update, is_modified): val = 100 if is_update else 10 return val
def test__normalize_val_SQL_eq(self): i = self.get_interface() s = Schema("fake_table_name", ts=Field(datetime.datetime, True)) orm_class = s.create_orm() fstr, fargs = orm_class.query.is_ts(day=10).render(placeholder=True) self.assertTrue('EXTRACT(DAY FROM "ts") = %s' in fstr) self.assertEqual(10, fargs[0]) fstr, fargs = orm_class.query.is_ts(day=11, hour=12).render(placeholder=True) self.assertTrue( 'EXTRACT(DAY FROM "ts") = %s AND EXTRACT(HOUR FROM "ts") = %s' in fstr) self.assertEqual(11, fargs[0]) self.assertEqual(12, fargs[1]) fstr, fargs = orm_class.query.is_ts(None).render(placeholder=True) self.assertTrue('"ts" IS %s' in fstr) fstr, fargs = orm_class.query.not_ts(None).render(placeholder=True) self.assertTrue('"ts" IS NOT %s' in fstr) with self.assertRaises(KeyError): fstr, fargs = orm_class.query.is_ts(bogus=5).render( placeholder=True)
def test_group_field_name(self): i = self.get_interface() s = Schema( self.get_table_name(), _id=Field(int, True, pk=True), group=Field(str, True), ) i.set_table(s) text = testdata.get_words() pk = i.insert(s, {'group': text}) q = query.Query().is__id(pk) d = dict(i.get_one(s, q)) self.assertEqual(text, d["group"]) self.assertEqual(pk, d["_id"])
def test_handle_error_column(self): i, s = self.get_table() s.set_field("che", Field(str, True)) # it's required fields = { 'foo': 1, 'bar': 'v1', 'che': "this field will cause the query to fail", } with self.assertRaises(prom.InterfaceError): rd = i.insert(s, fields) s = self.get_schema(table_name=str(s)) s.set_field("che", Field(str, False)) # not required so error recovery can fire pk = i.insert(s, fields) self.assertLess(0, pk)
def test_field_timestamp(self): table_name = self.get_table_name() schema = self.get_schema(table_name, ZTIMESTAMP=Field(datetime.datetime)) q = query.Query() epoch = datetime.datetime(1970, 1, 1) timestamp = (datetime.datetime.utcnow() - epoch).total_seconds() i = self.create_interface() i.set_table(schema) sql = "INSERT INTO {} (ZTIMESTAMP) VALUES ({:.5f})".format( table_name, timestamp) r = i.query(sql, ignore_result=True) r = i.get_one(schema, q) self.assertEqual((r["ZTIMESTAMP"] - epoch).total_seconds(), round(timestamp, 5)) timestamp = -62167219200 sql = "INSERT INTO {} (ZTIMESTAMP) VALUES ({})".format( table_name, timestamp) r = i.query(sql, ignore_result=True) r = i.get_one(schema, q.offset(1)) self.assertEqual(r["ZTIMESTAMP"], datetime.datetime.min) timestamp = 106751991167 sql = "INSERT INTO {} (ZTIMESTAMP) VALUES ({})".format( table_name, timestamp) r = i.query(sql, ignore_result=True) r = i.get_one(schema, q.offset(2)) self.assertEqual(r["ZTIMESTAMP"], datetime.datetime(5352, 11, 1, 10, 52, 47))
def test_type_pickle(self): class Foo(object): pass pickle_types = ( set, Foo, ) for field_type in pickle_types: f = Field(field_type) self.assertEqual(field_type, f.original_type) self.assertEqual(str, f.interface_type) self.assertEqual(str, f.type) self.assertIsNone(f.schema) self.assertTrue(f.is_serialized())
def test_set_index(self): s = Schema("foo") s.set_field("bar", Field(int, True)) s.set_field("che", Field(str)) with self.assertRaises(ValueError): s.set_index("foo", Index()) with self.assertRaises(ValueError): s.set_index("", Index("bar", "che")) s.set_index("bar_che", Index("che", "bar")) with self.assertRaises(ValueError): s.set_index("bar_che", Index("che", "bar")) s.set_index("testing", Index("che", unique=True)) self.assertTrue(s.indexes["testing"].unique)
def test_field_datetime_datatypes(self): """Makes sure datatypes.Datetime works for the different interfaces""" orm_class = self.get_orm_class(bar=Field(datetime.datetime)) o = orm_class.create(bar=Datetime()) dt = Datetime() o2 = orm_class.query.lt_bar(dt).one() self.assertEqual(o.bar, o2.bar)