def store(): seed(0) s = random_bytes(40) eq_(len(s), 40) w0 = hs.writer() r0 = w0.write(s, done=True) eq_(inline_udk, str(r0)) r0a = ContentAddress(Cake.from_bytes(s)) eq_(r0, r0a) eq_(False, r0 == 0) eq_(False, r0a == 0) eq_(hash(r0), hash(r0a)) ok_(hs.lookup(Cake.from_bytes(s)).found()) w1 = hs.writer() for _ in range(3): w1.write(random_bytes(100)) r1 = w1.done() s1 = str(r1) eq_(db_udk, s1) w2 = hs.writer() for _ in range(100): # 100Mb w2.write(random_bytes(1000)) w2.done() r2 = w2.done() # call done twice eq_(file_udk, str(r2)) return r0, r1, r2
def test_int_enum(): class X(enum.IntEnum): a = 1 b = 2 c = 3 meta = MetaData() tbl = Table("mytable", meta, Column("id", Integer, primary_key=True, autoincrement=True), Column('name', types.String()), Column("x", IntCast(X), nullable=True)) #'sqlite:///:memory:' dbf = Dbf(meta, test.file_path('int_enum.sqlite3')) ok_(not dbf.exists()) dbf.ensure_db() with dbf.connect() as conn: r = conn.execute(tbl.insert().values(name='abc')) id1 = r.inserted_primary_key[0] log.debug(id1) r = conn.execute(tbl.insert().values(name='xyz', x=None)) id2 = r.inserted_primary_key[0] log.debug(id2) dbf.execute(tbl.update().where(tbl.c.id == id1).values(name='ed', x=X.c)) fetch = dbf.execute(select([tbl])).fetchall() attach = {r.id: r.x for r in fetch} eq_(attach[id1], X.c) eq_(attach[id2], None)
def test_docs(): import doctest import hashstore.utils as utils import hashstore.utils.ignore_file as ignore_file for t in (utils, ignore_file): r = doctest.testmod(t) ok_(r.attempted > 0, f'There is no doctests in module {t}') eq_(r.failed,0)
def test_LiteBackend(): hs = BlobStore(os.path.join(test.dir, 'test_HashStore')) not_existent = '4no3jb46qaff0a0pwg24lu0y8eq5ldmdich3su14mkcr76m8wr' def store(): seed(0) s = random_bytes(40) eq_(len(s), 40) w0 = hs.writer() r0 = w0.write(s, done=True) eq_(inline_udk, str(r0)) r0a = ContentAddress(Cake.from_bytes(s)) eq_(r0, r0a) eq_(False, r0 == 0) eq_(False, r0a == 0) eq_(hash(r0), hash(r0a)) ok_(hs.lookup(Cake.from_bytes(s)).found()) w1 = hs.writer() for _ in range(3): w1.write(random_bytes(100)) r1 = w1.done() s1 = str(r1) eq_(db_udk, s1) w2 = hs.writer() for _ in range(100): # 100Mb w2.write(random_bytes(1000)) w2.done() r2 = w2.done() # call done twice eq_(file_udk, str(r2)) return r0, r1, r2 r0, r1, r2 = store() #test recall seed(0) o0 = hs.get_content(r0).stream() eq_(o0.read(40), random_bytes(40)) eq_(0, len(o0.read())) o1 = hs.get_content(r1).stream() for _ in range(3): eq_(o1.read(100), random_bytes(100)) eq_(0, len(o1.read())) o2 = hs.get_content(r2).stream() for _ in range(100): eq_(o2.read(1000), random_bytes(1000)) eq_(0, len(o2.read())) #store again store() #retrieve non existent try: hs.get_content(not_existent) ok_(False) except NotFoundError: pass all = list(hs) eq_(3, len(all))
def test_cake_type(): meta = MetaData() tbl = Table( "mytable", meta, Column("guid", StringCast(ids.Cake), primary_key=True, default=lambda: ids.Cake.new_portal()), Column('name', types.String()), Column("attachment", StringCast(ids.Cake), nullable=True)) tbl2 = Table( "mytable2", meta, Column("guid", StringCast(ids.Cake), primary_key=True, default=lambda: ids.Cake.new_portal()), Column('name', types.String()), Column("attachment", StringCast(ids.Cake), nullable=True)) #'sqlite:///:memory:' dbf = Dbf(meta, test.file_path('test.sqlite3')) def run_scenario(dbf, tbl): with dbf.connect() as conn: r = conn.execute(tbl.insert().values(name='abc')) guid1 = r.last_inserted_params()['guid'] log.debug(guid1) r = conn.execute(tbl.insert().values(name='xyz', attachment=None)) guid2 = r.last_inserted_params()['guid'] log.debug(guid2) dbf.execute(tbl.update().where(tbl.c.guid == guid1).values( name='ed', attachment=ids.Cake.from_bytes(b'asdf'))) fetch = dbf.execute(select([tbl])).fetchall() attach = {r.guid: r.attachment for r in fetch} return attach, guid1, guid2 ok_(not dbf.exists()) dbf.ensure_db() attach, guid1, guid2 = run_scenario(dbf, tbl) eq_(attach[guid1], ids.Cake('01ME5Mi')) eq_(attach[guid2], None) attach, guid1, guid2 = run_scenario(dbf, tbl2) eq_(attach[guid1], ids.Cake('01ME5Mi')) eq_(attach[guid2], None) tbl.drop(dbf.engine()) eq_(dbf.engine().table_names(), ['mytable2']) dbf = Dbf(meta, test.file_path('test.sqlite3')) eq_(dbf.engine().table_names(), ['mytable2']) dbf.ensure_db() eq_(dbf.engine().table_names(), ['mytable', 'mytable2'])
def wait_process(self, p_id, print_all_logs=False, expect_rc=None, expect_read=None, save_words=None): p, cmd, logpath = self.processes[p_id] # print('waiting on :{cmd}\npid={p_id}\nlog={logpath}\n'.format(**locals())) rc = p.wait() logtext = open(logpath).read() if expect_rc is not None: if not isinstance(expect_rc, list): expect_rc = [expect_rc] ok_(rc in expect_rc, logtext) if expect_read is not None: assert_text(logtext, expect_read, save_words=save_words) if print_all_logs: print('{cmd}\nrc:{rc}\n{logtext}\n'.format(**locals())) return rc, logtext
def test_api(): from hashstore.utils.api import ApiCallRegistry methods = ApiCallRegistry() class A: @methods.call(coerce_return_fn=lambda r: -r) def returns_5(self, a, b=4): ''' documentation ''' return 5 @methods.call() def none(self): pass @methods.call(x=lambda i: i*i) def error(self, x): raise ValueError('%d' % x) eq_(set('returns_5 error none'.split()),set(methods.calls.keys())) eq_(methods.calls['returns_5'].doc.strip(),'documentation') a = A() try: methods.run(a, 'returns_5', {}) ok_(False) except TypeError: eq_("returns_5() is missing required arguments: ['a']", exception_message()) try: methods.run(a, 'returns_5', {'x':7}) ok_(False) except TypeError: eq_("returns_5() does not have argument: 'x'", exception_message()) eq_({'result': -5}, methods.run(a, 'returns_5', {'a': 7})) eq_({'error': '4'}, methods.run(a, 'error', {'x': 2})) eq_({'result': None}, methods.run(a, 'none', {}))
def test_docs(): import doctest import hashstore.tests r = doctest.testmod(hashstore.tests) ok_(r.attempted > 0) eq_(r.failed,0)