def create(self, test): session = test.session() with session.begin() as t: for name, typ, ccy in zip(self.inames, self.itypes, self.iccys): t.add(Instrument(name=name, type=typ, ccy=ccy)) for name in self.gnames: t.add(Group(name=name)) for name, ccy in zip(self.inames, self.iccys): t.add(Fund(name=name, ccy=ccy)) yield t.on_result iall = yield test.session().query(Instrument).load_only('id').all() fall = yield test.session().query(Fund).load_only('id').all() with session.begin() as t: for i in iall: t.add(ObjectAnalytics(model_type=Instrument, object_id=i.id)) for i in fall: t.add(ObjectAnalytics(model_type=Fund, object_id=i.id)) yield t.on_result obj_len = self.size[1] groups = yield session.query(Group).all() objs = yield session.query(ObjectAnalytics).all() groups = self.populate('choice', obj_len, choice_from=groups) objs = self.populate('choice', obj_len, choice_from=objs) with test.session().begin() as t: for g, o in zip(groups, objs): t.add(AnalyticData(group=g, object=o)) yield t.on_result
def create(self, test, use_transaction=True): session = test.session() models = test.mapper eq = assertEqual if isinstance(test, type) else test.assertEqual c = yield models.instrument.query().count() eq(c, 0) if use_transaction: with session.begin() as t: for name, ccy in zip(self.fund_names, self.fund_ccys): t.add(models.fund(name=name, ccy=ccy)) for name, typ, ccy in zip(self.inst_names, self.inst_types, self.inst_ccys): t.add(models.instrument(name=name, type=typ, ccy=ccy)) yield t.on_result else: test.register() for name, typ, ccy in zip(self.inst_names, self.inst_types, self.inst_ccys): yield models.instrument.new(name=name, type=typ, ccy=ccy) for name, ccy in zip(self.fund_names, self.fund_ccys): yield models.fund(name=name, ccy=ccy) self.num_insts = yield models.instrument.query().count() self.num_funds = yield models.fund.query().count() eq(self.num_insts, len(self.inst_names)) eq(self.num_funds, len(self.fund_names)) yield session
def load_related(self, result): '''load related fields into the query result. :parameter result: a result from a queryset. :rtype: the same queryset qith related models loaded.''' if self.qs.select_related: if not hasattr(result,'__len__'): result = list(result) meta = self.meta for field in self.qs.select_related: name = field.name attname = field.attname vals = [getattr(r,attname) for r in result] if field in meta.scalarfields: related = field.relmodel.objects.filter(id__in = vals) for r,val in zip(result,related): setattr(r,name,val) else: with self.backend.transaction() as t: for val in vals: val.reload(t) for val,r in zip(vals,t.get_result()): val.set_cache(r) return result
def setUp(self): '''Create Instruments and Funds''' session = self.session() with session.begin(): for name,typ,ccy in zip(inst_names,inst_types,inst_ccys): session.add(Instrument(name = name, type = typ, ccy = ccy)) for name,ccy in zip(fund_names,fund_ccys): session.add(Fund(name = name, ccy = ccy))
def generate(self): self.dates = self.populate('date') self.values = self.populate('float', start=10, end=400) self.dates2 = self.populate('date', start=date(2009,1,1), end=date(2010,1,1)) self.big_strings = self.populate(min_len=300, max_len=1000) self.alldata = list(zip(self.dates, self.values)) self.alldata2 = list(zip(self.dates2, self.values)) self.testdata = dict(self.alldata) self.testdata2 = dict(self.alldata2)
def as_dict(times, fields): lists = [] names = [] d = {} for name, value in fields.items(): names.append(name) lists.append(value) for dt, data in zip(times, zip(*lists)): d[dt] = dict(zip(names, data)) return d
def create_one(self): ts = self.structure() ts.update(zip(self.data.dates, self.data.values)) self.assertFalse(ts.cache.cache) self.assertTrue(ts.cache.toadd) self.assertFalse(ts.cache.toremove) return ts
def unwind_query(self, meta, qset): """Unwind queryset""" table = meta.table() ids = list(qset) make_object = self.make_object for id, data in zip(ids, table.mget(ids)): yield make_object(meta, id, data)
def setUp(self): '''Create Instruments and Funds commiting at the end for speed''' session = self.session() with session.begin(): session.add(Dictionary(name='test')) session.add(Dictionary(name='test2')) self.assertEqual(session.query(Dictionary).count(),2) self.data = dict(zip(dict_keys,dict_values))
def setUp(self): size = self.sizes.get(getattr(self,'test_size','normal')) inst_names = populate('string',size, min_len = 5, max_len = 20) inst_types = populate('choice',size, choice_from = insts_types) inst_ccys = populate('choice',size, choice_from = ccys_types) with transaction(Instrument) as t: for name,typ,ccy in zip(inst_names,inst_types,inst_ccys): Instrument(name = name, type = typ, ccy = ccy).save(t)
def callback(self, response, args, session=None): # The session has received the callback from redis client data = [] for instance, id in list(zip(session, response)): instance = session.server_update(instance, id) if instance: data.append(instance) return data
def testPushBack(self): li = SimpleList().save() names = li.names for elem in elems: names.push_back(elem) li.save() for el,ne in zip(elems,names): self.assertEqual(el,ne)
def zset_score_pairs(response, **options): """ If ``withscores`` is specified in the options, return the response as a list of (value, score) pairs """ if not response or not options['withscores']: return response return zip(response[::2], map(float, response[1::2]))
def testPushFront(self): li = SimpleList().save() names = li.names for elem in reversed(elems): names.push_front(elem) li.save() for el,ne in zip(elems,names): self.assertEqual(el,ne)
def _wrap_commit(self, request, response, iids=None, **options): for id, iid in zip(response, iids): id, flag, info = id if int(flag): yield instance_session_result(iid, True, id, False, float(info)) else: msg = info.decode(request.encoding) yield CommitException(msg)
def _wrap_commit(self, response, iids=None, redis_client=None, **options): for id, iid in zip(response, iids): id, flag, info = id if int(flag): yield instance_session_result(iid, True, id, False, float(info)) else: msg = info.decode(redis_client.encoding) yield CommitException(msg)
def _load_missing_scripts(results, positions, res): for i,r in zip(positions,res): if i == -1: if isinstance(r, Exception): raise r else: continue results[i] = r return results
def testPushFront(self): li = SimpleList().save() if li.session.backend.name == 'redis': names = li.names for elem in reversed(elems): names.push_front(elem) li.save() for el,ne in zip(elems,names): self.assertEqual(el,ne)
def testPushBack(self): li = SimpleList().save() with li.session.begin(): names = li.names for elem in elems: names.push_back(elem) for el, ne in zip(elems, names): self.assertEqual(el, ne) self.assertEqual(li.names.size(), len(elems))
def create(self, test, use_transaction = True): session = test.session() if use_transaction: with session.begin(): for name,typ,ccy in zip(self.inst_names,self.inst_types,\ self.inst_ccys): session.add(Instrument(name = name, type = typ, ccy = ccy)) for name,ccy in zip(self.fund_names,self.fund_ccys): session.add(Fund(name = name, ccy = ccy)) else: self.register() for name,typ,ccy in zip(self.inst_names,self.inst_types,\ self.inst_ccys): Instrument(name = name, type = typ, ccy = ccy).save() for name,ccy in zip(self.fund_names,self.fund_ccys): Fund(name = name, ccy = ccy).save() self.num_insts = session.query(Instrument).count() self.num_funds = session.query(Fund).count()
def testData(self): ts = orm.TS() ts.update(zip(dates,values)) ts.save() self.assertEqual(ts.size(),len(dates)) front = ts.front() back = ts.back() self.assertTrue(back>front) range = list(ts.range(date(2009,10,1),date(2010,5,1))) self.assertTrue(range)
def pairs_to_dict(response, encoding, value_encoder=0): "Create a dict given a list of key/value pairs" if response: v1 = (r.decode(encoding) for r in response[::2]) v2 = response[1::2] if value_encoder: v2 = (value_encoder(v) for v in v2) return zip(v1,v2) else: return ()
def ts_pairs(response, **options): '''Parse the timeseries TSRANGE and TSRANGEBYTIME command''' if not response: return response elif options.get('withtimes'): return zip(response[::2], response[1::2]) elif options.get('single') and len(response) == 1: return response[0] else: return response
def test_push_back(self): models = self.mapper li = yield models.simplelist.new() with li.session.begin() as t: names = li.names for elem in self.data.names: names.push_back(elem) yield t.on_result all = yield names.items() self.assertEqual(len(all), len(self.data.names)) for el, ne in zip(self.data.names, all): self.assertEqual(el, ne)
def build(self, response, meta, fields, fields_attributes, encoding): fields = tuple(fields) if fields else None if fields: if len(fields) == 1 and fields[0] in (meta.pkname(), ''): for id in response: yield id, (), {} else: for id, fdata in response: yield id, fields, dict(zip(fields_attributes, fdata)) else: for id, fdata in response: yield id, None, pairs_to_dict(fdata, encoding)
def testPushFront(self): session = self.session() li = yield session.add(SimpleList()) names = li.names self.assertEqual(li.session, session) with session.begin() as t: for elem in reversed(self.data.names): names.push_front(elem) yield t.on_result all = yield names.items() for el, ne in zip(self.data.names, all): self.assertEqual(el, ne)
def testGet(self): session = self.session() with session.begin(): ts = session.add(odm.TS()) ts.update(zip(dates,values)) dt1 = dates[0] val1 = ts[dt1] self.assertTrue(val1) self.assertEqual(ts.get(dt1),val1) self.assertEqual(ts.get(date(1990,1,1)),None) self.assertEqual(ts.get(date(1990,1,1),1),1) self.assertRaises(KeyError, lambda : ts[date(1990,1,1)])
def testPop(self): session = self.session() with session.begin(): ts = session.add(odm.TS()) ts.update(zip(dates,values)) dt = dates[5] self.assertTrue(dt in ts) v = ts.pop(dt) self.assertTrue(v) self.assertFalse(dt in ts) self.assertRaises(KeyError, ts.pop, dt) self.assertEqual(ts.pop(dt,'bla'), 'bla')
def test_pop_range(self): session = self.session() with session.begin(): ts = session.add(odm.TS()) ts.update(zip(dates,values)) all_dates = list((d.date() for d in ts.itimes())) range = list(ts.range(all_dates[5],all_dates[15])) self.assertTrue(range) range2 = list(ts.pop_range(all_dates[5],all_dates[15])) self.assertEqual(range, range2) for dt,_ in range: self.assertFalse(dt in ts)
def callback(self, response, args, fields=None, fields_attributes=None): fields = tuple(fields) if fields else None if fields: if len(fields) == 1 and fields[0] == "id": for id in response: yield id, (), {} else: for id, fdata in response: yield id, fields_attributes, dict(zip(fields_attributes, fdata)) else: for id, fdata in response: yield id, None, dict(pairs_to_dict(fdata))
def load_missing_scripts(pipe, commands, results): '''Load missing scripts in a pipeline. This function loops through the *results* list and if one or more values are instances of :class:`NoScriptError`, it loads the scripts and perform a new evaluation. Commands which have *option* ``script_dependency`` set to the name of a missing script, are also re-executed.''' toload = False for r in results: if isinstance(r, NoScriptError): toload = True break if not toload: return results loaded = set() positions = [] for i, result in enumerate(zip(commands, results)): command, result = result if isinstance(result, NoScriptError): name = command.options.get('script_name') if name: script = get_script(name) if script: args = command.args s = 2 # Starts from 2 as the first argument is the command num_keys = args[s-1] keys, args = args[s:s+num_keys], args[s+num_keys:] if script.name not in loaded: positions.append(-1) loaded.add(script.name) script.load(pipe, keys, *args, **command.options) else: script.evalsha(pipe, keys, *args, **command.options) positions.append(i) for c in command.callbacks: pipe.add_callback(c) else: sc = command.options.get('script_dependency') if sc: if not isinstance(sc,(list,tuple)): sc = (sc,) for s in sc: if s in loaded: pipe.command_stack.append(commands[i]) positions.append(i) break res = pipe.execute() if isinstance(res,RedisRequest): return res.add_callback(partial(_load_missing_scripts, results, positions)) else: return _load_missing_scripts(results, positions, res)
def interval(self, a, b, targets, C, D): ts = yield self.get() intervals = ts.intervals(a,b) self.assertEqual(len(intervals),len(targets)) for interval,target in zip(intervals,targets): x = interval[0] y = interval[1] self.assertEqual(x, target[0]) self.assertEqual(y, target[1]) for dt in dategenerator(x,y): ts.data.add(dt,uniform(0,1)) self.assertEqual(ts.data_start,C) self.assertEqual(ts.data_end,D)
def after_setup(cls): d = cls.data session = cls.session() with session.begin() as t: for g in d.groups: t.add(Group(name=g)) yield t.on_result groups = yield session.query(Group).all() gps = test.populate('choice', d.size, choice_from=groups) with session.begin() as t: for p, g in zip(d.persons, gps): t.add(cls.model(name=p, group=g)) yield t.on_result
def fill(self, update=False): session = self.session() c = yield session.add(Calendar(name=self.data.random_string())) with session.begin() as t: for dt, value in zip(self.data.dates, self.data.values): t.add(DateValue(dt=dt, value=value)) yield t.on_result items = t.saved[DateValue._meta] with session.begin() as t: if update: c.data.update(items) else: for value in items: c.data.add(value) yield t.on_result yield c
def after_setup(cls): d = cls.data models = cls.mapper groups = [] groups.append( models.group.create_user(username='******', can_login=False)) for username, password in zip(d.usernames, d.passwords): groups.append( models.group.create_user(username=username, password=password)) yield cls.multi_async(groups) session = models.session() groups = yield session.query(Group).all() with models.session().begin() as t: for group in groups: group.create_role('family') # create the group-family role group.create_role('friends') # create the group-friends role yield t.on_result
def __init__(self, *args, **kwargs): meta = self._meta pkname = meta.pk.name setattr(self, pkname, kwargs.pop(pkname, None)) kwargs.pop(meta.pk.name, None) for field in meta.scalarfields: field.set_value(self, kwargs.pop(field.name, None)) attributes = meta.attributes if args: N = len(args) if N > len(attributes): raise ValueError('Too many attributes') attrs, attributes = attributes[:N], attributes[N:] for name, value in zip(attrs, args): setattr(self, name, value) for name in attributes: setattr(self, name, kwargs.pop(name, None)) if kwargs: raise_kwargs(self, kwargs)
def testFollowers(self): '''Add followers to a user''' # unwind queryset here since we are going to use it in a double loop models = self.mapper users = yield models.user.query().all() N = len(users) count = [] # Follow users for user in users: N = self.data.followers() uset = set() for tofollow in populate('choice', N, choice_from=users): uset.add(tofollow) user.following.add(tofollow) count.append(len(uset)) self.assertTrue(user.following.query().count() > 0) # for user, N in zip(users, count): all_following = user.following.query() self.assertEqual(all_following.count(), N) for following in all_following: self.assertTrue(user in following.followers.query())
def generate(self): self.keys = self.populate(min_len=5, max_len=20) self.values = self.populate(min_len=20, max_len=300) self.data = dict(zip(self.keys, self.values))
def __iter__(self): return zip(self.codes, self.groups)
def pairs_to_dict(response, encoding): "Create a dict given a list of key/value pairs" it = iter(response) return dict(((k.decode(encoding), v) for k, v in zip(it, it)))
def items(self): return zip(self.fields, self.data)
def mapping(self, prefix=''): for k, v in zip(self.keys, self.values): yield prefix + k, v
def after_setup(cls): d = cls.data with cls.session().begin() as t: for p, n, d in zip(d.persons, d.groups, d.dates): t.add(cls.model(person=p, name=n, dt=d)) return t.on_result
def create(self): models = self.mapper with models.session().begin() as t: for na, dt in zip(names, dates): t.add(self.model(person=na, name=na, dt=dt)) return t.on_result
def setUp(self): with self.mapper.session().begin() as t: for username, password in zip(self.data.usernames, self.data.passwords): t.add(User(username=username, password=password)) return t.on_result