def flat_to_nested(data, instance=None, attname=None, separator=None, loads=None): '''Convert a flat representation of a dictionary to a nested representation. Fields in the flat representation are separated by the *splitter* parameters. :parameter data: a flat dictionary of key value pairs. :parameter instance: optional instance of a model. :parameter attribute: optional attribute of a model. :parameter separator: optional separator. Default ``"__"``. :parameter loads: optional data unserializer. :rtype: a nested dictionary''' separator = separator or JSPLITTER val = {} flat_vals = {} for key, value in iteritems(data): if value is None: continue keys = key.split(separator) # first key equal to the attribute name if attname: if keys.pop(0) != attname: continue if loads: value = loads(value) # if an instance is available, inject the flat attribute if not keys: if value is None: val = flat_vals = {} break else: continue else: flat_vals[key] = value d = val lk = keys[-1] for k in keys[:-1]: if k not in d: nd = {} d[k] = nd else: nd = d[k] if not isinstance(nd, dict): nd = {'': nd} d[k] = nd d = nd if lk not in d: d[lk] = value else: d[lk][''] = value if instance and flat_vals: for attr, value in iteritems(flat_vals): setattr(instance, attr, value) return val
def testAdd(self): d = Dictionary.objects.get(name = 'test') for k,v in iteritems(self.data): d.data.add(k,v) self.assertEqual(d.data.size(),0) d.save() data = d.data
def items(self, keys = None): '''Generator over key-values. If keys is not supplied, it is a generator over all key-value items. No transaction involved in this function.''' if self.cache: if self.keys: cache = self.cache.get for key in keys: yield key,cache(key) else: for item in iteritems(self.cache): yield item else: kloads = self.pickler.loads vloads = self.value_pickler.loads if keys: dumps = self.pickler.dumps keys = [dumps(k) for k in keys] items = zip(keys,self._items(self.backend.cursor(),keys)) for key,val in items: yield kloads(key),vloads(val) else: cache = {} items = self._items(self.backend.cursor(),keys) for key,val in items: k,v = kloads(key),vloads(val) cache[k] = v yield k,v self._cache = cache
def update(self, mapping): if isinstance(mapping, dict): mapping = iteritems(mapping) add = self._add for dt, v in mapping: add(dt, v) return self
def __init__(self, g, fields, start=None, end=None, missing=False): end = end or date.today() if not start: start = end - timedelta(days=g.size) # random dates self.dates = g.populate('date', start=start, end=end) self.unique_dates = set(self.dates) self.fields = {} self.sorted_fields = {} for field in fields: vals = g.populate('float') if missing: N = len(vals) for num in range(randint(0, N // 2)): index = randint(0, N - 1) vals[index] = nan self.fields[field] = vals self.sorted_fields[field] = [] self.values = [] date_dict = {} for i, dt in enumerate(self.dates): vals = dict(((f, v[i]) for f, v in iteritems(self.fields))) self.values.append((dt, vals)) date_dict[dt] = vals sdates = [] for i, dt in enumerate(sorted(date_dict)): sdates.append(dt) fields = date_dict[dt] for field in fields: self.sorted_fields[field].append(fields[field]) self.sorted_values = (sdates, self.sorted_fields) self.length = len(sdates)
def make_objects(self, meta, data, related_fields=None): '''Generator of :class:`stdnet.odm.StdModel` instances with data from database. :parameter meta: instance of model :class:`stdnet.odm.Metaclass`. :parameter data: iterator over instances data. ''' make_object = meta.make_object related_data = [] if related_fields: for fname, fdata in iteritems(related_fields): field = meta.dfields[fname] if field in meta.multifields: related = dict(fdata) multi = True else: multi = False relmodel = field.relmodel related = dict( ((obj.id, obj) for obj in self.make_objects(relmodel._meta, fdata))) related_data.append((field, related, multi)) for state in data: instance = make_object(state, self) for field, rdata, multi in related_data: if multi: field.set_cache(instance, rdata.get(str(instance.id))) else: rid = getattr(instance, field.attname, None) if rid is not None: value = rdata.get(rid) setattr(instance, field.name, value) yield instance
def __init__(self, g, fields, start=None, end=None, missing=False): end = end or date.today() if not start: start = end - timedelta(days=g.size) # random dates self.dates = g.populate('date', start=start, end=end) self.unique_dates = set(self.dates) self.fields = {} self.sorted_fields = {} for field in fields: vals = g.populate('float') if missing: N = len(vals) for num in range(randint(0, N//2)): index = randint(0, N-1) vals[index] = nan self.fields[field] = vals self.sorted_fields[field] = [] self.values = [] date_dict = {} for i,dt in enumerate(self.dates): vals = dict(((f,v[i]) for f,v in iteritems(self.fields))) self.values.append((dt,vals)) date_dict[dt] = vals sdates = [] for i,dt in enumerate(sorted(date_dict)): sdates.append(dt) fields = date_dict[dt] for field in fields: self.sorted_fields[field].append(fields[field]) self.sorted_values = (sdates,self.sorted_fields) self.length = len(sdates)
def msetnx(self, mapping): """ Sets each key in the ``mapping`` dict to its corresponding value if none of the keys are already set """ items = [] [items.extend(pair) for pair in iteritems(mapping)] return self.execute_command('MSETNX', *items)
def test_msetnx(self): d = {'a': '1', 'b': '2', 'c': '3'} self.assert_(self.client.msetnx(d)) d2 = {'a': 'x', 'd': '4'} self.assert_(not self.client.msetnx(d2)) for k,v in iteritems(d): self.assertEquals(self.client[k], v.encode('utf-8')) self.assertEquals(self.client['d'], None)
def update(self, mapping): '''Add *mapping* dictionary to hashtable. Equivalent to python dictionary update method. :parameter mapping: a dictionary of field values.''' tokey = self.pickler.dumps dumps = self.value_pickler.dumps if isinstance(mapping,dict): mapping = iteritems(mapping) self.cache.update(dict(((tokey(k),dumps(v)) for k,v in mapping)))
def testAdd(self): self.data() d = self.model(name='test').save() self.assertTrue(d.session) self.assertTrue(d in d.session) with d.session.begin(): for k,v in iteritems(self.data): d.data.add(k, v) self.assertEqual(d.data.size(), 0) self.assertTrue(d.data.size(), 0)
def test_add(self): d = yield self.create() self.assertTrue(d.session) with d.session.begin() as t: for k, v in iteritems(self.data.data): d.data.add(k, v) size = yield d.data.size() self.assertEqual(size, 0) yield t.on_result size = yield d.data.size() self.assertEqual(len(self.data.data), size)
def dump_data(self, mapping): tokey = self.pickler.dumps dumps = self.value_pickler.dumps if isinstance(mapping, dict): mapping = iteritems(mapping) p = self.pair for pair in mapping: if not isinstance(pair, tuple): pair = pair, k, v = p(pair) yield tokey(k), dumps(v)
def aggregate(self, kwargs): '''Aggregate lookup parameters.''' meta = self._meta fields = meta.dfields field_lookups = {} for name, value in iteritems(kwargs): bits = name.split(JSPLITTER) field_name = bits.pop(0) if field_name not in fields: raise QuerySetError('Could not filter on model "{0}".\ Field "{1}" does not exist.'.format(meta, field_name)) field = fields[field_name] attname = field.attname if bits: bits = [n.lower() for n in bits] if bits[-1] == 'in': bits.pop() lookup = JSPLITTER.join(bits) if lookup and lookup not in range_lookups: lvalue = field.filter(self.session, lookup, value) if lvalue is not None: value = lvalue lookup = None else: if bits[-1] in range_lookups: lookup = bits.pop() else: lookup = None bits.insert(0, attname) attname = JSPLITTER.join(bits) else: lookup = None # Get lookups on attribute name lookups = field_lookups.get(attname) if lookups is None: lookups = [] field_lookups[attname] = lookups if lookup not in range_lookups: if not field.index: raise QuerySetError("{0} {1} is not an index.\ Cannot query.".format(field.__class__.__name__,field_name)) if not iterable(value): value = (value,) for v in value: if isinstance(v, Q): v = lookup_value('set', v.construct()) else: v = lookup_value('value', field.dumps(v, lookup)) lookups.append(v) else: lookups.append(lookup_value(lookup, field.dumps(value, lookup))) return [queryset(self, name=name, underlying=field_lookups[name])\ for name in sorted(field_lookups)]
def eval(self, body, **kwargs): num_keys = len(kwargs) if num_keys: keys = [] args = [] for k,v in iteritems(kwargs): keys.append(k) args.append(v) keys.extend(args) else: keys = () return self.execute_command('EVAL', body, num_keys, *keys)
def dump_data(self, mapping): tokey = self.pickler.dumps dumps = self.value_pickler.dumps if isinstance(mapping, dict): mapping = iteritems(mapping) p = self.pair data = [] for pair in mapping: if not isinstance(pair, tuple): pair = pair, k, v = p(pair) data.append((tokey(k), dumps(v))) return data
def add(self, value, transaction=None, **kwargs): '''Add *value*, an instance of ``self.formodel``, to the throw model.''' session, kw = self.session_kwargs(value, transaction) try: m = session.query(self.model).get(**kw) if not kwargs: return m except self.model.DoesNotExist: m = self.model(**kw) for k,v in iteritems(kwargs): setattr(m, k, v) return session.add(m)
def from_base64_data(cls, **kwargs): o = cls() meta = cls._meta pkname = meta.pkname() for name,value in iteritems(kwargs): if name == pkname: field = meta.pk elif name in meta.dfields: field = meta.dfields[name] else: continue value = field.to_python(value) setattr(o,field.attname,value) return o
def _add(self, dt, *args): timestamp = self.pickler.dumps(dt) add = self.cache.add dump = self.value_pickler.dumps if len(args) == 1: mapping = args[0] if isinstance(mapping, dict): mapping = iteritems(mapping) for field, value in mapping: add(timestamp, field, dump(value)) elif len(args) == 2: add(timestamp, args[0], dump(args[1])) else: raise TypeError('Expected a mapping or a field value pair')
def _update_or_create(self, model, **kwargs): pkname = model._meta.pkname() pk = kwargs.pop(pkname, None) query = self.query(model) item = None # if pk: # primary key available items = yield query.filter(pkname=pk).all() if items: item = items[0] else: kwargs[pkname] = pk else: params = {} rest = {} fields = model._meta.dfields for field, value in iteritems(kwargs): if field in fields and fields[field].index: params[field] = value else: rest[field] = value if params: items = yield query.filter(**params).all() if len(items) == 1: item = items[0] kwargs = rest if item: if kwargs: for field, value in iteritems(kwargs): setattr(item, field, value) item = yield self.add(item) yield item, False # else: item = yield self.add(model(**kwargs)) yield item, True
def aggregate(self, kwargs): """Aggregate lookup parameters.""" meta = self._meta fields = meta.dfields field_lookups = {} for name, value in iteritems(kwargs): bits = name.split(JSPLITTER) field_name = bits.pop(0) if field_name not in fields: raise QuerySetError( 'Could not filter on model "{0}".\ Field "{1}" does not exist.'.format( meta, field_name ) ) field = fields[field_name] attname = field.attname lookup = None if bits: bits = [n.lower() for n in bits] if bits[-1] == "in": bits.pop() elif bits[-1] in range_lookups: lookup = bits.pop() remaining = JSPLITTER.join(bits) if lookup: # this is a range lookup attname, nested = field.get_lookup(remaining, QuerySetError) lookups = get_lookups(attname, field_lookups) lookups.append(lookup_value(lookup, (value, nested))) continue elif remaining: # Not a range lookup, must be a nested filter value = field.filter(self.session, remaining, value) lookups = get_lookups(attname, field_lookups) # If we are here the field must be an index if not field.index: raise QuerySetError("%s %s is not an index. Cannot query." % (field.__class__.__name__, field_name)) if not iterable(value): value = (value,) for v in value: if isinstance(v, Q): v = lookup_value("set", v.construct()) else: v = lookup_value("value", field.serialise(v, lookup)) lookups.append(v) # return [queryset(self, name=name, underlying=field_lookups[name]) for name in sorted(field_lookups)]
def aggregate(self, kwargs): '''Aggregate lookup parameters.''' meta = self._meta fields = meta.dfields field_lookups = {} for name, value in iteritems(kwargs): bits = name.split(JSPLITTER) field_name = bits.pop(0) if field_name not in fields: raise QuerySetError('Could not filter on model "{0}".\ Field "{1}" does not exist.'.format(meta, field_name)) field = fields[field_name] attname = field.attname lookup = None if bits: bits = [n.lower() for n in bits] if bits[-1] == 'in': bits.pop() elif bits[-1] in range_lookups: lookup = bits.pop() remaining = JSPLITTER.join(bits) if lookup: # this is a range lookup attname, nested = field.get_lookup(remaining, QuerySetError) lookups = get_lookups(attname, field_lookups) lookups.append(lookup_value(lookup, (value, nested))) continue elif remaining: # Not a range lookup, must be a nested filter value = field.filter(self.session, remaining, value) lookups = get_lookups(attname, field_lookups) # If we are here the field must be an index if not field.index: raise QuerySetError("%s %s is not an index. Cannot query." % (field.__class__.__name__, field_name)) if not iterable(value): value = (value,) for v in value: if isinstance(v, Q): v = lookup_value('set', v.construct()) else: v = lookup_value('value', field.serialise(v, lookup)) lookups.append(v) # return [queryset(self, name=name, underlying=field_lookups[name]) for name in sorted(field_lookups)]
def from_base64_data(cls, **kwargs): '''Load a :class:`StdModel` from possibly base64encoded data. This method is used to load models from data obtained from the :meth:`tojson` method.''' o = cls() meta = cls._meta pkname = meta.pkname() for name, value in iteritems(kwargs): if name == pkname: field = meta.pk elif name in meta.dfields: field = meta.dfields[name] else: continue value = field.to_python(value) setattr(o, field.attname, value) return o
def makepanel(self, name): if name not in self.info: return pa = self._panels[name] = [] nicename = self.formatter.format_name nicebool = self.formatter.format_bool boolval = (0,1) for k,v in iteritems(self.info[name]): add = True if k in self.converters: fdata = self.converters[k] if fdata: formatter = getattr(self.formatter, 'format_{0}'.format(fdata[0])) k = fdata[1] or k v = formatter(v) else: add = False elif v in boolval: v = nicebool(v) if add: pa.append({'name':nicename(k), 'value':v})
def makepanel(self, name, info): if name not in info: return pa = [] nicename = self.formatter.format_name nicebool = self.formatter.format_bool boolval = (0, 1) for k, v in iteritems(info[name]): add = True if k in self.converters or isinstance(v, int): fdata = self.converters.get(k, ('int', None)) if fdata: formatter = getattr(self.formatter, 'format_{0}'.format(fdata[0])) k = fdata[1] or k v = formatter(v) else: add = False elif v in boolval: v = nicebool(v) if add: pa.append({'name': nicename(k), 'value': v}) return pa
def test_mset(self): d = {'a': '1', 'b': '2', 'c': '3'} self.assert_(self.client.mset(d)) for k,v in iteritems(d): self.assertEquals(self.client[k], v.encode('utf-8'))
def flat_mapping(mapping): items = [] for pair in iteritems(mapping): items.extend(pair) return items
def mset(self, mapping): "Sets each key in the ``mapping`` dict to its corresponding value" items = [] [items.extend(pair) for pair in iteritems(mapping)] return self.execute_command('MSET', *items)
def items(self): '''Iteratir over items (pairs) of :class:`PairMixin`.''' if self.cache.cache is None: data = self.session.structure(self).items() self.cache.set_cache(self.load_data(data)) return iteritems(self.cache.cache)