def set_metadata(self, metadata): metadata = util.build_recursive(metadata) if 'Metadata' in metadata: metadata = metadata['Metadata'] self['Metadata'] = util.dict_merge(self.get('Metadata', {}), metadata) self.dirty_children()
def ext_set(tag_list, x): """Extractor for the set operation Returns time series uuid and modified tags """ rv = [{'uuid': v[0]} for v in x] for rv_i, v in itertools.izip(rv, x): rv_i.update(v[1]) return [build_recursive(x, suppress=[]) for x in rv]
def capnp2json(capnpmsg): ret = {} # tlk = top level key, tlv = top level value for tlk,tlv in capnpmsg.to_dict().iteritems(): # resolve contents as list of strings if tlk.lower() == 'contents': ret['Contents'] = tlv # resolve readings as list of number pairs elif tlk.lower() == 'readings': ret['Readings'] = map(lambda x: [x['time'], x['data']], tlv) # resolve list of {'key': key, 'value': value} dicts elif isinstance(tlv, list): ret[tlk] = {} for d in tlv: ret[tlk][d['key']] = d['value'] ret[tlk] = build_recursive(ret[tlk], suppress=[]) else: ret[tlk] = tlv return ret
def capnp2json(capnpmsg): ret = {} # tlk = top level key, tlv = top level value for tlk, tlv in capnpmsg.to_dict().iteritems(): # resolve contents as list of strings if tlk.lower() == 'contents': ret['Contents'] = tlv # resolve readings as list of number pairs elif tlk.lower() == 'readings': ret['Readings'] = map(lambda x: [x['time'], x['data']], tlv) # resolve list of {'key': key, 'value': value} dicts elif isinstance(tlv, list): ret[tlk] = {} for d in tlv: ret[tlk][d['key']] = d['value'] ret[tlk] = build_recursive(ret[tlk], suppress=[]) else: ret[tlk] = tlv return ret
def ext_recursive(vals): return [build_recursive(x[0], suppress=[]) for x in vals]
def ext_plural(tags, vals): return [build_recursive(dict(zip(tags, v)), suppress=[]) for v in vals]
def tag_extract_result(self, request, result): """For a tag query, we want to return a nested dict so we pipe the result through this filter instead. """ return request, map(lambda x: util.build_recursive(x[0], suppress=[]), result)
self.chunk_loaded_idx += 1 # process for d in opdata: d[:, 0] *= 1000 opdata = operators.DataChunk((self.data_spec['start'], self.data_spec['end']), first, last, opdata) redata = self.op.process(opdata) log.msg("STATS: Operator processing took %0.6fs" % (time.time() - tic)) # log.msg("writing " + str(map(len, redata))) # construct a return value with metadata and data merged return map(self.build_result, zip(redata, self.op.outputs)) # print "processing and writing took", time.time() - tic def publish_data(self, data, last): if not self._stop: self.consumer.write(data) if last: self.consumer.unregisterProducer() self.consumer.finish() def build_result(self, (d, s)): obj = dict(s) if isinstance(d, np.ndarray): obj['Readings'] = d.tolist() else: obj['Readings'] = d return util.build_recursive(obj, suppress=[])
def apply_operator(self, opdata, region, first, last): tic = time.time() self.chunk_loaded_idx += 1 # process for d in opdata: d[:, 0] *= 1000 opdata = operators.DataChunk(region, first, last, opdata) redata = self.op.process(opdata) log.msg("STATS: Operator processing took %0.6fs" % (time.time() - tic)) # log.msg("writing " + str(map(len, redata))) # construct a return value with metadata and data merged return map(self.build_result, zip(redata, self.op.outputs)) # print "processing and writing took", time.time() - tic def publish_data(self, data, last): if not self._stop: self.consumer.write(data) if last: self.consumer.unregisterProducer() self.consumer.finish() def build_result(self, (d, s)): obj = dict(s) if isinstance(d, np.ndarray): obj['Readings'] = d.tolist() else: obj['Readings'] = d return util.build_recursive(obj, suppress=[])