def write_one_stream(self, request, stream, stags, mime_header=False): """For a CSV downlod, add some hweaders and write the data to the stream """ writer = csv.writer(request) if 'tags' in request.args and not 'none' in request.args['tags']: request.write("# uuid: %s\n" % stream['uuid']) request.write("# DownloadTime: " + time.ctime() + "\n") request.write("# ") request.write('\n# '.join( (': '.join(x) for x in sorted(stags.iteritems())))) request.write('\n') if 'timefmt' in request.args: # potentially do timestamp stringification here. # this could be a bit slow for large datasets... if request.args['timefmt'][0] == 'iso8601': fmt = dtutil.iso8601 tz = dtutil.gettz(stags.get('Properties/Timezone', 'Utc')) elif request.args['timefmt'][0] == 'excel': fmt = fmt = dtutil.excel tz = dtutil.gettz(stags.get('Properties/Timezone', 'Utc')) else: fmt = lambda dt, tz: dtutil.strftime_tz(dt, '%s') tz = dtutil.gettz('Utc') def row_action(row): row[0] = fmt(dtutil.ts2dt(row[0] / 1000), tz) writer.writerow(row) map(row_action, stream['Readings']) else: map(writer.writerow, stream['Readings'])
def make_time_formatter(request, stags): """Return a function that propertly formats timestamps for a particular request. """ if 'timefmt' in request.args: try: tz = stags['Properties']['Timezone'] except KeyError: tz = 'Utc' tz = dtutil.gettz(tz) # potentially do timestamp stringification here. # this could be a bit slow for large datasets... if request.args['timefmt'][0] == 'iso8601': fmt = dtutil.iso8601 elif request.args['timefmt'][0] == 'excel': fmt = fmt = dtutil.excel else: fmt = lambda dt, tz: dtutil.strftime_tz(dt, '%s') tz = dtutil.gettz('Utc') def format(t): return fmt(dtutil.ts2dt(t / 1000), tz) return format else: return lambda x: str(int(x))
def write_one_stream(self, request, stream, stags, mime_header=False): """For a CSV downlod, add some hweaders and write the data to the stream """ writer = csv.writer(request) if 'tags' in request.args and not 'none' in request.args['tags']: request.write("# uuid: %s\n" % stream['uuid']) request.write("# DownloadTime: " + time.ctime() + "\n") request.write("# ") request.write('\n# '.join((': '.join(x) for x in sorted(stags.iteritems())))) request.write('\n') if 'timefmt' in request.args: # potentially do timestamp stringification here. # this could be a bit slow for large datasets... if request.args['timefmt'][0] == 'iso8601': fmt = dtutil.iso8601 tz = dtutil.gettz(stags.get('Properties/Timezone', 'Utc')) elif request.args['timefmt'][0] == 'excel': fmt = fmt = dtutil.excel tz = dtutil.gettz(stags.get('Properties/Timezone', 'Utc')) else: fmt = lambda dt, tz: dtutil.strftime_tz(dt, '%s') tz = dtutil.gettz('Utc') def row_action(row): row[0] = fmt(dtutil.ts2dt(row[0] / 1000), tz) writer.writerow(row) map(row_action, stream['Readings']) else: map(writer.writerow, stream['Readings'])
def __init__(self, inputs, group_operator, **kwargs): field = kwargs.get('field', 'day') width = int(kwargs.get("width", 1)) slide = int(kwargs.get("slide", width)) inclusive = make_inclusive(kwargs.get("inclusive", "inc-exc")) snap_times = bool(kwargs.get("snap_times", True)) skip_empty = util.to_bool(kwargs.get("skip_empty", True)) if not field in DT_FIELDS: raise core.SmapException("Invalid datetime field: " + field) if not slide <= width: raise core.SmapException("window: Cannot slide more than the window width!") self.inclusive = make_inclusive(inclusive) if self.inclusive[0] == False: raise core.SmapException("Open intervals at the start are not supported") self.tzs = map(lambda x: dtutil.gettz(x['Properties/Timezone']), inputs) self.ops = map(lambda x: group_operator([x]), inputs) # self.ops = [[op([x]) for op in ops] for x in inputs] self.comparator = self.make_bin_comparator(field, width) self.snapper = make_bin_snapper(field, slide) self.snap_times = snap_times self.skip_empty = skip_empty self.bin_width = datetime.timedelta(**{field + 's': width}) self.bin_slide = datetime.timedelta(**{field + 's': slide}) self.name = "window(%s, field=%s, width=%i, inclusive=%s, snap_times=%s)" % ( \ str(self.ops[0]), field, width, str(inclusive), str(snap_times)) Operator.__init__(self, inputs, util.flatten(map(operator.attrgetter('outputs'), self.ops))) self.reset()
def __init__(self, inputs, **kwargs): interpolation_methods = ['linear', 'spline'] self.method = kwargs.get('method', 'linear').lower() self.field = kwargs.get('field', 'minute') width_in = int(kwargs.get('width', 1)) self.width = datetime.timedelta(**{ self.field + 's': width_in }).seconds * 1000 delta_in = kwargs.get('max_time_delta', None) if delta_in is not None: delta_in = int(delta_in) self.max_time_delta = datetime.timedelta(**{ self.field + 's': delta_in }).seconds * 1000 else: self.max_time_delta = None if not self.method in interpolation_methods: raise core.SmapException("Invalid interpolation method: " + self.method) if not self.field in DT_FIELDS: raise core.SmapException("Invalid datetime field: " + self.field) if self.max_time_delta is not None and self.max_time_delta < self.width: raise core.SmapException( "max_time_delta must be greater than the width.") self.snapper = make_bin_snapper(self.field, self.width) self.tzs = map(lambda x: dtutil.gettz(x['Properties/Timezone']), inputs) Operator.__init__(self, inputs, outputs=OP_N_TO_N) self.reset()
def __init__(self, inputs, group_operator, **kwargs): field = kwargs.get('field', 'day') width = int(kwargs.get("width", 1)) slide = int(kwargs.get("slide", width)) inclusive = make_inclusive(kwargs.get("inclusive", "inc-exc")) snap_times = bool(kwargs.get("snap_times", True)) skip_empty = util.to_bool(kwargs.get("skip_empty", True)) if not field in DT_FIELDS: raise core.SmapException("Invalid datetime field: " + field) if not slide <= width: raise core.SmapException( "window: Cannot slide more than the window width!") self.inclusive = make_inclusive(inclusive) if self.inclusive[0] == False: raise core.SmapException( "Open intervals at the start are not supported") self.tzs = map(lambda x: dtutil.gettz(x['Properties/Timezone']), inputs) self.ops = map(lambda x: group_operator([x]), inputs) # self.ops = [[op([x]) for op in ops] for x in inputs] self.comparator = self.make_bin_comparator(field, width) self.snapper = make_bin_snapper(field, slide) self.snap_times = snap_times self.skip_empty = skip_empty self.bin_width = datetime.timedelta(**{field + 's': width}) self.bin_slide = datetime.timedelta(**{field + 's': slide}) self.name = "window(%s, field=%s, width=%i, inclusive=%s, snap_times=%s)" % ( \ str(self.ops[0]), field, width, str(inclusive), str(snap_times)) Operator.__init__( self, inputs, util.flatten(map(operator.attrgetter('outputs'), self.ops))) self.reset()
def __init__(self, inputs): tz = set(map(operator.itemgetter('Properties/Timezone'), inputs)) if len(tz) != 1: raise SmapException("Datetime operator only supports a single tz") self.tz = dtutil.gettz(list(tz)[0]) self.base_operator = lambda vec: self._base_operator(vec) ParallelSimpleOperator.__init__(self, inputs)
def setUp(self): now = dtutil.strptime_tz("1 1 2000 0", "%m %d %Y %H", tzstr="America/Los_Angeles") now = dtutil.dt2ts(now) self.testdata = np.ones((self.hours, 2)) for i in xrange(0, self.hours): self.testdata[i, :] = i self.testdata[:, 0] *= 3600 self.testdata[:, 0] += now self.ma = grouping.MaskedDTList(self.testdata[:, 0], dtutil.gettz("America/Los_Angeles")) self.width = datetime.timedelta(days=1)
def test_liveness(smap_url, opts): data = load_json(smap_url + '/data/+') # print data readings = [(k, v['uuid'], v['Readings'][-1] if len(v['Readings']) else [0, None], v['Properties']) for k, v in data.iteritems() if 'uuid' in v] readings.sort(key=lambda v: v[2][0], reverse=True) # print readings d=[] for path, uid, latest, props in readings: tim= dtutil.iso8601(dtutil.ts2dt(latest[0] / 1000.), tzinfo=dtutil.gettz(props['Timezone'])), # if opts.uuids: print uid, path= path, val= "%s%s" % (latest[1], props['UnitofMeasure']) d.append('%s %s %s' % (tim[0],path[0],val)) return d
def get_liveness(smap_url): data = load_json(smap_url + '/data/+') readings = [(k, v['uuid'], v['Readings'][-1] if len(v['Readings']) else [0, None], v['Properties']) for k, v in data.iteritems() if 'uuid' in v] readings.sort(key=lambda v: v[2][0], reverse=True) # print readings d={} for path, uid, latest, props in readings: d[uid]={} d[uid]['data']=dtutil.iso8601(dtutil.ts2dt(latest[0] / 1000.), tzinfo=dtutil.gettz(props['Timezone'])), # if opts.uuids: print uid, d[uid]['curr']= "%s%s" % (latest[1], props['UnitofMeasure']) d[uid]['path']= path d[uid]['latest']=latest d[uid]['props']=props return d
def __init__(self, inputs, **kwargs): interpolation_methods = ['linear', 'spline'] self.method = kwargs.get('method', 'linear').lower() self.field = kwargs.get('field', 'minute') width_in = int(kwargs.get('width', 1)) self.width = datetime.timedelta(**{self.field + 's': width_in}).seconds * 1000 delta_in = kwargs.get('max_time_delta', None) if delta_in is not None: delta_in = int(delta_in) self.max_time_delta = datetime.timedelta(**{self.field + 's': delta_in}).seconds * 1000 else: self.max_time_delta = None if not self.method in interpolation_methods: raise core.SmapException("Invalid interpolation method: " + self.method) if not self.field in DT_FIELDS: raise core.SmapException("Invalid datetime field: " + self.field) if self.max_time_delta is not None and self.max_time_delta < self.width: raise core.SmapException("max_time_delta must be greater than the width.") self.snapper = make_bin_snapper(self.field, self.width) self.tzs = map(lambda x: dtutil.gettz(x['Properties/Timezone']), inputs) Operator.__init__(self, inputs, outputs=OP_N_TO_N) self.reset()
def __init__(self, inputs, days="1,2,3,4,5"): self.days = map(int, days.split(',')) self.tzs = map(lambda x: dtutil.gettz(x['Properties/Timezone']), inputs) Operator.__init__(self, inputs, OP_N_TO_N)