def __init__(self, inputs, **kwargs): interpolation_methods = ['linear', 'spline'] self.method = kwargs.get('method', 'linear').lower() self.field = kwargs.get('field', 'minute') width_in = int(kwargs.get('width', 1)) self.width = datetime.timedelta(**{ self.field + 's': width_in }).seconds * 1000 delta_in = kwargs.get('max_time_delta', None) if delta_in is not None: delta_in = int(delta_in) self.max_time_delta = datetime.timedelta(**{ self.field + 's': delta_in }).seconds * 1000 else: self.max_time_delta = None if not self.method in interpolation_methods: raise core.SmapException("Invalid interpolation method: " + self.method) if not self.field in DT_FIELDS: raise core.SmapException("Invalid datetime field: " + self.field) if self.max_time_delta is not None and self.max_time_delta < self.width: raise core.SmapException( "max_time_delta must be greater than the width.") self.snapper = make_bin_snapper(self.field, self.width) self.tzs = map(lambda x: dtutil.gettz(x['Properties/Timezone']), inputs) Operator.__init__(self, inputs, outputs=OP_N_TO_N) self.reset()
def __init__(self, inputs, group_operator, **kwargs): field = kwargs.get('field', 'day') width = int(kwargs.get("width", 1)) slide = int(kwargs.get("slide", width)) inclusive = make_inclusive(kwargs.get("inclusive", "inc-exc")) snap_times = bool(kwargs.get("snap_times", True)) skip_empty = util.to_bool(kwargs.get("skip_empty", True)) if not field in DT_FIELDS: raise core.SmapException("Invalid datetime field: " + field) if not slide <= width: raise core.SmapException( "window: Cannot slide more than the window width!") self.inclusive = make_inclusive(inclusive) if self.inclusive[0] == False: raise core.SmapException( "Open intervals at the start are not supported") self.tzs = map(lambda x: dtutil.gettz(x['Properties/Timezone']), inputs) self.ops = map(lambda x: group_operator([x]), inputs) # self.ops = [[op([x]) for op in ops] for x in inputs] self.comparator = self.make_bin_comparator(field, width) self.snapper = make_bin_snapper(field, slide) self.snap_times = snap_times self.skip_empty = skip_empty self.bin_width = datetime.timedelta(**{field + 's': width}) self.bin_slide = datetime.timedelta(**{field + 's': slide}) self.name = "window(%s, field=%s, width=%i, inclusive=%s, snap_times=%s)" % ( \ str(self.ops[0]), field, width, str(inclusive), str(snap_times)) Operator.__init__( self, inputs, util.flatten(map(operator.attrgetter('outputs'), self.ops))) self.reset()
def open_page(self): if not self.startdt: self.startdt = self.push_hist if not self.enddt: self.enddt = dtutil.now() start, end = urllib.quote(dtutil.strftime_tz(self.startdt, TIMEFMT)), \ urllib.quote(dtutil.strftime_tz(self.enddt, TIMEFMT)) url = self.url % (start, end) url += "&mnuStartMonth=%i&mnuStartDay=%i&mnuStartYear=%i" % \ (self.startdt.month, self.startdt.day, self.startdt.year) url += "&mnuStartTime=%i%%3A%i" % (self.startdt.hour, self.startdt.minute) url += "&mnuEndMonth=%i&mnuEndDay=%i&mnuEndYear=%i" % \ (self.enddt.month, self.enddt.day, self.enddt.year) url += "&mnuEndTime=%i%%3A%i" % (self.enddt.hour, self.enddt.minute) print "loading", url self.fp = httputils.load_http(url, as_fp=True, auth=auth.BMOAUTH) if not self.fp: raise core.SmapException("timeout!") self.reader = csv.reader(self.fp, dialect='excel-tab') header = self.reader.next() if len(header) == 0: print "Warning: no data from", self.url raise core.SmapException("no data!") try: self.field_map, self.map = make_field_idxs(self.meter_type, header, location=self.location) except: traceback.print_exc() if not self.added: self.added = True for channel in self.map['sensors'] + self.map['meters']: try: self.add_timeseries('/%s/%s' % channel[2:4], channel[4], data_type='double') self.set_metadata( '/%s/%s' % channel[2:4], { 'Extra/ChannelName': re.sub('\(.*\)', '', channel[0]).strip(), }) except: traceback.print_exc()
def setup(self, opts): if not 'ConfModule' in opts: raise core.SmapException("The LabJack driver requires an " "additional configuration module") cmps = opts.get('ConfModule').split('.') mod = __import__('.'.join(cmps), globals(), locals(), ['CONF']) self.labjacks = {} for ljname, ljconf in mod.CONF.iteritems(): # create all the time series and calibration functions dev = ReconnectingUE9(ipAddress=ljconf['address'], ethernet=True) self.labjacks[ljname] = (ljconf, dev) for cname, cconf in ljconf['channels'].iteritems(): cconf['calibrate'] = build_calibrate(cconf) path = '/%s/%s' % (ljname, cname) self.add_timeseries(path, cconf['unit'], data_type='double') meta = {'Extra/Register': str(cconf['register'])} meta.update(cconf.get('metadata', {})) self.set_metadata(path, meta) self.set_metadata( '/', { 'Instrument/Manufacturer': 'LabJack Corporation', 'Instrument/Model': 'UE9', 'Extra/Driver': 'smap.drivers.labjack.labjack.LabjackDriver' })
def makeService(self, options): if options['data-dir'] != None: if not os.access(options['data-dir'], os.X_OK | os.W_OK): raise core.SmapException("Cannot access " + options['data-dir']) smapconf.SERVER['DataDir'] = options['data-dir'] inst = loader.load(options['conf']) # override defaults with command-line args smapconf.SERVER.update( dict([(k.lower(), v) for (k, v) in options.iteritems() if v != None])) if 'SuggestThreadPool' in smapconf.SERVER: reactor.suggestThreadPoolSize( int(smapconf.SERVER['SuggestThreadPool'])) inst.start() reactor.addSystemEventTrigger('before', 'shutdown', inst.stop) site = getSite(inst, docroot=smapconf.SERVER['docroot']) service = MultiService() # add HTTP and HTTPS servers to the twisted multiservice if 'port' in smapconf.SERVER: service.addService( internet.TCPServer(int(smapconf.SERVER['port']), site)) if 'sslport' in smapconf.SERVER: service.addService( internet.SSLServer(int(smapconf.SERVER['sslport']), site, SslServerContextFactory(smapconf.SERVER))) return service
def process(self, input): # store the new data self.pending = extend(self.pending, input) # apply the grouping operator to each window startts = min( map(lambda x: np.min(x[:, 0]) if len(x) else np.inf, self.pending)) endts = max( map(lambda x: np.max(x[:, 0]) if len(x) else 0, self.pending)) rv = [null] * len(self.outputs) if startts == np.inf or endts == 0: return rv startts = int(startts - (startts % self.chunk_length)) endts = int((endts - (endts % self.chunk_length)) - \ (self.chunk_length * self.chunk_delay)) # iterate over the groups for time in xrange(startts, endts, self.chunk_length): # print "group starting", time data = map( lambda x: x[np.where( (x[:, 0] >= time) & (x[:, 0] < time + self.chunk_length))] if len(x) else [], self.pending) # skip window if there's no data in it if self.skip_empty and sum(map(len, data)) == 0: continue data = [x if len(x) else np.array([[time, np.nan]]) for x in data] # apply opresult = self.bucket_op(data) if max(map(len, opresult)) > 1: raise core.SmapException( "Error! Grouping operators can not produce " "more than one result per group!") if self.snap_times: for x in opresult: x[:, 0] = time rv = extend(rv, opresult) # filter out the data we operated on self.pending = map( lambda x: x[np.nonzero(x[:, 0] >= endts)] if len(x) else null, self.pending) return rv
def blocking_startup(self): # read the scale register from the dent for i in range(0, len(self.elt_scales)): for attempt in xrange(0, 5): try: scale = self.read_scale(self.base_addr + i) except IOError: scale = None if scale != None: break if scale == None: raise core.SmapException( "Could not read scale from dent: cannot proceed (%s)" % (str(self.serverloc))) self.elt_scales[i] = self.elt_scales[i][0], scale print self.elt_scales reactor.callInThread(self.final_startup)
def process(self, body): reader = csv.reader(StringIO.StringIO(body), dialect='excel-tab') header = reader.next() if len(header) == 0: print "Warning: no data from", self.url raise core.SmapException("no data!") try: self.field_map, self.map = make_field_idxs(self.meter_type, header, location=self.location) except: traceback.print_exc() if not self.added: self.added = True for channel in self.map['sensors'] + self.map['meters']: try: self.add_timeseries('/%s/%s' % channel[2:4], channel[4], data_type='double') self.set_metadata('/%s/%s' % channel[2:4], { 'Extra/ChannelName' : re.sub('\(.*\)', '', channel[0]).strip(), }) except: traceback.print_exc() # add all the values for r in reader: ts = dtutil.strptime_tz(r[0], TIMEFMT, tzstr='UTC') if ts > self.push_hist: self.push_hist = ts ts = dtutil.dt2ts(ts) for descr, val in zip(self.field_map, r): if descr == None: continue try: self._add('/' + '/'.join(descr), ts, float(val)) except ValueError: pass
def make_inclusive(range): if util.is_string(range): if range == 'inclusive': range = (True, True) elif range == 'inc-exc': range = (True, False) else: raise core.SmapException("Unsupported range: " + range) return range
def _process(self, data): lengths = set((x.shape[0] for x in data)) if len(lengths) != 1: raise core.SmapException("paste: hstack: wrong sized inputs") return [np.hstack([data[0]] + map(lambda x: x[:, 1:], data[1:]))]