Exemple #1
0
    def open_page(self):
        if not self.startdt:
            self.startdt = self.push_hist
        if not self.enddt:
            self.enddt = dtutil.now()

        start, end = urllib.quote(dtutil.strftime_tz(self.startdt, TIMEFMT)), \
            urllib.quote(dtutil.strftime_tz(self.enddt, TIMEFMT))

        url = self.url % (start, end)
        url += "&mnuStartMonth=%i&mnuStartDay=%i&mnuStartYear=%i" % \
            (self.startdt.month,
             self.startdt.day,
             self.startdt.year)
        url += "&mnuStartTime=%i%%3A%i" % (self.startdt.hour, 
                                           self.startdt.minute)
        url += "&mnuEndMonth=%i&mnuEndDay=%i&mnuEndYear=%i" % \
            (self.enddt.month,
             self.enddt.day,
             self.enddt.year)
        url += "&mnuEndTime=%i%%3A%i" % (self.enddt.hour, self.enddt.minute)
        log.msg("loading " + url)

        # send the auth preemptively to avoid the 403-redirect cycle...
        auth = "Basic " + base64.encodestring(":".join(self.auth))[:-1]
        d = self.agent.request("GET", url,
                               Headers({"Authorization": [auth]}))
        d.addCallback(self.get_response)
        return d
Exemple #2
0
    def _update(self, val):
        print "update using", val
        try:
            point, value, quality, time = val
            if quality != 'Good':
                log.msg("bad quality on point " + point + ": " + quality)
                return

            # parse the timestamp in the timezone of the server
            if self.use_opc_timestamps == 'true':
                ts = dtutil.strptime_tz(time,
                                        self.opc_timefmt,
                                        self.opc_timezone)
                ts = dtutil.dt2ts(ts)
            else:
                ts = dtutil.dt2ts(dtutil.now())

            path = self.make_path(point)
            series = self.get_timeseries(path)
            if series:
                if series['Properties']['ReadingType'] == 'double':
                    series._add(ts, float(value))
                else:
                    series._add(ts, int(value))
        except:
            log.err()
Exemple #3
0
 def urlgen(self, prefix, suffix, offset):
     """Generate the url for nyiso feeds. The produced output is
     "Prefix"+date+"Suffix". The offset is used when requesting future or
     past dates, e.g. for forcasted load"""
     basetime = dtutil.now("America/New_York")
     reqtime = basetime + datetime.timedelta(seconds=offset)
     url = reqtime.strftime("%Y%m%d")
     url = prefix + url + suffix
     return url
 def urlgen(self, prefix, suffix, offset):
     """Generate the url for nyiso feeds. The produced output is
     "Prefix"+date+"Suffix". The offset is used when requesting future or
     past dates, e.g. for forcasted load"""
     basetime = dtutil.now("America/New_York")
     reqtime = basetime + datetime.timedelta(seconds=offset)
     url = reqtime.strftime("%Y%m%d")
     url = prefix + url + suffix
     return url
Exemple #5
0
def p_abstime(t):
    """abstime : NUMBER 
               | QSTRING
               | NOW"""
    if t[1] == 'now':
        t[0] = dtutil.now()
    elif type(t[1]) == type(''):
        t[0] = parse_time(t[1])
    else:
        t[0] = dtutil.ts2dt(t[1] / 1000)
Exemple #6
0
def p_abstime(t):
    """abstime : NUMBER 
               | QSTRING
               | NOW"""
    if t[1] == 'now':
        t[0] = dtutil.now()
    elif type(t[1]) == type(''):
        t[0] = parse_time(t[1])
    else:
        t[0] = dtutil.ts2dt(t[1] / 1000)
Exemple #7
0
def smap_load():
    p = get_parser()
    opts, args = p.parse_args()
    if len(args) < 1:
        p.error("conf file is a required argument")

    log.startLogging(sys.stdout)
    sections = map(util.norm_path, args[1:])
    inst = loader.load(args[0], sections=sections)

    for dpath, driver in inst.drivers.iteritems():
        if len(sections) > 1 and not dpath in sections:
            continue

        if not hasattr(driver, "load"):
            log.err('Error: driver does not have "load" method')
            sys.exit(1)

        if hasattr(driver, 'reset') and \
                callable(driver.reset) and \
                opts.reset:
            log.msg("Resetting driver")
            driver.reset()

    try: 
        # find the date range for loading...
        st, et = None, None
        now = dtutil.now(tzstr=opts.timezone)
        if (opts.start_time=="now_minus_1hour"):
            st = now - datetime.timedelta(hours=1)
        else:
            st = dtutil.strptime_tz(opts.start_time, opts.timefmt, opts.timezone)

        if (opts.end_time=="now"):
            et = now
        else:
            et = dtutil.strptime_tz(opts.end_time, opts.timefmt, opts.timezone)
    except:
        pass

    dl = []
    for dpath, driver in inst.drivers.iteritems():
        if len(sections) > 1 and not dpath in sections:
            continue
        # try: 
        #     loader = driver.load(st, et, cache=opts.cache)
        # except TypeError:
        dl.append(defer.maybeDeferred(driver.load, st, et, cache=opts.cache))

    dl = defer.DeferredList(dl, consumeErrors=True)
    dl.addCallback(lambda x: inst._flush())
    dl.addCallbacks(lambda x: reactor.callFromThread(reactor.stop))

    reactor.run()
Exemple #8
0
    def open_page(self):
        if not self.startdt:
            self.startdt = self.push_hist
        if not self.enddt:
            self.enddt = dtutil.now()

        start, end = urllib.quote(dtutil.strftime_tz(self.startdt, TIMEFMT)), \
            urllib.quote(dtutil.strftime_tz(self.enddt, TIMEFMT))

        url = self.url % (start, end)
        url += "&mnuStartMonth=%i&mnuStartDay=%i&mnuStartYear=%i" % \
            (self.startdt.month,
             self.startdt.day,
             self.startdt.year)
        url += "&mnuStartTime=%i%%3A%i" % (self.startdt.hour,
                                           self.startdt.minute)
        url += "&mnuEndMonth=%i&mnuEndDay=%i&mnuEndYear=%i" % \
            (self.enddt.month,
             self.enddt.day,
             self.enddt.year)
        url += "&mnuEndTime=%i%%3A%i" % (self.enddt.hour, self.enddt.minute)
        print "loading", url

        self.fp = httputils.load_http(url, as_fp=True, auth=auth.BMOAUTH)
        if not self.fp:
            raise core.SmapException("timeout!")
        self.reader = csv.reader(self.fp, dialect='excel-tab')
        header = self.reader.next()
        if len(header) == 0:
            print "Warning: no data from", self.url
            raise core.SmapException("no data!")
        try:
            self.field_map, self.map = make_field_idxs(self.meter_type,
                                                       header,
                                                       location=self.location)
        except:
            traceback.print_exc()

        if not self.added:
            self.added = True
            for channel in self.map['sensors'] + self.map['meters']:
                try:
                    self.add_timeseries('/%s/%s' % channel[2:4],
                                        channel[4],
                                        data_type='double')
                    self.set_metadata(
                        '/%s/%s' % channel[2:4], {
                            'Extra/ChannelName':
                            re.sub('\(.*\)', '', channel[0]).strip(),
                        })

                except:
                    traceback.print_exc()
Exemple #9
0
 def _update(self):
     vals = self.opc.read(group="smap-points-group")
     for point, value, quality, time in vals:
         # parse the timestamp in the timezone of the server
         if time is not None:
             ts = dtutil.strptime_tz(time, self.opc_timefmt, self.opc_timezone)
             ts = dtutil.dt2ts(ts)
         else:
             ts = dtutil.now(self.opc_timezone)
             ts = dtutil.dt2ts(ts)
         if self.get_timeseries(self.make_path(point)) and value is not None:
             if isinstance(value, bool): value = int(value)
             self._add(self.make_path(point), ts, float(value))
Exemple #10
0
 def _update(self):
     vals = self.opc.read(group="smap-points-group")
     for point, value, quality, time in vals:
         # parse the timestamp in the timezone of the server
         if time is not None:
             ts = dtutil.strptime_tz(time, self.opc_timefmt,
                                     self.opc_timezone)
             ts = dtutil.dt2ts(ts)
         else:
             ts = dtutil.now(self.opc_timezone)
             ts = dtutil.dt2ts(ts)
         if self.get_timeseries(
                 self.make_path(point)) and value is not None:
             if isinstance(value, bool): value = int(value)
             self._add(self.make_path(point), ts, float(value))
Exemple #11
0
    def open_page(self):
        if not self.startdt:
            self.startdt = self.push_hist
        if not self.enddt:
            self.enddt = dtutil.now()

        start, end = urllib.quote(dtutil.strftime_tz(self.startdt, TIMEFMT)), \
            urllib.quote(dtutil.strftime_tz(self.enddt, TIMEFMT))

        url = self.url % (start, end)
        url += "&mnuStartMonth=%i&mnuStartDay=%i&mnuStartYear=%i" % \
            (self.startdt.month,
             self.startdt.day,
             self.startdt.year)
        url += "&mnuStartTime=%i%%3A%i" % (self.startdt.hour, 
                                           self.startdt.minute)
        url += "&mnuEndMonth=%i&mnuEndDay=%i&mnuEndYear=%i" % \
            (self.enddt.month,
             self.enddt.day,
             self.enddt.year)
        url += "&mnuEndTime=%i%%3A%i" % (self.enddt.hour, self.enddt.minute)
        print "loading", url

        self.fp = httputils.load_http(url, as_fp=True, auth=auth.BMOAUTH)
        if not self.fp:
            raise core.SmapException("timeout!")
        self.reader = csv.reader(self.fp, dialect='excel-tab')
        header = self.reader.next()
        if len(header) == 0:
            print "Warning: no data from", self.url
            raise core.SmapException("no data!")
        try:
            self.field_map, self.map = make_field_idxs(self.meter_type, header, 
                                                       location=self.location)
        except:
            traceback.print_exc()

        if not self.added:
            self.added = True
            for channel in self.map['sensors'] + self.map['meters']:
                try:
                    self.add_timeseries('/%s/%s' % channel[2:4], channel[4], data_type='double')
                    self.set_metadata('/%s/%s' % channel[2:4], {
                            'Extra/ChannelName' : re.sub('\(.*\)', '', channel[0]).strip(),
                            })
                    
                except:
                    traceback.print_exc()
Exemple #12
0
    def setup(self, opts):
        self.url = opts['Url']
        self.meter_type = opts['Metadata/Instrument/Model']
        self.location = opts.get('Metadata/Location/Building', None)
        self.rate = int(opts.get('Rate', 3600))
        self.running = False

#         if not sensordb.get_map(self.meter_type, ):
#             raise SmapLoadError(self.meter_type + " is not a known obvius meter type")
        self.push_hist = dtutil.now() - datetime.timedelta(hours=1)

        self.added = False
        self.set_metadata('/', {
                'Extra/Driver' : 'smap.drivers.obvius.bmo.BMOLoader' })

        print self.url, self.rate
Exemple #13
0
 def forecast_SPP_get(self):
     texas_today = dtutil.now("America/Chicago")
     ptime = texas_today.strftime("%Y%m%d")
     url = self.DATA_TYPES["Forecasted SPP"]["Uri"].replace(
         "<DATE HERE>", ptime)
     print(url)
     SPP = urllib2.urlopen(url)
     lines = SPP.readlines()
     SPP.close()
     while 'td class="headerValue' not in lines[0]:
         lines.pop(0)
     while "</tr>" not in lines[len(lines) - 1]:
         lines.pop()
     giantstr = ""
     for line in lines:
         giantstr += line
     lines = giantstr.split("</tr>\r\r\n\t\t\t\t\t\t\t<tr>")
     intermed_out = []
     for line in lines:
         temp = line.replace("\n", "")
         temp = temp.replace("\t", "")
         temp = temp.replace("\r\r", " ")
         temp = re.sub(r'\<.*?\>', '', temp)
         temp = temp.strip().split()
         intermed_out.append(temp)
     lines = intermed_out
     columns = intermed_out.pop(0)[4:]
     for place in columns:
         if place not in self.ercot_out["SPP"].keys():
             self.ercot_out["SPP"][place] = {"Forecasted": []}
         else:
             self.ercot_out["SPP"][place]["Forecasted"] = []
     for line in lines:
         line.pop(0)
         ptime = line.pop(0) + ":00"
         if "24" in ptime:
             ptime = self.parse_time("00:00", 1)
         else:
             ptime = self.parse_time(ptime, 0)
         for x in range(0, len(line)):
             point = [ptime, float(line[x])]
             self.ercot_out["SPP"][columns[x]]["Forecasted"].append(point)
Exemple #14
0
 def forecast_SPP_get(self):
     texas_today = dtutil.now("America/Chicago")
     ptime = texas_today.strftime("%Y%m%d")
     url = self.DATA_TYPES["Forecasted SPP"]["Uri"].replace("<DATE HERE>",
                                                                      ptime)
     print(url)
     SPP = urllib2.urlopen(url)
     lines = SPP.readlines()
     SPP.close()
     while 'td class="headerValue' not in lines[0]:
         lines.pop(0)
     while "</tr>" not in lines[len(lines)-1]:
         lines.pop()
     giantstr = ""
     for line in lines:
         giantstr += line
     lines = giantstr.split("</tr>\r\r\n\t\t\t\t\t\t\t<tr>")
     intermed_out = []
     for line in lines:
         temp = line.replace("\n", "")
         temp = temp.replace("\t", "")
         temp = temp.replace("\r\r", " ")
         temp = re.sub(r'\<.*?\>', '', temp)
         temp = temp.strip().split()
         intermed_out.append(temp)
     lines = intermed_out
     columns = intermed_out.pop(0)[4:]
     for place in columns:
         if place not in self.ercot_out["SPP"].keys():
             self.ercot_out["SPP"][place] = { "Forecasted": [] }
         else:
             self.ercot_out["SPP"][place]["Forecasted"] = []
     for line in lines:
         line.pop(0)
         ptime = line.pop(0) + ":00"
         if "24" in ptime:
             ptime = self.parse_time("00:00", 1)
         else:
             ptime = self.parse_time(ptime, 0)
         for x in range(0, len(line)):
             point = [ptime, float(line[x])]
             self.ercot_out["SPP"][columns[x]]["Forecasted"].append(point)
 def poll_stream(self, market, load_old):
   def _push_data(readings, market): 
     # Zip together the values for all keys
     for vals in zip(*readings.values()):
       if vals[0][0] > self.last_reading[market]:
         # Add all smap points for this time
         for (k,v) in zip(readings.keys(), vals):
           logging.debug("add /%s/%s: %s" % (market, k, str(v)))
           self.add('/%s/%s' % (market, k), *v)
           self.last_reading[market] = vals[0][0]
           
   # Load old data
   if load_old == True:
     for day in range(1, 2):
       stop = dtutil.now() - datetime.timedelta(days=day)
       start = stop - datetime.timedelta(days=2)
       try:
         readings = self.get_readings(market, start, stop)
         _push_data(readings, market)
       except Exception, e:
         logging.exception('Error getting reading')
Exemple #16
0
    def poll_stream(self, market, load_old):
        def _push_data(readings, market):
            # Zip together the values for all keys
            for vals in zip(*readings.values()):
                if vals[0][0] > self.last_reading[market]:
                    # Add all smap points for this time
                    for (k, v) in zip(readings.keys(), vals):
                        logging.debug("add /%s/%s: %s" % (market, k, str(v)))
                        self.add('/%s/%s' % (market, k), *v)
                        self.last_reading[market] = vals[0][0]

        # Load old data
        if load_old == True:
            for day in range(1, 2):
                stop = dtutil.now() - datetime.timedelta(days=day)
                start = stop - datetime.timedelta(days=2)
                try:
                    readings = self.get_readings(market, start, stop)
                    _push_data(readings, market)
                except Exception, e:
                    logging.exception('Error getting reading')
Exemple #17
0
class CaIsoPrice(SmapDriver):

    MARKETS = [('DAM', 30 * 60, 'Day-ahead market'),
               ('HASP', 10 * 60, 'Hour-ahead scheduling process'),
               ('RTM', 2 * 60, 'Real-time market')]
    FEEDS = [('total_price', '$', 'total price'), ('loss', '$', 'loss price'),
             ('congestion', '$', 'congestion price'),
             ('energy', '$', 'energy price')]

    def setup(self, opts):
        # get our location
        self.last_reading = {}
        for m, t, d in self.MARKETS:
            self.last_reading[m] = 0

        self.location = opts.get('Location', 'OAKLAND_1_N001')
        self.set_metadata(
            '/', {
                'Location/Uri': 'http://oasis.caiso.com/mrtu-oasis/SingleZip',
                'Extra/IsoNode': self.location,
                'Extra/Driver': 'smap.drivers.caiso_price.CaIsoPrice'
            })

        # add the feeds
        for (m, i, md) in self.MARKETS:
            for (f, u, fd) in self.FEEDS:
                path = '/%s/%s' % (m, f)
                self.add_timeseries(path,
                                    u,
                                    data_type='double',
                                    description=md + ' ' + fd)

    def start(self):
        for (market, interval, description) in self.MARKETS:
            periodicSequentialCall(self.poll_stream, market,
                                   False).start(interval)

    def get_readings(self, market, start_date, stop_date):
        readings = {
            'total_price': [],
            'loss': [],
            'energy': [],
            'congestion': []
        }
        print "get_readings", market
        if market == 'DAM':
            q = 'PRC_LMP'
            m = 'DAM'
        elif market == 'HASP':
            q = 'PRC_HASP_LMP'
            m = 'HASP'
        elif market == 'RTM':
            q = 'PRC_INTVL_LMP'
            m = 'RTM'
        else:
            raise Exception("Invalid market: " + market)

        url = 'http://oasis.caiso.com/mrtu-oasis/SingleZip?'
        url += 'queryname=' + q
        url += '&startdate=' + dtutil.strftime_tz(start_date, '%Y%m%d',
                                                  'US/Pacific')
        url += '&enddate=' + dtutil.strftime_tz(stop_date, '%Y%m%d',
                                                'US/Pacific')
        url += '&market_run_id=' + m
        url += '&node=' + self.location

        logging.info("Get url %s" % url)
        h = None
        for d in [5, 20, 60]:
            try:
                h = urllib2.urlopen(url, timeout=50)
                break
            except urllib2.URLError:
                logging.warn("urlopen failed.")
            time.sleep(d)
        if h == None:
            raise Exception("Failed to open url: %s" % url)

        z = zipfile.ZipFile(StringIO.StringIO(h.read()))
        xml = z.read(z.namelist()[0])
        b = BeautifulSoup.BeautifulSoup(xml)

        sec_per_int = int(b.find('m:sec_per_interval').contents[0])

        rows = b.findAll('m:report_data')
        for d in rows:
            res = d.find('m:resource_name').contents[0]
            item = d.find('m:data_item').contents[0]
            day = d.find('m:opr_date').contents[0]
            inter = int(d.find('m:interval_num').contents[0])
            val = float(d.find('m:value').contents[0])

            secs = (inter - 1) * sec_per_int
            dt = dtutil.strptime_tz(
                day, '%Y-%m-%d',
                'US/Pacific') + datetime.timedelta(seconds=secs)
            timestamp = dtutil.dt2ts(dt)

            key = None
            if item == 'LMP_PRC':
                key = 'total_price'
            elif item == 'LMP_LOSS_PRC':
                key = 'loss'
            elif item == 'LMP_ENE_PRC':
                key = 'energy'
            elif item == 'LMP_CONG_PRC':
                key = 'congestion'
            else:
                continue

            readings[key].append((timestamp, val))

        num_readings = len(readings[readings.keys()[0]])
        for k in readings.keys():
            if len(readings[k]) != num_readings:
                raise Exception('Missing readings')

            readings[k] = sorted(readings[k], key=lambda (t, v): t)

        return readings

    def poll_stream(self, market, load_old):
        def _push_data(readings, market):
            # Zip together the values for all keys
            for vals in zip(*readings.values()):
                if vals[0][0] > self.last_reading[market]:
                    # Add all smap points for this time
                    for (k, v) in zip(readings.keys(), vals):
                        logging.debug("add /%s/%s: %s" % (market, k, str(v)))
                        self.add('/%s/%s' % (market, k), *v)
                        self.last_reading[market] = vals[0][0]

        # Load old data
        if load_old == True:
            for day in range(1, 2):
                stop = dtutil.now() - datetime.timedelta(days=day)
                start = stop - datetime.timedelta(days=2)
                try:
                    readings = self.get_readings(market, start, stop)
                    _push_data(readings, market)
                except Exception, e:
                    logging.exception('Error getting reading')

        # Continuously get new data
        try:
            stop = dtutil.now()
            start = stop - datetime.timedelta(days=1)

            readings = self.get_readings(market, start, stop)

            rt = readings['total_price'][-1][0]

            if rt > self.last_reading[market]:
                logging.info("NEW %s READING (%s) at time %s" %
                             (market,
                              dtutil.strftime_tz(dtutil.ts2dt(rt),
                                                 '%m/%d %H:%M', 'US/Pacific'),
                              dtutil.strftime_tz(dtutil.now(), '%m/%d %H:%M',
                                                 'US/Pacific')))
                _push_data(readings, market)
                # self.last_reading = rt

        except Exception, e:
            logging.exception('Error getting reading')
Exemple #18
0
 def parse_time(self, time_str, dayoff):
     texas_today = dtutil.now("America/Chicago")
     texas_time = texas_today + datetime.timedelta(seconds=dayoff * 86400)
     ptime = texas_time.strftime("%Y%m%d") + " " + time_str
     out = time.strptime(ptime, "%Y%m%d %H:%M")
     return int(time.mktime(out))
Exemple #19
0
 def urlgen(self, pt1, pt2, deltadays):
     isotime = (dtutil.now("America/New_York") +
                                     datetime.timedelta(days=deltadays))
     isotime = isotime.strftime("%Y%m%d")
     return pt1 + isotime + pt2
Exemple #20
0
 def parse_time(self, time_str, dayoff):
     texas_today = dtutil.now("America/Chicago")
     texas_time = texas_today + datetime.timedelta(seconds=dayoff*86400)
     ptime = texas_time.strftime("%Y%m%d") + " " + time_str
     out = time.strptime(ptime, "%Y%m%d %H:%M")
     return int(time.mktime(out))