示例#1
0
    def get_trade(self, latest=False,
                  start_at=False, end_at=False, **kwargs):
        # set args
        self.handle_options(data='trade', latest=latest,
                            start_at=start_at, end_at=end_at, **kwargs)

        # set up storage
        parsed_data = []

        # collect data
        for this_date in self.dates():
            # fetch
            try:
                df, mode = self.fetch_df(this_date)
            except (HTTPError, ValueError):
                LOGGER.warn('No data available in NVEnergy at %s' % this_date)
                continue

            # store
            try:
                parsed_data += self.parse_trade(df, this_date, mode)
            except KeyError:
                LOGGER.warn('Unparseable data available in NVEnergy at %s: %s' % (this_date, df))
                continue

        # return
        return self.time_subset(parsed_data)
示例#2
0
    def request(self, *args, **kwargs):
        response = super(PJMClient, self).request(*args, **kwargs)
        if response and response.status_code == 400:
            LOGGER.warn('PJM request returned Bad Request %s' % response)
            return None

        return response
示例#3
0
文件: isone.py 项目: torenunez/pyiso
    def get_lmp(self, node_id='INTERNALHUB', latest=True, start_at=False, end_at=False, **kwargs):
        # set args
        self.handle_options(data='lmp', latest=latest,
                            start_at=start_at, end_at=end_at, node_id=node_id, **kwargs)
        # get location id
        try:
            locationid = self.locations[node_id.upper()]
        except KeyError:
            raise ValueError('No LMP data available for location %s' % node_id)

        # set up storage
        raw_data = []
        # collect raw data
        for endpoint in self.request_endpoints(locationid):
            # carry out request
            data = self.fetch_data(endpoint, self.auth)

            # pull out data
            try:
                raw_data += self.parse_json_lmp_data(data)
            except ValueError as e:
                LOGGER.warn(e)
                continue

        # parse and slice
        df = self._parse_json(raw_data)
        df = self.slice_times(df)

        # return
        return df.to_dict(orient='record')
示例#4
0
文件: yukon.py 项目: rariss/pyiso_lmp
 def get_trade(self,
               latest=False,
               yesterday=False,
               start_at=False,
               end_at=False,
               **kwargs):
     self.handle_options(latest=latest,
                         yesterday=yesterday,
                         start_at=start_at,
                         end_at=end_at,
                         data='trade')
     # http://yukonenergy.ca/energy-in-yukon/electricity-101/electricity-library/whats-an-isolated-grid-and-what-does-that-mean-for-me
     LOGGER.warn(
         'Yukon Energy is an isolated grid. Trade will always be zero.')
     trades = []
     hourly_rounded_dt = self.options.get('start_at').replace(minute=0,
                                                              second=0,
                                                              microsecond=0)
     while hourly_rounded_dt <= self.options.get('end_at'):
         if self.options['start_at'] <= hourly_rounded_dt <= self.options[
                 'end_at']:
             trades.append({
                 'ba_name': self.NAME,
                 'timestamp': Timestamp(hourly_rounded_dt),
                 'freq': self.FREQUENCY_CHOICES.hourly,
                 'market': self.MARKET_CHOICES.hourly,
                 'net_exp_MW': 0
             })
         hourly_rounded_dt = hourly_rounded_dt + timedelta(hours=1)
     return trades
示例#5
0
文件: pjm.py 项目: cnblevins/pyiso
    def request(self, *args, **kwargs):
        response = super(PJMClient, self).request(*args, **kwargs)
        if response and response.status_code == 400:
            LOGGER.warn('PJM request returned Bad Request %s' % response)
            return None

        return response
示例#6
0
文件: bpa.py 项目: abamberger/pyisoDB
    def fetch_recent(self):
        """Get BPA generation or load data from the past week"""
        # request text file
        response = self.request(self.base_url + 'wind/baltwg.txt')

        # set up columns to get
        mode = self.options['data']
        if mode == 'gen':
            cols = [0, 2, 3, 4]
        elif mode == 'load':
            cols = [0, 1]
        else:
            raise ValueError('Cannot fetch data without a data mode')

        # parse like tsv
        if response:
            df = self.parse_to_df(response.text,
                                  skiprows=6,
                                  header=0,
                                  delimiter='\t',
                                  index_col=0,
                                  usecols=cols,
                                  date_parser=self.date_parser)
        else:
            LOGGER.warn('No recent data found for BPA %s' % self.options)
            df = pd.DataFrame()

        return df
示例#7
0
文件: yukon.py 项目: rariss/pyiso_lmp
 def get_load(self,
              latest=False,
              yesterday=False,
              start_at=False,
              end_at=False,
              **kwargs):
     self.handle_options(latest=latest,
                         yesterday=yesterday,
                         start_at=start_at,
                         end_at=end_at,
                         data='load')
     loads = []
     if latest:
         self._load_latest(loads)
     elif self._is_valid_date_range():
         self._hourly_range(loads)
     else:
         if self.options.get('forecast', False):
             LOGGER.warn(self.NAME + ': Load forecasts are not supported.')
         else:
             msg = '%s: Requested date range %s to %s is outside range of available data from %s to %s.' % \
                   (self.NAME, self.options.get('start_at', None), self.options.get('end_at', None),
                    self.options.get('earliest_data_at', None), self.options.get('latest_data_at', None))
             LOGGER.warn(msg)
     return loads
示例#8
0
文件: isone.py 项目: cnblevins/pyiso
    def get_load(self, latest=False, start_at=False, end_at=False,
                 forecast=False, **kwargs):
        # set args
        self.handle_options(data='load', latest=latest, forecast=forecast,
                            start_at=start_at, end_at=end_at, **kwargs)

        # set up storage
        raw_data = []

        # collect raw data
        for endpoint in self.request_endpoints():
            # carry out request
            data = self.fetch_data(endpoint, self.auth)

            # pull out data
            try:
                raw_data += self.parse_json_load_data(data)
            except ValueError as e:
                LOGGER.warn(e)
                continue

        # parse data
        try:
            df = self._parse_json(raw_data)
        except ValueError:
            return []
        df = self.slice_times(df)

        # return
        return self.serialize_faster(df, drop_index=True)
示例#9
0
文件: isone.py 项目: cnblevins/pyiso
    def get_lmp(self, node_id='INTERNALHUB', latest=True, start_at=False, end_at=False, **kwargs):
        # set args
        self.handle_options(data='lmp', latest=latest,
                            start_at=start_at, end_at=end_at, node_id=node_id, **kwargs)
        # get location id
        try:
            locationid = self.locations[node_id.upper()]
        except KeyError:
            raise ValueError('No LMP data available for location %s' % node_id)

        # set up storage
        raw_data = []
        # collect raw data
        for endpoint in self.request_endpoints(locationid):
            # carry out request
            data = self.fetch_data(endpoint, self.auth)

            # pull out data
            try:
                raw_data += self.parse_json_lmp_data(data)
            except ValueError as e:
                LOGGER.warn(e)
                continue

        # parse and slice
        df = self._parse_json(raw_data)
        df = self.slice_times(df)

        # return
        return df.to_dict(orient='record')
示例#10
0
    def get_load(self,
                 latest=False,
                 yesterday=False,
                 start_at=False,
                 end_at=False,
                 **kwargs):
        super(AESOClient, self).handle_options(latest=latest,
                                               yesterday=yesterday,
                                               start_at=start_at,
                                               end_at=end_at,
                                               **kwargs)

        if latest:
            return self._get_latest_report(request_type=ParserFormat.load)
        elif self.options.get('start_at', None) and self.options.get(
                'end_at', None):
            earliest_load_dt = self.mtn_tz.localize(
                datetime(year=2000, month=1, day=1, hour=0, minute=0,
                         second=0))
            latest_load_dt = self.local_now().replace(hour=23,
                                                      minute=59,
                                                      second=59,
                                                      microsecond=999999)
            start_at = max(self.options['start_at'],
                           earliest_load_dt).astimezone(self.mtn_tz)
            end_at = min(self.options['end_at'],
                         latest_load_dt).astimezone(self.mtn_tz)
            return self._get_load_for_date_range(start_at=start_at,
                                                 end_at=end_at)
        else:
            LOGGER.warn('No valid options were supplied.')
示例#11
0
文件: isone.py 项目: rariss/pyiso_lmp
    def get_load(self, latest=False, start_at=False, end_at=False,
                 forecast=False, **kwargs):
        # set args
        self.handle_options(data='load', latest=latest, forecast=forecast,
                            start_at=start_at, end_at=end_at, **kwargs)

        # set up storage
        raw_data = []

        # collect raw data
        for endpoint in self.request_endpoints():
            # carry out request
            data = self.fetch_data(endpoint, self.auth)

            # pull out data
            try:
                raw_data += self.parse_json_load_data(data)
            except ValueError as e:
                LOGGER.warn(e)
                continue

        # parse data
        try:
            df = self._parse_json(raw_data)
        except ValueError:
            return []
        df = self.slice_times(df)

        # return
        return self.serialize_faster(df, drop_index=True)
示例#12
0
文件: base.py 项目: rariss/pyiso_lmp
    def _dst_active_hours_for_transition_day(self, local_dt_index):
        """
        When attempting to localize a timezone-naive list of dates, the daylight savings status may be ambigous. This
        method is meant as a fallback when the ambiguous='infer' datetime handling in pandas fails. It assumes
        that the datetime index is a daylight saving transition day.

        :param pandas.DatetimeIndex local_dt_index: A list of timezone-naive DatetimeIndex values.
        :return: A list of bool values indicating whether daylight savings time is active for the list provided.
            This returned list of boolean value is useful for passing to pandas 'ambiguous' kwarg.
        :rtype: list
        """
        dst_active_list = []
        hour_idx = local_dt_index.hour
        if len(hour_idx) > 3:
            starting_timestamp = local_dt_index[0]
            starting_month = starting_timestamp.month
            starting_hour = starting_timestamp.hour

            if starting_month == 3 and starting_hour == 0:
                dst_active_list = [h > 1 for h in hour_idx]
            elif starting_month == 11 and starting_hour == 0:
                dst_active_list = [h < 2 for h in hour_idx]
            elif 3 < starting_month < 11:
                dst_active_list = [True for h in hour_idx]
            elif starting_month < 3 or starting_month > 11:
                dst_active_list = [False for h in hour_idx]
            else:
                LOGGER.warn(
                    "Uanble to infer fallback DST status for ambiguous DatetimeIndex values."
                )
        return dst_active_list
示例#13
0
文件: base.py 项目: WattTime/pyiso
    def _dst_active_hours_for_transition_day(self, local_dt_index):
        """
        When attempting to localize a timezone-naive list of dates, the daylight savings status may be ambigous. This
        method is meant as a fallback when the ambiguous='infer' datetime handling in pandas fails. It assumes
        that the datetime index is a daylight saving transition day.

        :param pandas.DatetimeIndex local_dt_index: A list of timezone-naive DatetimeIndex values.
        :return: A list of bool values indicating whether daylight savings time is active for the list provided.
            This returned list of boolean value is useful for passing to pandas 'ambiguous' kwarg.
        :rtype: list
        """
        dst_active_list = []
        hour_idx = local_dt_index.hour
        if len(hour_idx) > 3:
            starting_timestamp = local_dt_index[0]
            starting_month = starting_timestamp.month
            starting_hour = starting_timestamp.hour

            if starting_month == 3 and starting_hour == 0:
                dst_active_list = [h > 1 for h in hour_idx]
            elif starting_month == 11 and starting_hour == 0:
                dst_active_list = [h < 2 for h in hour_idx]
            elif 3 < starting_month < 11:
                dst_active_list = [True for h in hour_idx]
            elif starting_month < 3 or starting_month > 11:
                dst_active_list = [False for h in hour_idx]
            else:
                LOGGER.warn("Uanble to infer fallback DST status for ambiguous DatetimeIndex values.")
        return dst_active_list
示例#14
0
    def get_trade(self, latest=False, start_at=False, end_at=False, **kwargs):
        # set args
        self.handle_options(data='trade',
                            latest=latest,
                            start_at=start_at,
                            end_at=end_at,
                            **kwargs)

        # set up storage
        parsed_data = []

        # collect data
        for this_date in self.dates():
            # fetch
            try:
                df, mode = self.fetch_df(this_date)
            except (HTTPError, ValueError):
                LOGGER.warn('No data available in NVEnergy at %s' % this_date)
                continue

            # store
            try:
                parsed_data += self.parse_trade(df, this_date, mode)
            except KeyError:
                LOGGER.warn(
                    'Unparseable data available in NVEnergy at %s: %s' %
                    (this_date, df))
                continue

        # return
        return self.time_subset(parsed_data)
示例#15
0
 def get_trade(self,
               latest=False,
               yesterday=False,
               start_at=False,
               end_at=False,
               **kwargs):
     self.handle_options(latest=latest,
                         yesterday=yesterday,
                         start_at=start_at,
                         end_at=end_at,
                         data='trade')
     trades = []
     if latest:
         self._trade_latest(trades)
     elif self._is_valid_date_range():
         self._trade_historical(trades)
     else:
         if self.options.get('forecast', False):
             LOGGER.warn(self.NAME + ': Trade forecasts are not supported.')
         else:
             msg = '%s: Requested date range %s to %s is outside range of available data from %s to %s.' % \
                   (self.NAME, self.options.get('start_at', None), self.options.get('end_at', None),
                    self.options.get('earliest_data_at', None), self.options.get('latest_data_at', None))
             LOGGER.warn(msg)
     return trades
示例#16
0
文件: nspower.py 项目: jleyden/pyiso
 def get_generation(self,
                    latest=False,
                    yesterday=False,
                    start_at=False,
                    end_at=False,
                    **kwargs):
     self.handle_options(latest=latest,
                         yesterday=yesterday,
                         start_at=start_at,
                         end_at=end_at,
                         data='gen',
                         **kwargs)
     genmix = []
     if latest:
         self._generation_latest(genmix)
     elif self._is_valid_date_range():
         self._generation_range(genmix)
     else:
         if self.options.get('forecast', False):
             LOGGER.warn(self.NAME +
                         ': Generation mix forecasts are not supported.')
         else:
             msg = '%s: Requested date range %s to %s is outside range of available data from %s to %s.' % \
                   (self.NAME, self.options.get('start_at', None), self.options.get('end_at', None),
                    self.options.get('earliest_data_at', None), self.options.get('latest_data_at', None))
             LOGGER.warn(msg)
     return genmix
示例#17
0
文件: caiso.py 项目: avances123/pyiso
 def fetch_todays_outlook_renewables(self):
     # get renewables data
     response = self.request(self.base_url_outlook+'renewables.html')
     try:
         return BeautifulSoup(response.content)
     except AttributeError:
         LOGGER.warn('No response for CAISO today outlook renewables')
         return None
示例#18
0
 def fetch_todays_outlook_renewables(self):
     # get renewables data
     response = self.request(self.base_url_outlook + 'renewables.html')
     try:
         return BeautifulSoup(response.content, 'lxml')
     except AttributeError:
         LOGGER.warn('No response for CAISO today outlook renewables')
         return None
示例#19
0
文件: pjm.py 项目: cnblevins/pyiso
    def get_load(self, latest=False, start_at=None, end_at=None, forecast=False, **kwargs):
        # set args
        self.handle_options(data='load', latest=latest,
                            start_at=start_at, end_at=end_at, forecast=forecast,
                            **kwargs)

        if self.options['forecast']:
            # fetch from eData
            df = self.fetch_edata_series('ForecastedLoadHistory', {'name': 'PJM RTO Total'})
            sliced = self.slice_times(df)
            sliced.columns = ['load_MW']

            # format
            extras = {
                'freq': self.FREQUENCY_CHOICES.hourly,
                'market': self.MARKET_CHOICES.dam,
                'ba_name': self.NAME,
            }
            data = self.serialize_faster(sliced, extras=extras)

            # return
            return data

        elif self.options['end_at'] and self.options['end_at'] < datetime.now(pytz.utc) - timedelta(hours=1):
            df = self.fetch_historical_load(self.options['start_at'].year)
            sliced = self.slice_times(df)

            # format
            extras = {
                'freq': self.FREQUENCY_CHOICES.hourly,
                'market': self.MARKET_CHOICES.dam,
                'ba_name': self.NAME,
            }
            data = self.serialize_faster(sliced, extras=extras)

            # return
            return data

        else:
            # handle real-time
            load_ts, load_val = self.fetch_edata_point('InstantaneousLoad', 'PJM RTO Total', 'MW')

            # fall back to OASIS
            if not (load_ts and load_val):
                load_ts, load_val = self.fetch_oasis_data()
            if not (load_ts and load_val):
                LOGGER.warn('No PJM latest load data')
                return []

            # format and return
            return [{
                'timestamp': load_ts,
                'freq': self.FREQUENCY_CHOICES.fivemin,
                'market': self.MARKET_CHOICES.fivemin,
                'load_MW': load_val,
                'ba_name': self.NAME,
            }]
示例#20
0
    def get_load(self, latest=False, start_at=None, end_at=None, forecast=False, **kwargs):
        # set args
        self.handle_options(data='load', latest=latest,
                            start_at=start_at, end_at=end_at, forecast=forecast,
                            **kwargs)

        if self.options['forecast']:
            # fetch from eData
            df = self.fetch_edata_series('ForecastedLoadHistory', {'name': 'PJM RTO Total'})
            sliced = self.slice_times(df)
            sliced.columns = ['load_MW']

            # format
            extras = {
                'freq': self.FREQUENCY_CHOICES.hourly,
                'market': self.MARKET_CHOICES.dam,
                'ba_name': self.NAME,
            }
            data = self.serialize_faster(sliced, extras=extras)

            # return
            return data

        elif self.options['end_at'] and self.options['end_at'] < datetime.now(pytz.utc) - timedelta(hours=1):
            df = self.fetch_historical_load(self.options['start_at'].year)
            sliced = self.slice_times(df)

            # format
            extras = {
                'freq': self.FREQUENCY_CHOICES.hourly,
                'market': self.MARKET_CHOICES.dam,
                'ba_name': self.NAME,
            }
            data = self.serialize_faster(sliced, extras=extras)

            # return
            return data

        else:
            # handle real-time
            load_ts, load_val = self.fetch_edata_point('InstantaneousLoad', 'PJM RTO Total', 'MW')

            # fall back to OASIS
            if not (load_ts and load_val):
                load_ts, load_val = self.fetch_oasis_data()
            if not (load_ts and load_val):
                LOGGER.warn('No PJM latest load data')
                return []

            # format and return
            return [{
                'timestamp': load_ts,
                'freq': self.FREQUENCY_CHOICES.fivemin,
                'market': self.MARKET_CHOICES.fivemin,
                'load_MW': load_val,
                'ba_name': self.NAME,
            }]
示例#21
0
文件: base.py 项目: WattTime/pyiso
    def utcify_index(self, local_index, tz_name=None, tz_col=None):
        """
        Convert a DateTimeIndex to UTC.

        :param DateTimeIndex local_index: The local DateTimeIndex to be converted.
        :param string tz_name: If local_ts is naive, it is assumed to be in timezone tz.
            If tz is not provided, the client's default timezone is used.
        :return: DatetimeIndex in UTC.
        :rtype: DatetimeIndex
        """
        # set up tz
        if tz_name is None:
            tz_name = self.TZ_NAME

        # use tz col if given
        if tz_col is not None:
            # it seems like we shouldn't have to iterate, but all the smart ways aren't working
            aware_utc_list = []
            for i in range(len(local_index)):
                try:
                    aware_local_ts = pytz.timezone(tz_col[i]).localize(local_index[i])
                except pytz.UnknownTimeZoneError:
                    # fall back to local ts
                    aware_local_ts = pytz.timezone(tz_name).localize(local_index[i])

                # utcify
                aware_utc_ts = self.utcify(aware_local_ts)
                aware_utc_list.append(aware_utc_ts)

            # indexify
            aware_utc_index = pd.DatetimeIndex(aware_utc_list)

        else:
            # localize
            try:
                aware_local_index = local_index.tz_localize(tz_name)
            except AmbiguousTimeError as e:
                LOGGER.debug(e)
                try:
                    aware_local_index = local_index.tz_localize(tz_name, ambiguous='infer')
                except AmbiguousTimeError:
                    LOGGER.warn('Second DatetimeIndex localization fallback, assuming DST transition day.')
                    dst_active_list = self._dst_active_hours_for_transition_day(local_dt_index=local_index)
                    aware_local_index = local_index.tz_localize(tz_name, ambiguous=dst_active_list)
            except TypeError as e:
                # already aware
                LOGGER.debug(e)
                aware_local_index = local_index

            # convert to utc
            aware_utc_index = aware_local_index.tz_convert('UTC')

        # return
        return aware_utc_index
示例#22
0
文件: base.py 项目: simon71717/pyiso
    def request(self, url, mode='get', retry_sec=5, **kwargs):
        """
        Get or post to a URL with the provided kwargs.
        Returns the response, or None if an error was encountered.
        If the mode is not 'get' or 'post', raises ValueError.
        """
        # check args
        allowed_modes = ['get', 'post']
        if mode not in allowed_modes:
            raise ValueError('Invalid request mode %s' % mode)

        # check for session
        try:
            session = getattr(self, 'session')
        except AttributeError:
            self.session = requests.Session()
            session = self.session

        # carry out request
        try:
            response = getattr(session, mode)(url, verify=False,
                                              timeout=self.TIMEOUT_SECONDS,
                                              **kwargs)
        # except requests.exceptions.ChunkedEncodingError as e:
        #     # JSON incomplete or not found
        #     msg = '%s: chunked encoding error for %s, %s:\n%s' % (self.NAME, url, kwargs, e)
        #     LOGGER.error(msg)
        #     return None
        except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e:
            # eg max retries exceeded
            msg = '%s: connection error for %s, %s:\n%s' % (self.NAME, url, kwargs, e)
            LOGGER.error(msg)
            return None
        # except requests.exceptions.RequestException:
        #     msg = '%s: request exception for %s, %s:\n%s' % (self.NAME, url, kwargs, e)
        #     LOGGER.error(msg)
        #     return None

        if response.status_code == 200:
            # success
            LOGGER.debug('%s: request success for %s, %s with cache hit %s' % (self.NAME, url, kwargs, getattr(response, 'from_cache', None)))

        elif response.status_code == 429:
            # retry on throttle
            LOGGER.warn('%s: retrying in %d seconds, throttled for %s, %s' % (self.NAME, retry_sec, url, kwargs))
            sleep(retry_sec)
            return self.request(url, mode=mode, retry_sec=retry_sec, **kwargs)

        else:
            # non-throttle error
            LOGGER.error('%s: request failure with code %s for %s, %s' % (self.NAME, response.status_code, url, kwargs))

        return response
示例#23
0
    def get_load(self,
                 latest=False,
                 yesterday=False,
                 start_at=None,
                 end_at=None,
                 **kwargs):
        load_ts = list([])
        self.handle_options(latest=latest,
                            yesterday=yesterday,
                            start_at=start_at,
                            end_at=end_at,
                            **kwargs)
        rt_const_totals_handler = RealTimeConstrainedTotalsReportHandler(
            ieso_client=self)
        predisp_const_totals_handler = PredispatchConstrainedTotalsReportHandler(
            ieso_client=self)

        if self.options.get('latest', False):
            self._get_latest_report_trimmed(
                result_ts=load_ts,
                report_handler=rt_const_totals_handler,
                parser_format=ParserFormat.load)
        elif self.options.get('start_at', None) and self.options.get(
                'end_at', None):
            if self.options.get('historical', False):
                range_start = max(
                    self.options['start_at'],
                    rt_const_totals_handler.earliest_available_datetime())
                range_end = min(
                    self.options['end_at'],
                    rt_const_totals_handler.latest_available_datetime())
                self._get_report_range(result_ts=load_ts,
                                       report_handler=rt_const_totals_handler,
                                       parser_format=ParserFormat.load,
                                       range_start=range_start,
                                       range_end=range_end)
            if self.options.get('forecast', False):
                range_start = max(
                    self.options['start_at'],
                    rt_const_totals_handler.latest_available_datetime(),
                    predisp_const_totals_handler.earliest_available_datetime())
                range_end = min(
                    self.options['end_at'],
                    predisp_const_totals_handler.latest_available_datetime())
                self._get_report_range(
                    result_ts=load_ts,
                    report_handler=predisp_const_totals_handler,
                    parser_format=ParserFormat.load,
                    range_start=range_start,
                    range_end=range_end)
        else:
            LOGGER.warn('No valid options were supplied.')
        return load_ts
示例#24
0
文件: aeso.py 项目: WattTime/pyiso
    def get_load(self, latest=False, yesterday=False, start_at=False, end_at=False, **kwargs):
        super(AESOClient, self).handle_options(latest=latest, yesterday=yesterday, start_at=start_at, end_at=end_at,
                                               **kwargs)

        if latest:
            return self._get_latest_report(request_type=ParserFormat.load)
        elif self.options.get('start_at', None) and self.options.get('end_at', None):
            earliest_load_dt = self.mtn_tz.localize(datetime(year=2000, month=1, day=1, hour=0, minute=0, second=0))
            latest_load_dt = self.local_now().replace(hour=23, minute=59, second=59, microsecond=999999)
            start_at = max(self.options['start_at'], earliest_load_dt).astimezone(self.mtn_tz)
            end_at = min(self.options['end_at'], latest_load_dt).astimezone(self.mtn_tz)
            return self._get_load_for_date_range(start_at=start_at, end_at=end_at)
        else:
            LOGGER.warn('No valid options were supplied.')
示例#25
0
    def get_trade(self,
                  latest=False,
                  yesterday=False,
                  start_at=None,
                  end_at=None,
                  **kwargs):
        trade_ts = list([])
        self.handle_options(latest=latest,
                            yesterday=yesterday,
                            start_at=start_at,
                            end_at=end_at,
                            **kwargs)
        inter_sched_flow_handler = IntertieScheduleFlowReportHandler(
            ieso_client=self)
        adequacy_handler = AdequacyReportHandler(ieso_client=self)

        if self.options.get('latest', False):
            self._get_latest_report_trimmed(
                result_ts=trade_ts,
                report_handler=inter_sched_flow_handler,
                parser_format=ParserFormat.trade)
        elif self.options.get('start_at', None) and self.options.get(
                'end_at', None):
            if self.options.get('historical', False):
                range_start = max(
                    self.options['start_at'],
                    inter_sched_flow_handler.earliest_available_datetime())
                range_end = min(
                    self.options['end_at'],
                    inter_sched_flow_handler.latest_available_datetime())
                self._get_report_range(result_ts=trade_ts,
                                       report_handler=inter_sched_flow_handler,
                                       parser_format=ParserFormat.trade,
                                       range_start=range_start,
                                       range_end=range_end)
            if self.options.get('forecast', False):
                range_start = max(
                    self.options['start_at'],
                    inter_sched_flow_handler.latest_available_datetime(),
                    adequacy_handler.earliest_available_datetime())
                range_end = min(self.options['end_at'],
                                adequacy_handler.latest_available_datetime())
                self._get_report_range(result_ts=trade_ts,
                                       report_handler=adequacy_handler,
                                       parser_format=ParserFormat.trade,
                                       range_start=range_start,
                                       range_end=range_end)
        else:
            LOGGER.warn('No valid options were supplied.')
        return trade_ts
示例#26
0
    def get_lmp(self, node_id, latest=True, start_at=False, end_at=False, **kwargs):
        # set args
        self.handle_options(data='lmp', latest=latest,
                            start_at=start_at, end_at=end_at, **kwargs)

        # get location id
        try:
            locationid = self.locations[node_id.upper()]
        except KeyError:
            raise ValueError('No LMP data available for location %s' % node_id)

        # set up storage
        raw_data = []
        parsed_data = []

        # collect raw data
        for endpoint in self.request_endpoints(locationid):
            # carry out request
            data = self.fetch_data(endpoint, self.auth)

            # pull out data
            try:
                raw_data += self.parse_json_lmp_data(data)
            except ValueError as e:
                LOGGER.warn(e)
                continue

        # parse data
        for raw_dp in raw_data:
            # set up storage
            parsed_dp = {}

            # add values
            parsed_dp['timestamp'] = self.utcify(raw_dp['BeginDate'])
            parsed_dp['lmp'] = raw_dp['LmpTotal']
            parsed_dp['ba_name'] = self.NAME
            parsed_dp['market'] = self.options['market']
            parsed_dp['freq'] = self.options['frequency']
            parsed_dp['node_id'] = node_id
            parsed_dp['lmp_type'] = 'energy'

            # add to full storage
            to_store = True
            if self.options['sliceable']:
                if self.options['start_at'] > parsed_dp['timestamp'] or self.options['end_at'] < parsed_dp['timestamp']:
                    to_store = False
            if to_store:
                parsed_data.append(parsed_dp)

        return parsed_data
示例#27
0
    def _get_load_forecast_report(self):
        """
        :return: List of dicts, each with keys ``[ba_name, timestamp, freq, market, load_MW]``.
           Timestamps are in UTC.
        :rtype: list
        """
        load_ts = list([])
        forecast_url_base = 'http://tso.nbpower.com/reports%20%26%20assessments/load%20forecast/hourly/'
        forecast_filename_fmt = '%Y-%m-%d %H.csv'
        earliest_forecast = copy(self.atlantic_now).replace(minute=0,
                                                            second=0,
                                                            microsecond=0)
        latest_forecast = earliest_forecast + timedelta(hours=3)

        if self.local_start_at <= latest_forecast:
            forecast_filename = earliest_forecast.strftime(
                forecast_filename_fmt)
            load_forecast_url = forecast_url_base + quote(forecast_filename)
            response = self.request(load_forecast_url)
            response_body = BytesIO(response.content)
            response_df = read_csv(response_body,
                                   names=['timestamp', 'load'],
                                   usecols=[0, 1],
                                   dtype={'load': float},
                                   parse_dates=[0],
                                   date_parser=self.parse_forecast_timestamps)
            for idx, row in response_df.iterrows():
                if self.atlantic_now <= row.timestamp and self.local_start_at <= row.timestamp <= self.local_end_at:
                    row_pd_timestamp = Timestamp(
                        row.timestamp.astimezone(pytz.utc))

                    # In the event of a duplicate timestamp (e.g. daylight savings transition hours), use latest value.
                    if len(load_ts) > 0 and load_ts[-1][
                            'timestamp'] == row_pd_timestamp:
                        del load_ts[-1:]

                    load_ts.append({
                        'ba_name': self.NAME,
                        'timestamp': row_pd_timestamp,
                        'freq': self.FREQUENCY_CHOICES.hourly,
                        'market': self.MARKET_CHOICES.dam,
                        'load_MW': row.load
                    })
        else:
            LOGGER.warn('The latest load forecast available is ' +
                        str(latest_forecast) +
                        '. The requested start_at must be before this time.')
        return load_ts
示例#28
0
文件: yukon.py 项目: WattTime/pyiso
 def get_load(self, latest=False, yesterday=False, start_at=False, end_at=False, **kwargs):
     self.handle_options(latest=latest, yesterday=yesterday, start_at=start_at, end_at=end_at, data='load')
     loads = []
     if latest:
         self._load_latest(loads)
     elif self._is_valid_date_range():
         self._hourly_range(loads)
     else:
         if self.options.get('forecast', False):
             LOGGER.warn(self.NAME + ': Load forecasts are not supported.')
         else:
             msg = '%s: Requested date range %s to %s is outside range of available data from %s to %s.' % \
                   (self.NAME, self.options.get('start_at', None), self.options.get('end_at', None),
                    self.options.get('earliest_data_at', None), self.options.get('latest_data_at', None))
             LOGGER.warn(msg)
     return loads
示例#29
0
文件: eu.py 项目: cnblevins/pyiso
    def fetch_entsoe(self, url, payload, count=0):
        if not getattr(self, 'session', None):
            self.auth()

        r = self.request(url, params=payload)
        # TODO error checking
        if len(r.text) == 0:
            if count > 3:  # try 3 times to get response
                LOGGER.warn('Request failed, no response found after %i attempts' % count)
                return False
            # throttled
            sleep(5)
            return self.fetch_entsoe(url, payload, count + 1)
        if 'UNKNOWN_EXCEPTION' in r.text:
            LOGGER.warn('UNKNOWN EXCEPTION')
            return False
        return r.text
示例#30
0
文件: yukon.py 项目: WattTime/pyiso
 def get_trade(self, latest=False, yesterday=False, start_at=False, end_at=False, **kwargs):
     self.handle_options(latest=latest, yesterday=yesterday, start_at=start_at, end_at=end_at, data='trade')
     # http://yukonenergy.ca/energy-in-yukon/electricity-101/electricity-library/whats-an-isolated-grid-and-what-does-that-mean-for-me
     LOGGER.warn('Yukon Energy is an isolated grid. Trade will always be zero.')
     trades = []
     hourly_rounded_dt = self.options.get('start_at').replace(minute=0, second=0, microsecond=0)
     while hourly_rounded_dt <= self.options.get('end_at'):
         if self.options['start_at'] <= hourly_rounded_dt <= self.options['end_at']:
             trades.append({
                 'ba_name': self.NAME,
                 'timestamp': Timestamp(hourly_rounded_dt),
                 'freq': self.FREQUENCY_CHOICES.hourly,
                 'market': self.MARKET_CHOICES.hourly,
                 'net_exp_MW': 0
             })
         hourly_rounded_dt = hourly_rounded_dt + timedelta(hours=1)
     return trades
示例#31
0
文件: eu.py 项目: mhdella/pyiso
    def fetch_entsoe(self, url, payload, count=0):
        if not getattr(self, 'session', None):
            self.auth()

        r = self.request(url, params=payload)
        # TODO error checking
        if len(r.text) == 0:
            if count > 3:  # try 3 times to get response
                LOGGER.warn('Request failed, no response found after %i attempts' % count)
                return False
            # throttled
            sleep(5)
            return self.fetch_entsoe(url, payload, count + 1)
        if 'UNKNOWN_EXCEPTION' in r.text:
            LOGGER.warn('UNKNOWN EXCEPTION')
            return False
        return r.text
示例#32
0
    def get_load(self, latest=False, start_at=False, end_at=False,
                 forecast=False, **kwargs):
        # set args
        self.handle_options(data='load', latest=latest, forecast=forecast,
                            start_at=start_at, end_at=end_at, **kwargs)

        # set up storage
        raw_data = []
        parsed_data = []

        # collect raw data
        for endpoint in self.request_endpoints():
            # carry out request
            data = self.fetch_data(endpoint, self.auth)

            # pull out data
            try:
                raw_data += self.parse_json_load_data(data)
            except ValueError as e:
                LOGGER.warn(e)
                continue

        # parse data
        now = pytz.utc.localize(datetime.utcnow())
        for raw_dp in raw_data:
            # set up storage
            parsed_dp = {}

            # add values
            parsed_dp['timestamp'] = self.utcify(raw_dp['BeginDate'])
            parsed_dp['load_MW'] = raw_dp['LoadMw']
            parsed_dp['ba_name'] = self.NAME
            parsed_dp['market'] = self.options['market']
            parsed_dp['freq'] = self.options['frequency']

            # add to full storage
            if self.options['forecast'] and parsed_dp['timestamp'] < now:
                # don't include past forecast data
                pass
            else:
                parsed_data.append(parsed_dp)

        return parsed_data
示例#33
0
文件: miso.py 项目: rariss/pyiso_lmp
    def parse_forecast(self, df):
        sliced = self.slice_times(df)

        if self.options['data'] == 'gen':
            try:
                sliced['gen_MW'] = 1000.0 * sliced['Supply Cleared (GWh) - Physical']
                sliced['fuel_name'] = 'other'
                return sliced[['gen_MW', 'fuel_name']]
            except KeyError:
                LOGGER.warn('MISO genmix error: missing key %s in %s' % ('Supply Cleared (GWh) - Physical', sliced.columns))
                return pd.DataFrame()

        elif self.options['data'] == 'load':
            try:
                sliced['load_MW'] = 1000.0 * (sliced['Demand Cleared (GWh) - Physical - Fixed'] +
                                              sliced['Demand Cleared (GWh) - Physical - Price Sen.'])
                return sliced['load_MW']
            except KeyError:
                LOGGER.warn('MISO load error: missing key %s in %s' % ('Demand Cleared (GWh) - Physical - Fixed', sliced.columns))
                return pd.DataFrame()

        elif self.options['data'] == 'trade':
            try:
                sliced['net_exp_MW'] = -1000.0 * sliced['Net Scheduled Imports (GWh)']
                return sliced['net_exp_MW']
            except KeyError:
                LOGGER.warn('MISO trade error: missing key %s in %s' % ('Net Scheduled Imports (GWh)', sliced.columns))
                return pd.DataFrame()

        else:
            raise ValueError('Can only parse MISO forecast gen, load, or trade data, not %s'
                             % self.options['data'])
示例#34
0
文件: miso.py 项目: avances123/pyiso
    def parse_forecast(self, df):
        sliced = self.slice_times(df)

        if self.options['data'] == 'gen':
            try:
                sliced['gen_MW'] = 1000.0 * sliced['Supply Cleared (GWh) - Physical']
                sliced['fuel_name'] = 'other'
                return sliced[['gen_MW', 'fuel_name']]
            except KeyError:
                LOGGER.warn('MISO genmix error: missing key %s in %s' % ('Supply Cleared (GWh) - Physical', sliced.columns))
                return pd.DataFrame()

        elif self.options['data'] == 'load':
            try:
                sliced['load_MW'] = 1000.0 * (sliced['Demand Cleared (GWh) - Physical - Fixed'] +
                                              sliced['Demand Cleared (GWh) - Physical - Price Sen.'])
                return sliced['load_MW']
            except KeyError:
                LOGGER.warn('MISO load error: missing key %s in %s' % ('Demand Cleared (GWh) - Physical - Fixed', sliced.columns))
                return pd.DataFrame()

        elif self.options['data'] == 'trade':
            try:
                sliced['net_exp_MW'] = -1000.0 * sliced['Net Scheduled Imports (GWh)']
                return sliced['net_exp_MW']
            except KeyError:
                LOGGER.warn('MISO trade error: missing key %s in %s' % ('Net Scheduled Imports (GWh)', sliced.columns))
                return pd.DataFrame()

        else:
            raise ValueError('Can only parse MISO forecast gen, load, or trade data, not %s'
                             % self.options['data'])
示例#35
0
文件: ieso.py 项目: rariss/pyiso_lmp
    def get_generation(self, latest=False, yesterday=False, start_at=None, end_at=None, **kwargs):
        generation_ts = list([])
        self.handle_options(latest=latest, yesterday=yesterday, start_at=start_at, end_at=end_at, **kwargs)

        gen_out_cap_handler = GeneratorOutputCapabilityReportHandler(ieso_client=self)
        gen_out_by_fuel_handler = GeneratorOutputByFuelHourlyReportHandler(ieso_client=self)
        adequacy_handler = AdequacyReportHandler(ieso_client=self)

        if self.options.get('latest', False):
            self._get_latest_report_trimmed(result_ts=generation_ts, report_handler=gen_out_cap_handler,
                                            parser_format=ParserFormat.generation)
        elif self.options.get('start_at', None) and self.options.get('end_at', None):
            # For long time ranges more than hour ending 1, seven days in the past, it is more efficient to request the
            # Generator Output by Fuel Type Hourly Report rather than repeated calls to the Generator Output and
            # Capability Report.
            # TODO Minor optimization, but this actually check if the start/end range is greater than 7 days.
            if self.options['start_at'] < self.local_start_of_day.replace(hour=1) - timedelta(days=7):
                self.timeout_seconds = 90  # These reports can get rather large ~7MB for a full year.
                range_start = max(self.options['start_at'], gen_out_by_fuel_handler.earliest_available_datetime())
                range_end = min(self.options['end_at'], gen_out_by_fuel_handler.latest_available_datetime())
                self._get_report_range(result_ts=generation_ts, report_handler=gen_out_by_fuel_handler,
                                       parser_format=ParserFormat.generation, range_start=range_start,
                                       range_end=range_end)
            elif self.options.get('historical', False):
                range_start = max(self.options['start_at'], gen_out_cap_handler.earliest_available_datetime())
                range_end = min(self.options['end_at'], gen_out_cap_handler.latest_available_datetime())
                self._get_report_range(result_ts=generation_ts, report_handler=gen_out_cap_handler,
                                       parser_format=ParserFormat.generation, range_start=range_start,
                                       range_end=range_end)

            if self.options.get('forecast', False):
                range_start = max(self.options['start_at'], self.local_now)
                range_end = min(self.options['end_at'], adequacy_handler.latest_available_datetime())
                self._get_report_range(result_ts=generation_ts, report_handler=adequacy_handler,
                                       parser_format=ParserFormat.generation, range_start=range_start,
                                       range_end=range_end)
        else:
            LOGGER.warn('No valid options were supplied.')
        return generation_ts
示例#36
0
文件: nbpower.py 项目: WattTime/pyiso
    def _get_load_forecast_report(self):
        """
        :return: List of dicts, each with keys ``[ba_name, timestamp, freq, market, load_MW]``.
           Timestamps are in UTC.
        :rtype: list
        """
        load_ts = list([])
        forecast_url_base = 'http://tso.nbpower.com/reports%20%26%20assessments/load%20forecast/hourly/'
        forecast_filename_fmt = '%Y-%m-%d %H.csv'
        earliest_forecast = copy(self.atlantic_now).replace(minute=0, second=0, microsecond=0)
        latest_forecast = earliest_forecast + timedelta(hours=3)

        if self.local_start_at <= latest_forecast:
            forecast_filename = earliest_forecast.strftime(forecast_filename_fmt)
            load_forecast_url = forecast_url_base + quote(forecast_filename)
            response = self.request(load_forecast_url)
            response_body = BytesIO(response.content)
            response_df = read_csv(response_body, names=['timestamp', 'load'], usecols=[0, 1],
                                   dtype={'load': float}, parse_dates=[0], date_parser=self.parse_forecast_timestamps)
            for idx, row in response_df.iterrows():
                if self.atlantic_now <= row.timestamp and self.local_start_at <= row.timestamp <= self.local_end_at:
                    row_pd_timestamp = Timestamp(row.timestamp.astimezone(pytz.utc))

                    # In the event of a duplicate timestamp (e.g. daylight savings transition hours), use latest value.
                    if len(load_ts) > 0 and load_ts[-1]['timestamp'] == row_pd_timestamp:
                        del load_ts[-1:]

                    load_ts.append({
                        'ba_name': self.NAME,
                        'timestamp': row_pd_timestamp,
                        'freq': self.FREQUENCY_CHOICES.hourly,
                        'market': self.MARKET_CHOICES.dam,
                        'load_MW': row.load
                    })
        else:
            LOGGER.warn('The latest load forecast available is ' + str(latest_forecast) +
                        '. The requested start_at must be before this time.')
        return load_ts
示例#37
0
文件: ieso.py 项目: WattTime/pyiso
    def get_trade(self, latest=False, yesterday=False, start_at=None, end_at=None, **kwargs):
        trade_ts = list([])
        self.handle_options(latest=latest, yesterday=yesterday, start_at=start_at, end_at=end_at, **kwargs)
        inter_sched_flow_handler = IntertieScheduleFlowReportHandler(ieso_client=self)
        adequacy_handler = AdequacyReportHandler(ieso_client=self)

        if self.options.get('latest', False):
            self._get_latest_report_trimmed(result_ts=trade_ts, report_handler=inter_sched_flow_handler,
                                            parser_format=ParserFormat.trade)
        elif self.options.get('start_at', None) and self.options.get('end_at', None):
            if self.options.get('historical', False):
                range_start = max(self.options['start_at'], inter_sched_flow_handler.earliest_available_datetime())
                range_end = min(self.options['end_at'], inter_sched_flow_handler.latest_available_datetime())
                self._get_report_range(result_ts=trade_ts, report_handler=inter_sched_flow_handler,
                                       parser_format=ParserFormat.trade, range_start=range_start, range_end=range_end)
            if self.options.get('forecast', False):
                range_start = max(self.options['start_at'], inter_sched_flow_handler.latest_available_datetime(),
                                  adequacy_handler.earliest_available_datetime())
                range_end = min(self.options['end_at'], adequacy_handler.latest_available_datetime())
                self._get_report_range(result_ts=trade_ts, report_handler=adequacy_handler,
                                       parser_format=ParserFormat.trade, range_start=range_start, range_end=range_end)
        else:
            LOGGER.warn('No valid options were supplied.')
        return trade_ts
示例#38
0
文件: ieso.py 项目: WattTime/pyiso
    def get_load(self, latest=False, yesterday=False, start_at=None, end_at=None, **kwargs):
        load_ts = list([])
        self.handle_options(latest=latest, yesterday=yesterday, start_at=start_at, end_at=end_at, **kwargs)
        rt_const_totals_handler = RealTimeConstrainedTotalsReportHandler(ieso_client=self)
        predisp_const_totals_handler = PredispatchConstrainedTotalsReportHandler(ieso_client=self)

        if self.options.get('latest', False):
            self._get_latest_report_trimmed(result_ts=load_ts, report_handler=rt_const_totals_handler,
                                            parser_format=ParserFormat.load)
        elif self.options.get('start_at', None) and self.options.get('end_at', None):
            if self.options.get('historical', False):
                range_start = max(self.options['start_at'], rt_const_totals_handler.earliest_available_datetime())
                range_end = min(self.options['end_at'], rt_const_totals_handler.latest_available_datetime())
                self._get_report_range(result_ts=load_ts, report_handler=rt_const_totals_handler,
                                       parser_format=ParserFormat.load, range_start=range_start, range_end=range_end)
            if self.options.get('forecast', False):
                range_start = max(self.options['start_at'], rt_const_totals_handler.latest_available_datetime(),
                                  predisp_const_totals_handler.earliest_available_datetime())
                range_end = min(self.options['end_at'], predisp_const_totals_handler.latest_available_datetime())
                self._get_report_range(result_ts=load_ts, report_handler=predisp_const_totals_handler,
                                       parser_format=ParserFormat.load, range_start=range_start, range_end=range_end)
        else:
            LOGGER.warn('No valid options were supplied.')
        return load_ts
示例#39
0
文件: bpa.py 项目: cnblevins/pyiso
    def fetch_recent(self):
        """Get BPA generation or load data from the past week"""
        # request text file
        response = self.request(self.base_url + 'wind/baltwg.txt')

        # set up columns to get
        mode = self.options['data']
        if mode == 'gen':
            cols = [0, 2, 3, 4]
        elif mode == 'load':
            cols = [0, 1]
        else:
            raise ValueError('Cannot fetch data without a data mode')

        # parse like tsv
        if response:
            df = self.parse_to_df(response.text, skiprows=6, header=0, delimiter='\t',
                                  index_col=0, usecols=cols,
                                  date_parser=self.date_parser)
        else:
            LOGGER.warn('No recent data found for BPA %s' % self.options)
            df = pd.DataFrame()

        return df
示例#40
0
文件: base.py 项目: rariss/pyiso_lmp
    def utcify_index(self, local_index, tz_name=None, tz_col=None):
        """
        Convert a DateTimeIndex to UTC.

        :param DateTimeIndex local_index: The local DateTimeIndex to be converted.
        :param string tz_name: If local_ts is naive, it is assumed to be in timezone tz.
            If tz is not provided, the client's default timezone is used.
        :return: DatetimeIndex in UTC.
        :rtype: DatetimeIndex
        """
        # set up tz
        if tz_name is None:
            tz_name = self.TZ_NAME

        # use tz col if given
        if tz_col is not None:
            # it seems like we shouldn't have to iterate, but all the smart ways aren't working
            aware_utc_list = []
            for i in range(len(local_index)):
                try:
                    aware_local_ts = pytz.timezone(tz_col[i]).localize(
                        local_index[i])
                except pytz.UnknownTimeZoneError:
                    # fall back to local ts
                    aware_local_ts = pytz.timezone(tz_name).localize(
                        local_index[i])

                # utcify
                aware_utc_ts = self.utcify(aware_local_ts)
                aware_utc_list.append(aware_utc_ts)

            # indexify
            aware_utc_index = pd.DatetimeIndex(aware_utc_list)

        else:
            # localize
            try:
                aware_local_index = local_index.tz_localize(tz_name)
            except AmbiguousTimeError as e:
                LOGGER.debug(e)
                try:
                    aware_local_index = local_index.tz_localize(
                        tz_name, ambiguous='infer')
                except AmbiguousTimeError:
                    LOGGER.warn(
                        'Second DatetimeIndex localization fallback, assuming DST transition day.'
                    )
                    dst_active_list = self._dst_active_hours_for_transition_day(
                        local_dt_index=local_index)
                    aware_local_index = local_index.tz_localize(
                        tz_name, ambiguous=dst_active_list)
            except TypeError as e:
                # already aware
                LOGGER.debug(e)
                aware_local_index = local_index

            # convert to utc
            aware_utc_index = aware_local_index.tz_convert('UTC')

        # return
        return aware_utc_index
示例#41
0
文件: ercot.py 项目: mhdella/pyiso
    def get_lmp(self, node_id='HB_HUBAVG', **kwargs):
        self.handle_options(data='lmp', node_id=node_id, **kwargs)

        if self.options['market'] == self.MARKET_CHOICES.fivemin:
            report_name = 'rt5m_lmp'
        elif self.options['market'] == self.MARKET_CHOICES.dam:
            report_name = 'dam_hrly_lmp'
        elif self.options['market'] == self.MARKET_CHOICES.hourly:
            raise NotImplementedError(
                'ERCOT does not produce realtime hourly prices?')

        self.now = datetime.now(pytz.utc)

        if 'start_at' in self.options:
            # get start and end days in local time
            tz = pytz.timezone(self.TZ_NAME)
            start = tz.normalize(self.options['start_at'])
            end = tz.normalize(self.options['end_at'])

            pieces = []
            if self.options['market'] == self.MARKET_CHOICES.fivemin:
                # warning, this could take a long time
                fivemin_periods = int(
                    (end - start).total_seconds() / (60 * 5)) + 1
                p_list = [
                    end - timedelta(minutes=5 * x)
                    for x in range(fivemin_periods)
                ]

                for period in p_list:
                    try:
                        report = self._request_report(report_name, date=period)
                        pieces.append(report)
                    except ValueError:
                        pass

            else:
                start = datetime(start.year,
                                 start.month,
                                 start.day,
                                 tzinfo=start.tzinfo)
                days_list = [
                    end - timedelta(days=x)
                    for x in range((end - start).days + 1)
                ]
                for day in days_list:
                    try:
                        report = self._request_report(report_name, day)
                        pieces.append(report)
                    except ValueError:
                        pass

            # combine pieces, if any
            if len(pieces) > 0:
                report = pd.concat(pieces)
            else:
                LOGGER.warn('No ERCOT LMP found for %s' % self.options)
                return []
        else:
            report = self._request_report(report_name, self.now)
            if report is None:
                report = self._request_report(report_name,
                                              self.now - timedelta(days=1))
        df = self.format_lmp(report)

        # strip uneeded times
        df = self.slice_times(df)

        # strip out unwanted nodes
        if node_id:
            if not isinstance(node_id, list):
                node_id = [node_id]
            reg = re.compile('|'.join(node_id))
            df = df.ix[df['node_id'].str.contains(reg)]

        return df.to_dict(orient='records')
示例#42
0
    def request(self,
                url,
                mode='get',
                retry_sec=5,
                retries_remaining=5,
                **kwargs):
        """
        Get or post to a URL with the provided kwargs.
        Returns the response, or None if an error was encountered.
        If the mode is not 'get' or 'post', raises ValueError.
        """
        # check args
        allowed_modes = ['get', 'post']
        if mode not in allowed_modes:
            raise ValueError('Invalid request mode %s' % mode)

        # check for session
        try:
            session = getattr(self, 'session')
        except AttributeError:
            self.session = requests.Session()
            session = self.session

        # carry out request
        try:
            response = getattr(session, mode)(url,
                                              verify=False,
                                              timeout=self.timeout_seconds,
                                              **kwargs)
        # except requests.exceptions.ChunkedEncodingError as e:
        #     # JSON incomplete or not found
        #     msg = '%s: chunked encoding error for %s, %s:\n%s' % (self.NAME, url, kwargs, e)
        #     LOGGER.error(msg)
        #     return None
        except (requests.exceptions.ConnectionError,
                requests.exceptions.Timeout) as e:
            # eg max retries exceeded
            msg = '%s: connection error for %s, %s:\n%s' % (self.NAME, url,
                                                            kwargs, e)
            LOGGER.error(msg)
            return None
        # except requests.exceptions.RequestException:
        #     msg = '%s: request exception for %s, %s:\n%s' % (self.NAME, url, kwargs, e)
        #     LOGGER.error(msg)
        #     return None

        if response.status_code == 200:
            # success
            LOGGER.debug('%s: request success for %s, %s with cache hit %s' %
                         (self.NAME, url, kwargs,
                          getattr(response, 'from_cache', None)))

        elif response.status_code == 429:
            if retries_remaining > 0:
                # retry on throttle
                LOGGER.warn(
                    '%s: retrying in %d seconds (%d retries remaining), throttled for %s, %s'
                    % (self.NAME, retry_sec, retries_remaining, url, kwargs))
                sleep(retry_sec)
                retries_remaining -= 1
                return self.request(url,
                                    mode=mode,
                                    retry_sec=retry_sec * 2,
                                    retries_remaining=retries_remaining,
                                    **kwargs)
            else:
                # exhausted retries
                LOGGER.warn('%s: exhausted retries for %s, %s' %
                            (self.NAME, url, kwargs))
                return None

        else:
            # non-throttle error
            LOGGER.error('%s: request failure with code %s for %s, %s' %
                         (self.NAME, response.status_code, url, kwargs))

        return response
示例#43
0
文件: sveri.py 项目: WattTime/pyiso
 def no_forecast_warn(self):
     if not self.options['latest'] and self.options['start_at'] >= pytz.utc.localize(datetime.utcnow()):
         LOGGER.warn("SVERI does not have forecast data. There will be no data for the chosen time frame.")
示例#44
0
文件: ercot.py 项目: avances123/pyiso
    def get_lmp(self, node_id='HB_HUBAVG', **kwargs):
        self.handle_options(data='lmp', node_id=node_id, **kwargs)

        if self.options['market'] == self.MARKET_CHOICES.fivemin:
            report_name = 'rt5m_lmp'
        elif self.options['market'] == self.MARKET_CHOICES.dam:
            report_name = 'dam_hrly_lmp'
        elif self.options['market'] == self.MARKET_CHOICES.hourly:
            raise NotImplementedError('ERCOT does not produce realtime hourly prices?')

        self.now = datetime.now(pytz.utc)

        if 'start_at' in self.options:
            # get start and end days in local time
            tz = pytz.timezone(self.TZ_NAME)
            start = tz.normalize(self.options['start_at'])
            end = tz.normalize(self.options['end_at'])

            pieces = []
            if self.options['market'] == self.MARKET_CHOICES.fivemin:
                # warning, this could take a long time
                fivemin_periods = int((end-start).total_seconds()/(60*5)) + 1
                p_list = [end - timedelta(minutes=5*x) for x in range(fivemin_periods)]

                for period in p_list:
                    try:
                        report = self._request_report(report_name, date=period)
                        pieces.append(report)
                    except ValueError:
                        pass

            else:
                start = datetime(start.year, start.month, start.day, tzinfo=start.tzinfo)
                days_list = [end - timedelta(days=x) for x in range((end-start).days + 1)]
                for day in days_list:
                    try:
                        report = self._request_report(report_name, day)
                        pieces.append(report)
                    except ValueError:
                        pass

            # combine pieces, if any
            if len(pieces) > 0:
                report = pd.concat(pieces)
            else:
                LOGGER.warn('No ERCOT LMP found for %s' % self.options)
                return []
        else:
            report = self._request_report(report_name, self.now)
            if report is None:
                report = self._request_report(report_name, self.now - timedelta(days=1))
        df = self.format_lmp(report)

        # strip uneeded times
        df = self.slice_times(df)

        # strip out unwanted nodes
        if node_id:
            if not isinstance(node_id, list):
                node_id = [node_id]
            reg = re.compile('|'.join(node_id))
            df = df.ix[df['node_id'].str.contains(reg)]

        return df.to_dict(orient='records')
示例#45
0
 def no_forecast_warn(self):
     if not self.options['latest'] and self.options[
             'start_at'] >= pytz.utc.localize(datetime.utcnow()):
         LOGGER.warn(
             "SVERI does not have forecast data. There will be no data for the chosen time frame."
         )