def parse_login(organization_name, text): result = validate(text, login_schema, "login") for org in result.organization: if org.type == "organization" and org.name == organization_name: return org.uid msg = "No organization %s associated with this login." raise GrovestreamsOrganizationNotFoundFailure(msg % organization_name)
def parse_intervals(text): result = validate(text, interval_schema, "interval") if not result.feed.component: return None, [] if len(result.feed.component) > 1: error = "The parser should only receive one component-record at a time." raise GrovestreamsDataIntegrityException(error) comp = result.feed.component[0] if not comp.stream: return None, [] if len(comp.stream) > 1: error = "The parser should only receive one stream record at a time." raise GrovestreamsDataIntegrityException(error) stream = comp.stream[0] if not stream.statistic: return stream.streamUid, [] if len(stream.statistic) > 1: error = "The parser should only receive one statistic at a time." raise GrovestreamsDataIntegrityException(error) statistic = stream.statistic[0] start_dt = _ts_to_dt(result.feed.requestStartDate) end_dt = _ts_to_dt(result.feed.requestEndDate) interval = _ts_to_dt(stream.lastValueEndDate) - _ts_to_dt( stream.lastValueStartDate) actual = len(statistic.data) expected = (end_dt - start_dt).total_seconds() // interval.total_seconds() if expected != actual: msg = "Expected %d interval data entries, found %s." raise GrovestreamsDataIntegrityException(msg % (expected, actual)) interval_data = [] current_dt = start_dt for datum in statistic.data: next_dt = current_dt + interval if datum is not None: # If the data is null, don't bother incorporating it. # JSON numbers could parse to integers or floating point numbers. # For consistency, cast everything to float. interval_data.append( Interval(start=current_dt, end=next_dt, kwh=float(datum))) current_dt = next_dt return stream.streamUid, interval_data
def parse_intervals(site_id, text): result = validate(text, interval_schema, "interval") try: match = re.search(r"(\d+),(\d+)", result.window) if not match: raise DataIntegrityFailure( "Failed to locate interval width from API response. Received: %s" % result.window ) width = int(match.group(1)) except ValueError: raise DataIntegrityFailure( "Failed to parse interval width from API response. Received: %s" % result.window ) valid_series = [s for s in result.endPoints if s.name == site_id] if not valid_series: raise DataIntegrityFailure( "Could not obtain a valid timeseries for site id %s." % site_id ) if len(valid_series) > 1: msg = "Failed to determine a canonical timeseries for site id %s, %d timeseries present." raise DataIntegrityFailure(msg % (site_id, len(valid_series))) series = valid_series[0] start_dt = epoch_to_dt(series.first) end_dt = epoch_to_dt(series.last) num_intervals = len( series["values"] ) # Namespace conflict means we cannot write series.values here. if (end_dt - start_dt).total_seconds() != width * (num_intervals - 1): msg = ( "Timeseries start/end dates do not match the number of intervals. " "Start: %s, End: %s, Interval Seconds: %d, Intervals: %d" ) raise DataIntegrityFailure(msg % (start_dt, end_dt, width, num_intervals)) interval_data = [] current_dt = start_dt step = timedelta(seconds=width) for datum in series["values"]: next_dt = current_dt + step if datum is not None: kw = datum / 1000.0 # Engie API data is in Watts interval_data.append(Interval(start=current_dt, end=next_dt, kw=kw)) current_dt = next_dt return interval_data
def parse_sites(text): record = validate(text, site_list_schema, "Site") sites = [ Site( id=s.id, link=s.link, name=s.name, start=_parse_datetime(s.stream_start_times.MONITOR), ) for s in record.sites if s.stream_start_times.MONITOR ] return sites
def parse_site(text): record = validate(text, site_details_schema, "Site") rd = record.details site = Site( id=rd.id, name=rd.name, installation_date=rd.installationDate, time_zone=rd.location.timeZone, address=rd.location.address, link=rd.uris.OVERVIEW, ) return site
def parse_intervals(text, stream_type): record = validate(text, stream_schema, "Interval") intervals = [] for st in record.stream_types: if st.stream_type == stream_type: for rec in st.streams: ivl = Interval( start=_parse_datetime(rec.start_datetime), end=_parse_datetime(rec.end_datetime), kw=rec.kw_total_avg, ) intervals.append(ivl) return intervals
def parse_intervals(text): record = validate(text, meter_energy_details_schema, "Interval") intervals = [] for meter in record.meterEnergyDetails.meters: if meter.meterType == "Production": for reading in meter["values"]: # Sometimes it doesn't have 'value' if not reading.value: reading.value = float("nan") ivl = Interval( start=_parse_datetime(reading.date), kwh=reading.value / 1000, serial_number=meter.meterSerialNumber, ) intervals.append(ivl) return intervals
def parse_clients(text): record = validate(text, client_list_schema, "Client") clients = [ Client(link=c.link, id=c.id, name=c.name) for c in record.clients ] return clients