def _execute(self): billing_data_final = self.gridium_bills_to_billing_datum() if self._configuration.partial_type == PartialBillProviderType.GENERATION_ONLY: return Results(generation_bills=billing_data_final) if self._configuration.partial_type == PartialBillProviderType.TND_ONLY: return Results(tnd_bills=billing_data_final) return Results(bills=billing_data_final)
def scrape_interval_data(self): self.interval_data_timeline = None state_machine = self.define_state_machine() final_state = state_machine.run() if final_state == "done": if self.interval_data_timeline: serialized = self.interval_data_timeline.serialize() self.log_readings(serialized) return Results(readings=serialized) return Results(readings={}) raise Exception( "The scraper did not reach a finished state, this will require developer attention." )
def scrape_billing_data(self): self.billing_history = [] state_machine = self.define_state_machine() final_state = state_machine.run() if final_state == "done": self.log_bills(self.billing_history) if self.scrape_partial_bills: return Results(tnd_bills=self.billing_history) else: return Results(bills=self.billing_history) raise Exception( "The scraper did not reach a finished state, this will require developer attention." )
def scrape_billing_data(self) -> Results: state_machine = self.define_state_machine() final_state = state_machine.run() if final_state == "done": if self.scrape_partial_bills: # T&D bills use the same pages/path as bundled bills return Results( generation_bills=self.gen_billing_history, tnd_bills=self.billing_history, ) else: return Results(bills=self.billing_history) raise Exception( "The scraper did not reach a finished state, this will require developer attention." )
def _execute(self): login_page = LoginPage(self._driver) dashboard_page = login_page.login(self.username, self.password) log.info("Login successful.") self.screenshot("post_login") dashboard_page.visit_multidashboard() log.info("Loaded Multi Dashboard Page.") account_page = dashboard_page.select_account(self.account_number) log.info("Loaded account page.") self.screenshot("account_summary") energy_manager_page = account_page.visit_dashboard() log.info("Loaded energy manager page.") self.screenshot("energy_manager") energy_dashboard = energy_manager_page.energy_dashboard() log.info("Loaded energy dashboard page.") self.screenshot("energy_manager") energy_dashboard.heatmap() self.screenshot("energy_manager_heatmap") results = energy_dashboard.download_data(self.start_date, self.end_date) return Results(readings=results)
def _execute(self): self.construct_site_url() self._driver.get(self.site_url) sleep(5) if "Error?aspxerrorpath" in self._driver.current_url: raise NautilusException("Error - could not find site url") site_page = SitePage(self._driver) five_days = timedelta(days=5) self.install_date = site_page.get_install_date() msg = "Installation date is %s" % self.install_date self.adjust_start_and_end_dates() log.info(msg) site_page.month_select() sleep(5) earliest_shown = site_page.get_earliest_shown() # coarse-grained: go back by month while self.end_date < earliest_shown - timedelta(days=30): msg = "finding where to start. earliest_shown is %s" % earliest_shown log.info(msg) site_page.double_back_arrow_select() sleep(5) earliest_shown = site_page.get_earliest_shown() site_page.five_days_select() sleep(10) earliest_shown = site_page.get_earliest_shown() # fine-grained: go back by 5-day increments while self.end_date < (earliest_shown - five_days): msg = "finding where to start. earliest_shown is %s" % earliest_shown log.info(msg) site_page.double_back_arrow_select() sleep(5) earliest_shown = site_page.get_earliest_shown() timeline = Timeline(self.start_date, self.end_date) while (self.start_date - five_days) < earliest_shown: msg = "gathering data. start_date is %s" % self.start_date log.info(msg) msg = "gathering data. earliest_shown is %s" % earliest_shown log.info(msg) site_page.hamburger_select() sleep(2) file_path = site_page.download_csv() data = CSVParser(file_path).process_csv() for dt, use_kw in data: timeline.insert(dt, use_kw) log.info("\tRecorded %d intervals of data." % len(data)) log.info("Cleaning up download.") clear_downloads(self._driver.download_dir) site_page.double_back_arrow_select() sleep(5) earliest_shown = site_page.get_earliest_shown() return Results(readings=timeline.serialize())
def _execute(self): self._driver.get(self.url) log.info(self._configuration.__dict__) # We define the scraper flow below using a simple state machine. state_machine = PageStateMachine(self._driver) state_machine.add_state( name="init", action=self.init_action, transitions=["login"], ) state_machine.add_state( name="login", page=LoginPage(self._driver), action=self.login_action, transitions=["find_account", "dashboard", "login_failed"], ) state_machine.add_state( name="login_failed", page=LoginFailedPage(self._driver), action=self.login_failed_action, transitions=[], ) state_machine.add_state( name="find_account", page=FindAccountPage(self._driver), action=self.find_account_action, transitions=["dashboard"], ) state_machine.add_state( name="dashboard", page=DashboardPage(self._driver), action=self.dashboard_page_action, transitions=["usage"], wait_time=30, ) state_machine.add_state( name="usage", page=UsagePage(self._driver), action=self.usage_page_action, transitions=["done"], ) state_machine.add_state("done") state_machine.set_initial_state("init") final_state = state_machine.run() if final_state == "done": return Results( readings=self.timeline.serialize() if self.timeline else []) raise Exception("The scraper did not reach a finished state.")
def _execute(self): # Meterwatch immediately spawns a popup when loaded which is the actual # window we want. So we have to go and grab the main window handle and # THEN go looking for the popup window and switch to it. handles_before = self._driver.window_handles timeline = Timeline(self.start_date, self.end_date) main_window = _get_main_window(self._driver) login_window = None log.info(f"Navigating to {self.url}") self._driver.get(self.url) self.screenshot("initial_url") log.debug("Driver title: " + self._driver.title) assert "Seattle MeterWatch" in self._driver.title login_page = LoginPage(self._driver) meterdata_page = MeterDataPage(self._driver, self._configuration) login_page.login(self.username, self.password) self._driver.wait().until( lambda driver: len(handles_before) != len(driver.window_handles), "Issues loading login page.", ) for handle in self._driver.window_handles: if handle != main_window: login_window = handle # We have our popup, so lets do stuff with it. log.info("switching to new window") self._driver.switch_to.window(login_window) # resize: it opens as a tiny window self._driver.set_window_size(1200, 800) for meter_number in self._configuration.meter_numbers: meterdata_page.select_account(meter_number) self.start_date, self.end_date = meterdata_page.adjust_start_and_end_dates( self.start_date, self.end_date) # Widen timeline if necessary after dates may have been adjusted from original. timeline.extend_timeline(self.start_date, self.end_date) date_range = DateRange(self.start_date, self.end_date) interval_size = relativedelta(days=MAX_DOWNLOAD_DAYS) for sub_range in date_range.split_iter(delta=interval_size): meterdata_page.enter_dates(sub_range.start_date, sub_range.end_date) csv_file_path = meterdata_page.download_data(meter_number) log.info( f"parsing kWh usage from downloaded data for {meter_number}" ) self._process_csv(csv_file_path, timeline) return Results(readings=timeline.serialize(include_empty=False))
def _execute(self): login_page = LoginPage(self._driver) home_page = login_page.login(self.username, self.password) self.screenshot("home_page") bill_history_page = home_page.to_bill_history() bill_history_page.set_dates(self.start_date, self.end_date) self.screenshot("bill_history") history = bill_history_page.gather_data() pdf_bytes = sum(len(t[0]) for t in history if t[0]) xls_bytes = sum(len(t[1]) for t in history if t[1]) pdfs = sum(1 for t in history if t[0]) xls = sum(1 for t in history if t[1]) log.info( "Acquired %s pdfs (%s bytes) and %s excel files (%s bytes)." % (pdfs, pdf_bytes, xls, xls_bytes) ) bills = [] for pdf, xls in history: bill_data = [] if xls is not None: bill_data = bill_data_from_xls(xls, self.service_account) elif pdf is not None: bill_data = bill_data_from_pdf( pdf, self.service_account, self.meter_serial ) if pdf is not None and bill_data: bill_data_prime = [] for bill_datum in bill_data: key = bill_upload.hash_bill_datum(self.service_account, bill_datum) # statement date is not visible in the bill PDF text; use end date attachment_entry = bill_upload.upload_bill_to_s3( BytesIO(pdf), key, source="atmosenergy.com", statement=bill_datum.end, utility=self.utility, utility_account_id=self.utility_account_id, ) if attachment_entry: bill_data_prime.append( bill_datum._replace(attachments=[attachment_entry]) ) else: bill_data_prime.append(bill_datum) bill_data = bill_data_prime if bill_data: bills += bill_data final_bills = adjust_bill_dates(bills) return Results(bills=final_bills)
def _execute(self): self.billing_history = [] state_machine = self.define_state_machine() final_state = state_machine.run() if final_state == "done": self.log_bills(self.billing_history) return Results(bills=self.billing_history) raise Exception( "The scraper did not reach a finished state, this will require developer attention." )
def _execute(self): """Define, run and return the results from running this state machine.""" state_machine = self.define_state_machine() final_state = state_machine.run() if final_state == "done": final_bills = adjust_bill_dates(self.billing_history) return Results(bills=final_bills) raise errors.BillingScraperException( "The scraper did not reach a finished state; " "this will require developer attention.")
def _execute(self): login_page = LoginPage(self._driver) home_page = login_page.login(self.keller_id, self.password) self.screenshot("home_page") bill_history_page = home_page.to_bill_history() self.screenshot("bill_history_page") bills = bill_history_page.gather_data(self.keller_id, self.start_date, self.end_date) log.info("Acquired %d bills (%s bytes total)." % (len(bills), sum(len(b) for b in bills))) bill_data = [] for b in bills: bill_datum = parse_bill_pdf(BytesIO(b)) if bill_datum is None: continue key = bill_upload.hash_bill_datum(self.account_number, bill_datum) # bill doesn't have a statement date; use end date attachment_entry = bill_upload.upload_bill_to_s3( BytesIO(b), key, statement=bill_datum.end, source="cityofkeller.com", utility=self.utility, utility_account_id=self.account_number, ) if attachment_entry: bill_data.append( bill_datum._replace(attachments=[attachment_entry])) else: bill_data.append(bill_datum) # bill periods overlap; adjust start dates adjusted_bill_data = [] for bill in bill_data: adjusted_bill_data.append( BillingDatum( start=bill.start + timedelta(days=1), end=bill.end, statement=bill.statement, cost=bill.cost, used=bill.used, peak=bill.peak, items=bill.items, attachments=bill.attachments, utility_code=None, )) final_bills = adjust_bill_dates(adjusted_bill_data) show_bill_summary(final_bills, "Final Bill Summary") return Results(bills=final_bills)
def _execute(self): """ Define, run and return the results from running this SM """ state_machine = self.define_state_machine() final_state = state_machine.run() if final_state == "done": return Results(readings=self.readings) raise errors.IntervalScraperException( "The scraper did not reach a finished state, " "this will require developer attention." )
def _execute(self): # Direct driver to site url - # Currently a public URL, no credentials needed. Will have to be # refactored in the future if we start scraping private sites. self._driver.get(self.site_url) # Create page helpers overview_page = OverviewPage(self._driver) site_analytics_page = SiteAnalyticsPage(self._driver) date_picker_component = DatePickerSection(self._driver) # Navigate to site analytics tab overview_page.wait_until_ready() self.screenshot("before clicking on site analytics tab") overview_page.navigate_to_site_analytics() # Select inverter from both dropdowns site_analytics_page.wait_until_ready() self.screenshot("before selecting inverters") site_analytics_page.select_inverter_from_both_dropdowns( self.inverter_id) # Click on AC Power button self.screenshot("before clicking on ac power button") site_analytics_page.click_ac_power_button() self.screenshot("after clicking on ac power button") self.install_date = self.string_to_date( site_analytics_page.get_install_date()) # Adjust start and end date, depending on inverter install date self.adjust_start_and_end_dates() date_range = DateRange(self.start_date, self.end_date) interval_size = relativedelta(days=MAX_INTERVAL_LENGTH) # Loop through desired interval in two day chunks to pull down # power generated for sub_range in date_range.split_iter(delta=interval_size): start = sub_range.start_date end = sub_range.end_date file_path = date_picker_component.complete_form_and_download( start, end) intermediate_readings = CSVParser(self.inverter_id, file_path).process_csv() self.readings.update(intermediate_readings) log.info("Cleaning up download.") clear_downloads(self._driver.download_dir) # Adding a large pause self._driver.sleep(5) return Results(readings=self.readings)
def _execute(self): timeline = Timeline(self.start_date, self.end_date) cookies = _login(self.username, self.password) readings = _fetch_usage_data( cookies, self._configuration.item_id, self.start_date, self.end_date ) for upoint in readings: timeline.insert(upoint.datetime, upoint.kW) return Results(readings=timeline.serialize())
def _execute(self): mtype = self._configuration.meter_type final_timeline = None if mtype == TimeSeriesType.SYNTHETIC_BUILDING_LOAD: final_timeline = self._compute_synthetic_building_load() elif mtype == TimeSeriesType.CHARGE: final_timeline = self._compute_converter_timeseries(sign=1) elif mtype == TimeSeriesType.DISCHARGE: final_timeline = self._compute_converter_timeseries(sign=-1) return Results(readings=final_timeline.serialize())
def _execute(self): login_page = LoginPage(self._driver) setup_page = login_page.login(self.username, self.password) log.info("Login successful. Loading Data View and Export setup") self.screenshot("after login") report_page = setup_page.create_report(self.point_id, self.start_date, self.end_date) log.info("Created report.") self.screenshot("created report") readings = report_page.parse_readings() return Results(readings=readings)
def _execute(self): log.info("Attempting to log into the eloverblik API.") self._login() log.info("Login successful") timeline = Timeline(self.start_date, self.end_date, interval=60) # get and parse data 14 days at a time start_date = self.start_date while start_date < self.end_date: end_date = min(self.end_date, start_date + timedelta(days=14)) self._get_data(timeline, start_date, end_date) start_date = end_date # end date is exclusive return Results(readings=timeline.serialize())
def _execute(self): self._driver.get( "https://customerconnect.poway.org/my-account/billing") login_page = LoginPage(self._driver) self.screenshot("before login") login_page.login(self.username, self.password) self.screenshot("after login") account_overview_page = AccountOverviewPage(self._driver) account_overview_page.goto_billing_history() billing_page = BillingHistoryPage(self._driver) billing_page.select_account(self._configuration.account_id) self.screenshot("billing page") billing_page.patch_view_ebill_javascript_callback() billing_page.download_pdfs(self.start_date, self.end_date) return Results(bills=self.parse_pdfs())
def _execute(self): return Results( bills=[ BillingDatum( start=self.end_date - timedelta(days=30), end=self.end_date, statement=self.end_date, cost=100, used=25, peak=10, items=None, attachments=None, utility_code=None, ) ] )
def _execute(self): self.screenshot("login") log.info("Logging in to Powertrack.") login_page = LoginPage(self._driver) portfolio_page = login_page.login(self.username, self.password) self.screenshot("site selection") log.info("Selecting site") time.sleep(5) status_page = portfolio_page.go_to_status_page(self.site_id) time.sleep(15) self.install_date = status_page.get_install_date() msg = "Installation date is %s" % self.install_date self.adjust_start_and_end_dates() log.info(msg) status_page.three_day_view_select() status_page.fifteen_minute_select() earliest_shown = status_page.get_earliest_shown() four_days = timedelta(days=4) while self.end_date < (earliest_shown - four_days): msg = "finding where to start. earliest_shown is %s" % earliest_shown log.info(msg) status_page.calendar_back_click() earliest_shown = status_page.get_earliest_shown() # calendar picker is very hard to use; just cycle backwards 3 days at a time getting data timeline = Timeline(self.start_date, self.end_date) while (self.start_date - four_days) < earliest_shown: msg = "gathering data. earliest_shown is %s" % earliest_shown log.info(msg) status_page.hamburger_click() file_path = status_page.download_csv() data = CSVParser(file_path).process_csv() for dt, use_kw in data: timeline.insert(dt, use_kw) log.info("\tRecorded %d intervals of data." % len(data)) log.info("Cleaning up download.") clear_downloads(self._driver.download_dir) status_page.calendar_back_click() earliest_shown = status_page.get_earliest_shown() time.sleep(5) return Results(readings=timeline.serialize())
def _execute(self): config: SmdPartialBillingScraperConfiguration = self._configuration meter = config.meter usage_points = relevant_usage_points(meter) log.info("Identified %s relevant usage point(s): %s", len(usage_points), usage_points) query = db.session.query(SmdBill).filter( SmdBill.usage_point.in_(usage_points)) if self.start_date: start = self.start_date end = max(start, self.end_date or date.today()) if end - self.start_date <= timedelta(days=60): start = start - timedelta(days=60) log.info("Adjusting start date to %s.", start) query = query.filter(start <= SmdBill.start) if self.end_date: query = query.filter(SmdBill.start <= self.end_date) query = query.order_by(SmdBill.published) log.info("Identified %d raw SMD bills relevant to this meter.", query.count()) # It often happens that we receive several versions of the same bill across multiple files. # The first thing we need to do is order the bills by publication date, so we can decide # which SmdBill record is the correct one for our chosen date. unified_bills: List[SmdBill] = SmdBill.unify_bills(query) adjusted_bills: List[SmdBill] = SmdBill.adjust_single_day_bills( unified_bills) partial_bills = [ b.to_billing_datum(self.service) for b in adjusted_bills ] if partial_bills: log.debug( "Identified %s partial bills in Share My Data for meter %s (%s).", len(partial_bills), meter.name, meter.oid, ) if datafeeds_config.enabled("S3_BILL_UPLOAD"): partial_bills = self.attach_corresponding_urja_pdfs( partial_bills) return Results(tnd_bills=partial_bills)
def _execute(self): # Direct the driver to the login page log.info("Navigating to PG&E") self._driver.get(self.login_url) # Create page helpers login_page = LoginPage(self._driver) dashboard_page = DashboardPage(self._driver) # Log in self.screenshot("before login") login_page.login(self.username, self.password) self.screenshot("after login") log.info("Successfully logged in") log.info("Visiting main dashboard") dashboard_page.visit_dashboard() self.screenshot("dashboard") # select account log.info( f"Visiting account summary for {self._configuration.utility_account}" ) dashboard_page.select_account(self._configuration.utility_account) self.screenshot("after select account") # get latest statement date already retrieved datasource = self._configuration.datasource latest = date_parser.parse((datasource.meta or {}).get("latest", "2010-01-01")).date() # download bills pdfs = dashboard_page.download_bills( latest, self._configuration.utility_account, self._configuration.utility, ) # set latest statement date if pdfs: latest_download = max([pdf.end for pdf in pdfs]) if not datasource.meta: datasource.meta: Dict[str, Any] = {} datasource.meta["latest"] = latest_download.strftime("%Y-%m-%d") return Results(pdfs=pdfs)
def _execute(self): if self.end_date - self.start_date < timedelta(days=90): self.start_date = self.end_date - timedelta(days=90) log.info( "Initial time window was too narrow for this utility. Expanding time window to: %s - %s" % (self.start_date, self.end_date)) login_page = LoginPage(self._driver) home_page = login_page.login(self.username, self.password) log.info("Login successful. Loading bill history.") self.screenshot("post_login") bill_history_page = home_page.select_account(self.account_number) log.info("Loaded bill history page.") self.screenshot("bill_history") results = bill_history_page.gather_data(self.start_date, self.end_date) log.info("Obtained %s bill records and %s PDFs." % (len(results), sum(1 for _, f in results if f is not None))) bills = [] for bd, pdf_bytes in results: if pdf_bytes is None: bills.append(bd) continue key = bill_upload.hash_bill_datum(self.account_number, bd) attachment_entry = bill_upload.upload_bill_to_s3( BytesIO(pdf_bytes), key, statement=bd.statement, source="hudsonenergy.net", utility=self.utility, utility_account_id=self.account_number, ) if attachment_entry: bills.append(bd._replace(attachments=[attachment_entry])) else: bills.append(bd) final_bills = adjust_bill_dates(bills) return Results(bills=final_bills)
def _execute(self): log.info("Attempting to log into the Grovestreams API.") self._login() log.info("Login successful. Organization ID: %s" % self.organization_id) timeline = Timeline(self.start_date, self.end_date) current_dt = date_to_datetime(self.start_date) end_dt = date_to_datetime(self.end_date) step = timedelta(days=1) while current_dt < end_dt: next_dt = current_dt + step stream_uid, interval_data = self._gather_interval_data( current_dt, next_dt) msg = "Recovered data for stream %s. (UID: %s, Data Points: %s)" log.info(msg % (self.stream_id, stream_uid, len(interval_data))) for ivl in interval_data: pst_time = dt_to_platform_pst(ivl.start) # Convert UTC to PST kw = ivl.kwh * 4 # Convert kWh to kW. if self.meter_type == TimeSeriesType.SYNTHETIC_BUILDING_LOAD: timeline.insert(pst_time, kw) elif self.meter_type == TimeSeriesType.CHARGE: if kw > 0: timeline.insert(pst_time, kw) else: timeline.insert(pst_time, 0) elif self.meter_type == TimeSeriesType.DISCHARGE: if kw < 0: timeline.insert(pst_time, -kw) else: timeline.insert(pst_time, 0) current_dt = next_dt self._logout() return Results(readings=timeline.serialize())
def _execute(self): jwt = None token_refresh = None dt = timedelta(hours=24) days = (self.end_date - self.start_date).days if days < MIN_DAYS: current = self.end_date - timedelta(days=MIN_DAYS) else: current = self.start_date timeline = Timeline(current, self.end_date) while current <= self.end_date: if not token_refresh or datetime.now() - token_refresh > timedelta( minutes=5): if not token_refresh: msg = "Attempting login to NV Energy" else: msg = "Refreshing JWT token by logging in to NV energy." log.info(msg) token_refresh = datetime.now() jwt = _login(self.username, self.password) log.info("NV Energy login succeeded.") readings = _fetch_usage_data(jwt, self.account_id, self.meter_id, current) log.info( "Acquired {} non-null readings for date {}. Account ID: {}, Meter ID: {}" .format(len(readings), str(current), self.account_id, self.meter_id)) for upoint in readings: timeline.insert(upoint.datetime, upoint.kW) current += dt return Results(readings=timeline.serialize())
def _execute(self): billing_data = None interval_data = None self._driver.get(self.login_url) self._authenticate() # Note: While SoCalGas provides GB files (which should include bills), it is not guaranteed # that usage/GB will be available for certain meters (particularly those available through Envoy), # so we need a reliable way of retrieving bills for all meters - hence scraping the table if self.scrape_bills: billing_data = self._navigate_and_parse_bill_history() if self.scrape_readings: self._navigate_to_usage() self._accept_terms() self._select_account() interval_data = self._download_green_button() return Results(bills=billing_data, readings=interval_data)
def _execute(self) -> Results: smt_client_cert = read_file_from_s3(config.PRIVATE_CONFIG_BUCKET, config.SMT_CERTIFICATE_S3_KEY) smt_client_cert_key = read_file_from_s3( config.PRIVATE_CONFIG_BUCKET, config.SMT_CERTIFICATE_KEY_S3_KEY) if not smt_client_cert or not smt_client_cert_key: raise ApiException( "Datafeeds was not properly configured with client certificates" " for accessing the Smart Meter Texas API.") with open("/tmp/smt_client.cert", "wb") as cert_file: cert_file.write(smt_client_cert) with open("/tmp/smt_client.key", "wb") as key_file: key_file.write(smt_client_cert_key) # No more than 24 months are available on this service. start = max(self.start_date, date.today() - relativedelta(months=23)) end = min(self.end_date, date.today()) log.info("Final date range: %s - %s" % (start, end)) timeline = Timeline(self.start_date, self.end_date) current_dt = self.start_date while current_dt < self.end_date: log.info("Requesting data for %s.", current_dt) results = self.request_data(current_dt, current_dt) for day, use_data in results.items(): for ii, use_value in enumerate(use_data): timeline.insert( datetime(day.year, day.month, day.day) + timedelta(minutes=15 * ii), use_value * 4 if use_value is not None else None, ) current_dt += timedelta(days=1) return Results(readings=timeline.serialize())
def _execute(self): login_page = LoginPage(self._driver) bill_page = login_page.login(self.username, self.password) log.info("Login successful. Loading bill history.") self.screenshot("post_login") accounts_page = bill_page.switch_accounts() accounts_page.select_account(self.utility_account_id) results = bill_page.download_bills(self.start_date, self.end_date) log.info("Obtained %s bill PDF files." % (len(results))) bills: List[BillingDatum] = [ process_pdf( self.utility, self.utility_account_id, self.service_id, statement_dt, filename, ) for (statement_dt, filename) in results ] return Results(bills=bills)
def _execute(self): site_id = self._configuration.site_id meter_type = self._configuration.meter_type timeline = Timeline(self.start_date, self.end_date) current_dt = time.date_to_datetime(self.start_date) end_dt = time.date_to_datetime(self.end_date) step = timedelta(days=1) while current_dt < end_dt: next_dt = current_dt + step interval_data = self._gather_interval_data(current_dt, next_dt) log.info("Recovered data for site %s. Total Intervals: %s" % (site_id, len(interval_data))) for ivl in interval_data: pst_time = time.dt_to_platform_pst(ivl.start) if meter_type == TimeSeriesType.SYNTHETIC_BUILDING_LOAD: # TODO: Unclear if we need to difference with the charge/discharge channel here. timeline.insert(pst_time, ivl.kw) elif meter_type == TimeSeriesType.CHARGE: if ivl.kw > 0: timeline.insert(pst_time, ivl.kw) else: timeline.insert(pst_time, 0) elif meter_type == TimeSeriesType.DISCHARGE: if ivl.kw < 0: timeline.insert(pst_time, -ivl.kw) else: timeline.insert(pst_time, 0) current_dt = next_dt return Results(readings=timeline.serialize())