def test_from_csv_string(self): path = self.tbl.to_csv() # Pull the file into a string with open(path, 'r') as f: str = f.read() result_tbl = Table.from_csv_string(str) assert_matching_tables(self.tbl, result_tbl)
def get_users(self): """ Get users. `Returns:` Parsons Table See :ref:`parsons-table` for output options. """ url = self.uri + f'partners/{self.subdomain}.turbovote.org/users' headers = {"Authorization": f"Bearer {self._get_token()}"} logger.info('Requesting users table...') r = requests.get(url, headers=headers) logger.debug(r) r.raise_for_status() tbl = Table.from_csv_string(r.text) logger.info(f'{tbl.num_rows} users retrieved.') return tbl
def setUp(self, m): self.ab = ActBlue(TEST_CLIENT_UUID, TEST_CLIENT_SECRET, TEST_URI) self.from_csv = Table.from_csv test_csv_data = Table.from_csv_string(open('test/test_actblue/test_csv_data.csv').read()) Table.from_csv = MagicMock(name='mocked from_csv', return_value=test_csv_data)
def get_registration_report(self, report_id, block=False, poll_interval_seconds=60, report_timeout_seconds=3600): """ Get data from an existing registration report. `Args:` report_id: int The ID of the report to get data from block: bool Whether or not to block execution until the report is complete poll_interval_seconds: int If blocking, how long to pause between attempts to check if the report is done report_timeout_seconds: int If blocking, how long to wait for the report before timing out `Returns:` Parsons Table Parsons table with the report data. """ logger.info(f"Getting report with id {report_id}...") credentials = { 'partner_id': self.partner_id, 'partner_API_key': self.partner_api_key, } status_url = f'registrant_reports/{report_id}' download_url = None # Let's figure out at what time should we just give up because we waited # too long end_time = datetime.datetime.now() + datetime.timedelta( seconds=report_timeout_seconds) # If we have a download URL, we can move on and just download the # report. Otherwise, as long as we haven't run out of time, we will # check the status. while not download_url and datetime.datetime.now() < end_time: logger.debug( f'Registrations report not ready yet, sleeping %s seconds', poll_interval_seconds) # Check the status again via the status endpoint status_response = self.client.request(status_url, 'get', params=credentials) # Check to make sure the call got a valid response if status_response.status_code == requests.codes.ok: status_json = status_response.json() # Grab the download_url from the response. download_url = status_json.get('download_url') if not download_url and not block: return None else: raise RTVFailure("Couldn't get report status") if not download_url: # We just got the status, so we should wait a minute before # we check it again. time.sleep(poll_interval_seconds) # If we never got a valid download_url, then we timed out waiting for # the report to generate. We will log an error and exit. if not download_url: raise RTVFailure('Timed out waiting for report') # Download the report data download_response = self.client.request(download_url, 'get', params=credentials) # Check to make sure the call got a valid response if download_response.status_code == requests.codes.ok: report_data = download_response.text # Load the report data into a Parsons Table table = Table.from_csv_string(report_data) # Transform the data from the report's CSV format to something more # Pythonic (snake case) normalized_column_names = [ re.sub(r'\s', '_', name).lower() for name in table.columns ] normalized_column_names = [ re.sub(r'[^A-Za-z\d_]', '', name) for name in normalized_column_names ] table.table = petl.setheader(table.table, normalized_column_names) return table else: raise RTVFailure('Unable to download report data')