def generate_e_file(self): """ Write file E to an appropriate CSV. """ log_data = {'message': 'Starting file E generation', 'message_type': 'ValidatorInfo', 'job_id': self.job.job_id, 'submission_id': self.job.submission_id, 'file_type': 'executive_compensation'} logger.info(log_data) d1 = self.sess.query(AwardProcurement.awardee_or_recipient_uniqu).\ filter(AwardProcurement.submission_id == self.job.submission_id).\ distinct() d2 = self.sess.query(AwardFinancialAssistance.awardee_or_recipient_uniqu).\ filter(AwardFinancialAssistance.submission_id == self.job.submission_id).\ distinct() duns_set = {r.awardee_or_recipient_uniqu for r in d1.union(d2)} duns_list = list(duns_set) # get an order rows = [] for i in range(0, len(duns_list), 100): rows.extend(fileE.retrieve_rows(duns_list[i:i + 100])) # Add rows to database here. # TODO: This is a temporary solution until loading from SAM's SFTP has been resolved for row in rows: self.sess.merge(ExecutiveCompensation(**fileE.row_to_dict(row))) self.sess.commit() log_data['message'] = 'Writing E file CSV: {}'.format(self.job.original_filename) logger.info(log_data) write_csv(self.job.original_filename, self.job.filename, self.is_local, fileE.Row._fields, rows) log_data['message'] = 'Finished writing E file CSV: {}'.format(self.job.original_filename) logger.info(log_data)
def generate_e_file(self): """ Write file E to an appropriate CSV. """ log_data = {'message': 'Starting file E generation', 'message_type': 'ValidatorInfo', 'job_id': self.job.job_id, 'submission_id': self.job.submission_id, 'file_type': 'executive_compensation'} logger.info(log_data) d1 = self.sess.query(AwardProcurement.awardee_or_recipient_uniqu).\ filter(AwardProcurement.submission_id == self.job.submission_id).\ distinct() d2 = self.sess.query(AwardFinancialAssistance.awardee_or_recipient_uniqu).\ filter(AwardFinancialAssistance.submission_id == self.job.submission_id).\ distinct() duns_set = {r.awardee_or_recipient_uniqu for r in d1.union(d2)} duns_list = list(duns_set) # get an order rows = [] for i in range(0, len(duns_list), 100): rows.extend(fileE.retrieve_rows(duns_list[i:i + 100])) # Add rows to database here. # TODO: This is a temporary solution until loading from SAM's SFTP has been resolved for row in rows: self.sess.merge(ExecutiveCompensation(**fileE.row_to_dict(row))) self.sess.commit() log_data['message'] = 'Writing file E CSV' logger.info(log_data) write_csv(self.job.original_filename, self.job.filename, self.is_local, fileE.Row._fields, rows)
def test_retrieve_rows(monkeypatch): """Mock out a response from the SAM API and spot check several of the components that built it up""" monkeypatch.setattr(fileE, 'CONFIG_BROKER', _VALID_CONFIG) mock_result = Mock( listOfEntities=Mock( entity=[ Mock( entityIdentification=Mock(DUNS='entity1', legalBusinessName='Legal Business Name'), coreData=Mock( listOfExecutiveCompensationInformation=Mock( executiveCompensationDetail=[ Mock(compensation=123.45), Mock(compensation=234.56) ] ), DUNSInformation=Mock( globalParentDUNS=Mock( DUNSNumber='parent1Duns', legalBusinessName='parent1' ) ) ) ), Mock( entityIdentification=Mock(DUNS='entity2'), coreData=Mock( # Indicates no data listOfExecutiveCompensationInformation='', DUNSInformation=Mock( globalParentDUNS=Mock( DUNSNumber='parent2Duns', legalBusinessName='parent2' ) ) ) ), ] ) ) mock_client = Mock() mock_client.return_value.service.getEntities.return_value = mock_result monkeypatch.setattr(fileE, 'get_client', mock_client) rows = fileE.retrieve_rows(['duns1', 'duns2']) assert len(rows) == 2 assert rows[0].AwardeeOrRecipientUniqueIdentifier == 'entity1' assert rows[0].AwardeeOrRecipientLegalEntityName == 'Legal Business Name' assert rows[0].HighCompOfficer1Amount == 234.56 assert rows[0].HighCompOfficer5Amount == '' assert rows[1].UltimateParentUniqueIdentifier == 'parent2Duns' assert rows[1].UltimateParentLegalEntityName == 'parent2' # [0] for positional args call_params = mock_client.return_value.service.getEntities.call_args[0] auth, search, params = call_params assert auth.userID == _VALID_CONFIG['sam']['username'] assert auth.password == _VALID_CONFIG['sam']['password'] assert search.DUNSList.DUNSNumber == ['duns1', 'duns2'] assert params.coreData.value == 'Y'
def generate_e_file(submission_id, job_id, timestamped_name, upload_file_name, is_local): """Write file E to an appropriate CSV.""" with job_context(job_id) as session: d1 = session.\ query(AwardProcurement.awardee_or_recipient_uniqu).\ filter(AwardProcurement.submission_id == submission_id).\ distinct() d2 = session.\ query(AwardFinancialAssistance.awardee_or_recipient_uniqu).\ filter(AwardFinancialAssistance.submission_id == submission_id).\ distinct() duns_set = {r.awardee_or_recipient_uniqu for r in d1.union(d2)} duns_list = list(duns_set) # get an order rows = [] for i in range(0, len(duns_list), 100): rows.extend(fileE.retrieve_rows(duns_list[i:i + 100])) # Add rows to database here. # TODO: This is a temporary solution until loading from SAM's SFTP has been resolved for row in rows: session.merge(ExecutiveCompensation(**fileE.row_to_dict(row))) session.commit() write_csv(timestamped_name, upload_file_name, is_local, fileE.Row._fields, rows)
def generate_e_file(submission_id, job_id, timestamped_name, upload_file_name, is_local): """Write file E to an appropriate CSV. Args: submission_id - Submission ID for generation job_id - Job ID for upload job timestamped_name - Version of filename without user ID upload_file_name - Filename to use on S3 is_local - True if in local development, False otherwise """ log_data = { 'message': 'Starting file E generation', 'message_type': 'BrokerInfo', 'submission_id': submission_id, 'job_id': job_id, 'file_type': 'executive_compensation' } logger.info(log_data) with job_context(job_id) as session: d1 = session.query(AwardProcurement.awardee_or_recipient_uniqu).\ filter(AwardProcurement.submission_id == submission_id).\ distinct() d2 = session.query(AwardFinancialAssistance.awardee_or_recipient_uniqu).\ filter(AwardFinancialAssistance.submission_id == submission_id).\ distinct() duns_set = {r.awardee_or_recipient_uniqu for r in d1.union(d2)} duns_list = list(duns_set) # get an order rows = [] for i in range(0, len(duns_list), 100): rows.extend(fileE.retrieve_rows(duns_list[i:i + 100])) # Add rows to database here. # TODO: This is a temporary solution until loading from SAM's SFTP has been resolved for row in rows: session.merge(ExecutiveCompensation(**fileE.row_to_dict(row))) session.commit() log_data['message'] = 'Writing file E CSV' logger.info(log_data) write_csv(timestamped_name, upload_file_name, is_local, fileE.Row._fields, rows) log_data['message'] = 'Finished file E generation' logger.info(log_data)