def load_csv_file_into_bigquery(self, dataset_id, table_id, csv_file_name, schema): client = bigquery.Client() dataset_ref = client.dataset(dataset_id) table_ref = dataset_ref.table(table_id) job_config = bigquery.LoadJobConfig() job_config.source_format = bigquery.SourceFormat.CSV job_config.skip_leading_rows = 0 job_config.write_disposition = 'WRITE_TRUNCATE' job_config.autodetect = True job_config.allowQuotedNewlines = True job_config.quote = '"' job_config.schema = schema with open(csv_file_name, "rb") as source_file: job = client.load_table_from_file( source_file, table_ref, location="US", job_config=job_config, ) try: job.result() except: raise RuntimeError(f"op=load_csv_file_into_bigquery | status=Fail| desc=dataset_id: {dataset_id}, table_id: {table_id}, errors: {str(job.errors)}") Logger.info( f"op=load-csv-file-into-bigquery | status=OK | desc=Loaded {job.output_rows} rows into {dataset_id}:{table_id}")
def send_staffing_request_to_forcetalk(self, staffing_request): Logger.debug( f"send_staffing_request_to_forcetalk with {staffing_request}") data = { "id": staffing_request['id'], "project": { 'sf_id': staffing_request['opportunityId'] }, "startDate": staffing_request['startDate'], "endDate": staffing_request['endDate'], "probability": staffing_request['probability'], 'effort': staffing_request['effort'], "grade": staffing_request['gradeName'], "role": staffing_request['roleName'], "workingOffices": self.generate_working_offices(staffing_request['workingOffices']) } forcetalk_url = f'{self.forcetalk_host}/forcetalk/ResourceRequest?checkEligible=false' headers = { "Accept": "application/json", "Content-type": "application/json" } request_with_retry(NewRequest.post, kwargs=dict(url=forcetalk_url, headers=headers, data=json.dumps(data), timeout=30))
def send_assignment_to_forcetalk(self, assignment): Logger.debug(f"send_assignment_to_forcetalk with {assignment}") data = { "id": assignment['id'], "project": { 'sf_id': assignment['project']['opportunityId'] }, "staffingRequest": { "id": assignment['staffingRequest']['uuid'] }, "consultant": { 'id': assignment['consultant']['employeeId'] }, "startDate": assignment['duration']['startsOn'], "endDate": assignment['duration']['endsOn'], "effort": int(assignment['effort']), "shadow": assignment['shadow'] == 'true', } forcetalk_url = f'{self.forcetalk_host}/forcetalk/Assignment?checkEligible=false' headers = { "Accept": "application/json", "Content-type": "application/json" } request_with_retry(NewRequest.post, kwargs=dict(url=forcetalk_url, headers=headers, data=json.dumps(data), timeout=30))
def write_rows_to_bigquery(self, dataset_id, table_id, rows_to_insert): client = bigquery.Client() table_ref = client.dataset(dataset_id).table(table_id) table = client.get_table(table_ref) errors = client.insert_rows(table, rows_to_insert) if errors: raise RuntimeError(f"op=write_rows_to_bigquery | status=OK| desc=dataset_id: {dataset_id}, table_id: {table_id}, rows_to_insert: {str(rows_to_insert)}, errors: {str(errors)}") else: Logger.debug(f"op=write_rows_to_bigquery | status=OK | desc=dataset_id: {dataset_id}, table_id: {table_id}, rows_to_insert: {str(rows_to_insert)}")
def flag_as_daily_rate_project(self, opportunity_id): Logger.debug(f"flag_as_daily_rate_project with {opportunity_id}") forcetalk_url = f'{self.forcetalk_host}/forcetalk/Project/flagAsDailyRateProject/{opportunity_id}' headers = { "Accept": "application/json", "Content-type": "application/json" } request_with_retry(NewRequest.put, kwargs=dict(url=forcetalk_url, headers=headers, timeout=30))
def delete_resource_request(self, res_req_id): forcetalk_url = f'{self.forcetalk_host}/forcetalk/ResourceRequest/{res_req_id}' headers = { "Accept": "application/json", "Content-type": "application/json" } Logger.debug(f"delete_resource_request with {res_req_id}") request_with_retry(NewRequest.delete, kwargs=dict(url=forcetalk_url, headers=headers, timeout=30))
def flag_project_as_eligible_for_live_feed(self, opportunity_id): Logger.debug(f"flagProjectAsEligibleForLiveFeed with {opportunity_id}") forcetalk_url = f'{self.forcetalk_host}/forcetalk/Project/flagForLiveFeed/{opportunity_id}' headers = { "Accept": "application/json", "Content-type": "application/json" } request_with_retry(NewRequest.put, kwargs=dict(url=forcetalk_url, headers=headers, timeout=30))
def delete_assignment(self, ass_id): Logger.debug(f"delete_assignment with {ass_id}") forcetalk_url = f'{self.forcetalk_host}/forcetalk/Assignment/{ass_id}' headers = { "Accept": "application/json", "Content-type": "application/json" } request_with_retry(NewRequest.put, kwargs=dict(url=forcetalk_url, headers=headers, timeout=30))
def save_json_to_bigquery(self, dataset_id, table_id, schema, data, overwrite=False): Logger.info("\nStart write to bigquery\n") content = io.StringIO('\n'.join(json.dumps(record) for record in data)) client = bigquery.Client() dataset_ref = client.dataset(dataset_id) table_ref = dataset_ref.table(table_id) job_config = bigquery.LoadJobConfig() job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON if overwrite: job_config.write_disposition = 'WRITE_TRUNCATE' else: job_config.write_disposition = 'WRITE_APPEND' job_config.autodetect = True job_config.schema = schema client.load_table_from_file(content, table_ref, job_config=job_config).result() Logger.info("\nFinish write to bigquery\n")
def get_platform_data(self, scope, path, params=None): Logger.debug(f"get_platform_data with {path}") for _ in range(3): res = requests.get( f"{self.platform_host}/{path}", params=params, headers={'Authorization': f'bearer {self.get_token(scope)}'}) if res.status_code != 200: Logger.error( f"Failed to get {path}, res: {res.status_code} | {res.text}" ) else: return res.json() raise RuntimeError(f"failed to get_platform_data with {path}")
def check_paused_subscriptions(self, subscription_name_list=None): for _ in range(3): try: res = requests.get(f"{self.platform_host}/v1/subscription", headers={"x-api-key": self.x_api_key}) except: Logger.error(f'failed to get_subscription_by_name: {subscription_name_list}') sleep(5) else: if res.status_code == 200: err_msg = '' for subscription in res.json(): if subscription_name_list is None or subscription["name"] in subscription_name_list: if subscription["state"] != "active": err_msg += f'\n{subscription["name"]} is {subscription["state"]}\n' if err_msg: Logger.error(err_msg) return err_msg else: err_msg = f'failed to get_subscriptions: {res.status_code} | {res.text}' Logger.error(err_msg) raise RuntimeError(err_msg)