def store_import_job_details(self, report_id: int, job: bigquery.LoadJob): """Save a BQ Import job in Firestore Arguments: report_id {int} -- [description] job {bigquery.LoadJob} -- [description] """ document = 'jobs/{report_id}'.format(report_id=report_id) self.client.document(document).set(job.to_api_repr())
def store_import_job_details(self, report_id: int, job: bigquery.LoadJob) -> None: """Saves a BQ Import job in Firestore Arguments: report_id {int} -- [description] job {bigquery.LoadJob} -- [description] """ self.store_document(Type._JOBS, report_id, job.to_api_repr())
def mark_import_job_complete(self, report_id: int, job: bigquery.LoadJob) -> None: """Marks a BQ Import job in Firestore done Moves an import job from 'jobs/' to 'jobs-completed'. Arguments: report_id {int} -- [description] job {bigquery.LoadJob} -- [description] """ self.delete_document(Type._JOBS, report_id) self.store_document(Type._COMPLETED, report_id, job.to_api_repr())
def process(self, data: Dict[str, Any], context) -> None: """Checks all the running jobs. Args: event (Dict[str, Any]): data sent from the PubSub message context (Dict[str, Any]): context data. unused """ attributes = data.get('attributes') documents = self.firestore.get_all_documents(Type._JOBS) for document in documents: for product in [T for T in Type]: if config := self.firestore.get_document(product, document.id): if config.get('dest_project'): user_creds = \ credentials.Credentials(email=config['email'], project=config['dest_project']) bq = bigquery.Client( project=config['dest_project'], credentials=user_creds.credentials) else: bq = bigquery.Client() api_repr = document.get().to_dict() if api_repr: try: job = LoadJob.from_api_repr(api_repr, bq) job.reload() if job.state == 'DONE': if job.error_result: logging.error(job.errors) self._handle_finished(job=job, config=config) ('notifier' in config) and self.notify( report_type=product, config=config, job=job, id=document.id) self._mark_import_job_complete( document.id, job, ) except Exception as e: logging.error( 'Error loading job %s for monitoring.', document.id) break
def mark_import_job_complete(self, report_id: int, job: bigquery.LoadJob): """Mark BQ Import job in Firestore done Moves an import job from 'jobs/' to 'jobs-completed'. Arguments: report_id {int} -- [description] job {bigquery.LoadJob} -- [description] """ document = 'jobs/{report_id}'.format(report_id=report_id) self.client.document(document).delete() document = 'jobs-completed/{report_id}'.format(report_id=report_id) self.client.document(document).set(job.to_api_repr())
def process(self, data: Dict[str, Any], context): """Check all the running jobs Arguments: event {Dict[str, Any]} -- data sent from the PubSub message context {Dict[str, Any]} -- context data. unused """ firestore = Firestore(in_cloud=True, email=None, project=None) documents = firestore.get_all_jobs() for document in documents: for T in [t for t in Type if not t.name.startswith('_')]: config = firestore.get_report_config(T, document.id) if config: if config.get('dest_project'): # authenticate against supplied project with supplied key project = config.get('dest_project') or os.environ.get( 'GCP_PROJECT') client_key = json.loads( Cloud_Storage.fetch_file( bucket= f"{os.environ.get('GCP_PROJECT') or 'galvanic-card-234919'}-report2bq-tokens", file=f"{config['email']}_user_token.json")) server_key = json.loads( Cloud_Storage.fetch_file( bucket= f"{os.environ.get('GCP_PROJECT') or 'galvanic-card-234919'}-report2bq-tokens", file='client_secrets.json')) client_key['client_id'] = ( server_key.get('web') or server_key.get('installed')).get('client_id') client_key['client_secret'] = ( server_key.get('web') or server_key.get('installed')).get('client_secret') logging.info(client_key) creds = Credentials.from_authorized_user_info( client_key) bq = bigquery.Client(project=project, credentials=creds) else: bq = bigquery.Client() api_repr = document.get().to_dict() if api_repr: try: job = LoadJob.from_api_repr(api_repr, bq) job.reload() if job.state == 'DONE': if job.error_result: logging.error(job.errors) self._handle_finished(job=job, id=document.id, config=config, report_type=T) firestore.mark_import_job_complete( document.id, job) except Exception as e: logging.error( f"""Error loading job {document.id} for monitoring.""" ) break