def __process(self, request_body_json): copy_job_results = CopyJobResult(request_body_json.get('jobJson')) data = request_body_json.get('data') if copy_job_results.has_errors(): error_message = "Copy job failed with errors: {} ." \ "Backup for source: {}, target: {} " \ "has not been done. " \ .format(copy_job_results.error_message, data["sourceBqTable"], data["targetBqTable"]) ErrorReporting().report(error_message) return backup_table_metadata = BigQueryTableMetadata.get_table_by_big_query_table( copy_job_results.target_bq_table) if backup_table_metadata.table_exists(): self.__create_backup(backup_table_metadata, copy_job_results) if backup_table_metadata.has_partition_expiration(): self.__disable_partition_expiration( TableReference.from_bq_table( copy_job_results.target_bq_table)) else: pass ErrorReporting().report( "Backup table {0} not exist. Backup entity is not created". format(copy_job_results.target_bq_table))
def __create_backup(backup_table_metadata, copy_job_results): table_entity = Table.get_table_by_reference( TableReference.from_bq_table(copy_job_results.source_bq_table)) if table_entity is None: raise DatastoreTableGetRetriableException() backup = Backup(parent=table_entity.key, last_modified=copy_job_results.start_time, created=copy_job_results.end_time, dataset_id=copy_job_results.target_dataset_id, table_id=copy_job_results.target_table_id, numBytes=backup_table_metadata.table_size_in_bytes()) logging.debug( "Saving backup to datastore, source:{0}, target:{1}".format( copy_job_results.source_bq_table, copy_job_results.target_bq_table)) backup.insert_if_absent(backup)