def get(self): page_token = self.request.get('pageToken', None) if not request_correlation_id.get(): request_correlation_id.set_correlation_id(str(uuid.uuid4())) logging.info(u'Organisation Backup Scheduler task for page_token: %s', page_token) OrganizationBackupScheduler().schedule_backup(page_token=page_token)
def create(cls, **kwargs): corr_id = request_correlation_id.get() if corr_id: if 'headers' in kwargs: kwargs['headers'][request_correlation_id.HEADER_NAME] = corr_id else: kwargs['headers'] = { request_correlation_id.HEADER_NAME: corr_id } return Task(**kwargs)
def __copy_table_async(source_bq_table, destination_bq_table): CopyJobServiceAsync( copy_job_type_id='backups', task_name_suffix=request_correlation_id.get()).with_post_action( PostCopyActionRequest( url='/callback/backup-created/{}/{}/{}'.format( source_bq_table.project_id, source_bq_table.dataset_id, source_bq_table.table_id), data={ "sourceBqTable": source_bq_table, "targetBqTable": destination_bq_table })).copy_table(source_bq_table, destination_bq_table)
def create_backup(self, source_table_entity, bq_table_metadata): logging.info( 'Scheduling copy job for backup, request correlation id:' ' %s', request_correlation_id.get()) target_project_id = configuration.backup_project_id target_dataset_id = DatasetIdCreator.create( self.now, bq_table_metadata.get_location(), source_table_entity.project_id) target_table_id = self.__create_table_id(source_table_entity) source_table_id_with_partition_id = BigQueryTableMetadata\ .get_table_id_with_partition_id(source_table_entity.table_id, source_table_entity.partition_id) source_bq_table = BigQueryTable(source_table_entity.project_id, source_table_entity.dataset_id, source_table_id_with_partition_id) destination_bq_table = BigQueryTable(target_project_id, target_dataset_id, target_table_id) self.__copy_table_async(source_bq_table, destination_bq_table)