def test_that_create_and_write_disposition_are_passed_if_specified(
            self, create_copy_job):
        # given
        create_dispositon = "SOME_CREATE_DISPOSITON"
        write_dispostion = "SOME_WRITE_DISPOSTION"

        # when
        CopyJobServiceAsync(
            copy_job_type_id="test-process",
            task_name_suffix="example_sufix"
        )\
        .with_create_disposition(create_dispositon)\
        .with_write_disposition(write_dispostion)\
        .copy_table(
            self.create_example_source_bq_table(),
            self.create_example_target_bq_table()
        )

        # then
        create_copy_job.assert_called_once_with(
            CopyJobRequest(
                task_name_suffix="example_sufix",
                copy_job_type_id="test-process",
                source_big_query_table=(self.create_example_source_bq_table()),
                target_big_query_table=(self.create_example_target_bq_table()),
                create_disposition=create_dispositon,
                write_disposition=write_dispostion,
                retry_count=0,
                post_copy_action_request=None))
    def __run_copy_job_for_each(self, restore_items, restoration_job):
        logging.info("Scheduling %s", len(restore_items))

        for restore_item in restore_items:

            source_table_reference = restore_item.source_table_reference
            target_table_reference = restore_item.target_table_reference

            try:
                self.restore_workspace_creator.create_workspace(
                    source_table_reference, target_table_reference)
                CopyJobServiceAsync(
                    copy_job_type_id='restore',
                    task_name_suffix=restoration_job.key.id()
                ).with_post_action(
                    PostCopyActionRequest(
                        url='/callback/restore-finished/',
                        data={'restoreItemKey': restore_item.key.urlsafe()})
                ).with_create_disposition(
                    restoration_job.create_disposition).with_write_disposition(
                        restoration_job.write_disposition).copy_table(
                            source_table_reference.create_big_query_table(),
                            target_table_reference.create_big_query_table())
            except Exception as ex:
                logging.error(
                    "Error during creating copy job. Marking restore "
                    "item as FAILED, Error message: %s", ex.message)
                restore_item.update_with_failed(restore_item.key, ex.message)
    def test_that_assertion_error_if_no_task_name_suffix_provided(self):
        with self.assertRaises(AssertionError) as error:
            CopyJobServiceAsync(copy_job_type_id="test-process",
                                task_name_suffix=None).copy_table(None, None)

        self.assertEqual(
            error.exception.message,
            "task_name_suffix needs to be assigned in constructor")
예제 #4
0
    def __copy_table_async(source_bq_table, destination_bq_table):

        CopyJobServiceAsync(
            copy_job_type_id='backups',
            task_name_suffix=request_correlation_id.get()).with_post_action(
                PostCopyActionRequest(
                    url='/callback/backup-created/{}/{}/{}'.format(
                        source_bq_table.project_id, source_bq_table.dataset_id,
                        source_bq_table.table_id),
                    data={
                        "sourceBqTable": source_bq_table,
                        "targetBqTable": destination_bq_table
                    })).copy_table(source_bq_table, destination_bq_table)
    def test_that_queue_task_was_invoked_with_default_retry_count_value(
            self, create_copy_job, _):
        # given
        # when
        CopyJobServiceAsync(copy_job_type_id="test-process",
                            task_name_suffix="example_sufix").copy_table(
                                self.create_example_source_bq_table(),
                                self.create_example_target_bq_table())

        # then
        expected_retry_count = 0
        create_copy_job.assert_called_once_with(
            CopyJobRequest(
                task_name_suffix="example_sufix",
                copy_job_type_id="test-process",
                source_big_query_table=(self.create_example_source_bq_table()),
                target_big_query_table=(self.create_example_target_bq_table()),
                create_disposition="CREATE_IF_NEEDED",
                write_disposition="WRITE_EMPTY",
                retry_count=expected_retry_count))
    def test_that_post_copy_action_request_is_passed(self, create_copy_job):
        # given
        post_copy_action_request = \
            PostCopyActionRequest(url="/my/url", data={"key1": "value1"})

        # when
        CopyJobServiceAsync(copy_job_type_id="test-process",
                            task_name_suffix="example_sufix").with_post_action(
                                post_copy_action_request).copy_table(
                                    self.create_example_source_bq_table(),
                                    self.create_example_target_bq_table())

        # then
        create_copy_job.assert_called_once_with(
            CopyJobRequest(
                task_name_suffix="example_sufix",
                copy_job_type_id="test-process",
                source_big_query_table=(self.create_example_source_bq_table()),
                target_big_query_table=(self.create_example_target_bq_table()),
                create_disposition="CREATE_IF_NEEDED",
                write_disposition="WRITE_EMPTY",
                retry_count=0,
                post_copy_action_request=post_copy_action_request))