def test_should_fill_deleted_field_in_backup_entity_if_table_not_found_error_during_deletion(
            self, _):
        # given
        table = Table(project_id='example-proj-name',
                      dataset_id='example-dataset-name',
                      table_id='example-table-name',
                      last_checked=datetime.datetime(2017, 2, 1, 16, 30))
        table.put()
        reference = TableReference.from_table_entity(table)
        backup1 = backup_utils.create_backup(datetime.datetime(
            2017, 2, 1, 16, 30),
                                             table,
                                             table_id="backup1")
        backup2 = backup_utils.create_backup(datetime.datetime(
            2017, 2, 2, 16, 30),
                                             table,
                                             table_id="backup2")
        ndb.put_multi([backup1, backup2])
        self.policy.get_backups_eligible_for_deletion = Mock(
            return_value=[backup1, backup2])

        # when
        self.under_test.perform_retention(reference, table.key.urlsafe())

        # then
        self.assertTrue(Backup.get_by_key(backup1.key).deleted is not None)
        self.assertTrue(Backup.get_by_key(backup2.key).deleted is not None)
    def test_should_disable_partition_expiration_if_backup_table_has_it(
            self, disable_partition_expiration, _, _1, _2, _3, _4, _5):
        # given
        table_entity = Table(project_id="source_project_id",
                             dataset_id="source_dataset_id",
                             table_id="source_table_id",
                             partition_id="123")
        table_entity.put()

        source_bq_table = TableReference.from_table_entity(
            table_entity).create_big_query_table()
        destination_bq_table = BigQueryTable("target_project_id",
                                             "target_dataset_id",
                                             "target_table_id")
        data = {
            "sourceBqTable": source_bq_table,
            "targetBqTable": destination_bq_table
        }
        payload = json.dumps({
            "data": data,
            "jobJson": JobResultExample.DONE
        },
                             cls=RequestEncoder)

        # when
        response = self.under_test.post(
            '/callback/backup-created/project/dataset/table', params=payload)

        # then
        self.assertEqual(response.status_int, 200)
        disable_partition_expiration.assert_called_once()
    def test_should_not_perform_retention_if_no_backups(self, delete_table):
        # given
        table = Table(project_id='example-proj-name',
                      dataset_id='example-dataset-name',
                      table_id='example-table-name',
                      last_checked=datetime.datetime(2017, 2, 1, 16, 30))
        table.put()
        reference = TableReference.from_table_entity(table)

        # when
        self.under_test.perform_retention(reference, table.key.urlsafe())

        # then
        delete_table.assert_not_called()
    def test_should_create_datastore_backup_entity(self, _create_http, _):
        # given
        _create_http.return_value = HttpMockSequence([
            ({
                'status': '200'
            }, content('tests/json_samples/bigquery_v2_test_schema.json')),
            ({
                'status': '200'
            },
             content('tests/json_samples/table_get/'
                     'bigquery_partitioned_table_get.json'))
        ])

        table_entity = Table(project_id="source_project_id",
                             dataset_id="source_dataset_id",
                             table_id="source_table_id",
                             partition_id="123")
        table_entity.put()

        source_bq_table = TableReference.from_table_entity(
            table_entity).create_big_query_table()
        destination_bq_table = BigQueryTable("target_project_id",
                                             "target_dataset_id",
                                             "target_table_id")
        data = {
            "sourceBqTable": source_bq_table,
            "targetBqTable": destination_bq_table
        }
        payload = json.dumps({
            "data": data,
            "jobJson": JobResultExample.DONE
        },
                             cls=RequestEncoder)
        copy_job_result = CopyJobResult(json.loads(payload).get('jobJson'))

        # when
        response = self.under_test.post(
            '/callback/backup-created/project/dataset/table', params=payload)
        backup = table_entity.last_backup

        # then
        self.assertEqual(response.status_int, 200)
        self.assertEqual(backup.dataset_id, "target_dataset_id")
        self.assertEqual(backup.table_id, "target_table_id")
        self.assertTrue(isinstance(backup.created, datetime))
        self.assertEqual(backup.created, copy_job_result.end_time)

        self.assertTrue(isinstance(backup.last_modified, datetime))
        self.assertEqual(backup.last_modified, copy_job_result.start_time)
    def test_should_not_create_backups_entity_if_backup_table_doesnt_exist(
            self, _create_http, error_reporting, _):
        # given
        _create_http.return_value = HttpMockSequence([
            ({
                'status': '200'
            }, content('tests/json_samples/bigquery_v2_test_schema.json')),
            (
                {
                    'status': '404'
                },  # Table not found
                content('tests/json_samples/table_get/'
                        'bigquery_partitioned_table_get.json'))
        ])

        table_entity = Table(project_id="source_project_id",
                             dataset_id="source_dataset_id",
                             table_id="source_table_id",
                             partition_id="123")
        table_entity.put()

        source_bq_table = TableReference.from_table_entity(
            table_entity).create_big_query_table()
        destination_bq_table = BigQueryTable("target_project_id",
                                             "target_dataset_id",
                                             "target_table_id")
        data = {
            "sourceBqTable": source_bq_table,
            "targetBqTable": destination_bq_table
        }
        payload = json.dumps({
            "data": data,
            "jobJson": JobResultExample.DONE
        },
                             cls=RequestEncoder)

        # when
        response = self.under_test.post(
            '/callback/backup-created/project/dataset/table', params=payload)
        backup = table_entity.last_backup

        # then
        self.assertEqual(response.status_int, 200)
        self.assertIsNone(backup)
        error_reporting.assert_called_once()