示例#1
0
    def test_that_last_checked_date_is_updated_even_if_table_should_not_be_backed_up( # nopep8 pylint: disable=C0301
            self, copy_table, _1, _2):
        # given
        table = Table(project_id="test-project",
                      dataset_id="test-dataset",
                      table_id="test-table",
                      last_checked=datetime.datetime(2017, 3, 3))

        table_reference = TableReference(project_id="test-project",
                                         dataset_id="test-dataset",
                                         table_id="test-table")

        # when
        table.put()

        BackupProcess(table_reference, self.big_query,
                      self.big_query_table_metadata).start()

        table_entity = Table.get_table("test-project", "test-dataset",
                                       "test-table")

        # then
        self.assertEqual(table_entity.last_checked,
                         datetime.datetime(2017, 04, 4))
        copy_table.assert_not_called()
    def start(table_reference):
        big_query_table_metadata = BigQueryTableMetadata.get_table_by_reference(
            table_reference)

        BackupProcess(
            table_reference=table_reference,
            big_query=BigQuery(),
            big_query_table_metadata=big_query_table_metadata,
            should_backup_predicate=OnDemandBackupPredicate()).start()
示例#3
0
    def test_that_dataset_will_not_be_unnecessary_created_twice(self,
                                                                _, _1, _2, _3):
        # given
        table_reference_1 = TableReference(project_id="test-project",
                                           dataset_id="test-dataset",
                                           table_id="test-table-1")
        table_reference_2 = TableReference(project_id="test-project",
                                           dataset_id="test-dataset",
                                           table_id="test-table-2")

        # when
        self.big_query.create_dataset = MagicMock()

        BackupProcess(table_reference_1, self.big_query,
                      self.big_query_table_metadata).start()
        BackupProcess(table_reference_2, self.big_query,
                      self.big_query_table_metadata).start()

        # then
        self.big_query.create_dataset.assert_called_once()
示例#4
0
    def start(table_reference):
        big_query_table_metadata = BigQueryTableMetadata.get_table_by_reference(
            table_reference)

        if big_query_table_metadata.is_daily_partitioned(
        ) and not big_query_table_metadata.is_partition():
            raise ParameterValidationException(
                "Partition id is required for partitioned table in on-demand mode"
            )

        BackupProcess(
            table_reference=table_reference,
            big_query=BigQuery(),
            big_query_table_metadata=big_query_table_metadata,
            should_backup_predicate=OnDemandBackupPredicate()).start()
示例#5
0
    def test_copy_job_and_entity_in_datastore_for_not_partitioned_table(
            self, _, _1, _2):
        # given
        table_reference = TableReference(project_id="test-project",
                                         dataset_id="test-dataset",
                                         table_id="test-table")

        # when
        BackupProcess(table_reference, self.big_query,
                      self.big_query_table_metadata).start()

        table_entity = Table.get_table("test-project", "test-dataset",
                                       "test-table")

        # then
        self.assertIsNotNone(table_entity)
示例#6
0
    def test_that_copy_job_and_entity_in_datastore_is_created_if_empty_partitioned_table( # nopep8 pylint: disable=C0301
            self, create_backup, _, _1):
        # given
        table_reference = TableReference(project_id="test-project",
                                         dataset_id="test-dataset",
                                         table_id="test-table",
                                         partition_id=None)

        # when
        BackupProcess(table_reference, self.big_query,
                      self.big_query_table_metadata).start()
        table_in_datastore = Table.get_table("test-project", "test-dataset",
                                             "test-table")

        # then
        create_backup.assert_called_once()
        self.assertIsNotNone(table_in_datastore)
示例#7
0
    def start(table_reference):
        big_query = BigQuery()

        big_query_table_metadata = BigQueryTableMetadata.get_table_by_reference(
            table_reference)

        if big_query_table_metadata.is_daily_partitioned() and \
                not big_query_table_metadata.is_partition():
            logging.info('Table (%s/%s/%s) is partitioned',
                         table_reference.get_project_id(),
                         table_reference.get_dataset_id(),
                         table_reference.get_table_id())
            TablePartitionsBackupScheduler(table_reference, big_query).start()
        else:
            BackupProcess(
                table_reference=table_reference,
                big_query=big_query,
                big_query_table_metadata=big_query_table_metadata).start()
示例#8
0
    def start(table_reference):
        big_query = BigQuery()

        big_query_table_metadata = BigQueryTableMetadata.get_table_by_reference(
            table_reference)

        if big_query_table_metadata.is_daily_partitioned() and \
            not big_query_table_metadata.is_partition():
            logging.info(u'Table %s:%s.%s is partitioned',
                         table_reference.get_project_id(),
                         table_reference.get_dataset_id(),
                         table_reference.get_table_id())
            TableBackup._schedule_partitioned_table_backup_scheduler_task(
                table_reference)

        else:
            BackupProcess(
                table_reference=table_reference,
                big_query=big_query,
                big_query_table_metadata=big_query_table_metadata).start()
示例#9
0
    def test_copy_job_and_entity_in_datastore_for_single_partition_of_a_table(
            self, _, _1, _2):
        # given
        table_reference = TableReference(project_id="test-project",
                                         dataset_id="test-dataset",
                                         table_id="test-table",
                                         partition_id="20170330")

        # when
        BackupProcess(table_reference, self.big_query,
                      self.big_query_table_metadata).start()

        ancestor_of_partition = Table.get_table("test-project", "test-dataset",
                                                "test-table")
        partition = Table.get_table("test-project", "test-dataset",
                                    "test-table", "20170330")

        # then
        self.assertIsNotNone(partition)
        self.assertIsNone(ancestor_of_partition)
示例#10
0
    def test_that_last_checked_date_is_updated_when_backup_is_processed(
            self, _, _1, _2):
        # given
        table = Table(project_id="test-project",
                      dataset_id="test-dataset",
                      table_id="test-table",
                      last_checked=datetime.datetime(2017, 3, 3))

        table_reference = TableReference(project_id="test-project",
                                         dataset_id="test-dataset",
                                         table_id="test-table")

        # when
        table.put()

        BackupProcess(table_reference, self.big_query,
                      self.big_query_table_metadata).start()

        table_entity = Table.get_table("test-project", "test-dataset",
                                       "test-table")

        # then
        self.assertEqual(table_entity.last_checked,
                         datetime.datetime(2017, 04, 4))