def test_should_not_insert_two_backup_entities_for_the_same_backup_table( self): # nopep8 pylint: disable=C0301 # given table = Table(project_id='example-proj-name', dataset_id='example-dataset-name', table_id='example-table-name', last_checked=datetime(2017, 02, 1, 16, 30)) table.put() backup_one = Backup(parent=table.key, last_modified=datetime(2017, 02, 1, 16, 30), created=datetime(2017, 02, 1, 16, 30), dataset_id='targetDatasetId', table_id='targetTableId', numBytes=1234) backup_two = Backup(parent=table.key, last_modified=datetime(2018, 03, 2, 00, 00), created=datetime(2018, 03, 2, 00, 00), dataset_id='targetDatasetId', table_id='targetTableId', numBytes=1234) # when Backup.insert_if_absent(backup_one) Backup.insert_if_absent(backup_two) backups = list(Backup.get_all()) # then self.assertEqual(len(backups), 1) self.assertEqual(backup_one.created, backups[0].created)
def __create_backup(backup_table_metadata, copy_job_results): table_entity = Table.get_table_by_reference( TableReference.from_bq_table(copy_job_results.source_bq_table)) if table_entity is None: raise DatastoreTableGetRetriableException() backup = Backup(parent=table_entity.key, last_modified=copy_job_results.start_time, created=copy_job_results.end_time, dataset_id=copy_job_results.target_dataset_id, table_id=copy_job_results.target_table_id, numBytes=backup_table_metadata.table_size_in_bytes()) logging.debug( "Saving backup to datastore, source:{0}, target:{1}".format( copy_job_results.source_bq_table, copy_job_results.target_bq_table)) backup.insert_if_absent(backup)