def check_DatastoreWriteFn(self, num_entities): """A helper function to test DatastoreWriteFn.""" with patch.object(helper, 'get_datastore', return_value=self._mock_datastore): entities = [ e.entity for e in fake_datastore.create_entities(num_entities) ] expected_mutations = map(WriteToDatastore.to_upsert_mutation, entities) actual_mutations = [] self._mock_datastore.commit.side_effect = ( fake_datastore.create_commit(actual_mutations)) datastore_write_fn = _Mutate.DatastoreWriteFn( self._PROJECT, fixed_batch_size=_Mutate._WRITE_BATCH_INITIAL_SIZE) datastore_write_fn.start_bundle() for mutation in expected_mutations: datastore_write_fn.process(mutation) datastore_write_fn.finish_bundle() self.assertEqual(actual_mutations, expected_mutations) self.assertEqual( (num_entities - 1) / _Mutate._WRITE_BATCH_INITIAL_SIZE + 1, self._mock_datastore.commit.call_count)
def test_DatastoreWriteLargeEntities(self): """100*100kB entities gets split over two Commit RPCs.""" with patch.object(helper, 'get_datastore', return_value=self._mock_datastore): entities = [e.entity for e in fake_datastore.create_entities(100)] datastore_write_fn = _Mutate.DatastoreWriteFn( self._PROJECT, fixed_batch_size=_Mutate._WRITE_BATCH_INITIAL_SIZE) datastore_write_fn.start_bundle() for entity in entities: datastore_helper.add_properties( entity, {'large': u'A' * 100000}, exclude_from_indexes=True) datastore_write_fn.process(WriteToDatastore.to_upsert_mutation(entity)) datastore_write_fn.finish_bundle() self.assertEqual(2, self._mock_datastore.commit.call_count)