def _write_microversion(changeset, file_kwargs, method_kwargs, email, action): """Task to enqueue for microversioning a file action.""" # Set the _internal flag for all microversion operations. file_kwargs['_internal'] = True file_kwargs['changeset'] = changeset method_kwargs['_delete_old_blob'] = False if email: method_kwargs['created_by'] = users.TitanUser(email) method_kwargs['modified_by'] = users.TitanUser(email) if action == _Actions.WRITE: files.File(**file_kwargs).write(**method_kwargs) elif action == _Actions.DELETE: files.File(**file_kwargs).delete(**method_kwargs)
def setUp(self): super(BaseProcessorActivityLoggerTest, self).setUp() user = users.TitanUser('*****@*****.**') self.activity = activities.Activity('key', user=user, meta='meta') self.activity_logger = activities.BaseProcessorActivityLogger( self.activity)
def testCommitManyFiles(self): # Regression test for "operating on too many entity groups in a # single transaction" error. This usually happens through the HTTP API # when lazy file objects are created from the manifest and then end up # being evaluated one-by-one inside the commit code path. test_user = users.TitanUser('*****@*****.**') changeset = self.vcs.new_staging_changeset(created_by=test_user) for i in range(100): files.File('/foo%s' % i, changeset=changeset).write(str(i)) changeset.finalize_associated_files() # Recreate the changeset from scratch, and reassociate lazy file objects. changeset = versions.Changeset(changeset.num) for i in range(100): changeset.associate_file(files.File('/foo%s' % i, changeset=changeset)) changeset.finalize_associated_files() self.vcs.commit(changeset) # Test again, but test the list_files code path with force=True. changeset = self.vcs.new_staging_changeset(created_by=test_user) for i in range(100): files.File('/foo%s' % i, changeset=changeset).write(str(i)) changeset.finalize_associated_files() # Recreate the changeset from scratch, and reassociate lazy file objects. changeset = versions.Changeset(changeset.num) self.vcs.commit(changeset, force=True)
def testProcessActivityLoggers(self): user = users.TitanUser('*****@*****.**') activity = activities.Activity('key', user=user, meta='meta') activity_logger = activities.FileActivityLogger(activity) activity_logger.store() # Ensure that it runs normally. activities.process_activity_loggers() self.RunDeferredTasks(queue_name=activities.ACTIVITY_QUEUE) # Verify file was written. self.assertTrue( files.File(activity.activity_id, _internal=True).exists)
def make_testdata(self): for _ in range(1, 10): self.vcs.new_staging_changeset() # Changeset 11 (was changeset 10 before commit): changeset = self.vcs.new_staging_changeset() self.assertIsNone(changeset.base_changeset) files.File('/foo', changeset=changeset).write('foo') files.File('/bar', changeset=changeset).write('bar') files.File('/qux', changeset=changeset).write('qux') changeset.finalize_associated_files() self.vcs.commit(changeset) # For testing, move the submitted datetime to 31 days ago. changeset_ent = versions.Changeset(11).changeset_ent created = datetime.datetime.now() - datetime.timedelta(days=31) changeset_ent.created = created changeset_ent.put() # Changset 13: changeset = self.vcs.new_staging_changeset() self.assertEqual(11, changeset.base_changeset.num) files.File('/foo', changeset=changeset).write('foo2') # edit files.File('/bar', changeset=changeset).delete() # delete files.File('/baz', changeset=changeset).write('baz') # create files.File('/qux', changeset=changeset).write('qux2') # edit changeset.finalize_associated_files() self.vcs.commit(changeset) self.assertEqual(13, self.vcs.get_last_submitted_changeset().num) self.assertIsNone(self.vcs.get_last_submitted_changeset().namespace) # Changset 15: changeset = self.vcs.new_staging_changeset() self.assertEqual(13, changeset.base_changeset.num) files.File('/foo', changeset=changeset).delete() # delete files.File('/bar', changeset=changeset).delete() # delete files.File('/baz', changeset=changeset).write('baz2') # edit changeset.finalize_associated_files() self.vcs.commit(changeset) # Changset 17: changeset = self.vcs.new_staging_changeset() modified_by = users.TitanUser('*****@*****.**') files.File('/foo', changeset=changeset).write( 'foo3', modified_by=modified_by) # re-create changeset.finalize_associated_files() self.vcs.commit(changeset)
def testCommit(self): test_user = users.TitanUser('*****@*****.**') changeset = self.vcs.new_staging_changeset(created_by=test_user) # Shouldn't be able to submit changesets with no changed files: self.assertRaises(versions.CommitError, self.vcs.commit, changeset, force=True) # Verify that the auto_current_user_add property is overwritten. self.assertEqual('*****@*****.**', str(changeset.created_by)) # Before a changeset is committed, its associated files must be finalized # to indicate that the object's files can be trusted for strong consistency. files.File('/foo', changeset=changeset).write('') self.assertRaises(versions.ChangesetError, self.vcs.commit, changeset) changeset.finalize_associated_files() final_changeset = self.vcs.commit(changeset) # When a changeset is committed, a new changeset is created (so that # changes are always sequential) with a created time. The old changeset # is marked as deleted by submit. staged_changeset = versions.Changeset(1) self.assertEqual(CHANGESET_DELETED_BY_SUBMIT, staged_changeset.status) self.assertEqual(CHANGESET_SUBMITTED, final_changeset.status) # Also, the changesets are linked to each other: self.assertEqual(1, final_changeset.linked_changeset_num) self.assertEqual(2, staged_changeset.linked_changeset_num) self.assertEqual(versions.Changeset(1), final_changeset.linked_changeset) self.assertEqual(versions.Changeset(2), staged_changeset.linked_changeset) # Verify base_path properties also: self.assertEqual('/_titan/ver/2', final_changeset.base_path) self.assertEqual('/_titan/ver/1', final_changeset.linked_changeset_base_path) # Verify that the auto_current_user_add property is overwritten in the # final_changeset because it was overwritten in the staged_changeset. self.assertEqual('*****@*****.**', str(final_changeset.created_by)) # After commit(), files in a changeset cannot be modified. titan_file = files.File('/foo', changeset=changeset) self.assertRaises(versions.ChangesetError, titan_file.write, '') self.assertRaises(versions.ChangesetError, titan_file.delete)
def testActivityId(self): # Ensure an activity id is generated with a user. user = users.TitanUser('*****@*****.**') activity = activities.Activity('keys.go.here', user=user) self.assertTrue(activity.activity_id)
def testActivity(self): user = users.TitanUser('*****@*****.**') activity = activities.Activity('key', user=user, meta='meta') self.assertEquals('key', activity.key) self.assertEquals(user, activity.user) self.assertEquals('meta', activity.meta)
def testFilesList(self): # Create files for testing. root_level = files.Files(['/index.html', '/qux']) first_level = files.Files(['/foo/bar']) second_level = files.Files([ '/foo/bar/baz', '/foo/bar/baz.html', '/foo/bar/baz.txt', ]) root_and_first_levels = files.Files.merge(root_level, first_level) first_and_second_levels = files.Files.merge(first_level, second_level) # files.Files.update(). all_files = files.Files([]) all_files.update(root_level) all_files.update(first_level) all_files.update(second_level) self.assertEqual(6, len(all_files)) # Test __eq__ (don't use assertEqual). self.assertTrue(files.Files(['/a', '/b']) == files.Files(['/a', '/b'])) self.assertFalse(files.Files(['/a', '/b']) == files.Files(['/a'])) for titan_file in all_files.itervalues(): titan_file.write('') # Empty. self.assertSameObjects(files.Files(), files.Files.list('/fake/path')) self.assertSameObjects(files.Files([]), files.Files.list('/fake/path')) # From root. self.assertSameObjects(root_level, files.Files.list('/')) titan_files = files.Files.list('/', recursive=True) self.assertSameObjects(all_files, titan_files) # From first level dir. self.assertSameObjects(first_level, files.Files.list('/foo')) self.assertSameObjects(first_level, files.Files.list('/foo/')) titan_files = files.Files.list('/foo', recursive=True) self.assertSameObjects(first_and_second_levels, titan_files) # From second level dir. self.assertSameObjects(second_level, files.Files.list('/foo/bar')) titan_files = files.Files.list('/foo/bar', recursive=True) self.assertSameObjects(second_level, titan_files) # Limit recursion depth. titan_files = files.Files.list('/', recursive=True, depth=1) self.assertSameObjects(root_and_first_levels, titan_files) titan_files = files.Files.list('/', recursive=True, depth=2) self.assertSameObjects(all_files, titan_files) titan_files = files.Files.list('/foo/', recursive=True, depth=1) self.assertSameObjects(first_and_second_levels, titan_files) # Limit the number of files returned. titan_files = files.Files.list('/foo', recursive=True, limit=1) self.assertEqual(1, len(titan_files)) # Support trailing slashes. self.assertSameObjects(second_level, files.Files.list('/foo/bar/')) titan_files = files.Files.list('/foo/bar/', recursive=True) self.assertSameObjects(second_level, titan_files) # Custom filters: files.File('/a/foo').write('', meta={'color': 'red', 'count': 1}) files.File('/a/bar/qux').write('', meta={'color': 'red', 'count': 2}) files.File('/a/baz').write('', meta={'color': 'blue', 'count': 3}) # Single filter: filters = [files.FileProperty('color') == 'red'] titan_files = files.Files.list('/a', filters=filters) self.assertSameObjects(['/a/foo'], titan_files) # Multiple filters: filters = [ files.FileProperty('color') == 'blue', files.FileProperty('count') == 3, ] titan_files = files.Files.list('/', recursive=True, filters=filters) self.assertEqual(files.Files(['/a/baz']), titan_files) # Recursive: filters = [files.FileProperty('color') == 'red'] titan_files = files.Files.list('/', recursive=True, filters=filters) self.assertEqual(files.Files(['/a/foo', '/a/bar/qux']), titan_files) # Non-meta property: user = users.TitanUser('*****@*****.**') filters = [ files.FileProperty('created_by') == str(user), files.FileProperty('count') == 2, ] titan_files = files.Files.list('/a/', recursive=True, filters=filters) self.assertEqual(files.Files(['/a/bar/qux']), titan_files) # Error handling. self.assertRaises(ValueError, files.Files.list, '') self.assertRaises(ValueError, files.Files.list, '//') self.assertRaises(ValueError, files.Files.list, '/..') self.assertRaises(ValueError, files.Files.list, '/', recursive=True, depth=0) self.assertRaises(ValueError, files.Files.list, '/', recursive=False, depth=1)
def testFile(self): meta = {'color': 'blue', 'flag': False} titan_file = files.File('/foo/bar.html') titan_file.write('Test', meta=meta) # Init with path only, verify lazy-loading properties. titan_file = files.File('/foo/bar.html') self.assertFalse(titan_file.is_loaded) self.assertIsNone(titan_file._file_ent) _ = titan_file.mime_type self.assertNotEqual(None, titan_file._file_ent) self.assertTrue(titan_file.is_loaded) titan_file.unload() self.assertFalse(titan_file.is_loaded) self.assertIsNone(titan_file._file_ent) # Init with a _TitanFile entity. file_ent = files._TitanFile.get_by_id('/foo/bar.html') titan_file = files.File('/foo/bar.html', _file_ent=file_ent) self.assertEqual('/foo/bar.html', titan_file.path) self.assertEqual('bar.html', titan_file.name) self.assertEqual('bar', titan_file.name_clean) self.assertEqual('.html', titan_file.extension) self.assertTrue(titan_file.is_loaded) self.assertIsNotNone(titan_file._file_ent) # write(). self.assertEqual(titan_file.content, 'Test') titan_file.write('New content') self.assertEqual(titan_file.content, 'New content') titan_file.write('') self.assertEqual(titan_file.content, '') # Check meta data. self.assertEqual('blue', titan_file.meta.color) self.assertEqual(False, titan_file.meta.flag) # delete(). self.assertTrue(titan_file.exists) titan_file.delete() self.assertFalse(titan_file.exists) titan_file.write(content='Test', meta=meta) titan_file.delete() self.assertFalse(titan_file.exists) # __hash__(). self.assertEqual(hash(files.File('/foo')), hash(files.File('/foo'))) self.assertNotEqual(hash(files.File('/foo')), hash(files.File('/bar'))) self.assertNotEqual(hash(files.File('/foo')), hash(files.File('/foo', namespace='aaa'))) # serialize(). titan_file = files.File('/foo/bar/baz').write('', meta=meta) expected_data = { 'path': '/foo/bar/baz', 'real_path': '/foo/bar/baz', 'name': 'baz', 'paths': ['/', '/foo', '/foo/bar'], 'mime_type': u'application/octet-stream', 'created': titan_file.created, 'modified': titan_file.modified, 'content': '', 'blob': None, 'created_by': '*****@*****.**', 'modified_by': '*****@*****.**', 'meta': { 'color': 'blue', 'flag': False, }, 'size': 0, 'md5_hash': hashlib.md5('').hexdigest(), } self.assertEqual(expected_data, files.File('/foo/bar/baz').serialize(full=True)) # Properties: name, name_clean, extension, paths, mime_type, created, # modified, blob, created_by, modified_by, and size. titan_file = files.File('/foo/bar/baz.html') self.assertEqual('baz.html', titan_file.name) self.assertEqual('baz', titan_file.name_clean) self.assertEqual('.html', titan_file.extension) # Check bool handling: self.assertFalse(titan_file) titan_file.write('') self.assertTrue(titan_file) self.assertEqual(['/', '/foo', '/foo/bar'], titan_file.paths) self.assertEqual('text/html', titan_file.mime_type) self.assertTrue(isinstance(titan_file.created, datetime.datetime)) self.assertTrue(isinstance(titan_file.modified, datetime.datetime)) self.assertIsNone(titan_file.blob) self.assertEqual(users.TitanUser('*****@*****.**'), titan_file.created_by) self.assertEqual(users.TitanUser('*****@*****.**'), titan_file.modified_by) # Size: titan_file.write('foo') self.assertEqual(3, titan_file.size) titan_file.write(u'f♥♥') # "size" should represent the number of bytes, not the number of characters. # 'f♥♥' == 'f\xe2\x99\xa5\xe2\x99\xa5' == 1 + 3 + 3 == 7 self.assertEqual(7, titan_file.size) # "size" should use blob size if present: titan_file.write(LARGE_FILE_CONTENT) self.assertEqual(1 << 21, titan_file.size) # read() and content property. self.assertEqual(titan_file.content, titan_file.read()) # close(). self.assertIsNone(titan_file.close()) # Error handling: init with non-existent path. titan_file = files.File('/foo/fake.html') self.assertRaises(files.BadFileError, lambda: titan_file.paths) self.assertRaises(files.BadFileError, lambda: titan_file.content) self.assertRaises(files.BadFileError, titan_file.delete) self.assertRaises(files.BadFileError, titan_file.serialize) # Bad path arguments: self.assertRaises(ValueError, files.File, None) self.assertRaises(ValueError, files.File, '') self.assertRaises(ValueError, files.File, 'bar.html') self.assertRaises(ValueError, files.File, '/a/b/') self.assertRaises(ValueError, files.File, '/a//b') self.assertRaises(ValueError, files.File, '..') self.assertRaises(ValueError, files.File, '/a/../b') self.assertRaises(ValueError, files.File, '/')
def testWrite(self): expected_file = files._TitanFile( id='/foo/bar.html', name='bar.html', content='Test', dir_path='/foo', paths=[u'/', u'/foo'], depth=1, mime_type=u'text/html', created_by=users.TitanUser('*****@*****.**'), modified_by=users.TitanUser('*****@*****.**'), # Arbitrary meta data for expando: color=u'blue', flag=False, md5_hash=hashlib.md5('Test').hexdigest(), ) original_expected_file = copy.deepcopy(expected_file) meta = {'color': 'blue', 'flag': False} new_meta = {'color': 'blue', 'flag': True} dates = ['modified', 'created'] # Synchronous write of a new file. actual_file = files.File('/foo/bar.html').write('Test', meta=meta) self.assertNdbEntityEqual(expected_file, actual_file._file, ignore=dates) self.assertNotEqual(None, actual_file.modified, 'modified is not being set') # Synchronous update without changes. actual_file = files.File('/foo/bar.html').write(meta=meta) self.assertNdbEntityEqual(expected_file, actual_file._file, ignore=dates) # Synchronous update with changes. old_modified = actual_file.modified actual_file = files.File('/foo/bar.html') actual_file.write('New content', meta=new_meta, mime_type='fake/type') expected_file.content = 'New content' expected_file.md5_hash = hashlib.md5('New content').hexdigest() expected_file.flag = True expected_file.mime_type = 'fake/type' self.assertNdbEntityEqual(expected_file, actual_file._file, ignore=dates) self.assertNotEqual(old_modified, actual_file.modified) # Allow writing blank files. actual_file = files.File('/foo/bar.html').write('') self.assertEqual(actual_file.content, '') # Allow overwriting mime_type and meta without touching content. files.File('/foo/bar.html').write(content='Test') actual_file = files.File('/foo/bar.html').write( mime_type='fake/mimetype') self.assertEqual('fake/mimetype', actual_file.mime_type) self.assertEqual('Test', actual_file.content) actual_file = files.File('/foo/bar.html').write(meta=new_meta) self.assertEqual(True, actual_file.meta.flag) self.assertEqual('Test', actual_file.content) # Allow overwriting created and modified without touching content. files.File('/foo/bar.html').write(content='Test') now = datetime.datetime.now() + datetime.timedelta(days=1) actual_file = files.File('/foo/bar.html').write(created=now, modified=now) self.assertEqual(now, actual_file.created) self.assertEqual(now, actual_file.modified) # Verify the same behavior for the file creation codepath. files.File('/foo/bar.html').delete().write('Test', created=now, modified=now) self.assertEqual(now, actual_file.created) self.assertEqual(now, actual_file.modified) # Error handling. self.assertRaises(ValueError, files.File('/a').write, '', created='foo') self.assertRaises(ValueError, files.File('/a').write, '', modified='foo') # Allow overwriting created_by and modified_by without touching content. files.File('/foo/bar.html').write(content='Test') user = users.TitanUser( '*****@*****.**') # Not the current logged in user. actual_file = files.File('/foo/bar.html').write(created_by=user, modified_by=user) self.assertEqual(user, actual_file.created_by) self.assertEqual(user, actual_file.modified_by) # Verify the same behavior for the file creation codepath. files.File('/foo/bar.html').delete().write('Test', created_by=user, modified_by=user) self.assertEqual(user, actual_file.created_by) self.assertEqual(user, actual_file.modified_by) # Error handling. self.assertRaises(ValueError, files.File('/a').write, '', created_by='foo') self.assertRaises(ValueError, files.File('/a').write, '', modified_by='foo') # Cleanup. expected_file = original_expected_file files.File('/foo/bar.html').delete() # write large content to blobstore. titan_file = files.File('/foo/bar.html').write( content=LARGE_FILE_CONTENT) blob_key = titan_file.blob.key() self.assertTrue(blob_key) self.assertEqual(LARGE_FILE_CONTENT, titan_file.content) self.assertIsNone(titan_file._file_ent.content) self.assertEqual(LARGE_FILE_CONTENT, files.File('/foo/bar.html').content) self.assertEqual( hashlib.md5(LARGE_FILE_CONTENT).hexdigest(), titan_file.md5_hash) # De-duping check: verify the blob key doesn't change if the content # doesn't change. old_blob_key = blob_key titan_file = files.File('/foo/bar.html').write( content=LARGE_FILE_CONTENT) blob_key = titan_file.blob.key() self.assertEqual(old_blob_key, blob_key) self.assertEqual(LARGE_FILE_CONTENT, titan_file.content) self.assertIsNone(titan_file._file_ent.content) self.assertEqual(LARGE_FILE_CONTENT, files.File('/foo/bar.html').content) self.stubs.SmartUnsetAll() # write with a blob key and encoding; verify proper decoding. encoded_foo = u'f♥♥'.encode('utf-8') blob_key = utils.write_to_blobstore(encoded_foo) titan_file = files.File('/foo/bar.html') # Verify that without encoding, the encoded bytestring is returned. titan_file.write(blob=blob_key) self.assertEqual(encoded_foo, titan_file.content) # Verify that with encoding, a unicode string is returned. titan_file.write(blob=blob_key, encoding='utf-8') self.assertEqual(u'f♥♥', titan_file.content) # Argument error handling for mixing encoding and unicode content: self.assertRaises(TypeError, titan_file.write, content=u'Test', encoding='utf-8') # Make sure the blob is deleted with the file: titan_file.delete() self.assertIsNone(blobstore.get(blob_key)) self.assertRaises(files.BadFileError, lambda: titan_file.blob) # Make sure the blob is deleted if the file gets smaller: titan_file = files.File('/foo/bar.html').write( content=LARGE_FILE_CONTENT) blob_key = titan_file.blob.key() titan_file.write(content='Test') self.assertIsNone(blobstore.get(blob_key)) # Test the current object and a new instance: self.assertEqual('Test', titan_file.content) self.assertEqual('Test', files.File('/foo/bar.html').content) self.assertIsNone(titan_file.blob) self.assertIsNone(files.File('/foo/bar.html').blob) # write with a BlobKey: titan_file = files.File('/foo/bar.html').write(blob=self.blob_key) blob_content = self.blob_reader.read() # Test the current object and a new instance: self.assertEqual(blob_content, files.File('/foo/bar.html').content) self.assertEqual(blob_content, titan_file.content) self.assertEqual( hashlib.md5('Blobstore!').hexdigest(), titan_file.md5_hash) # Cleanup. expected_file = original_expected_file files.File('/foo/bar.html').delete() # Error handling: # Updating mime_type or meta when entity doesn't exist. titan_file = files.File('/fake/file') self.assertRaises(files.BadFileError, titan_file.write, meta=meta) self.assertRaises(files.BadFileError, titan_file.write, mime_type='fake/mimetype') # Bad arguments: self.assertRaises(TypeError, titan_file.write) self.assertRaises(TypeError, titan_file.write, content=None, blob=None) self.assertRaises(TypeError, titan_file.write, content='Test', blob=self.blob_key) self.assertRaises(TypeError, titan_file.write, encoding='utf-8') # There are some reserved words that cannot be used in meta properties. invalid_meta_keys = [ # Titan reserved: 'name', 'path', 'dir_path', 'paths', 'depth', 'mime_type', 'encoding', 'created', 'modified', 'content', 'blob', 'blobs', 'created_by', 'modified_by', 'md5_hash', # NDB reserved: 'key', 'app', 'id', 'parent', 'namespace', 'projection', ] for key in invalid_meta_keys: try: titan_file.write(content='', meta={key: ''}) except files.InvalidMetaError: pass else: self.fail( 'Invalid meta key should have failed: {!r}'.format(key))