コード例 #1
0
ファイル: dirs_test.py プロジェクト: dindinet/titan
    def testNamespaces(self):
        files.register_file_mixins([dirs.DirManagerMixin])

        self.assertIsNone(dirs.Dir('/').namespace)
        self.assertEqual('aaa', dirs.Dir('/', namespace='aaa').namespace)

        files.File('/a/a/foo', namespace='aaa').write('')
        files.File('/z/z/foo').write('')
        self.assertEqual(['/a'], dirs.Dirs.list('/', namespace='aaa').keys())
        self.assertEqual(['/z'], dirs.Dirs.list('/').keys())
コード例 #2
0
def _write_microversion(changeset, file_kwargs, method_kwargs, email, action):
  """Task to enqueue for microversioning a file action."""
  # Set the _internal flag for all microversion operations.
  file_kwargs['_internal'] = True
  file_kwargs['changeset'] = changeset
  method_kwargs['_delete_old_blob'] = False
  if email:
    method_kwargs['created_by'] = users.TitanUser(email)
    method_kwargs['modified_by'] = users.TitanUser(email)
  if action == _Actions.WRITE:
    files.File(**file_kwargs).write(**method_kwargs)
  elif action == _Actions.DELETE:
    files.File(**file_kwargs).delete(**method_kwargs)
コード例 #3
0
  def testStronglyConsistentCommits(self):
    # Microversions uses finalize_associated_files so the commit() path should
    # use the always strongly-consistent get_files(), rather than a query.
    # Verify this behavior by simulating a never-consistent HR datastore.
    policy = datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=0)
    self.testbed.init_datastore_v3_stub(consistency_policy=policy)

    files.File('/foo').write('foo')

    # Also, test process_data_with_backoff while we're here.
    results = microversions.process_data_with_backoff(timeout_seconds=5)
    final_changeset = results[0][0]['changeset'].linked_changeset
    self.assertEqual(2, final_changeset.num)
    titan_file = files.File('/foo', changeset=final_changeset)
    self.assertEqual('foo', titan_file.content)
コード例 #4
0
ファイル: activities_test.py プロジェクト: dindinet/titan
 def testLog(self):
     activity = activities.log('titan.test')
     activities.process_activity_loggers()
     self.RunDeferredTasks(queue_name=activities.ACTIVITY_QUEUE)
     # Verify file was written.
     self.assertTrue(
         files.File(activity.activity_id, _internal=True).exists)
コード例 #5
0
ファイル: handlers.py プロジェクト: dindinet/titan
    def get(self):
        """GET handler."""
        path = self.request.get('path')
        try:
            file_kwargs, _ = _GetExtraParams(self.request.POST)
        except ValueError:
            self.error(400)
            return
        try:
            titan_file = files.File(path, **file_kwargs)
        except (TypeError, ValueError):
            self.error(400)
            _MaybeLogException('Bad request:')
            return
        if not titan_file.exists:
            self.error(404)
            return

        mime_type = self.request.get('mime_type') or titan_file.mime_type
        self.response.headers['Content-Type'] = str(mime_type)
        self.response.headers['Content-Disposition'] = (
            'inline; filename=%s' % titan_file.name.encode('ascii', 'replace'))

        if titan_file.blob:
            blob_key = titan_file.blob
            self.send_blob(blob_key, content_type=str(mime_type))
        else:
            self.response.out.write(titan_file.content)
コード例 #6
0
    def testRemoteVersionControlService(self):
        remote_vcs = self.remote_vcs_factory.make_remote_vcs()
        remote_changeset = remote_vcs.new_staging_changeset()

        # Error handling: writing without a changeset.
        remote_file = self.remote_file_factory.make_remote_file('/a/foo')
        # TODO(user): build more specific remote objects error handling.
        self.assertRaises(Exception, remote_file.write, 'foo!')

        remote_file = self.remote_file_factory.make_remote_file(
            '/a/foo', changeset=remote_changeset.num)
        remote_file.write('foo!')
        remote_changeset.associate_file(remote_file)

        # Error handling: changeset's files are not finalized.
        self.assertRaises(versions_client.RemoteChangesetError,
                          remote_vcs.commit, remote_changeset)
        self.assertRaises(versions_client.RemoteChangesetError,
                          remote_changeset.get_files)

        remote_changeset.finalize_associated_files()
        remote_vcs.commit(remote_changeset)
        self.assertEqual(['/a/foo'], remote_changeset.get_files().keys())

        # Verify actual state.
        actual_file = files.File('/a/foo')
        self.assertEqual('foo!', actual_file.content)
        self.assertTrue(actual_file.changeset)

        # Test Commit(force=True).
        remote_changeset = remote_vcs.new_staging_changeset()
        self.assertRaises(Exception,
                          remote_vcs.commit,
                          remote_changeset,
                          force=True)
        remote_file = self.remote_file_factory.make_remote_file(
            '/a/foo', changeset=remote_changeset.num)
        remote_file.write('bar!')
        # Don't call associate_file or finalize_associated_files.
        remote_vcs.commit(remote_changeset, force=True)
        actual_file = files.File('/a/foo')
        self.assertEqual('bar!', actual_file.content)
        self.assertTrue(actual_file.changeset)

        # Test MakeRemoteChangeset().
        num = actual_file.changeset.num
        remote_changeset = self.remote_vcs_factory.make_remote_changeset(num)
コード例 #7
0
ファイル: channel.py プロジェクト: dindinet/titan
    def __init__(self, key):
        self.key = key
        self._internal_client_ids = set()

        # Use a hashed version of the user key for the filename.
        self._internal_key = hashlib.md5(key).hexdigest()
        self._file_path = '%s%s' % (CHANNELS_DIR, self.key)
        self._file = files.File(self._file_path, _internal=True)
コード例 #8
0
  def testContentAndBlobsHandling(self):
    files.File('/foo').write('foo')
    files.File('/foo').delete()
    # This will immediately go to blobstore, then the deferred task will
    # have a "blob" argument:
    files.File('/foo').write(LARGE_FILE_CONTENT)
    process_microversions()
    # After tasks run, verify correct content was saved to the versioned paths:
    file_versions = self.vcs.get_file_versions('/foo')

    # In reverse-chronological order:
    titan_file = files.File('/foo', changeset=file_versions[0].changeset)
    self.assertEqual(LARGE_FILE_CONTENT, titan_file.content)
    self.assertEqual('*****@*****.**', str(titan_file.created_by))
    self.assertEqual('*****@*****.**', str(titan_file.modified_by))
    # Whitebox test: created_by and modified_by might be coming from the
    # backwards-compatibility code in versions. Verify they are actually
    # stored correctly.
    self.assertEqual('*****@*****.**', str(titan_file._file.created_by))
    self.assertEqual('*****@*****.**', str(titan_file._file.modified_by))

    titan_file = files.File('/foo', changeset=file_versions[1].changeset)
    self.assertFalse(titan_file.exists)

    titan_file = files.File('/foo', changeset=file_versions[2].changeset)
    self.assertEqual('foo', titan_file.content)

    self.assertEqual(versions.FileStatus.created, file_versions[0].status)
    self.assertEqual(versions.FileStatus.deleted, file_versions[1].status)
    self.assertEqual(versions.FileStatus.created, file_versions[2].status)

    # Verify that this doesn't error, the behavior should be the same as above.
    files.File('/foo').write(EXPLODING_FILE_CONTENT)
    process_microversions()
コード例 #9
0
    def testRemoteFile(self):
        remote_file = self.remote_file_factory.make_remote_file('/a/foo')
        actual_file = files.File('/a/foo')

        # Error handling for non-existent files.
        self.assertRaises(files_client.BadRemoteFileError,
                          lambda: remote_file.name)
        self.assertEqual(actual_file.exists, remote_file.exists)

        actual_file.write('foo!', meta={'flag': False})

        # Test properties.
        self.assertEqual(actual_file.name, remote_file.name)
        self.assertEqual(actual_file.path, remote_file.path)
        self.assertEqual(actual_file.real_path, remote_file.real_path)
        self.assertEqual(actual_file.paths, remote_file.paths)
        self.assertEqual(actual_file.mime_type, remote_file.mime_type)
        self.assertEqual(actual_file.created, remote_file.created)
        self.assertEqual(actual_file.modified, remote_file.modified)
        self.assertEqual(actual_file.size, remote_file.size)
        self.assertEqual(actual_file.content, remote_file.content)
        self.assertEqual(actual_file.exists, remote_file.exists)
        # Different remote API (string of blob_key rather than BlobKey):
        self.assertEqual(actual_file.serialize()['blob'], remote_file.blob)
        # Different remote API (string of email addresses rather than User objects).
        self.assertEqual(actual_file.serialize()['created_by'],
                         remote_file.created_by)
        self.assertEqual(actual_file.serialize()['modified_by'],
                         remote_file.modified_by)
        self.assertEqual(actual_file.md5_hash, remote_file.md5_hash)
        self.assertEqual(actual_file.meta, remote_file.meta)

        # Test write().
        remote_file.write(content='bar')
        actual_file = files.File('/a/foo')
        self.assertEqual('bar', remote_file.content)
        self.assertEqual('bar', actual_file.content)

        # Test delete().
        remote_file.delete()
        actual_file = files.File('/a/foo')
        self.assertRaises(files_client.BadRemoteFileError,
                          lambda: remote_file.name)
        self.assertFalse(actual_file.exists)
        self.assertFalse(remote_file.exists)
コード例 #10
0
ファイル: tasks.py プロジェクト: dindinet/titan
 def _file(self):
     if self._internal_file is None:
         # /_titan/tasks/<group>/<task_manager_key>/tasks/<task_key_hash>.json
         filename = utils.safe_join(_ROOT_DIR_PATH,
                                    self._task_manager.group,
                                    self._task_manager.key, 'tasks',
                                    self._internal_key + '.json')
         self._internal_file = files.File(filename, _internal=True)
     return self._internal_file
コード例 #11
0
ファイル: activities_test.py プロジェクト: dindinet/titan
 def testProcessActivityLoggers(self):
     user = users.TitanUser('*****@*****.**')
     activity = activities.Activity('key', user=user, meta='meta')
     activity_logger = activities.FileActivityLogger(activity)
     activity_logger.store()
     # Ensure that it runs normally.
     activities.process_activity_loggers()
     self.RunDeferredTasks(queue_name=activities.ACTIVITY_QUEUE)
     # Verify file was written.
     self.assertTrue(
         files.File(activity.activity_id, _internal=True).exists)
コード例 #12
0
    def finalize(self):
        """Log the activity in Titan Files."""
        titan_file = files.File(self.activity.activity_id, _internal=True)
        titan_file.write(content=protojson.encode_message(
            self.activity.to_message()),
                         meta=self.file_meta,
                         created=self.activity.timestamp,
                         modified=self.activity.timestamp)

        # Ensure that it gets written first.
        super(FileActivityLogger, self).finalize()
コード例 #13
0
  def testJsonMixin(self):
    titan_file = files.File('/foo/file.json')

    # Verify that invalid JSON raises an error when written.
    self.assertRaises(json_mixin.BadJsonError, titan_file.write, '')
    self.assertRaises(json_mixin.BadJsonError, titan_file.write, '{a:1}')

    # Verify Django templates are not validated as JSON.
    titan_file.write('{% some django tag %}')

    # Verify valid JSON writes successfully.
    titan_file.write(json.dumps({'a': True}))

    # Verify behavior of json property.
    titan_file.write('{"a":"b"}')
    self.assertEqual({'a': 'b'}, titan_file.json)
    titan_file.json['c'] = 'd'
    self.assertEqual({'a': 'b', 'c': 'd'}, titan_file.json)
    titan_file.save()

    # Get json out of the saved file.
    titan_file = files.File('/foo/file.json')
    self.assertEqual({'a': 'b', 'c': 'd'}, titan_file.json)

    # Verify invalid JSON raises an error when accessed using json.
    titan_file = files.File('/foo/file.json')
    titan_file.write('{% some django tag %}')
    self.assertRaises(json_mixin.BadJsonError, lambda: titan_file.json)

    # Verify JSON is not checked for blobs (out of necessity, not correctness).
    titan_file = files.File('/foo/some-blob').write(LARGE_FILE_CONTENT)
    files.File('/foo/file.json').write(blob=titan_file.blob.key())

    # Verify ability to set json on a new file.
    titan_file = files.File('/foo/new_file.json')
    titan_file.json = None
    self.assertIsNone(titan_file.json)
    titan_file.save()
    titan_file.json = {}
    self.assertEqual({}, titan_file.json)
    titan_file.save()
    titan_file.json = {'foo': 'bar'}
    titan_file.save()
    titan_file = files.File('/foo/new_file.json')
    self.assertEqual({'foo': 'bar'}, titan_file.json)

    # Error handling.
    self.assertRaises(
        files.BadFileError, lambda: files.File('/fake.json').json)
コード例 #14
0
ファイル: dirs_test.py プロジェクト: dindinet/titan
 def testStripPrefix(self):
     files.register_file_mixins([dirs.DirManagerMixin])
     titan_dir = dirs.Dir('/a/b', strip_prefix='/a')
     self.assertEqual('/b', titan_dir.path)
     titan_dir = dirs.Dir('/a/b', strip_prefix='/a/')
     self.assertEqual('/b', titan_dir.path)
     self.assertFalse(titan_dir.exists)
     files.File('/a/b/foo').write('')
     self.assertTrue(titan_dir.exists)
     titan_dirs = dirs.Dirs.list('/a/', strip_prefix='/a/')
     # Recreate object to strip paths:
     titan_dirs = dirs.Dirs(dirs=titan_dirs.values())
     self.assertEqual('/b', titan_dirs['/b'].path)
コード例 #15
0
 def testApplyPatch(self):
   titan_file = files.File('/foo/file.json')
   titan_file.json = {
       'foo': ['bar', 'baz']
   }
   titan_file.apply_patch([{
       'op': 'add',
       'path': '/foo/1',
       'value': 'qux'
   }])
   expected = {
       'foo': ['bar', 'qux', 'baz']
   }
   self.assertEqual(expected, titan_file.json)
コード例 #16
0
    def testRemoteFiles(self):
        actual_file = files.File('/a/foo')
        actual_file.write('foo!', meta={'flag': False})
        remote_file = self.remote_file_factory.make_remote_file('/a/foo')

        remote_files = self.remote_file_factory.make_remote_files()
        self.assertEqual([], remote_files.keys())
        remote_files = self.remote_file_factory.make_remote_files([])
        self.assertEqual([], remote_files.keys())
        remote_files = self.remote_file_factory.make_remote_files(['/a/foo'])
        self.assertEqual(['/a/foo'], remote_files.keys())
        remote_files = self.remote_file_factory.make_remote_files(
            files=[remote_file])
        self.assertEqual(['/a/foo'], remote_files.keys())

        # Test mapping properties.
        remote_files['/a/bar'] = self.remote_file_factory.make_remote_file(
            '/a/bar')
        self.assertSameElements(['/a/foo', '/a/bar'], remote_files.keys())
        del remote_files['/a/bar']
        self.assertEqual(['/a/foo'], remote_files.keys())
        self.assertIn('/a/foo', remote_files)
        self.assertEqual(1, len(remote_files))
        remote_files.clear()
        self.assertEqual(0, len(remote_files))

        # Test List().
        # Different remote API: List is not a class method due to authentication.
        remote_files = self.remote_file_factory.make_remote_files()
        remote_files.list('/', recursive=True)
        self.assertEqual(['/a/foo'], remote_files.keys())

        # Test Delete().
        remote_files.delete()
        actual_file = files.File('/a/foo')
        self.assertFalse(actual_file.exists)
コード例 #17
0
ファイル: handlers.py プロジェクト: dindinet/titan
 def delete(self):
     """DELETE handler."""
     path = self.request.get('path')
     try:
         file_kwargs, method_kwargs = _GetExtraParams(self.request.POST)
     except ValueError:
         self.error(400)
         return
     try:
         files.File(path, **file_kwargs).delete(**method_kwargs)
     except files.BadFileError:
         self.error(404)
     except (TypeError, ValueError):
         self.error(400)
         _MaybeLogException('Bad request:')
コード例 #18
0
    def get_activity(self, activity_id):
        """Query for a stored activity.

    Args:
      activity_id: The full file name of the activity.
    Raises:
      InvalidActivityIdError: When path is not prefixed correctly.
      ActivityNotFoundError: When activity is not found in system.
    Returns:
      The activity matching the activity_id.
    """
        if not activity_id or not activity_id.startswith(ACTIVITY_DIR):
            raise InvalidActivityIdError()
        titan_file = files.File(activity_id, _internal=True)
        if not titan_file.exists:
            raise ActivityNotFoundError()
        return self._loads_activity(titan_file.content)
コード例 #19
0
ファイル: dirs_test.py プロジェクト: dindinet/titan
    def testEndToEnd(self):
        files.register_file_mixins([dirs.DirManagerMixin])

        files.File('/a/b/foo').write('')
        files.File('/a/b/bar').write('')
        files.File('/a/d/foo').write('')

        # List root dir.
        self.assertEqual(dirs.Dirs(['/a']), dirs.Dirs.list('/'))
        # List /a.
        self.assertEqual(dirs.Dirs(['/a/b', '/a/d']), dirs.Dirs.list('/a/'))
        # List /a/b/.
        self.assertEqual(dirs.Dirs([]), dirs.Dirs.list('/a/b/'))
        # List /fake/dir.
        self.assertEqual(dirs.Dirs([]), dirs.Dirs.list('/fake/dir'))

        # Test deleting directories.
        self.assertTrue(dirs.Dir('/a/d').exists)
        files.File('/a/d/foo').delete()
        self.assertFalse(dirs.Dir('/a/d').exists)
        # List /a.
        self.assertEqual(dirs.Dirs(['/a/b']), dirs.Dirs.list('/a/'))
        self.assertEqual(dirs.Dirs(['/a']), dirs.Dirs.list('/'))
        # Delete the remaining files and list again.
        files.File('/a/b/foo').delete()
        files.File('/a/b/bar').delete()
        self.assertEqual(dirs.Dirs([]), dirs.Dirs.list('/'))

        # Verify behavior of set_meta and meta.
        self.assertRaises(dirs.InvalidDirectoryError,
                          lambda: dirs.Dir('/a/b').set_meta({'flag': True}))
        self.assertRaises(dirs.InvalidMetaError,
                          lambda: dirs.Dir('/a/b').set_meta({'name': 'foo'}))

        files.File('/a/b/foo').write('')
        # Also, weakly test execution path of process_windows_with_backoff.
        dir_task_consumer = dirs.DirTaskConsumer()
        dir_task_consumer.process_windows_with_backoff(runtime=2)
        titan_dir = dirs.Dir('/a/b')
        self.assertRaises(AttributeError, lambda: titan_dir.meta.flag)

        titan_dir.set_meta(meta={'flag': True})
        titan_dir = dirs.Dir('/a/b')
        self.assertTrue(titan_dir.meta.flag)

        # Verify properties.
        self.assertEqual('b', titan_dir.name)
        self.assertEqual('/a/b', titan_dir.path)
コード例 #20
0
  def testMicroversions(self):
    # write.
    files.File('/foo').write('foo', encoding='utf-8')
    self.logout()  # Mimic the cron job.
    results = process_microversions()
    self.login()
    final_changeset = results[0]['changeset'].linked_changeset
    self.assertEqual(2, final_changeset.num)
    titan_file = files.File('/foo', changeset=final_changeset)
    self.assertEqual(u'foo', titan_file.content)

    # The final changeset's created_by should be None, because it's created
    # internally in a cron job and shares multiple user writes.
    self.assertEqual(None, final_changeset.created_by)

    # write with an existing root file (which should be copied to the version).
    files.File('/foo', _no_mixins=True).write('new foo')
    files.File('/foo').write(meta={'color': 'blue'})
    results = process_microversions()
    final_changeset = results[0]['changeset'].linked_changeset
    self.assertEqual(4, final_changeset.num)
    titan_file = files.File('/foo', changeset=final_changeset)
    self.assertEqual('new foo', titan_file.content)
    self.assertEqual('blue', titan_file.meta.color)

    # Delete. Also, this verifies that delete doesn't rely on the presence
    # of the root file.
    files.File('/foo').delete()
    results = process_microversions()
    final_changeset = results[0]['changeset'].linked_changeset
    self.assertEqual(6, final_changeset.num)
    titan_file = files.File('/foo', changeset=final_changeset)
    self.assertFalse(titan_file.exists)

    # Check file versions.
    file_versions = self.vcs.get_file_versions('/foo')
    self.assertEqual(6, file_versions[0].changeset.num)
    self.assertEqual(4, file_versions[1].changeset.num)
    self.assertEqual(2, file_versions[2].changeset.num)
    self.assertEqual(versions.FileStatus.deleted, file_versions[0].status)
    self.assertEqual(versions.FileStatus.edited, file_versions[1].status)
    self.assertEqual(versions.FileStatus.created, file_versions[2].status)
コード例 #21
0
ファイル: handlers.py プロジェクト: dindinet/titan
    def post(self):
        """POST handler."""
        path = self.request.get('path')
        # Must use str_POST here to preserve the original encoding of the string.
        content = self.request.str_POST.get('content')
        blob = self.request.get('blob', None)

        try:
            file_kwargs, method_kwargs = _GetExtraParams(self.request.POST)
        except ValueError:
            self.error(400)
            return

        if blob is not None:
            # Convert any string keys to BlobKey instances.
            if isinstance(blob, basestring):
                blob = blobstore.BlobKey(blob)

        meta = self.request.get('meta', None)
        if meta:
            meta = json.loads(meta)
        mime_type = self.request.get('mime_type', None)

        try:
            files.File(path, **file_kwargs).write(content,
                                                  blob=blob,
                                                  mime_type=mime_type,
                                                  meta=meta,
                                                  **method_kwargs)
            self.response.set_status(201)
            # Headers must be byte-strings, not unicode strings.
            # Since this is a URI, follow RFC 3986 and encode it using UTF-8.
            location_header = ('/_titan/file/?path=%s' % path).encode('utf-8')
            self.response.headers['Location'] = location_header
        except files.BadFileError:
            self.error(404)
        except (TypeError, ValueError):
            self.error(400)
            _MaybeLogException('Bad request:')
コード例 #22
0
    def post(self):
        """POST handler."""
        try:
            namespace = self.request.get('namespace', None)
            save_manifest = self.request.get('save_manifest', 'true') == 'true'
            vcs = versions.VersionControlService()
            staging_changeset = versions.Changeset(int(
                self.request.get('changeset')),
                                                   namespace=namespace)
            force = bool(self.request.get('force', False))
            manifest = self.request.POST.get('manifest', None)
            if not force and not manifest or force and manifest:
                self.error(400)
                logging.error(
                    'Exactly one of "manifest" or "force" params is required')
                return

            # If a client has full knowledge of the files uploaded to a changeset,
            # the "manifest" param may be given to ensure a strongly consistent
            # commit. If given, associate the files to the changeset and finalize it.
            if manifest:
                manifest = json.loads(manifest)
                for path in manifest:
                    titan_file = files.File(path,
                                            changeset=staging_changeset,
                                            namespace=namespace,
                                            _internal=True)
                    staging_changeset.associate_file(titan_file)
                staging_changeset.finalize_associated_files()

            final_changeset = vcs.commit(staging_changeset,
                                         force=force,
                                         save_manifest=save_manifest)
            self.write_json_response(final_changeset)
            self.response.set_status(201)
        except (TypeError, ValueError):
            self.error(400)
            logging.exception('Bad request:')
コード例 #23
0
ファイル: handlers.py プロジェクト: dindinet/titan
    def get(self):
        """GET handler."""
        path = self.request.get('path')
        full = bool(self.request.get('full'))

        try:
            file_kwargs, _ = _GetExtraParams(self.request.GET)
        except ValueError:
            self.error(400)
            return

        try:
            titan_file = files.File(path, **file_kwargs)
        except (TypeError, ValueError):
            self.error(400)
            _MaybeLogException('Bad request:')
            return
        if not titan_file.exists:
            self.error(404)
            return
        # TODO(user): when full=True, this may fail for files with byte-string
        # content.
        self.write_json_response(titan_file, full=full)
コード例 #24
0
  def testKeepOldBlobs(self):
    # Create a blob and blob_reader for testing.
    filename = blobstore_files.blobstore.create(
        mime_type='application/octet-stream')
    with blobstore_files.open(filename, 'a') as fp:
      fp.write('Blobstore!')
    blobstore_files.finalize(filename)
    blob_key = blobstore_files.blobstore.get_blob_key(filename)

    # Verify that the blob is not deleted when microversioned content resizes.
    files.File('/foo').write(blob=blob_key)
    process_microversions()
    titan_file = files.File('/foo')
    self.assertTrue(titan_file.blob)
    self.assertEqual('Blobstore!', titan_file.content)
    process_microversions()
    # Resize as smaller (shouldn't delete the old blob).
    files.File('/foo').write('foo')
    files.File('/foo').write(blob=blob_key)  # Resize back to large size.
    # Delete file (shouldn't delete the old blob).
    files.File('/foo').delete()
    process_microversions()
    file_versions = self.vcs.get_file_versions('/foo')

    # Deleted file (blob should be None).
    changeset = file_versions[0].changeset
    titan_file = files.File(
        '/foo', changeset=changeset, _allow_deleted_files=True)
    self.assertIsNone(titan_file.blob)

    # Created file (blob key and blob content should still exist).
    changeset = file_versions[-1].changeset
    titan_file = files.File(
        '/foo', changeset=changeset, _allow_deleted_files=True)
    self.assertTrue(titan_file.blob)
    self.assertEqual('Blobstore!', titan_file.content)
コード例 #25
0
ファイル: stats_recorder_test.py プロジェクト: dindinet/titan
    def testRecordStats(self):
        # The following numbers should not be over-analyzed if broken, as long
        # as they stay relatively low. Feel free to update if internals change.

        # +2 counters (files/File/load and files/File/load/latency).
        _ = files.File('/foo').exists
        self.assertEqual(2, self.CountLoggers())

        # +6 (internal of write() + plus load counters).
        files.File('/foo').write('bar')
        self.assertEqual(8, self.CountLoggers())

        # +4.
        files.File('/foo').serialize()
        self.assertEqual(12, self.CountLoggers())

        # +12.
        files.File('/foo').copy_to(files.File('/bar'))
        self.assertEqual(24, self.CountLoggers())

        # +4.
        files.File('/foo').delete()
        self.assertEqual(28, self.CountLoggers())
コード例 #26
0
  def testRootTreeHandling(self):
    # Actions should check root tree files, not versioning _FilePointers.
    self.assertFalse(files.File('/foo').exists)
    files.File('/foo', _no_mixins=True).write('')
    self.assertTrue(files.File('/foo').exists)
    files.File('/foo', _no_mixins=True).delete()
    self.assertFalse(files.File('/foo').exists)

    # write(), and delete() should modify root tree files and add pull tasks.
    files.File('/foo').write('foo')
    self.assertEqual(1, len(self.taskqueue_stub.get_filtered_tasks()))
    self.assertEqual('foo', files.File('/foo', _no_mixins=True).content)
    files.File('/foo').delete()
    self.assertEqual(2, len(self.taskqueue_stub.get_filtered_tasks()))
    self.assertFalse(files.File('/foo', _no_mixins=True).exists)

    # Verify large RPC deferred task handling.
    files.File('/foo').write(LARGE_FILE_CONTENT)
    self.assertEqual(3, len(self.taskqueue_stub.get_filtered_tasks()))
    self.assertEqual(
        LARGE_FILE_CONTENT, files.File('/foo', _no_mixins=True).content)
コード例 #27
0
 def _data_file(self):
   if self.__data_file is None:
     self.__data_file = files.File('/{}.json'.format(self.key))
   return self.__data_file
コード例 #28
0
ファイル: tasks.py プロジェクト: dindinet/titan
 def _file(self):
     if self._internal_file is None:
         self._internal_file = files.File(self._dir_path + '.json',
                                          _internal=True)
     return self._internal_file
コード例 #29
0
ファイル: stats.py プロジェクト: dindinet/titan
    def _save_aggregate_data(self, final_aggregate_data):
        """Permanently store aggregate data to Titan Files."""

        # Combine all data before writing files to minimize same file writes.
        window_files = {}
        for aggregate_data in final_aggregate_data:
            window = aggregate_data['window']
            window_datetime = datetime.datetime.utcfromtimestamp(window)
            for counter_name, counter in aggregate_data['counters'].iteritems(
            ):
                path = _make_log_path(window_datetime, counter)
                if path not in window_files:
                    titan_file = files.File(path, _internal=True)
                    content = []
                    if titan_file.exists:
                        content = json.loads(titan_file.content)
                    window_files[path] = {
                        'file': titan_file,
                        'path': path,
                        'content': content,
                        'counter_name': counter_name,
                        'window_datetime': window_datetime,
                    }

                # Add the counter data if it doesn't exist or is different.
                old_content = window_files[path]['content']
                window_files[path]['content'] = []
                window_exists = False
                for old_window, old_value in old_content:
                    # If we didn't find the window add it as a new counter.
                    if old_window > window and not window_exists:
                        window_exists = True
                        window_files[path]['content'].append(
                            (window, counter.finalize()))
                    # If the data is the same ignore, otherwise add old data to new.
                    if old_window == window:
                        window_exists = True
                        if old_value != counter.finalize():
                            if not counter.overwrite:
                                counter.aggregate(old_value)
                            old_value = counter.finalize()
                    window_files[path]['content'].append(
                        (old_window, old_value))
                if not window_exists:
                    window_files[path]['content'].append(
                        (window, counter.finalize()))

                # Keep the data sorted for update efficiency.
                window_files[path]['content'].sort(key=lambda tup: tup[0])

        # Write the changed window files.
        for file_item in window_files.itervalues():
            # Strip hours/minutes/seconds from date since the datastore can only
            # store datetime objects, but we only need the date itself.
            window_datetime = file_item['window_datetime']
            date = datetime.datetime(window_datetime.year,
                                     window_datetime.month,
                                     window_datetime.day)
            meta = {
                'stats_counter_name': file_item['counter_name'],
                'stats_date': date,
            }
            file_item['file'].write(content=json.dumps(file_item['content']),
                                    meta=meta)