Exemple #1
0
    def list(cls, group=DEFAULT_GROUP, limit=None, offset=None):
        """Returns a list of TaskManager objects.

    Args:
      group: The group ID of where to list task managers.
      limit: An integer limit for the list query.
      offset: An integer offset for the list query.
    Returns:
      A list of TaskManager objects in descending chronological order.
    """
        titan_files = files.Files.list(dir_path=utils.safe_join(
            _ROOT_DIR_PATH, group),
                                       limit=limit,
                                       offset=offset,
                                       _internal=True)
        titan_files.load()

        # Sort in descending chronological order.
        ordered_files = sorted(titan_files.itervalues(),
                               key=lambda f: f.created)
        ordered_files.reverse()
        task_manager_keys = [f.name_clean for f in ordered_files]

        # Create TaskManager objects and inject the preloaded Titan file.
        task_managers = []
        for i, task_manager_key in enumerate(task_manager_keys):
            task_manager = TaskManager(key=task_manager_key,
                                       group=group,
                                       _file=ordered_files[i])
            task_managers.append(task_manager)
        return task_managers
Exemple #2
0
  def list(cls, group=DEFAULT_GROUP, limit=None, offset=None):
    """Returns a list of TaskManager objects.

    Args:
      group: The group ID of where to list task managers.
      limit: An integer limit for the list query.
      offset: An integer offset for the list query.
    Returns:
      A list of TaskManager objects in descending chronological order.
    """
    titan_files = files.Files.list(
        dir_path=utils.safe_join(_ROOT_DIR_PATH, group),
        limit=limit, offset=offset, _internal=True)
    titan_files.load()

    # Sort in descending chronological order.
    ordered_files = sorted(titan_files.itervalues(), key=lambda f: f.created)
    ordered_files.reverse()
    task_manager_keys = [f.name_clean for f in ordered_files]

    # Create TaskManager objects and inject the preloaded Titan file.
    task_managers = []
    for i, task_manager_key in enumerate(task_manager_keys):
      task_manager = TaskManager(
          key=task_manager_key, group=group, _file=ordered_files[i])
      task_managers.append(task_manager)
    return task_managers
Exemple #3
0
 def _file(self):
   if self._internal_file is None:
     # /_titan/tasks/<group>/<task_manager_key>/tasks/<task_key_hash>.json
     filename = utils.safe_join(
         _ROOT_DIR_PATH, self._task_manager.group, self._task_manager.key,
         'tasks', self._internal_key + '.json')
     self._internal_file = files.File(filename, _internal=True)
   return self._internal_file
Exemple #4
0
 def _file(self):
     if self._internal_file is None:
         # /_titan/tasks/<group>/<task_manager_key>/tasks/<task_key_hash>.json
         filename = utils.safe_join(_ROOT_DIR_PATH,
                                    self._task_manager.group,
                                    self._task_manager.key, 'tasks',
                                    self._internal_key + '.json')
         self._internal_file = files.File(filename, _internal=True)
     return self._internal_file
Exemple #5
0
    def testSafeJoin(self):
        self.assertEqual('foo/bar', utils.safe_join('foo', 'bar'))
        self.assertEqual('foo/bar/baz/', utils.safe_join('foo', 'bar', 'baz/'))
        self.assertEqual('foo.html', utils.safe_join('', 'foo.html'))
        self.assertEqual('/foo/bar', utils.safe_join('/foo', 'bar'))
        self.assertEqual('/foo/bar', utils.safe_join('/foo/', 'bar'))
        self.assertEqual('/foo/bar/baz.html',
                         utils.safe_join('/foo', 'bar', 'baz.html'))
        self.assertEqual('/foo/bar////baz.html',
                         utils.safe_join('/foo', 'bar////', 'baz.html'))

        self.assertRaises(ValueError, utils.safe_join, '/foo', '/bar')
        self.assertRaises(ValueError, utils.safe_join, 'foo', '/bar', 'baz')
        self.assertRaises(ValueError, utils.safe_join, 'foo', 'bar', '/baz')
  def testSafeJoin(self):
    self.assertEqual('foo/bar', utils.safe_join('foo', 'bar'))
    self.assertEqual('foo/bar/baz/', utils.safe_join('foo', 'bar', 'baz/'))
    self.assertEqual('foo.html', utils.safe_join('', 'foo.html'))
    self.assertEqual('/foo/bar', utils.safe_join('/foo', 'bar'))
    self.assertEqual('/foo/bar', utils.safe_join('/foo/', 'bar'))
    self.assertEqual('/foo/bar/baz.html',
                     utils.safe_join('/foo', 'bar', 'baz.html'))
    self.assertEqual('/foo/bar////baz.html',
                     utils.safe_join('/foo', 'bar////', 'baz.html'))

    self.assertRaises(ValueError, utils.safe_join, '/foo', '/bar')
    self.assertRaises(ValueError, utils.safe_join, 'foo', '/bar', 'baz')
    self.assertRaises(ValueError, utils.safe_join, 'foo', 'bar', '/baz')
Exemple #7
0
    def __init__(self, key, user=None, meta=None):
        super(Activity, self).__init__()

        self.timestamp = datetime.datetime.utcnow()
        self.key = key
        self.user = user
        self.meta = meta
        self._keys = utils.split_segments(self.key, sep='.')

        # Make the activity_id relatively unique.
        hashed = hashlib.md5()
        hashed.update(self.key)
        hashed.update(self.timestamp.isoformat(sep='T'))
        hashed.update(os.environ.get('REQUEST_ID_HASH', ''))
        if self.user:
            hashed.update(self.user.email)
        self.activity_id = utils.safe_join(
            ACTIVITY_DIR, self.key, self.timestamp.strftime('%Y/%m/%d'),
            '{}-{}.json'.format(self.timestamp.isoformat(sep='T'),
                                hashed.hexdigest()[:6]))
  def __init__(self, key, user=None, meta=None):
    super(Activity, self).__init__()

    self.timestamp = datetime.datetime.utcnow()
    self.key = key
    self.user = user
    self.meta = meta
    self._keys = utils.split_segments(self.key, sep='.')

    # Make the activity_id relatively unique.
    hashed = hashlib.md5()
    hashed.update(self.key)
    hashed.update(self.timestamp.isoformat(sep='T'))
    hashed.update(os.environ.get('REQUEST_ID_HASH', ''))
    if self.user:
      hashed.update(self.user.email)
    self.activity_id = utils.safe_join(
        ACTIVITY_DIR, self.key, self.timestamp.strftime('%Y/%m/%d'),
        '{}-{}.json'.format(
            self.timestamp.isoformat(sep='T'), hashed.hexdigest()[:6]))
Exemple #9
0
def _make_log_path(date, counter):
  # Make a path like:
  # /_titan/activities/stats/counters/2015/05/15/page/view/data-60s.json
  formatted_date = date.strftime(counter.date_format)
  return utils.safe_join(
      BASE_DIR, formatted_date, counter.name, counter.data_filename)
Exemple #10
0
 def _tasks_dir_path(self):
   # /_titan/tasks/<group>/<task_manager_key>/tasks
   if not self._internal_tasks_dir_path:
     self._internal_tasks_dir_path = utils.safe_join(self._dir_path, 'tasks')
   return self._internal_tasks_dir_path
Exemple #11
0
 def _dir_path(self):
   # /_titan/tasks/<group>/<task_manager_key>
   if not self._internal_dir_path:
     self._internal_dir_path = utils.safe_join(
         _ROOT_DIR_PATH, self.group, self.key)
   return self._internal_dir_path
Exemple #12
0
 def _dir_path(self):
     # /_titan/tasks/<group>/<task_manager_key>
     if not self._internal_dir_path:
         self._internal_dir_path = utils.safe_join(_ROOT_DIR_PATH,
                                                   self.group, self.key)
     return self._internal_dir_path
Exemple #13
0
    def Run(self,
            dir_path=None,
            recursive=False,
            depth=None,
            file_paths=None,
            target_dir=None):
        """Download runner.

    Args:
      dir_path: A list of remote directories to download.
      recursive: Whether or not to download directory recursively.
      depth: Depth of recursion if specified.
      file_paths: A list of remote files to download.
      target_dir: The target local directory to upload to.
    Returns:
      A list of mappings between remote_path -> local_path.
    Raises:
      DownloadFileError
    """
        if not file_paths and not dir_path:
            self.print_error('No files to download. Use --file_path or '
                             '--dir_path.')
            return

        if target_dir is None:
            target_dir = '.'

        path_map = []
        if file_paths:
            remote_files = self.remote_file_factory.make_remote_files(
                paths=file_paths)
            for remote_file in remote_files.itervalues():
                if not remote_file.exists:
                    print 'File %s does not exist' % remote_file.path

                target = os.path.abspath(
                    utils.safe_join(target_dir, remote_file.name))
                path_map.append([remote_file, target])

        elif dir_path:
            dir_kwargs = {'recursive': recursive, 'depth': depth}
            remote_files = self.remote_file_factory.make_remote_files(paths=[])
            remote_files.list(dir_path, **dir_kwargs)

            for remote_file in remote_files.itervalues():
                target = os.path.abspath(
                    utils.safe_join(target_dir, remote_file.path[1:]))
                path_map.append([remote_file, target])

        conf_message = ['The following will be downloaded from %s' % self.host]
        for remote_file, target in path_map:
            conf_message.append('  %s --> %s' % (remote_file.path, target))
        print '\n'.join(conf_message)
        if not self.confirm(' Are you sure?'):
            sys.exit('Download aborted.')
        else:
            print 'Downloading...'

        utils.make_dirs(target_dir)

        # Start the download.
        start = time.time()
        self.remote_file_factory.validate_client_auth()
        future_results = []
        with self.ThreadPoolExecutor() as executor:
            for remote_file, target in path_map:

                target_base_dir = os.path.dirname(target)
                utils.make_dirs(target_base_dir)

                future = executor.submit(self._DownloadFile, remote_file,
                                         target)
                future_results.append(future)

        failed = False
        for future in futures.as_completed(future_results):
            try:
                downloaded_file = future.result()
                self.print_message(
                    'Downloaded %s to %s' %
                    (downloaded_file['path'], downloaded_file['target']))
            except DownloadFileError as e:
                self.print_error('Error downloading %s. Error was: %s %s' %
                                 (e.target_path, e.__class__.__name__, str(e)))
                failed = True

        if failed:
            self.print_error('Could not download one or more files.')
            return
        elapsed_time = time.time() - start
        print 'Downloaded %d files in %s.' % (
            len(path_map), utils.humanize_duration(elapsed_time))
Exemple #14
0
    def Run(self,
            filenames,
            root_dir=None,
            target_path='/',
            changeset=None,
            commit=False,
            confirm_manifest=False):
        """Upload runner.

    Args:
      filenames: A list of local filenames to upload.
      root_dir: Local root dir containing all the files. Default: current dir.
      target_path: Remote root target dir for uploaded documents.
      changeset: A changeset number or "new".
      commit: Whether or not to commit the changeset.
      confirm_manifest: Whether or not the current list of files given to
          upload can be trusted as a complete manifest of the files in the
          changeset. Required when combining "commit" and "changeset=<num>".
    Returns:
      A dictionary containing "success", "manifest", "paths", and
      "changeset_num" if uploading to a changeset.
    """
        if root_dir is None:
            root_dir = os.path.curdir

        # Compose mapping of absolute local path to absolute remote path.
        root_dir = os.path.abspath(root_dir)
        filename_to_paths = {}
        for filename in filenames:
            absolute_filename = os.path.abspath(filename)
            if not absolute_filename.startswith(root_dir):
                self.print_error('Path "%s" not contained within "%s".' %
                                 (absolute_filename, root_dir))
                sys.exit()
            remote_path = absolute_filename[len(root_dir) + 1:]
            remote_path = utils.safe_join(target_path, remote_path)
            filename_to_paths[absolute_filename] = remote_path

        # Confirm action.
        print '\nUploading files to %s (%s):' % (self.host, self.api_base_path)
        for filename, path in filename_to_paths.iteritems():
            if root_dir == os.path.abspath(os.path.curdir):
                # Strip the root_dir from view if it's already the working directory.
                filename = '.' + filename[len(root_dir):]
            print '  %s --> %s' % (filename, path)
        if not self.confirm('Upload files?'):
            sys.exit('Upload aborted.')

        # Versions Mixin:
        file_kwargs = {}
        changeset_num = None
        if changeset == 'new':
            self.vcs_factory.validate_client_auth()
            vcs = self.vcs_factory.make_remote_vcs()
            staging_changeset = vcs.new_staging_changeset()
            changeset_num = staging_changeset.num
            print 'New staging changeset created: %d' % changeset_num
        elif changeset:
            changeset_num = int(changeset)
        if changeset and changeset_num:
            file_kwargs['changeset'] = changeset_num
            print 'Uploading %d files to changeset %d...' % (len(filenames),
                                                             changeset_num)

        start = time.time()
        self.remote_file_factory.validate_client_auth()
        future_results = []
        with self.ThreadPoolExecutor() as executor:
            for filename, target_path in filename_to_paths.iteritems():
                future = executor.submit(self._upload_file,
                                         filename,
                                         target_path,
                                         file_kwargs=file_kwargs)
                future_results.append(future)

        failed = False
        total_bytes = 0
        for future in futures.as_completed(future_results):
            try:
                remote_file = future.result()
                print 'Uploaded %s' % remote_file.real_path
                total_bytes += remote_file.size
            except UploadFileError as e:
                self.print_error('Error uploading %s. Error was: %s %s' %
                                 (e.target_path, e.__class__.__name__, str(e)))
                failed = True

        if failed:
            self.print_error('Could not upload one or more files.')
            return

        manifest = filename_to_paths.values()
        if commit:
            if changeset != 'new' and not confirm_manifest:
                self.print_error(
                    'Must use --changeset=new with --commit, or pass '
                    '--confirm_manifest.')
                return
            self._commit_changeset_or_exit(changeset_num, manifest=manifest)

        elapsed_time = time.time() - start
        print 'Uploaded %d files in %s.' % (
            len(filenames), utils.humanize_duration(elapsed_time))
        result = {}
        if changeset_num:
            result['changeset_num'] = changeset_num
        result['success'] = not failed
        result['manifest'] = manifest
        result['paths'] = filename_to_paths.values()
        return result
Exemple #15
0
 def _tasks_dir_path(self):
     # /_titan/tasks/<group>/<task_manager_key>/tasks
     if not self._internal_tasks_dir_path:
         self._internal_tasks_dir_path = utils.safe_join(
             self._dir_path, 'tasks')
     return self._internal_tasks_dir_path
  def Run(self, filenames, root_dir=None, target_path='/', changeset=None,
          commit=False, confirm_manifest=False):
    """Upload runner.

    Args:
      filenames: A list of local filenames to upload.
      root_dir: Local root dir containing all the files. Default: current dir.
      target_path: Remote root target dir for uploaded documents.
      changeset: A changeset number or "new".
      commit: Whether or not to commit the changeset.
      confirm_manifest: Whether or not the current list of files given to
          upload can be trusted as a complete manifest of the files in the
          changeset. Required when combining "commit" and "changeset=<num>".
    Returns:
      A dictionary containing "success", "manifest", "paths", and
      "changeset_num" if uploading to a changeset.
    """
    if root_dir is None:
      root_dir = os.path.curdir

    # Compose mapping of absolute local path to absolute remote path.
    root_dir = os.path.abspath(root_dir)
    filename_to_paths = {}
    for filename in filenames:
      absolute_filename = os.path.abspath(filename)
      if not absolute_filename.startswith(root_dir):
        self.print_error('Path "%s" not contained within "%s".'
                         % (absolute_filename, root_dir))
        sys.exit()
      remote_path = absolute_filename[len(root_dir) + 1:]
      remote_path = utils.safe_join(target_path, remote_path)
      filename_to_paths[absolute_filename] = remote_path

    # Confirm action.
    print '\nUploading files to %s (%s):' % (self.host, self.api_base_path)
    for filename, path in filename_to_paths.iteritems():
      if root_dir == os.path.abspath(os.path.curdir):
        # Strip the root_dir from view if it's already the working directory.
        filename = '.' + filename[len(root_dir):]
      print '  %s --> %s' % (filename, path)
    if not self.confirm('Upload files?'):
      sys.exit('Upload aborted.')

    # Versions Mixin:
    file_kwargs = {}
    changeset_num = None
    if changeset == 'new':
      self.vcs_factory.validate_client_auth()
      vcs = self.vcs_factory.make_remote_vcs()
      staging_changeset = vcs.new_staging_changeset()
      changeset_num = staging_changeset.num
      print 'New staging changeset created: %d' % changeset_num
    elif changeset:
      changeset_num = int(changeset)
    if changeset and changeset_num:
      file_kwargs['changeset'] = changeset_num
      print 'Uploading %d files to changeset %d...' % (len(filenames),
                                                       changeset_num)

    start = time.time()
    self.remote_file_factory.validate_client_auth()
    future_results = []
    with self.ThreadPoolExecutor() as executor:
      for filename, target_path in filename_to_paths.iteritems():
        future = executor.submit(
            self._upload_file, filename, target_path, file_kwargs=file_kwargs)
        future_results.append(future)

    failed = False
    total_bytes = 0
    for future in futures.as_completed(future_results):
      try:
        remote_file = future.result()
        print 'Uploaded %s' % remote_file.real_path
        total_bytes += remote_file.size
      except UploadFileError as e:
        self.print_error('Error uploading %s. Error was: %s %s'
                         % (e.target_path, e.__class__.__name__, str(e)))
        failed = True

    if failed:
      self.print_error('Could not upload one or more files.')
      return

    manifest = filename_to_paths.values()
    if commit:
      if changeset != 'new' and not confirm_manifest:
        self.print_error('Must use --changeset=new with --commit, or pass '
                         '--confirm_manifest.')
        return
      self._commit_changeset_or_exit(changeset_num, manifest=manifest)

    elapsed_time = time.time() - start
    print 'Uploaded %d files in %s.' % (
        len(filenames),
        utils.humanize_duration(elapsed_time))
    result = {}
    if changeset_num:
      result['changeset_num'] = changeset_num
    result['success'] = not failed
    result['manifest'] = manifest
    result['paths'] = filename_to_paths.values()
    return result
  def Run(self, dir_path=None, recursive=False, depth=None, file_paths=None,
          target_dir=None):
    """Download runner.

    Args:
      dir_path: A list of remote directories to download.
      recursive: Whether or not to download directory recursively.
      depth: Depth of recursion if specified.
      file_paths: A list of remote files to download.
      target_dir: The target local directory to upload to.
    Returns:
      A list of mappings between remote_path -> local_path.
    Raises:
      DownloadFileError
    """
    if not file_paths and not dir_path:
      self.print_error('No files to download. Use --file_path or '
                       '--dir_path.')
      return

    if target_dir is None:
      target_dir = '.'

    path_map = []
    if file_paths:
      remote_files = self.remote_file_factory.make_remote_files(
          paths=file_paths)
      for remote_file in remote_files.itervalues():
        if not remote_file.exists:
          print 'File %s does not exist' % remote_file.path

        target = os.path.abspath(utils.safe_join(target_dir, remote_file.name))
        path_map.append([remote_file, target])

    elif dir_path:
      dir_kwargs = {'recursive': recursive, 'depth': depth}
      remote_files = self.remote_file_factory.make_remote_files(paths=[])
      remote_files.list(dir_path, **dir_kwargs)

      for remote_file in remote_files.itervalues():
        target = os.path.abspath(
            utils.safe_join(target_dir, remote_file.path[1:]))
        path_map.append([remote_file, target])

    conf_message = ['The following will be downloaded from %s' % self.host]
    for remote_file, target in path_map:
      conf_message.append('  %s --> %s' % (remote_file.path, target))
    print '\n'.join(conf_message)
    if not self.confirm(' Are you sure?'):
      sys.exit('Download aborted.')
    else:
      print 'Downloading...'

    utils.make_dirs(target_dir)

    # Start the download.
    start = time.time()
    self.remote_file_factory.validate_client_auth()
    future_results = []
    with self.ThreadPoolExecutor() as executor:
      for remote_file, target in path_map:

        target_base_dir = os.path.dirname(target)
        utils.make_dirs(target_base_dir)

        future = executor.submit(self._DownloadFile, remote_file, target)
        future_results.append(future)

    failed = False
    for future in futures.as_completed(future_results):
      try:
        downloaded_file = future.result()
        self.print_message(
            'Downloaded %s to %s' %
            (downloaded_file['path'], downloaded_file['target']))
      except DownloadFileError as e:
        self.print_error('Error downloading %s. Error was: %s %s' %
                        (e.target_path, e.__class__.__name__, str(e)))
        failed = True

    if failed:
      self.print_error('Could not download one or more files.')
      return
    elapsed_time = time.time() - start
    print 'Downloaded %d files in %s.' % (len(path_map),
                                          utils.humanize_duration(elapsed_time))
Exemple #18
0
def _make_log_path(date, counter):
    # Make a path like:
    # /_titan/activities/stats/counters/2015/05/15/page/view/data-60s.json
    formatted_date = date.strftime(counter.date_format)
    return utils.safe_join(BASE_DIR, formatted_date, counter.name,
                           counter.data_filename)