Beispiel #1
0
    def put_files(self,
                  files,
                  content_type=None,
                  compress=None,
                  compress_level=None,
                  cache_control=None,
                  block=True):
        """
    Put lots of files at once and get a nice progress bar. It'll also wait
    for the upload to complete, just like get_files.

    Required:
      files: [ (filepath, content), .... ]
    """
        if compress not in compression.COMPRESSION_TYPES:
            raise NotImplementedError()

        def uploadfn(path, content):
            with self.get_connection() as conn:
                content = compression.compress(content,
                                               method=compress,
                                               compress_level=compress_level)
                conn.put_file(
                    file_path=path,
                    content=content,
                    content_type=content_type,
                    compress=compress,
                    cache_control=cache_control,
                )

        if not isinstance(gen, types.GeneratorType):
            dupes = duplicates([path for path, content in files])
            if dupes:
                raise ValueError(
                    "Cannot write the same file multiple times in one pass. This causes a race condition. Files: "
                    + ", ".join(dupes))

        fns = (partial(uploadfn, path, content) for path, content in files)

        if block:
            desc = desc = self.progress_description('Uploading')
            schedule_green_jobs(
                fns=fns,
                progress=(desc if self.progress else None),
                concurrency=self.concurrency,
                total=len(files),
            )
        else:
            for fn in fns:
                self.pool.spawn(fn)

        return self
Beispiel #2
0
    def delete_files(self, file_paths):
        def thunk_delete(path):
            with self.get_connection() as conn:
                conn.delete_file(path)

        schedule_green_jobs(
            fns=(partial(thunk_delete, path) for path in file_paths),
            progress=('Deleting' if self.progress else None),
            concurrency=self.concurrency,
            total=len(file_paths),
        )

        return self
Beispiel #3
0
    def put_files(self,
                  files,
                  content_type=None,
                  compress=None,
                  compress_level=None,
                  cache_control=None,
                  block=True):
        """
    Put lots of files at once and get a nice progress bar. It'll also wait
    for the upload to complete, just like get_files.

    Required:
      files: [ (filepath, content), .... ]
    """
        if compress not in compression.COMPRESSION_TYPES:
            raise NotImplementedError()

        def uploadfn(path, content):
            with self.get_connection() as conn:
                content = compression.compress(content,
                                               method=compress,
                                               compress_level=compress_level)
                conn.put_file(
                    file_path=path,
                    content=content,
                    content_type=content_type,
                    compress=compress,
                    cache_control=cache_control,
                )

        fns = (partial(uploadfn, path, content) for path, content in files)

        if block:
            schedule_green_jobs(
                fns=fns,
                progress=('Uploading' if self.progress else None),
                concurrency=self.concurrency,
                total=len(files),
            )
        else:
            for fn in fns:
                self.pool.spawn(fn)

        return self
Beispiel #4
0
    def files_exist(self, file_paths):
        """
    Threaded exists for all file paths.

    file_paths: (list) file paths to test for existence

    Returns: { filepath: bool }
    """
        results = {}

        def exist_thunk(paths):
            with self.get_connection() as conn:
                results.update(conn.files_exist(paths))

        schedule_green_jobs(
            fns=(partial(exist_thunk, paths)
                 for paths in scatter(file_paths, self.concurrency)),
            progress=('Existence Testing' if self.progress else None),
            concurrency=self.concurrency,
            total=len(file_paths),
        )

        return results
Beispiel #5
0
  def get_files(self, file_paths, starts=None, ends=None):
    """
    Returns: [ 
      { "filename": ..., "content": bytes, "error": exception or None }, 
      ... 
    ]
    """
    starts, ends = default_byte_iterator(starts, ends)

    def getfn(path, start, end):
      result = error = None 

      conn = self.get_connection()
      try:
        result = conn.get_file(path, start=start, end=end)
      except Exception as err:
        error = err
        # important to print immediately because 
        # errors are collected at the end
        print(err)
        del conn
      else:
        conn.release_connection()
      
      content, encoding = result
      content = compression.decompress(content, encoding)

      return {
        "filename": path,
        "byte_range": (start, end),
        "content": content,
        "error": error,
      }

    desc = self.progress_description('Downloading')

    return schedule_green_jobs(  
      fns=( 
        partial(getfn, path, start, end) 
        for path, start, end in zip(file_paths, starts, ends) 
      ),
      progress=(desc if self.progress else None),
      concurrency=self.concurrency,
      total=len(file_paths),
    )
Beispiel #6
0
    def get_files(self, file_paths):
        """
    Returns: [ 
      { "filename": ..., "content": bytes, "error": exception or None }, 
      ... 
    ]
    """
        def getfn(path):
            result = error = None

            conn = self.get_connection()
            try:
                result = conn.get_file(path)
            except Exception as err:
                error = err
                # important to print immediately because
                # errors are collected at the end
                print(err)
                del conn
            else:
                conn.release_connection()

            content, encoding = result
            content = compression.decompress(content, encoding)

            return {
                "filename": path,
                "content": content,
                "error": error,
            }

        return schedule_green_jobs(
            fns=(partial(getfn, path) for path in file_paths),
            progress=('Downloading' if self.progress else None),
            concurrency=self.concurrency,
            total=len(file_paths),
        )