Ejemplo n.º 1
0
  def download_dir_contents(self, source_bucket, source_dir, dest_dir):
    """Recursively download contents of a Google Storage directory to local disk

    params:
      source_bucket: GS bucket to copy the files from
      source_dir: full path (Posix-style) within that bucket; read the files
          from this directory
      dest_dir: full path (local-OS-style) on local disk of directory to copy
          the files into

    The copy operates as a "merge with overwrite": any files in source_dir will
    be "overlaid" on top of the existing content in dest_dir.  Existing files
    with the same names will be overwritten.

    TODO(epoger): Download multiple files simultaneously to reduce latency.
    """
    _makedirs_if_needed(dest_dir)
    b = self._connect_to_bucket(bucket=source_bucket)
    (dirs, files) = self.list_bucket_contents(
        bucket=source_bucket, subdir=source_dir)

    for filename in files:
      key = Key(b)
      key.name = posixpath.join(source_dir, filename)
      dest_path = os.path.join(dest_dir, filename)
      with open(dest_path, 'w') as f:
        try:
          key.get_contents_to_file(fp=f)
        except BotoServerError, e:
          e.body = (repr(e.body) +
                    ' while downloading gs://%s/%s to local_path=%s' % (
                        b.name, key.name, dest_path))
          raise
Ejemplo n.º 2
0
    def download_dir_contents(self, source_bucket, source_dir, dest_dir):
        """Recursively download contents of a Google Storage directory to local disk

    params:
      source_bucket: GS bucket to copy the files from
      source_dir: full path (Posix-style) within that bucket; read the files
          from this directory
      dest_dir: full path (local-OS-style) on local disk of directory to copy
          the files into

    The copy operates as a "merge with overwrite": any files in source_dir will
    be "overlaid" on top of the existing content in dest_dir.  Existing files
    with the same names will be overwritten.

    TODO(epoger): Download multiple files simultaneously to reduce latency.
    """
        _makedirs_if_needed(dest_dir)
        b = self._connect_to_bucket(bucket=source_bucket)
        (dirs, files) = self.list_bucket_contents(bucket=source_bucket,
                                                  subdir=source_dir)

        for filename in files:
            key = Key(b)
            key.name = posixpath.join(source_dir, filename)
            dest_path = os.path.join(dest_dir, filename)
            with open(dest_path, 'w') as f:
                try:
                    key.get_contents_to_file(fp=f)
                except BotoServerError, e:
                    e.body = (
                        repr(e.body) +
                        ' while downloading gs://%s/%s to local_path=%s' %
                        (b.name, key.name, dest_path))
                    raise
Ejemplo n.º 3
0
  def download_file(self, source_bucket, source_path, dest_path,
                    create_subdirs_if_needed=False, source_generation=None):
    """Downloads a single file from Google Cloud Storage to local disk.

    Args:
      source_bucket: GS bucket to download the file from
      source_path: full path (Posix-style) within that bucket
      dest_path: full path (local-OS-style) on local disk to copy the file to
      create_subdirs_if_needed: boolean; whether to create subdirectories as
          needed to create dest_path
      source_generation: the generation version of the source
    """
    b = self._connect_to_bucket(bucket=source_bucket)
    key = Key(b)
    key.name = source_path
    if source_generation:
      key.generation = source_generation
    if create_subdirs_if_needed:
      _makedirs_if_needed(os.path.dirname(dest_path))
    with open(dest_path, 'w') as f:
      try:
        key.get_contents_to_file(fp=f)
      except BotoServerError, e:
        e.body = (repr(e.body) +
                  ' while downloading gs://%s/%s to local_path=%s' % (
                      b.name, source_path, dest_path))
        raise
Ejemplo n.º 4
0
    def download_file(self,
                      source_bucket,
                      source_path,
                      dest_path,
                      create_subdirs_if_needed=False,
                      source_generation=None):
        """Downloads a single file from Google Cloud Storage to local disk.

    Args:
      source_bucket: GS bucket to download the file from
      source_path: full path (Posix-style) within that bucket
      dest_path: full path (local-OS-style) on local disk to copy the file to
      create_subdirs_if_needed: boolean; whether to create subdirectories as
          needed to create dest_path
      source_generation: the generation version of the source
    """
        b = self._connect_to_bucket(bucket=source_bucket)
        key = Key(b)
        key.name = source_path
        if source_generation:
            key.generation = source_generation
        if create_subdirs_if_needed:
            _makedirs_if_needed(os.path.dirname(dest_path))
        with open(dest_path, 'w') as f:
            try:
                key.get_contents_to_file(fp=f)
            except BotoServerError, e:
                e.body = (repr(e.body) +
                          ' while downloading gs://%s/%s to local_path=%s' %
                          (b.name, source_path, dest_path))
                raise
Ejemplo n.º 5
0
  def delete_file(self, bucket, path):
    """Delete a single file within a GS bucket.

    TODO(epoger): what if bucket or path does not exist?  Should probably raise
    an exception.  Implement, and add a test to exercise this.

    Params:
      bucket: GS bucket to delete a file from
      path: full path (Posix-style) of the file within the bucket to delete
    """
    b = self._connect_to_bucket(bucket=bucket)
    key = Key(b)
    key.name = path
    try:
      key.delete()
    except BotoServerError, e:
      e.body = (repr(e.body) +
                ' while deleting gs://%s/%s' % (b.name, path))
      raise
Ejemplo n.º 6
0
    def delete_file(self, bucket, path):
        """Delete a single file within a GS bucket.

    TODO(epoger): what if bucket or path does not exist?  Should probably raise
    an exception.  Implement, and add a test to exercise this.

    Params:
      bucket: GS bucket to delete a file from
      path: full path (Posix-style) of the file within the bucket to delete
    """
        b = self._connect_to_bucket(bucket=bucket)
        key = Key(b)
        key.name = path
        try:
            key.delete()
        except BotoServerError, e:
            e.body = (repr(e.body) + ' while deleting gs://%s/%s' %
                      (b.name, path))
            raise
Ejemplo n.º 7
0
  def upload_file(self, source_path, dest_bucket, dest_path,
                  upload_if=UploadIf.ALWAYS,
                  predefined_acl=None,
                  fine_grained_acl_list=None):
    """Upload contents of a local file to Google Storage.

    params:
      source_path: full path (local-OS-style) on local disk to read from
      dest_bucket: GS bucket to copy the file to
      dest_path: full path (Posix-style) within that bucket
      upload_if: one of the UploadIf values, describing in which cases we should
          upload the file
      predefined_acl: which predefined ACL to apply to the file on Google
          Storage; must be one of the PredefinedACL values defined above.
          If None, inherits dest_bucket's default object ACL.
      fine_grained_acl_list: list of (id_type, id_value, permission) tuples
          to apply to the uploaded file (on top of the predefined_acl),
          or None if predefined_acl is sufficient

    TODO(epoger): Consider adding a do_compress parameter that would compress
    the file using gzip before upload, and add a "Content-Encoding:gzip" header
    so that HTTP downloads of the file would be unzipped automatically.
    See https://developers.google.com/storage/docs/gsutil/addlhelp/
        WorkingWithObjectMetadata#content-encoding
    """
    b = self._connect_to_bucket(bucket=dest_bucket)
    local_md5 = None  # filled in lazily

    if upload_if == self.UploadIf.IF_NEW:
      old_key = b.get_key(key_name=dest_path)
      if old_key:
        print ('Skipping upload of existing file gs://%s/%s' % (
            b.name, dest_path))
        return
    elif upload_if == self.UploadIf.IF_MODIFIED:
      old_key = b.get_key(key_name=dest_path)
      if old_key:
        if not local_md5:
          local_md5 = _get_local_md5(path=source_path)
        if ('"%s"' % local_md5) == old_key.etag:
          print (
              'Skipping upload of unmodified file gs://%s/%s : %s' % (
                  b.name, dest_path, local_md5))
          return
    elif upload_if != self.UploadIf.ALWAYS:
      raise Exception('unknown value of upload_if: %s' % upload_if)

    # Upload the file using a temporary name at first, in case the transfer
    # is interrupted partway through.
    if not local_md5:
      local_md5 = _get_local_md5(path=source_path)
    initial_key = Key(b)
    initial_key.name = dest_path + '-uploading-' + local_md5
    try:
      initial_key.set_contents_from_filename(filename=source_path,
                                             policy=predefined_acl)
    except BotoServerError, e:
      e.body = (repr(e.body) +
                ' while uploading source_path=%s to gs://%s/%s' % (
                    source_path, b.name, initial_key.name))
      raise
Ejemplo n.º 8
0
    def upload_file(self,
                    source_path,
                    dest_bucket,
                    dest_path,
                    upload_if=UploadIf.ALWAYS,
                    predefined_acl=None,
                    fine_grained_acl_list=None):
        """Upload contents of a local file to Google Storage.

    params:
      source_path: full path (local-OS-style) on local disk to read from
      dest_bucket: GS bucket to copy the file to
      dest_path: full path (Posix-style) within that bucket
      upload_if: one of the UploadIf values, describing in which cases we should
          upload the file
      predefined_acl: which predefined ACL to apply to the file on Google
          Storage; must be one of the PredefinedACL values defined above.
          If None, inherits dest_bucket's default object ACL.
      fine_grained_acl_list: list of (id_type, id_value, permission) tuples
          to apply to the uploaded file (on top of the predefined_acl),
          or None if predefined_acl is sufficient

    TODO(epoger): Consider adding a do_compress parameter that would compress
    the file using gzip before upload, and add a "Content-Encoding:gzip" header
    so that HTTP downloads of the file would be unzipped automatically.
    See https://developers.google.com/storage/docs/gsutil/addlhelp/
        WorkingWithObjectMetadata#content-encoding
    """
        b = self._connect_to_bucket(bucket=dest_bucket)
        local_md5 = None  # filled in lazily

        if upload_if == self.UploadIf.IF_NEW:
            old_key = b.get_key(key_name=dest_path)
            if old_key:
                print('Skipping upload of existing file gs://%s/%s' %
                      (b.name, dest_path))
                return
        elif upload_if == self.UploadIf.IF_MODIFIED:
            old_key = b.get_key(key_name=dest_path)
            if old_key:
                if not local_md5:
                    local_md5 = _get_local_md5(path=source_path)
                if ('"%s"' % local_md5) == old_key.etag:
                    print(
                        'Skipping upload of unmodified file gs://%s/%s : %s' %
                        (b.name, dest_path, local_md5))
                    return
        elif upload_if != self.UploadIf.ALWAYS:
            raise Exception('unknown value of upload_if: %s' % upload_if)

        # Upload the file using a temporary name at first, in case the transfer
        # is interrupted partway through.
        if not local_md5:
            local_md5 = _get_local_md5(path=source_path)
        initial_key = Key(b)
        initial_key.name = dest_path + '-uploading-' + local_md5
        try:
            initial_key.set_contents_from_filename(filename=source_path,
                                                   policy=predefined_acl)
        except BotoServerError, e:
            e.body = (repr(e.body) +
                      ' while uploading source_path=%s to gs://%s/%s' %
                      (source_path, b.name, initial_key.name))
            raise