Exemple #1
0
    def InvalidateObjects(self, uri, paths, default_index_file,
                          invalidate_default_index_on_cf,
                          invalidate_default_index_root_on_cf):
        # joseprio: if the user doesn't want to invalidate the default index
        # path, or if the user wants to invalidate the root of the default
        # index, we need to process those paths
        if default_index_file is not None and (
                not invalidate_default_index_on_cf
                or invalidate_default_index_root_on_cf):
            new_paths = []
            default_index_suffix = '/' + default_index_file
            for path in paths:
                if path.endswith(
                        default_index_suffix) or path == default_index_file:
                    if invalidate_default_index_on_cf:
                        new_paths.append(path)
                    if invalidate_default_index_root_on_cf:
                        new_paths.append(path[:-len(default_index_file)])
                else:
                    new_paths.append(path)
            paths = new_paths

        # uri could be either cf:// or s3:// uri
        cfuris = self.get_dist_name_for_bucket(uri)
        if len(paths) > 999:
            try:
                tmp_filename = Utils.mktmpfile()
                f = open(deunicodise(tmp_filename), "w")
                f.write(deunicodise("\n".join(paths) + "\n"))
                f.close()
                warning("Request to invalidate %d paths (max 999 supported)" %
                        len(paths))
                warning("All the paths are now saved in: %s" % tmp_filename)
            except:
                pass
            raise ParameterError("Too many paths to invalidate")

        responses = []
        for cfuri in cfuris:
            invalbatch = InvalidationBatch(distribution=cfuri.dist_id(),
                                           paths=paths)
            debug("InvalidateObjects(): request_body: %s" % invalbatch)
            response = self.send_request("Invalidate",
                                         dist_id=cfuri.dist_id(),
                                         body=str(invalbatch))
            response['dist_id'] = cfuri.dist_id()
            if response['status'] == 201:
                inval_info = Invalidation(response['data']).info
                response['request_id'] = inval_info['Id']
            debug("InvalidateObjects(): response: %s" % response)

            responses.append(response)

        return responses
Exemple #2
0
    def InvalidateObjects(self, uri, paths, default_index_file, invalidate_default_index_on_cf, invalidate_default_index_root_on_cf):
        # joseprio: if the user doesn't want to invalidate the default index
        # path, or if the user wants to invalidate the root of the default
        # index, we need to process those paths
        if default_index_file is not None and (not invalidate_default_index_on_cf or invalidate_default_index_root_on_cf):
            new_paths = []
            default_index_suffix = '/' + default_index_file
            for path in paths:
                if path.endswith(default_index_suffix) or path == default_index_file:
                    if invalidate_default_index_on_cf:
                        new_paths.append(path)
                    if invalidate_default_index_root_on_cf:
                        new_paths.append(path[:-len(default_index_file)])
                else:
                    new_paths.append(path)
            paths = new_paths

        # uri could be either cf:// or s3:// uri
        cfuris = self.get_dist_name_for_bucket(uri)
        if len(paths) > 3000:
            try:
                tmp_filename = Utils.mktmpfile()
                f = open(deunicodise(tmp_filename), "w")
                f.write(deunicodise("\n".join(paths)+"\n"))
                f.close()
                warning("Request to invalidate %d paths (max 3000 supported)" % len(paths))
                warning("All the paths are now saved in: %s" % tmp_filename)
            except:
                pass
            raise ParameterError("Too many paths to invalidate")

        responses = []
        for cfuri in cfuris:
            invalbatch = InvalidationBatch(distribution = cfuri.dist_id(), paths = paths)
            debug("InvalidateObjects(): request_body: %s" % invalbatch)
            response = self.send_request("Invalidate", dist_id = cfuri.dist_id(),
                                         body = str(invalbatch))
            response['dist_id'] = cfuri.dist_id()
            if response['status'] == 201:
                inval_info = Invalidation(response['data']).info
                response['request_id'] = inval_info['Id']
            debug("InvalidateObjects(): response: %s" % response)

            responses.append(response)

        return responses
Exemple #3
0
def sign_string_v2(string_to_sign):
    """Sign a string with the secret key, returning base64 encoded results.
    By default the configured secret key is used, but may be overridden as
    an argument.

    Useful for REST authentication. See http://s3.amazonaws.com/doc/s3-developer-guide/RESTAuthentication.html
    """
    signature = base64.encodestring(hmac.new(Config.Config().secret_key, deunicodise(string_to_sign), sha1).digest()).strip()
    return signature
Exemple #4
0
def sign_string_v2(string_to_sign):
    """Sign a string with the secret key, returning base64 encoded results.
    By default the configured secret key is used, but may be overridden as
    an argument.

    Useful for REST authentication. See http://s3.amazonaws.com/doc/s3-developer-guide/RESTAuthentication.html
    """
    signature = base64.encodestring(
        hmac.new(Config.Config().secret_key, deunicodise(string_to_sign),
                 sha1).digest()).strip()
    return signature
Exemple #5
0
def checksum_sha256_file(filename, offset=0, size=None):
    try:
        hash = sha256()
    except:
        # fallback to Crypto SHA256 module
        hash = sha256.new()
    with open(deunicodise(filename),'rb') as f:
        if size is None:
            for chunk in iter(lambda: f.read(8192), b''):
                hash.update(chunk)
        else:
            f.seek(offset)
            chunk = f.read(size)
            hash.update(chunk)
    return hash
Exemple #6
0
def checksum_sha256_file(filename, offset=0, size=None):
    try:
        hash = sha256()
    except:
        # fallback to Crypto SHA256 module
        hash = sha256.new()
    with open(deunicodise(filename), "rb") as f:
        if size is None:
            for chunk in iter(lambda: f.read(8192), b""):
                hash.update(chunk)
        else:
            f.seek(offset)
            chunk = f.read(size)
            hash.update(chunk)
    return hash
Exemple #7
0
def checksum_sha256_file(filename, offset=0, size=None):
    try:
        hash = sha256()
    except:
        # fallback to Crypto SHA256 module
        hash = sha256.new()
    with open(deunicodise(filename),'rb') as f:
        if size is None:
            for chunk in iter(lambda: f.read(8192), b''):
                hash.update(chunk)
        else:
            f.seek(offset)
            size_left = size
            while size_left > 0:
                chunk = f.read(min(8192, size_left))
                size_left -= len(chunk)
                hash.update(chunk)

    return hash
Exemple #8
0
 def __repr__(self):
     return deunicodise("<%s: %s>" % (self.__class__.__name__, self.__unicode__()))
Exemple #9
0
 def dirname(self):
     return unicodise(os.path.dirname(deunicodise(self.path())))
Exemple #10
0
 def basename(self):
     return unicodise(os.path.basename(deunicodise(self.path())))
Exemple #11
0
    def upload_all_parts(self, extra_label=''):
        """
        Execute a full multipart upload on a file
        Returns the seq/etag dict
        TODO use num_processes to thread it
        """
        if not self.upload_id:
            raise RuntimeError(
                "Attempting to use a multipart upload that has not been initiated."
            )

        self.chunk_size = self.s3.config.multipart_chunk_size_mb * 1024 * 1024
        filename = unicodise(self.file.name)

        if filename != "<stdin>":
            size_left = file_size = os.stat(deunicodise(filename))[ST_SIZE]
            nr_parts = file_size / self.chunk_size + (file_size %
                                                      self.chunk_size and 1)
            debug("MultiPart: Uploading %s in %d parts" % (filename, nr_parts))
        else:
            debug("MultiPart: Uploading from %s" % filename)

        remote_statuses = dict()
        if self.s3.config.put_continue:
            remote_statuses = self.get_parts_information(
                self.uri, self.upload_id)

        if extra_label:
            extra_label = u' ' + extra_label
        seq = 1
        if filename != "<stdin>":
            while size_left > 0:
                offset = self.chunk_size * (seq - 1)
                current_chunk_size = min(file_size - offset, self.chunk_size)
                size_left -= current_chunk_size
                labels = {
                    'source':
                    filename,
                    'destination':
                    self.uri.uri(),
                    'extra':
                    "[part %d of %d, %s]%s" %
                    (seq, nr_parts, "%d%sB" % formatSize(
                        current_chunk_size, human_readable=True), extra_label)
                }
                try:
                    self.upload_part(seq,
                                     offset,
                                     current_chunk_size,
                                     labels,
                                     remote_status=remote_statuses.get(seq))
                except:
                    error(
                        u"\nUpload of '%s' part %d failed. Use\n  %s abortmp %s %s\nto abort the upload, or\n  %s --upload-id %s put ...\nto continue the upload."
                        % (filename, seq, sys.argv[0], self.uri,
                           self.upload_id, sys.argv[0], self.upload_id))
                    raise
                seq += 1
        else:
            while True:
                buffer = self.file.read(self.chunk_size)
                offset = 0  # send from start of the buffer
                current_chunk_size = len(buffer)
                labels = {
                    'source':
                    filename,
                    'destination':
                    self.uri.uri(),
                    'extra':
                    "[part %d, %s]" %
                    (seq, "%d%sB" %
                     formatSize(current_chunk_size, human_readable=True))
                }
                if len(buffer) == 0:  # EOF
                    break
                try:
                    self.upload_part(seq,
                                     offset,
                                     current_chunk_size,
                                     labels,
                                     buffer,
                                     remote_status=remote_statuses.get(seq))
                except:
                    error(
                        u"\nUpload of '%s' part %d failed. Use\n  %s abortmp %s %s\nto abort, or\n  %s --upload-id %s put ...\nto continue the upload."
                        % (filename, seq, sys.argv[0], self.uri,
                           self.upload_id, sys.argv[0], self.upload_id))
                    raise
                seq += 1

        debug("MultiPart: Upload finished: %d parts", seq - 1)
Exemple #12
0
 def isdir(self):
     return os.path.isdir(deunicodise(self.path()))
Exemple #13
0
 def __str__(self):
     ## Call unicode(self) instead of self.message because
     ## __unicode__() method could be overriden in subclasses!
     return deunicodise(unicode(self))
Exemple #14
0
 def __repr__(self):
     return deunicodise('Grantee("%(tag)s", "%(name)s", "%(permission)s")' % {
         "tag" : self.tag,
         "name" : self.name,
         "permission" : self.permission
     })
Exemple #15
0
 def save(self, f):
     d = dict(inodes=self.inodes, version=1)
     f = open(deunicodise(f), 'w')
     pickle.dump(d, f)
     f.close()
Exemple #16
0
 def __str__(self):
     ## Call unicode(self) instead of self.message because
     ## __unicode__() method could be overriden in subclasses!
     return deunicodise(unicode(self))
Exemple #17
0
 def load(self, f):
     f = open(deunicodise(f), 'r')
     d = pickle.load(f)
     f.close()
     if d.get('version') == 1 and 'inodes' in d:
         self.inodes = d['inodes']
Exemple #18
0
    def upload_all_parts(self):
        """
        Execute a full multipart upload on a file
        Returns the seq/etag dict
        TODO use num_processes to thread it
        """
        if not self.upload_id:
            raise RuntimeError("Attempting to use a multipart upload that has not been initiated.")

        self.chunk_size = self.s3.config.multipart_chunk_size_mb * 1024 * 1024
        filename = unicodise(self.file.name)

        if filename != "<stdin>":
                size_left = file_size = os.stat(deunicodise(filename))[ST_SIZE]
                nr_parts = file_size / self.chunk_size + (file_size % self.chunk_size and 1)
                debug("MultiPart: Uploading %s in %d parts" % (filename, nr_parts))
        else:
            debug("MultiPart: Uploading from %s" % filename)

        remote_statuses = dict()
        if self.s3.config.put_continue:
            remote_statuses = self.get_parts_information(self.uri, self.upload_id)

        seq = 1
        if filename != "<stdin>":
            while size_left > 0:
                offset = self.chunk_size * (seq - 1)
                current_chunk_size = min(file_size - offset, self.chunk_size)
                size_left -= current_chunk_size
                labels = {
                    'source' : filename,
                    'destination' : self.uri.uri(),
                    'extra' : "[part %d of %d, %s]" % (seq, nr_parts, "%d%sB" % formatSize(current_chunk_size, human_readable = True))
                }
                try:
                    self.upload_part(seq, offset, current_chunk_size, labels, remote_status = remote_statuses.get(seq))
                except:
                    error(u"\nUpload of '%s' part %d failed. Use\n  %s abortmp %s %s\nto abort the upload, or\n  %s --upload-id %s put ...\nto continue the upload."
                          % (filename, seq, sys.argv[0], self.uri, self.upload_id, sys.argv[0], self.upload_id))
                    raise
                seq += 1
        else:
            while True:
                buffer = self.file.read(self.chunk_size)
                offset = 0 # send from start of the buffer
                current_chunk_size = len(buffer)
                labels = {
                    'source' : filename,
                    'destination' : self.uri.uri(),
                    'extra' : "[part %d, %s]" % (seq, "%d%sB" % formatSize(current_chunk_size, human_readable = True))
                }
                if len(buffer) == 0: # EOF
                    break
                try:
                    self.upload_part(seq, offset, current_chunk_size, labels, buffer, remote_status = remote_statuses.get(seq))
                except:
                    error(u"\nUpload of '%s' part %d failed. Use\n  %s abortmp %s %s\nto abort, or\n  %s --upload-id %s put ...\nto continue the upload."
                          % (filename, seq, sys.argv[0], self.uri, self.upload_id, sys.argv[0], self.upload_id))
                    raise
                seq += 1

        debug("MultiPart: Upload finished: %d parts", seq - 1)
Exemple #19
0
 def save(self, f):
     d = dict(inodes=self.inodes, version=1)
     f = open(deunicodise(f), 'w')
     pickle.dump(d, f)
     f.close()
Exemple #20
0
 def __repr__(self):
     return deunicodise('Grantee("%(tag)s", "%(name)s", "%(permission)s")' % {
         "tag" : self.tag,
         "name" : self.name,
         "permission" : self.permission
     })
Exemple #21
0
 def load(self, f):
     f = open(deunicodise(f), 'r')
     d = pickle.load(f)
     f.close()
     if d.get('version') == 1 and 'inodes' in d:
         self.inodes = d['inodes']