Esempio n. 1
0
    def object_copy(self, src_uri, dst_uri, extra_headers = None):
        if src_uri.type != "s3":
            raise ValueError("Expected URI type 's3', got '%s'" % src_uri.type)
        if dst_uri.type != "s3":
            raise ValueError("Expected URI type 's3', got '%s'" % dst_uri.type)
        headers = SortedDict(ignore_case = True)
        headers['x-amz-copy-source'] = "/%s/%s" % (src_uri.bucket(), self.urlencode_string(src_uri.object()))
        ## TODO: For now COPY, later maybe add a switch?
        headers['x-amz-metadata-directive'] = "COPY"
        if self.config.acl_public:
            headers["x-amz-acl"] = "public-read"
        if self.config.reduced_redundancy:
            headers["x-amz-storage-class"] = "REDUCED_REDUNDANCY"

        ## Set server side encryption
        if self.config.server_side_encryption:
            headers["x-amz-server-side-encryption"] = "AES256"

        if extra_headers:
            headers['x-amz-metadata-directive'] = "REPLACE"
            headers.update(extra_headers)

        filename = os.path.basename(str(src_uri))
        headers["content-type"] = self.content_type(filename)

        request = self.create_request("OBJECT_PUT", uri = dst_uri, headers = headers)
        response = self.send_request(request)
        return response
Esempio n. 2
0
	def create_request(self, operation, bucket = None, object = None, headers = None, extra = None, **params):
		resource = { 'bucket' : None, 'uri' : "/" }
		if bucket:
			resource['bucket'] = str(bucket)
			if object:
				resource['uri'] = "/" + self.urlencode_string(object)
		if extra:
			resource['uri'] += extra

		if not headers:
			headers = SortedDict()

		if headers.has_key("date"):
			if not headers.has_key("x-amz-date"):
				headers["x-amz-date"] = headers["date"]
			del(headers["date"])
		
		if not headers.has_key("x-amz-date"):
			headers["x-amz-date"] = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())

		method_string = S3.http_methods.getkey(S3.operations[operation] & S3.http_methods["MASK"])
		signature = self.sign_headers(method_string, resource, headers)
		headers["Authorization"] = "AWS "+self.config.access_key+":"+signature
		param_str = ""
		for param in params:
			if params[param] not in (None, ""):
				param_str += "&%s=%s" % (param, params[param])
			else:
				param_str += "&%s" % param
		if param_str != "":
			resource['uri'] += "?" + param_str[1:]
		debug("CreateRequest: resource[uri]=" + resource['uri'])
		return (method_string, resource, headers)
Esempio n. 3
0
    def object_copy(self, src_uri, dst_uri, extra_headers = None):
        if src_uri.type != "s3":
            raise ValueError("Expected URI type 's3', got '%s'" % src_uri.type)
        if dst_uri.type != "s3":
            raise ValueError("Expected URI type 's3', got '%s'" % dst_uri.type)
        headers = SortedDict(ignore_case = True)
        headers['x-amz-copy-source'] = "/%s/%s" % (src_uri.bucket(), self.urlencode_string(src_uri.object()))
        ## TODO: For now COPY, later maybe add a switch?
        headers['x-amz-metadata-directive'] = "COPY"
        if self.config.acl_public:
            headers["x-amz-acl"] = "public-read"
        if self.config.reduced_redundancy:
            headers["x-amz-storage-class"] = "REDUCED_REDUNDANCY"
        if extra_headers:
          headers.update(extra_headers)

        # Sync public ACL
        if self.config.acl_copy_public:
            acl = self.get_acl(src_uri)
            if acl.isAnonRead():
                headers["x-amz-acl"] = "public-read"
        
        request = self.create_request("OBJECT_PUT", uri = dst_uri, headers = headers)
        try:
            response = self.send_request(request)
        except Exception, e:
            if e.status == 412:
                # PreconditionFailed response - this is ok, just skip
                return {"status": e.status}
            else:
                raise e
Esempio n. 4
0
 def __init__(self, mapping={}, ignore_case=True, **kwargs):
     SortedDict.__init__(self,
                         mapping=mapping,
                         ignore_case=ignore_case,
                         **kwargs)
     self.hardlinks = dict(
     )  # { dev: { inode : {'md5':, 'relative_files':}}}
     self.by_md5 = dict()  # {md5: set(relative_files)}
Esempio n. 5
0
class S3Request(object):
    def __init__(self, s3, method_string, resource, headers, params = {}):
        self.s3 = s3
        self.headers = SortedDict(headers or {}, ignore_case = True)
        self.resource = resource
        self.method_string = method_string
        self.params = params

        self.add_security_token()
        self.update_timestamp()
        self.sign()

    def update_timestamp(self):
        if self.headers.has_key("date"):
            del(self.headers["date"])
        self.headers["x-amz-date"] = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())

    def add_security_token(self):
        if self.s3.config.security_token:
            self.headers["x-amz-security-token"] = self.s3.config.security_token

    def format_param_str(self):
        """
        Format URL parameters from self.params and returns
        ?parm1=val1&parm2=val2 or an empty string if there
        are no parameters.  Output of this function should
        be appended directly to self.resource['uri']
        """
        param_str = ""
        for param in self.params:
            if self.params[param] not in (None, ""):
                param_str += "&%s=%s" % (param, self.params[param])
            else:
                param_str += "&%s" % param
        return param_str and "?" + param_str[1:]

    def sign(self):
        h  = self.method_string + "\n"
        h += self.headers.get("content-md5", "")+"\n"
        h += self.headers.get("content-type", "")+"\n"
        h += self.headers.get("date", "")+"\n"
        for header in self.headers.keys():
            if header.startswith("x-amz-"):
                h += header+":"+str(self.headers[header])+"\n"
        if self.resource['bucket']:
            h += "/" + self.resource['bucket']
        h += self.resource['uri']
        debug("SignHeaders: " + repr(h))
        signature = sign_string(h)

        self.headers["Authorization"] = "AWS "+self.s3.config.access_key+":"+signature

    def get_triplet(self):
        self.update_timestamp()
        self.sign()
        resource = dict(self.resource)  ## take a copy
        resource['uri'] += self.format_param_str()
        return (self.method_string, resource, self.headers)
Esempio n. 6
0
	def __init__(self, s3, method_string, resource, headers, params = {}):
		self.s3 = s3
		self.headers = SortedDict(headers or {}, ignore_case = True)
		self.resource = resource
		self.method_string = method_string
		self.params = params

		self.update_timestamp()
		self.sign()
Esempio n. 7
0
class S3Request(object):
    def __init__(self, s3, method_string, resource, headers, params={}):
        self.s3 = s3
        self.headers = SortedDict(headers or {}, ignore_case=True)
        self.resource = resource
        self.method_string = method_string
        self.params = params

        self.update_timestamp()
        self.sign()

    def update_timestamp(self):
        if self.headers.has_key("date"):
            del (self.headers["date"])
        self.headers["x-amz-date"] = time.strftime(
            "%a, %d %b %Y %H:%M:%S +0000", time.gmtime())

    def format_param_str(self):
        """
        Format URL parameters from self.params and returns
        ?parm1=val1&parm2=val2 or an empty string if there
        are no parameters.  Output of this function should
        be appended directly to self.resource['uri']
        """
        param_str = ""
        for param in self.params:
            if self.params[param] not in (None, ""):
                param_str += "&%s=%s" % (param, self.params[param])
            else:
                param_str += "&%s" % param
        return param_str and "?" + param_str[1:]

    def sign(self):
        h = self.method_string + "\n"
        h += self.headers.get("content-md5", "") + "\n"
        h += self.headers.get("content-type", "") + "\n"
        h += self.headers.get("date", "") + "\n"
        for header in self.headers.keys():
            if header.startswith("x-amz-"):
                h += header + ":" + str(self.headers[header]) + "\n"
        if self.resource['bucket']:
            h += "/" + self.resource['bucket']
        h += self.resource['uri']
        debug("SignHeaders: " + repr(h))
        signature = sign_string(h)

        self.headers[
            "Authorization"] = "AWS " + self.s3.config.access_key + ":" + signature

    def get_triplet(self):
        self.update_timestamp()
        self.sign()
        resource = dict(self.resource)  ## take a copy
        resource['uri'] += self.format_param_str()
        return (self.method_string, resource, self.headers)
Esempio n. 8
0
    def _get_filelist_remote(remote_uri, recursive = True):
        ## If remote_uri ends with '/' then all remote files will have
        ## the remote_uri prefix removed in the relative path.
        ## If, on the other hand, the remote_uri ends with something else
        ## (probably alphanumeric symbol) we'll use the last path part
        ## in the relative path.
        ##
        ## Complicated, eh? See an example:
        ## _get_filelist_remote("s3://bckt/abc/def") may yield:
        ## { 'def/file1.jpg' : {}, 'def/xyz/blah.txt' : {} }
        ## _get_filelist_remote("s3://bckt/abc/def/") will yield:
        ## { 'file1.jpg' : {}, 'xyz/blah.txt' : {} }
        ## Furthermore a prefix-magic can restrict the return list:
        ## _get_filelist_remote("s3://bckt/abc/def/x") yields:
        ## { 'xyz/blah.txt' : {} }

        info(u"Retrieving list of remote files for %s ..." % remote_uri)

        s3 = S3(Config())
        response = s3.bucket_list(remote_uri.bucket(), prefix = remote_uri.object(), recursive = recursive)

        rem_base_original = rem_base = remote_uri.object()
        remote_uri_original = remote_uri
        if rem_base != '' and rem_base[-1] != '/':
            rem_base = rem_base[:rem_base.rfind('/')+1]
            remote_uri = S3Uri("s3://%s/%s" % (remote_uri.bucket(), rem_base))
        rem_base_len = len(rem_base)
        rem_list = SortedDict(ignore_case = False)
        break_now = False
        for object in response['list']:
            if object['Key'] == rem_base_original and object['Key'][-1] != os.path.sep:
                ## We asked for one file and we got that file :-)
                key = os.path.basename(object['Key'])
                object_uri_str = remote_uri_original.uri()
                break_now = True
                rem_list = SortedDict(ignore_case = False)   ## Remove whatever has already been put to rem_list
            else:
                key = object['Key'][rem_base_len:]      ## Beware - this may be '' if object['Key']==rem_base !!
                object_uri_str = remote_uri.uri() + key
            rem_list[key] = {
                'size' : int(object['Size']),
                'timestamp' : dateS3toUnix(object['LastModified']), ## Sadly it's upload time, not our lastmod time :-(
                'md5' : object['ETag'][1:-1],
                'object_key' : object['Key'],
                'object_uri_str' : object_uri_str,
                'base_uri' : remote_uri,
                'dev' : None,
                'inode' : None,
            }
            md5 = object['ETag'][1:-1]
            rem_list.record_md5(key, md5)
            if break_now:
                break
        return rem_list
Esempio n. 9
0
    def __init__(self, s3, method_string, resource, headers, params = {}):
        self.s3 = s3
        self.headers = SortedDict(headers or {}, ignore_case = True)
        # Add in any extra headers from s3 config object
        if self.s3.config.extra_headers:
            self.headers.update(self.s3.config.extra_headers)
        self.resource = resource
        self.method_string = method_string
        self.params = params

        self.update_timestamp()
        self.sign()
Esempio n. 10
0
	def object_copy(self, src_uri, dst_uri, extra_headers = None):
		if src_uri.type != "s3":
			raise ValueError("Expected URI type 's3', got '%s'" % src_uri.type)
		if dst_uri.type != "s3":
			raise ValueError("Expected URI type 's3', got '%s'" % dst_uri.type)
		headers = SortedDict()
		headers['x-amz-copy-source'] = "/%s/%s" % (src_uri.bucket(), self.urlencode_string(src_uri.object()))
		if self.config.acl_public:
			headers["x-amz-acl"] = "public-read"
		if extra_headers:
			headers.update(extra_headers)
		request = self.create_request("OBJECT_PUT", uri = dst_uri, headers = headers)
		response = self.send_request(request)
		return response
Esempio n. 11
0
    def __init__(self, s3, method_string, resource, headers, params={}):
        self.s3 = s3
        self.headers = SortedDict(headers or {}, ignore_case=True)
        # Add in any extra headers from s3 config object
        if self.s3.config.extra_headers:
            self.headers.update(self.s3.config.extra_headers)
        if len(self.s3.config.access_token) > 0:
            self.s3.config.role_refresh()
            self.headers['x-amz-security-token'] = self.s3.config.access_token
        self.resource = resource
        self.method_string = method_string
        self.params = params

        self.update_timestamp()
        self.sign()
Esempio n. 12
0
def filter_exclude_include(src_list):
    info(u"Applying --exclude/--include")
    cfg = Config()
    exclude_list = SortedDict(ignore_case=False)
    for file in src_list.keys():
        debug(u"CHECK: %s" % file)
        excluded = False
        for r in cfg.exclude:
            if r.search(file):
                excluded = True
                debug(u"EXCL-MATCH: '%s'" % (cfg.debug_exclude[r]))
                break
        if excluded:
            ## No need to check for --include if not excluded
            for r in cfg.include:
                if r.search(file):
                    excluded = False
                    debug(u"INCL-MATCH: '%s'" % (cfg.debug_include[r]))
                    break
        if excluded:
            ## Still excluded - ok, action it
            debug(u"EXCLUDE: %s" % file)
            exclude_list[file] = src_list[file]
            del (src_list[file])
            continue
        else:
            debug(u"PASS: %s" % (file))
    return src_list, exclude_list
Esempio n. 13
0
    def website_info(self, uri, bucket_location=None):
        headers = SortedDict(ignore_case=True)
        bucket = uri.bucket()
        body = ""

        request = self.create_request("BUCKET_LIST",
                                      bucket=bucket,
                                      extra="?website")
        try:
            response = self.send_request(request, body)
            response['index_document'] = getTextFromXml(
                response['data'], ".//IndexDocument//Suffix")
            response['error_document'] = getTextFromXml(
                response['data'], ".//ErrorDocument//Key")
            response['website_endpoint'] = self.config.website_endpoint % {
                "bucket": uri.bucket(),
                "location": self.get_bucket_location(uri)
            }
            return response
        except S3Error, e:
            if e.status == 404:
                debug(
                    "Could not get /?website - website probably not configured for this bucket"
                )
                return None
            raise
Esempio n. 14
0
File: S3.py Progetto: mekza/s3cmd
    def object_copy(self, src_uri, dst_uri, extra_headers=None):
        if src_uri.type != "s3":
            raise ValueError("Expected URI type 's3', got '%s'" % src_uri.type)
        if dst_uri.type != "s3":
            raise ValueError("Expected URI type 's3', got '%s'" % dst_uri.type)
        headers = SortedDict(ignore_case=True)
        headers['x-amz-copy-source'] = "/%s/%s" % (
            src_uri.bucket(), self.urlencode_string(src_uri.object()))
        ## TODO: For now COPY, later maybe add a switch?
        headers['x-amz-metadata-directive'] = "COPY"
        if self.config.acl_public:
            headers["x-amz-acl"] = "public-read"
        if self.config.reduced_redundancy:
            headers["x-amz-storage-class"] = "REDUCED_REDUNDANCY"
        # if extra_headers:
        #   headers.update(extra_headers)

        ## Set server side encryption
        if self.config.server_side_encryption:
            headers["x-amz-server-side-encryption"] = "AES256"

        request = self.create_request("OBJECT_PUT",
                                      uri=dst_uri,
                                      headers=headers)
        response = self.send_request(request)
        return response
Esempio n. 15
0
 def create_request(self, Action, DomainName, parameters=None):
     if not parameters:
         parameters = SortedDict()
     parameters['AWSAccessKeyId'] = self.config.access_key
     parameters['Version'] = self.Version
     parameters['SignatureVersion'] = self.SignatureVersion
     parameters['Action'] = Action
     parameters['Timestamp'] = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
     if DomainName:
         parameters['DomainName'] = DomainName
     parameters['Signature'] = self.sign_request(parameters)
     parameters.keys_return_lowercase = False
     uri_params = urllib.urlencode(parameters)
     request = {}
     request['uri_params'] = uri_params
     request['parameters'] = parameters
     return request
Esempio n. 16
0
    def __init__(self, s3, method_string, resource, headers, params = {}):
        self.s3 = s3
        self.headers = SortedDict(headers or {}, ignore_case = True)
        self.resource = resource
        self.method_string = method_string
        self.params = params

        self.update_timestamp()
        self.sign()
Esempio n. 17
0
 def create_request(self, Action, DomainName, parameters=None):
     if not parameters:
         parameters = SortedDict()
     parameters['AWSAccessKeyId'] = self.config.access_key
     parameters['Version'] = self.Version
     parameters['SignatureVersion'] = self.SignatureVersion
     parameters['Action'] = Action
     parameters['Timestamp'] = time.strftime("%Y-%m-%dT%H:%M:%SZ",
                                             time.gmtime())
     if DomainName:
         parameters['DomainName'] = DomainName
     parameters['Signature'] = self.sign_request(parameters)
     parameters.keys_return_lowercase = False
     uri_params = urllib.urlencode(parameters)
     request = {}
     request['uri_params'] = uri_params
     request['parameters'] = parameters
     return request
Esempio n. 18
0
    def _get_filelist_remote(remote_uri, recursive=True):
        ## If remote_uri ends with '/' then all remote files will have
        ## the remote_uri prefix removed in the relative path.
        ## If, on the other hand, the remote_uri ends with something else
        ## (probably alphanumeric symbol) we'll use the last path part
        ## in the relative path.
        ##
        ## Complicated, eh? See an example:
        ## _get_filelist_remote("s3://bckt/abc/def") may yield:
        ## { 'def/file1.jpg' : {}, 'def/xyz/blah.txt' : {} }
        ## _get_filelist_remote("s3://bckt/abc/def/") will yield:
        ## { 'file1.jpg' : {}, 'xyz/blah.txt' : {} }
        ## Furthermore a prefix-magic can restrict the return list:
        ## _get_filelist_remote("s3://bckt/abc/def/x") yields:
        ## { 'xyz/blah.txt' : {} }

        info(u"Retrieving list of remote files for %s ..." % remote_uri)

        s3 = S3(Config())
        response = s3.bucket_list(remote_uri.bucket(),
                                  prefix=remote_uri.object(),
                                  recursive=recursive)

        rem_base_original = rem_base = remote_uri.object()
        remote_uri_original = remote_uri
        if rem_base != '' and rem_base[-1] != '/':
            rem_base = rem_base[:rem_base.rfind('/') + 1]
            remote_uri = S3Uri("s3://%s/%s" % (remote_uri.bucket(), rem_base))
        rem_base_len = len(rem_base)
        rem_list = SortedDict(ignore_case=False)
        break_now = False
        for object in response['list']:
            if object['Key'] == rem_base_original and object['Key'][
                    -1] != os.path.sep:
                ## We asked for one file and we got that file :-)
                key = os.path.basename(object['Key'])
                object_uri_str = remote_uri_original.uri()
                break_now = True
                rem_list = {
                }  ## Remove whatever has already been put to rem_list
            else:
                key = object['Key'][
                    rem_base_len:]  ## Beware - this may be '' if object['Key']==rem_base !!
                object_uri_str = remote_uri.uri() + key
            rem_list[key] = {
                'size': int(object['Size']),
                'timestamp': dateS3toUnix(
                    object['LastModified']
                ),  ## Sadly it's upload time, not our lastmod time :-(
                'md5': object['ETag'][1:-1],
                'object_key': object['Key'],
                'object_uri_str': object_uri_str,
                'base_uri': remote_uri,
            }
            if break_now:
                break
        return rem_list
Esempio n. 19
0
	def create_request(self, operation, uri = None, bucket = None, object = None, headers = None, extra = None, **params):
		resource = { 'bucket' : None, 'uri' : "/" }

		if uri and (bucket or object):
			raise ValueError("Both 'uri' and either 'bucket' or 'object' parameters supplied")
		## If URI is given use that instead of bucket/object parameters
		if uri:
			bucket = uri.bucket()
			object = uri.has_object() and uri.object() or None

		if bucket:
			resource['bucket'] = str(bucket)
			if object:
				resource['uri'] = "/" + self.urlencode_string(object)
		if extra:
			resource['uri'] += extra

		if not headers:
			headers = SortedDict()

		debug("headers: %s" % headers)

		if headers.has_key("date"):
			if not headers.has_key("x-amz-date"):
				headers["x-amz-date"] = headers["date"]
			del(headers["date"])
		
		if not headers.has_key("x-amz-date"):
			headers["x-amz-date"] = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime())

		method_string = S3.http_methods.getkey(S3.operations[operation] & S3.http_methods["MASK"])
		signature = self.sign_headers(method_string, resource, headers)
		headers["Authorization"] = "AWS "+self.config.access_key+":"+signature
		param_str = ""
		for param in params:
			if params[param] not in (None, ""):
				param_str += "&%s=%s" % (param, params[param])
			else:
				param_str += "&%s" % param
		if param_str != "":
			resource['uri'] += "?" + param_str[1:]
		debug("CreateRequest: resource[uri]=" + resource['uri'])
		return (method_string, resource, headers)
Esempio n. 20
0
 def ListDomains(self, MaxNumberOfDomains=100):
     '''
     Lists all domains associated with our Access Key. Returns
     domain names up to the limit set by MaxNumberOfDomains.
     '''
     parameters = SortedDict()
     parameters['MaxNumberOfDomains'] = MaxNumberOfDomains
     return self.send_request("ListDomains",
                              DomainName=None,
                              parameters=parameters)
Esempio n. 21
0
 def GetAttributes(self, DomainName, ItemName, Attributes=[]):
     parameters = SortedDict()
     parameters['ItemName'] = ItemName
     seq = 0
     for attrib in Attributes:
         parameters['AttributeName.%d' % seq] = attrib
         seq += 1
     return self.send_request("GetAttributes",
                              DomainName=DomainName,
                              parameters=parameters)
Esempio n. 22
0
    def __getitem__(self, k=None):
        size = len(self._sorted_keys)
        if size == 0:
            return None
        if k == None:
            return SortedDict.__getitem__(self, self._sorted_keys[0])

        indexOfFirstGreaterOrEqual = bisect_left(self._sorted_keys, k)
        
        if size > indexOfFirstGreaterOrEqual:
            if self._sorted_keys[indexOfFirstGreaterOrEqual] == k:
                return SortedDict.__getitem__(self, k)
        if indexOfFirstGreaterOrEqual == 0:
            return None
        if self.interpolationType == "STEP":
            return SortedDict.__getitem__(self, self._sorted_keys[indexOfFirstGreaterOrEqual-1])
        else:
            # linear interpolation (for both "RAMP" and "LINEAR")
            try:
                t1 = self._sorted_keys[indexOfFirstGreaterOrEqual-1]
                t2 = self._sorted_keys[indexOfFirstGreaterOrEqual]
                v1 = SortedDict.__getitem__(self, t1)
                v2 = SortedDict.__getitem__(self, t2)
                return v1+((v2-v1)*(k-t1))/(t2-t1)
            except:
                # if linear interpolation fails, do step
                return SortedDict.__getitem__(self, self._sorted_keys[indexOfFirstGreaterOrEqual-1])
Esempio n. 23
0
    def __getitem__(self, k=None):
        size = len(self._sorted_keys)
        if size == 0:
            return None
        if k == None:
            return SortedDict.__getitem__(self, self._sorted_keys[0])

        indexOfFirstGreaterOrEqual = bisect_left(self._sorted_keys, k)

        if size > indexOfFirstGreaterOrEqual:
            if self._sorted_keys[indexOfFirstGreaterOrEqual] == k:
                return SortedDict.__getitem__(self, k)
        if indexOfFirstGreaterOrEqual == 0:
            return None
        if self.interpolationType == "STEP":
            return SortedDict.__getitem__(
                self, self._sorted_keys[indexOfFirstGreaterOrEqual - 1])
        else:
            # linear interpolation (for both "RAMP" and "LINEAR")
            try:
                t1 = self._sorted_keys[indexOfFirstGreaterOrEqual - 1]
                t2 = self._sorted_keys[indexOfFirstGreaterOrEqual]
                v1 = SortedDict.__getitem__(self, t1)
                v2 = SortedDict.__getitem__(self, t2)
                return v1 + ((v2 - v1) * (k - t1)) / (t2 - t1)
            except:
                # if linear interpolation fails, do step
                return SortedDict.__getitem__(
                    self, self._sorted_keys[indexOfFirstGreaterOrEqual - 1])
Esempio n. 24
0
    def website_delete(self, uri, bucket_location = None):
        headers = SortedDict(ignore_case = True)
        bucket = uri.bucket()
        body = ""

        request = self.create_request("BUCKET_DELETE", bucket = bucket, extra="?website")
        debug("About to send request '%s' with body '%s'" % (request, body))
        response = self.send_request(request, body)
        debug("Received response '%s'" % (response))

        if response['status'] != 204:
            raise S3ResponseError("Expected status 204: %s" % response)

        return response
Esempio n. 25
0
 def Query(self,
           DomainName,
           QueryExpression=None,
           MaxNumberOfItems=None,
           NextToken=None):
     parameters = SortedDict()
     if QueryExpression:
         parameters['QueryExpression'] = QueryExpression
     if MaxNumberOfItems:
         parameters['MaxNumberOfItems'] = MaxNumberOfItems
     if NextToken:
         parameters['NextToken'] = NextToken
     return self.send_request("Query",
                              DomainName=DomainName,
                              parameters=parameters)
Esempio n. 26
0
File: S3.py Progetto: jmehnle/s3cmd
    def __init__(self, s3, method_string, resource, headers, params = {}):
        self.s3 = s3
        self.headers = SortedDict(headers or {}, ignore_case = True)
        # Add in any extra headers from s3 config object
        if self.s3.config.extra_headers:
            self.headers.update(self.s3.config.extra_headers)
        if len(self.s3.config.access_token)>0:
            self.s3.config.role_refresh()
            self.headers['x-amz-security-token']=self.s3.config.access_token
        self.resource = resource
        self.method_string = method_string
        self.params = params

        self.update_timestamp()
        self.sign()
Esempio n. 27
0
    def website_list(self, uri, bucket_location=None):
        headers = SortedDict(ignore_case=True)
        bucket = uri.bucket()
        body = ""

        request = self.create_request("WEBSITE_LIST",
                                      bucket=bucket,
                                      extra="?website")
        response = None
        try:
            response = self.send_request(request, body)
        except S3Error, e:
            if e.status == 404:
                debug("Could not get ?website. Assuming none set.")
            else:
                raise
Esempio n. 28
0
    def expiration_info(self, uri, bucket_location = None):
        headers = SortedDict(ignore_case = True)
        bucket = uri.bucket()
        body = ""

        request = self.create_request("BUCKET_LIST", bucket = bucket, extra="?lifecycle")
        try:
            response = self.send_request(request, body)
            response['prefix'] = getTextFromXml(response['data'], ".//Rule//Prefix")
            response['date'] = getTextFromXml(response['data'], ".//Rule//Expiration//Date")
            response['days'] = getTextFromXml(response['data'], ".//Rule//Expiration//Days")
            return response
        except S3Error, e:
            if e.status == 404:
                debug("Could not get /?lifecycle - lifecycle probably not configured for this bucket")
                return None
            raise
Esempio n. 29
0
	def __init__(self,parent=None):
	
		self.contacts = SortedDict()

		QWidget.__init__(self,parent)

		self.ui = Ui_MainWindow()
		self.ui.setupUi(self)

		self.initUI()
		
		self.setWindowTitle("Xen CRM "+self._version)

		self.UpdateContacts.connect(self.updateContactsTable)

		self.tableModel = TableModel(self.contacts)
		self.ui.tableView.setSelectionBehavior(QAbstractItemView.SelectRows)
		self.ui.tableView.setModel(self.tableModel)
Esempio n. 30
0
 def bucket_create(self, bucket, bucket_location=None):
     headers = SortedDict(ignore_case=True)
     body = ""
     if bucket_location and bucket_location.strip().upper() != "US":
         body = "<CreateBucketConfiguration><LocationConstraint>"
         body += bucket_location.strip().upper()
         body += "</LocationConstraint></CreateBucketConfiguration>"
         debug("bucket_location: " + body)
         self.check_bucket_name(bucket, dns_strict=True)
     else:
         self.check_bucket_name(bucket, dns_strict=False)
     if self.config.acl_public:
         headers["x-amz-acl"] = "public-read"
     request = self.create_request("BUCKET_CREATE",
                                   bucket=bucket,
                                   headers=headers)
     response = self.send_request(request, body)
     return response
Esempio n. 31
0
    def website_create(self, uri, bucket_location = None):
        headers = SortedDict(ignore_case = True)
        bucket = uri.bucket()
        body = '<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
        body += '  <IndexDocument>'
        body += ('    <Suffix>%s</Suffix>' % self.config.website_index)
        body += '  </IndexDocument>'
        if self.config.website_error:
            body += '  <ErrorDocument>'
            body += ('    <Key>%s</Key>' % self.config.website_error)
            body += '  </ErrorDocument>'
        body += '</WebsiteConfiguration>'

        request = self.create_request("BUCKET_CREATE", bucket = bucket, extra="?website")
        debug("About to send request '%s' with body '%s'" % (request, body))
        response = self.send_request(request, body)
        debug("Received response '%s'" % (response))

        return response
Esempio n. 32
0
 def object_copy(self, src_uri, dst_uri, extra_headers=None):
     if src_uri.type != "s3":
         raise ValueError("Expected URI type 's3', got '%s'" % src_uri.type)
     if dst_uri.type != "s3":
         raise ValueError("Expected URI type 's3', got '%s'" % dst_uri.type)
     headers = SortedDict(ignore_case=True)
     headers['x-amz-copy-source'] = "/%s/%s" % (
         src_uri.bucket(), self.urlencode_string(src_uri.object()))
     ## TODO: For now COPY, later maybe add a switch?
     headers['x-amz-metadata-directive'] = "COPY"
     if self.config.acl_public:
         headers["x-amz-acl"] = "public-read"
     # if extra_headers:
     # 	headers.update(extra_headers)
     request = self.create_request("OBJECT_PUT",
                                   uri=dst_uri,
                                   headers=headers)
     response = self.send_request(request)
     return response
Esempio n. 33
0
    def _expiration_set(self, uri):
        debug("put bucket lifecycle")
        body = '<LifecycleConfiguration>'
        body += '  <Rule>'
        body += ('    <Prefix>%s</Prefix>' % self.config.expiry_prefix)
        body += ('    <Status>Enabled</Status>')
        body += ('    <Expiration>')
        if self.config.expiry_date:
            body += ('    <Date>%s</Date>' % self.config.expiry_date)
        elif self.config.expiry_days:
            body += ('    <Days>%s</Days>' % self.config.expiry_days)
        body += ('    </Expiration>')
        body += '  </Rule>'
        body += '</LifecycleConfiguration>'

        headers = SortedDict(ignore_case = True)
        headers['content-md5'] = compute_content_md5(body)
        bucket = uri.bucket()
        request =  self.create_request("BUCKET_CREATE", bucket = bucket, headers = headers, extra="?lifecycle")
        return (request, body)
Esempio n. 34
0
	def readContact(self):
		tempContact=SortedDict()
		tempContact["FirstName"]=self.ui.inputFirstName.text()
		tempContact["LastName"]=self.ui.inputLastName.text()
		tempContact["MiddleName"]=self.ui.inputMiddleName.text()
		tempContact["KnownAs"]=self.ui.inputKnownAs.text()
		tempContact["HouseNumber"]=self.ui.inputHouseNumber.text()
		tempContact["Street"]=self.ui.inputStreet.text()
		tempContact["Town"]=self.ui.inputTown.text()
		tempContact["PostalTown"]=self.ui.inputPostalTown.text()
		tempContact["PostCode"]=self.ui.inputPostCode.text()
		tempContact["Telephone"]=self.ui.inputTelephone.text()
		tempContact["Mobile"]=self.ui.inputMobile.text()
		tempContact["Email"]=self.ui.inputEmail.text()

		if(self.currentKey != ""):
			tempContact["Key"]=self.currentKey
		
		self.ContactAdd.emit(tempContact)
		self.currentKey=""
		self.hide()
Esempio n. 35
0
 def DeleteAttributes(self, DomainName, ItemName, Attributes={}):
     """
     Remove specified Attributes from ItemName.
     Attributes parameter can be either:
     - not specified, in which case the whole Item is removed
     - list, e.g. ['Attr1', 'Attr2'] in which case these parameters are removed
     - dict, e.g. {'Attr' : 'One', 'Attr' : 'Two'} in which case the
       specified values are removed from multi-value attributes.
     """
     parameters = SortedDict()
     parameters['ItemName'] = ItemName
     seq = 0
     for attrib in Attributes:
         parameters['Attribute.%d.Name' % seq] = attrib
         if type(Attributes) == type(dict()):
             parameters['Attribute.%d.Value' % seq] = unicode(
                 Attributes[attrib])
         seq += 1
     return self.send_request("DeleteAttributes",
                              DomainName=DomainName,
                              parameters=parameters)
Esempio n. 36
0
 def PutAttributes(self, DomainName, ItemName, Attributes):
     parameters = SortedDict()
     parameters['ItemName'] = ItemName
     seq = 0
     for attrib in Attributes:
         if type(Attributes[attrib]) == type(list()):
             for value in Attributes[attrib]:
                 parameters['Attribute.%d.Name' % seq] = attrib
                 parameters['Attribute.%d.Value' % seq] = unicode(value)
                 seq += 1
         else:
             parameters['Attribute.%d.Name' % seq] = attrib
             parameters['Attribute.%d.Value' % seq] = unicode(
                 Attributes[attrib])
             seq += 1
     ## TODO:
     ## - support for Attribute.N.Replace
     ## - support for multiple values for one attribute
     return self.send_request("PutAttributes",
                              DomainName=DomainName,
                              parameters=parameters)
Esempio n. 37
0
 def _get_filelist_local(local_uri):
     info(u"Compiling list of local files...")
     if local_uri.isdir():
         local_base = deunicodise(local_uri.basename())
         local_path = deunicodise(local_uri.path())
         filelist = _fswalk(local_path, cfg.follow_symlinks)
         single_file = False
     else:
         local_base = ""
         local_path = deunicodise(local_uri.dirname())
         filelist = [(local_path, [], [deunicodise(local_uri.basename())])]
         single_file = True
     loc_list = SortedDict(ignore_case=False)
     for root, dirs, files in filelist:
         rel_root = root.replace(local_path, local_base, 1)
         for f in files:
             full_name = os.path.join(root, f)
             if not os.path.isfile(full_name):
                 continue
             if os.path.islink(full_name):
                 if not cfg.follow_symlinks:
                     continue
             relative_file = unicodise(os.path.join(rel_root, f))
             if os.path.sep != "/":
                 # Convert non-unix dir separators to '/'
                 relative_file = "/".join(relative_file.split(os.path.sep))
             if cfg.urlencoding_mode == "normal":
                 relative_file = replace_nonprintables(relative_file)
             if relative_file.startswith('./'):
                 relative_file = relative_file[2:]
             sr = os.stat_result(os.lstat(full_name))
             loc_list[relative_file] = {
                 'full_name_unicode': unicodise(full_name),
                 'full_name': full_name,
                 'size': sr.st_size,
                 'mtime': sr.st_mtime,
                 ## TODO: Possibly more to save here...
             }
     return loc_list, single_file
Esempio n. 38
0
File: S3.py Progetto: jmehnle/s3cmd
        if uri.type != "s3":
            raise ValueError("Expected URI type 's3', got '%s'" % uri.type)

        if filename != "-" and not os.path.isfile(filename):
            raise InvalidFileError(u"%s is not a regular file" % unicodise(filename))
        try:
            if filename == "-":
                file = sys.stdin
                size = 0
            else:
                file = open(filename, "rb")
                size = os.stat(filename)[ST_SIZE]
        except (IOError, OSError), e:
            raise InvalidFileError(u"%s: %s" % (unicodise(filename), e.strerror))

        headers = SortedDict(ignore_case = True)
        if extra_headers:
            headers.update(extra_headers)

        ## MIME-type handling
        content_type = self.config.mime_type
        content_encoding = None
        if filename != "-" and not content_type and self.config.guess_mime_type:
            (content_type, content_encoding) = mime_magic(filename)
        if not content_type:
            content_type = self.config.default_mime_type

        ## add charset to content type
        if self.add_encoding(filename, content_type):
            content_type = content_type + "; charset=" + self.config.encoding.upper()
Esempio n. 39
0
 def __init__(self, mapping = {}, ignore_case = True, **kwargs):
     SortedDict.__init__(self, mapping = mapping, ignore_case = ignore_case, **kwargs)
     self.hardlinks = dict() # { dev: { inode : {'md5':, 'relative_files':}}}
     self.by_md5 = dict() # {md5: set(relative_files)}
Esempio n. 40
0
    def _get_filelist_local(loc_list, local_uri, cache):
        info(u"Compiling list of local files...")

        if deunicodise(local_uri.basename()) == "-":
            loc_list = SortedDict(ignore_case = False)
            loc_list["-"] = {
                'full_name_unicode' : '-',
                'full_name' : '-',
                'size' : -1,
                'mtime' : -1,
            }
            return loc_list, True
        if local_uri.isdir():
            local_base = deunicodise(local_uri.basename())
            local_path = deunicodise(local_uri.path())
            if cfg.follow_symlinks:
                filelist = _fswalk_follow_symlinks(local_path)
            else:
                filelist = _fswalk_no_symlinks(local_path)
            single_file = False
        else:
            local_base = ""
            local_path = deunicodise(local_uri.dirname())
            filelist = [( local_path, [], [deunicodise(local_uri.basename())] )]
            single_file = True
        for root, dirs, files in filelist:
            rel_root = root.replace(local_path, local_base, 1)
            for f in files:
                full_name = os.path.join(root, f)
                if not os.path.isfile(full_name):
                    continue
                if os.path.islink(full_name):
                                    if not cfg.follow_symlinks:
                                            continue
                relative_file = unicodise(os.path.join(rel_root, f))
                if os.path.sep != "/":
                    # Convert non-unix dir separators to '/'
                    relative_file = "/".join(relative_file.split(os.path.sep))
                if cfg.urlencoding_mode == "normal":
                    relative_file = replace_nonprintables(relative_file)
                if relative_file.startswith('./'):
                    relative_file = relative_file[2:]
                sr = os.stat_result(os.lstat(full_name))
                loc_list[relative_file] = {
                    'full_name_unicode' : unicodise(full_name),
                    'full_name' : full_name,
                    'size' : sr.st_size,
                    'mtime' : sr.st_mtime,
                    'dev'   : sr.st_dev,
                    'inode' : sr.st_ino,
                    'uid' : sr.st_uid,
                    'gid' : sr.st_gid,
                    'sr': sr # save it all, may need it in preserve_attrs_list
                    ## TODO: Possibly more to save here...
                }
                if 'md5' in cfg.sync_checks:
                    md5 = cache.md5(sr.st_dev, sr.st_ino, sr.st_mtime, sr.st_size)
                    if md5 is None:
                            try:
                                md5 = loc_list.get_md5(relative_file) # this does the file I/O
                            except IOError:
                                continue
                            cache.add(sr.st_dev, sr.st_ino, sr.st_mtime, sr.st_size, md5)
                    loc_list.record_hardlink(relative_file, sr.st_dev, sr.st_ino, md5)
        return loc_list, single_file
Esempio n. 41
0
def fetch_remote_list(args, require_attribs = False, recursive = None):
    def _get_filelist_remote(remote_uri, recursive = True):
        ## If remote_uri ends with '/' then all remote files will have
        ## the remote_uri prefix removed in the relative path.
        ## If, on the other hand, the remote_uri ends with something else
        ## (probably alphanumeric symbol) we'll use the last path part
        ## in the relative path.
        ##
        ## Complicated, eh? See an example:
        ## _get_filelist_remote("s3://bckt/abc/def") may yield:
        ## { 'def/file1.jpg' : {}, 'def/xyz/blah.txt' : {} }
        ## _get_filelist_remote("s3://bckt/abc/def/") will yield:
        ## { 'file1.jpg' : {}, 'xyz/blah.txt' : {} }
        ## Furthermore a prefix-magic can restrict the return list:
        ## _get_filelist_remote("s3://bckt/abc/def/x") yields:
        ## { 'xyz/blah.txt' : {} }

        info(u"Retrieving list of remote files for %s ..." % remote_uri)

        s3 = S3(Config())
        response = s3.bucket_list(remote_uri.bucket(), prefix = remote_uri.object(), recursive = recursive)

        rem_base_original = rem_base = remote_uri.object()
        remote_uri_original = remote_uri
        if rem_base != '' and rem_base[-1] != '/':
            rem_base = rem_base[:rem_base.rfind('/')+1]
            remote_uri = S3Uri("s3://%s/%s" % (remote_uri.bucket(), rem_base))
        rem_base_len = len(rem_base)
        rem_list = SortedDict(ignore_case = False)
        break_now = False
        for object in response['list']:
            if object['Key'] == rem_base_original and object['Key'][-1] != os.path.sep:
                ## We asked for one file and we got that file :-)
                key = os.path.basename(object['Key'])
                object_uri_str = remote_uri_original.uri()
                break_now = True
                rem_list = SortedDict(ignore_case = False)   ## Remove whatever has already been put to rem_list
            else:
                key = object['Key'][rem_base_len:]      ## Beware - this may be '' if object['Key']==rem_base !!
                object_uri_str = remote_uri.uri() + key
            rem_list[key] = {
                'size' : int(object['Size']),
                'timestamp' : dateS3toUnix(object['LastModified']), ## Sadly it's upload time, not our lastmod time :-(
                'md5' : object['ETag'][1:-1],
                'object_key' : object['Key'],
                'object_uri_str' : object_uri_str,
                'base_uri' : remote_uri,
                'dev' : None,
                'inode' : None,
            }
            md5 = object['ETag'][1:-1]
            rem_list.record_md5(key, md5)
            if break_now:
                break
        return rem_list

    cfg = Config()
    remote_uris = []
    remote_list = SortedDict(ignore_case = False)

    if type(args) not in (list, tuple):
        args = [args]

    if recursive == None:
        recursive = cfg.recursive

    for arg in args:
        uri = S3Uri(arg)
        if not uri.type == 's3':
            raise ParameterError("Expecting S3 URI instead of '%s'" % arg)
        remote_uris.append(uri)

    if recursive:
        for uri in remote_uris:
            objectlist = _get_filelist_remote(uri)
            for key in objectlist:
                remote_list[key] = objectlist[key]
                remote_list.record_md5(key, objectlist.get_md5(key))
    else:
        for uri in remote_uris:
            uri_str = str(uri)
            ## Wildcards used in remote URI?
            ## If yes we'll need a bucket listing...
            if uri_str.find('*') > -1 or uri_str.find('?') > -1:
                first_wildcard = uri_str.find('*')
                first_questionmark = uri_str.find('?')
                if first_questionmark > -1 and first_questionmark < first_wildcard:
                    first_wildcard = first_questionmark
                prefix = uri_str[:first_wildcard]
                rest = uri_str[first_wildcard+1:]
                ## Only request recursive listing if the 'rest' of the URI,
                ## i.e. the part after first wildcard, contains '/'
                need_recursion = rest.find('/') > -1
                objectlist = _get_filelist_remote(S3Uri(prefix), recursive = need_recursion)
                for key in objectlist:
                    ## Check whether the 'key' matches the requested wildcards
                    if glob.fnmatch.fnmatch(objectlist[key]['object_uri_str'], uri_str):
                        remote_list[key] = objectlist[key]
            else:
                ## No wildcards - simply append the given URI to the list
                key = os.path.basename(uri.object())
                if not key:
                    raise ParameterError(u"Expecting S3 URI with a filename or --recursive: %s" % uri.uri())
                remote_item = {
                    'base_uri': uri,
                    'object_uri_str': unicode(uri),
                    'object_key': uri.object()
                }
                if require_attribs:
                    response = S3(cfg).object_info(uri)
                    remote_item.update({
                    'size': int(response['headers']['content-length']),
                    'md5': response['headers']['etag'].strip('"\''),
                    'timestamp' : dateRFC822toUnix(response['headers']['date'])
                    })
                    # get md5 from header if it's present.  We would have set that during upload
                    if response['headers'].has_key('x-amz-meta-s3cmd-attrs'):
                        attrs = parse_attrs_header(response['headers']['x-amz-meta-s3cmd-attrs'])
                        if attrs.has_key('md5'):
                            remote_item.update({'md5': attrs['md5']})

                remote_list[key] = remote_item
    return remote_list
Esempio n. 42
0
def fetch_local_list(args, recursive = None):
    def _get_filelist_local(local_uri):
        info(u"Compiling list of local files...")
        if local_uri.isdir():
            local_base = deunicodise(local_uri.basename())
            local_path = deunicodise(local_uri.path())
            filelist = _fswalk(local_path, cfg.follow_symlinks)
            single_file = False
        else:
            local_base = ""
            local_path = deunicodise(local_uri.dirname())
            filelist = [( local_path, [], [deunicodise(local_uri.basename())] )]
            single_file = True
        loc_list = SortedDict(ignore_case = False)
        for root, dirs, files in filelist:
            rel_root = root.replace(local_path, local_base, 1)
            for f in files:
                full_name = os.path.join(root, f)
                if not os.path.isfile(full_name):
                    continue
                if os.path.islink(full_name):
                                    if not cfg.follow_symlinks:
                                            continue
                relative_file = unicodise(os.path.join(rel_root, f))
                if os.path.sep != "/":
                    # Convert non-unix dir separators to '/'
                    relative_file = "/".join(relative_file.split(os.path.sep))
                if cfg.urlencoding_mode == "normal":
                    relative_file = replace_nonprintables(relative_file)
                if relative_file.startswith('./'):
                    relative_file = relative_file[2:]
                sr = os.stat_result(os.lstat(full_name))
                loc_list[relative_file] = {
                    'full_name_unicode' : unicodise(full_name),
                    'full_name' : full_name,
                    'size' : sr.st_size,
                    'mtime' : sr.st_mtime,
                    'timestamp' : sr.st_mtime,
                    ## TODO: Possibly more to save here...
                }
        return loc_list, single_file

    cfg = Config()
    local_uris = []
    local_list = SortedDict(ignore_case = False)
    single_file = False

    if type(args) not in (list, tuple):
        args = [args]

    if recursive == None:
        recursive = cfg.recursive

    for arg in args:
        uri = S3Uri(arg)
        if not uri.type == 'file':
            raise ParameterError("Expecting filename or directory instead of: %s" % arg)
        if uri.isdir() and not recursive:
            raise ParameterError("Use --recursive to upload a directory: %s" % arg)
        local_uris.append(uri)

    for uri in local_uris:
        list_for_uri, single_file = _get_filelist_local(uri)
        local_list.update(list_for_uri)

    ## Single file is True if and only if the user
    ## specified one local URI and that URI represents
    ## a FILE. Ie it is False if the URI was of a DIR
    ## and that dir contained only one FILE. That's not
    ## a case of single_file==True.
    if len(local_list) > 1:
        single_file = False

    return local_list, single_file
Esempio n. 43
0
        if filename != "-" and not os.path.isfile(filename):
            raise InvalidFileError(u"%s is not a regular file" %
                                   unicodise(filename))
        try:
            if filename == "-":
                file = sys.stdin
                size = 0
            else:
                file = open(filename, "rb")
                size = os.stat(filename)[ST_SIZE]
        except (IOError, OSError), e:
            raise InvalidFileError(u"%s: %s" %
                                   (unicodise(filename), e.strerror))

        headers = SortedDict(ignore_case=True)
        if extra_headers:
            headers.update(extra_headers)

        ## MIME-type handling
        content_type = self.config.mime_type
        content_encoding = None
        if filename != "-" and not content_type and self.config.guess_mime_type:
            (content_type, content_encoding) = mime_magic(filename)
        if not content_type:
            content_type = self.config.default_mime_type
        if not content_encoding:
            content_encoding = self.config.encoding.upper()

        ## add charset to content type
        if self.add_encoding(filename,
Esempio n. 44
0
class Config(object):
    _instance = None
    _parsed_files = []
    _doc = {}
    access_key = ""
    secret_key = ""
    access_token = ""
    _access_token_refresh = True
    host_base = "s3.amazonaws.com"
    host_bucket = "%(bucket)s.s3.amazonaws.com"
    kms_key = ""  #can't set this and Server Side Encryption at the same time
    # simpledb_host looks useless, legacy? to remove?
    simpledb_host = "sdb.amazonaws.com"
    cloudfront_host = "cloudfront.amazonaws.com"
    verbosity = logging.WARNING
    progress_meter = sys.stdout.isatty()
    progress_class = Progress.ProgressCR
    send_chunk = 64 * 1024
    recv_chunk = 64 * 1024
    list_md5 = False
    long_listing = False
    human_readable_sizes = False
    extra_headers = SortedDict(ignore_case=True)
    force = False
    server_side_encryption = False
    enable = None
    get_continue = False
    put_continue = False
    upload_id = None
    skip_existing = False
    recursive = False
    restore_days = 1
    acl_public = None
    acl_grants = []
    acl_revokes = []
    proxy_host = ""
    proxy_port = 3128
    encrypt = False
    dry_run = False
    add_encoding_exts = ""
    preserve_attrs = True
    preserve_attrs_list = [
        'uname',  # Verbose owner Name (e.g. 'root')
        'uid',  # Numeric user ID (e.g. 0)
        'gname',  # Group name (e.g. 'users')
        'gid',  # Numeric group ID (e.g. 100)
        'atime',  # Last access timestamp
        'mtime',  # Modification timestamp
        'ctime',  # Creation timestamp
        'mode',  # File mode (e.g. rwxr-xr-x = 755)
        'md5',  # File MD5 (if known)
        #'acl',     # Full ACL (not yet supported)
    ]
    delete_removed = False
    delete_after = False
    delete_after_fetch = False
    max_delete = -1
    limit = -1
    _doc[
        'delete_removed'] = "[sync] Remove remote S3 objects when local file has been deleted"
    delay_updates = False  # OBSOLETE
    gpg_passphrase = ""
    gpg_command = ""
    gpg_encrypt = "%(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s"
    gpg_decrypt = "%(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s"
    use_https = True
    ca_certs_file = ""
    check_ssl_certificate = True
    check_ssl_hostname = True
    bucket_location = "US"
    default_mime_type = "binary/octet-stream"
    guess_mime_type = True
    use_mime_magic = True
    mime_type = ""
    enable_multipart = True
    multipart_chunk_size_mb = 15  # MB
    multipart_max_chunks = 10000  # Maximum chunks on AWS S3, could be different on other S3-compatible APIs
    # List of checks to be performed for 'sync'
    sync_checks = ['size', 'md5']  # 'weak-timestamp'
    # List of compiled REGEXPs
    exclude = []
    include = []
    # Dict mapping compiled REGEXPs back to their textual form
    debug_exclude = {}
    debug_include = {}
    encoding = locale.getpreferredencoding() or "UTF-8"
    urlencoding_mode = "normal"
    log_target_prefix = ""
    reduced_redundancy = False
    storage_class = ""
    follow_symlinks = False
    socket_timeout = 300
    invalidate_on_cf = False
    # joseprio: new flags for default index invalidation
    invalidate_default_index_on_cf = False
    invalidate_default_index_root_on_cf = True
    website_index = "index.html"
    website_error = ""
    website_endpoint = "http://%(bucket)s.s3-website-%(location)s.amazonaws.com/"
    additional_destinations = []
    files_from = []
    cache_file = ""
    add_headers = ""
    remove_headers = []
    expiry_days = ""
    expiry_date = ""
    expiry_prefix = ""
    signature_v2 = False
    limitrate = 0
    requester_pays = False
    stop_on_error = False
    content_disposition = None
    content_type = None
    stats = False
    # Disabled by default because can create a latency with a CONTINUE status reply
    # expected for every send file requests.
    use_http_expect = False

    ## Creating a singleton
    def __new__(self,
                configfile=None,
                access_key=None,
                secret_key=None,
                access_token=None):
        if self._instance is None:
            self._instance = object.__new__(self)
        return self._instance

    def __init__(self,
                 configfile=None,
                 access_key=None,
                 secret_key=None,
                 access_token=None):
        if configfile:
            try:
                self.read_config_file(configfile)
            except IOError:
                if 'AWS_CREDENTIAL_FILE' in os.environ:
                    self.env_config()

            # override these if passed on the command-line
            if access_key and secret_key:
                self.access_key = access_key
                self.secret_key = secret_key

            if access_token:
                self.access_token = access_token
                # Do not refresh the IAM role when an access token is provided.
                self._access_token_refresh = False

            if len(self.access_key) == 0:
                env_access_key = os.environ.get("AWS_ACCESS_KEY",
                                                None) or os.environ.get(
                                                    "AWS_ACCESS_KEY_ID", None)
                env_secret_key = os.environ.get(
                    "AWS_SECRET_KEY", None) or os.environ.get(
                        "AWS_SECRET_ACCESS_KEY", None)
                env_access_token = os.environ.get(
                    "AWS_SESSION_TOKEN", None) or os.environ.get(
                        "AWS_SECURITY_TOKEN", None)
                if env_access_key:
                    self.access_key = env_access_key
                    self.secret_key = env_secret_key
                    if env_access_token:
                        # Do not refresh the IAM role when an access token is provided.
                        self._access_token_refresh = False
                        self.access_token = env_access_token
                else:
                    self.role_config()

            #TODO check KMS key is valid
            if self.kms_key and self.server_side_encryption == True:
                warning(
                    'Cannot have server_side_encryption (S3 SSE) and KMS_key set (S3 KMS). KMS encryption will be used. Please set server_side_encryption to False'
                )
            if self.kms_key and self.signature_v2 == True:
                raise Exception(
                    'KMS encryption requires signature v4. Please set signature_v2 to False'
                )

    def role_config(self):
        if sys.version_info[0] * 10 + sys.version_info[1] < 26:
            error("IAM authentication requires Python 2.6 or newer")
            raise
        if not 'json' in sys.modules:
            error("IAM authentication not available -- missing module json")
            raise
        try:
            conn = httplib.HTTPConnection(host='169.254.169.254', timeout=2)
            conn.request('GET', "/latest/meta-data/iam/security-credentials/")
            resp = conn.getresponse()
            files = resp.read()
            if resp.status == 200 and len(files) > 1:
                conn.request(
                    'GET',
                    "/latest/meta-data/iam/security-credentials/%s" % files)
                resp = conn.getresponse()
                if resp.status == 200:
                    creds = json.load(resp)
                    Config().update_option(
                        'access_key', creds['AccessKeyId'].encode('ascii'))
                    Config().update_option(
                        'secret_key', creds['SecretAccessKey'].encode('ascii'))
                    Config().update_option('access_token',
                                           creds['Token'].encode('ascii'))
                else:
                    raise IOError
            else:
                raise IOError
        except:
            raise

    def role_refresh(self):
        if self._access_token_refresh:
            try:
                self.role_config()
            except:
                warning("Could not refresh role")

    def env_config(self):
        cred_content = ""
        try:
            cred_file = open(os.environ['AWS_CREDENTIAL_FILE'], 'r')
            cred_content = cred_file.read()
        except IOError as e:
            debug("Error %d accessing credentials file %s" %
                  (e.errno, os.environ['AWS_CREDENTIAL_FILE']))
        r_data = re.compile("^\s*(?P<orig_key>\w+)\s*=\s*(?P<value>.*)")
        r_quotes = re.compile("^\"(.*)\"\s*$")
        if len(cred_content) > 0:
            for line in cred_content.splitlines():
                is_data = r_data.match(line)
                if is_data:
                    data = is_data.groupdict()
                    if r_quotes.match(data["value"]):
                        data["value"] = data["value"][1:-1]
                    if data["orig_key"] == "AWSAccessKeyId" \
                       or data["orig_key"] == "aws_access_key_id":
                        data["key"] = "access_key"
                    elif data["orig_key"]=="AWSSecretKey" \
                       or data["orig_key"]=="aws_secret_access_key":
                        data["key"] = "secret_key"
                    else:
                        debug("env_config: key = %r will be ignored",
                              data["orig_key"])

                    if "key" in data:
                        Config().update_option(data["key"], data["value"])
                        if data["key"] in ("access_key", "secret_key",
                                           "gpg_passphrase"):
                            print_value = ("%s...%d_chars...%s") % (
                                data["value"][:2], len(data["value"]) - 3,
                                data["value"][-1:])
                        else:
                            print_value = data["value"]
                        debug("env_Config: %s->%s" %
                              (data["key"], print_value))

    def option_list(self):
        retval = []
        for option in dir(self):
            ## Skip attributes that start with underscore or are not string, int or bool
            option_type = type(getattr(Config, option))
            if option.startswith("_") or \
               not (option_type in (
                    type("string"), # str
                        type(42),   # int
                    type(True))):   # bool
                continue
            retval.append(option)
        return retval

    def read_config_file(self, configfile):
        cp = ConfigParser(configfile)
        for option in self.option_list():
            _option = cp.get(option)
            if _option is not None:
                _option = _option.strip()
            self.update_option(option, _option)

        # allow acl_public to be set from the config file too, even though by
        # default it is set to None, and not present in the config file.
        if cp.get('acl_public'):
            self.update_option('acl_public', cp.get('acl_public'))

        if cp.get('add_headers'):
            for option in cp.get('add_headers').split(","):
                (key, value) = option.split(':')
                self.extra_headers[key.replace('_',
                                               '-').strip()] = value.strip()

        self._parsed_files.append(configfile)

    def dump_config(self, stream):
        ConfigDumper(stream).dump("default", self)

    def update_option(self, option, value):
        if value is None:
            return

        #### Handle environment reference
        if str(value).startswith("$"):
            return self.update_option(option, os.getenv(str(value)[1:]))

        #### Special treatment of some options
        ## verbosity must be known to "logging" module
        if option == "verbosity":
            # support integer verboisities
            try:
                value = int(value)
            except ValueError:
                try:
                    # otherwise it must be a key known to the logging module
                    value = logging._levelNames[value]
                except KeyError:
                    error("Config: verbosity level '%s' is not valid" % value)
                    return

        elif option == "limitrate":
            #convert kb,mb to bytes
            if value.endswith("k") or value.endswith("K"):
                shift = 10
            elif value.endswith("m") or value.endswith("M"):
                shift = 20
            else:
                shift = 0
            try:
                value = shift and int(value[:-1]) << shift or int(value)
            except:
                error(
                    "Config: value of option %s must have suffix m, k, or nothing, not '%s'"
                    % (option, value))
                return

        ## allow yes/no, true/false, on/off and 1/0 for boolean options
        elif type(getattr(Config, option)) is type(True):  # bool
            if str(value).lower() in ("true", "yes", "on", "1"):
                value = True
            elif str(value).lower() in ("false", "no", "off", "0"):
                value = False
            else:
                error(
                    "Config: value of option '%s' must be Yes or No, not '%s'"
                    % (option, value))
                return

        elif type(getattr(Config, option)) is type(42):  # int
            try:
                value = int(value)
            except ValueError:
                error(
                    "Config: value of option '%s' must be an integer, not '%s'"
                    % (option, value))
                return

        elif option in ["host_base", "host_bucket", "cloudfront_host"]:
            if value.startswith("http://"):
                value = value[7:]
            elif value.startswith("https://"):
                value = value[8:]

        setattr(Config, option, value)
Esempio n. 45
0
class S3(object):
	http_methods = BidirMap(
		GET = 0x01,
		PUT = 0x02,
		HEAD = 0x04,
		DELETE = 0x08,
		MASK = 0x0F,
		)
	
	targets = BidirMap(
		SERVICE = 0x0100,
		BUCKET = 0x0200,
		OBJECT = 0x0400,
		MASK = 0x0700,
		)

	operations = BidirMap(
		UNDFINED = 0x0000,
		LIST_ALL_BUCKETS = targets["SERVICE"] | http_methods["GET"],
		BUCKET_CREATE = targets["BUCKET"] | http_methods["PUT"],
		BUCKET_LIST = targets["BUCKET"] | http_methods["GET"],
		BUCKET_DELETE = targets["BUCKET"] | http_methods["DELETE"],
		OBJECT_PUT = targets["OBJECT"] | http_methods["PUT"],
		OBJECT_GET = targets["OBJECT"] | http_methods["GET"],
		OBJECT_HEAD = targets["OBJECT"] | http_methods["HEAD"],
		OBJECT_DELETE = targets["OBJECT"] | http_methods["DELETE"],
	)

	codes = {
		"NoSuchBucket" : "Bucket '%s' does not exist",
		"AccessDenied" : "Access to bucket '%s' was denied",
		"BucketAlreadyExists" : "Bucket '%s' already exists",
		}

	## S3 sometimes sends HTTP-307 response 
	redir_map = {}

	def __init__(self, config):
		self.config = config

	def get_connection(self, bucket):
		if self.config.proxy_host != "":
			return httplib.HTTPConnection(self.config.proxy_host, self.config.proxy_port)
		else:
			if self.config.use_https:
				return httplib.HTTPSConnection(self.get_hostname(bucket))
			else:
				return httplib.HTTPConnection(self.get_hostname(bucket))

	def get_hostname(self, bucket):
		if bucket and check_bucket_name_dns_conformity(bucket):
			if self.redir_map.has_key(bucket):
				host = self.redir_map[bucket]
			else:
				host = getHostnameFromBucket(bucket)
		else:
			host = self.config.host_base
		debug('get_hostname(%s): %s' % (bucket, host))
		return host

	def set_hostname(self, bucket, redir_hostname):
		self.redir_map[bucket] = redir_hostname

	def format_uri(self, resource):
		if resource['bucket'] and not check_bucket_name_dns_conformity(resource['bucket']):
			uri = "/%s%s" % (resource['bucket'], resource['uri'])
		else:
			uri = resource['uri']
		if self.config.proxy_host != "":
			uri = "http://%s%s" % (self.get_hostname(resource['bucket']), uri)
		debug('format_uri(): ' + uri)
		return uri

	## Commands / Actions
	def list_all_buckets(self):
		request = self.create_request("LIST_ALL_BUCKETS")
		response = self.send_request(request)
		response["list"] = getListFromXml(response["data"], "Bucket")
		return response
	
	def bucket_list(self, bucket, prefix = None, recursive = None):
		def _list_truncated(data):
			## <IsTruncated> can either be "true" or "false" or be missing completely
			is_truncated = getTextFromXml(data, ".//IsTruncated") or "false"
			return is_truncated.lower() != "false"

		def _get_contents(data):
			return getListFromXml(data, "Contents")

		def _get_common_prefixes(data):
			return getListFromXml(data, "CommonPrefixes")

		uri_params = {}
		truncated = True
		list = []
		prefixes = []

		while truncated:
			response = self.bucket_list_noparse(bucket, prefix, recursive, uri_params)
			current_list = _get_contents(response["data"])
			current_prefixes = _get_common_prefixes(response["data"])
			truncated = _list_truncated(response["data"])
			if truncated:
				if current_list:
					uri_params['marker'] = self.urlencode_string(current_list[-1]["Key"])
				else:
					uri_params['marker'] = self.urlencode_string(current_prefixes[-1]["Prefix"])
				debug("Listing continues after '%s'" % uri_params['marker'])

			list += current_list
			prefixes += current_prefixes

		response['list'] = list
		response['common_prefixes'] = prefixes
		return response

	def bucket_list_noparse(self, bucket, prefix = None, recursive = None, uri_params = {}):
		if prefix:
			uri_params['prefix'] = self.urlencode_string(prefix)
		if not self.config.recursive and not recursive:
			uri_params['delimiter'] = "/"
		request = self.create_request("BUCKET_LIST", bucket = bucket, **uri_params)
		response = self.send_request(request)
		#debug(response)
		return response

	def bucket_create(self, bucket, bucket_location = None):
		headers = SortedDict(ignore_case = True)
		body = ""
		if bucket_location and bucket_location.strip().upper() != "US":
			bucket_location = bucket_location.strip()
			if bucket_location.upper() == "EU":
				bucket_location = bucket_location.upper()
			else:
				bucket_location = bucket_location.lower()
			body  = "<CreateBucketConfiguration><LocationConstraint>"
			body += bucket_location
			body += "</LocationConstraint></CreateBucketConfiguration>"
			debug("bucket_location: " + body)
			check_bucket_name(bucket, dns_strict = True)
		else:
			check_bucket_name(bucket, dns_strict = False)
		if self.config.acl_public:
			headers["x-amz-acl"] = "public-read"
		request = self.create_request("BUCKET_CREATE", bucket = bucket, headers = headers)
		response = self.send_request(request, body)
		return response

	def bucket_delete(self, bucket):
		request = self.create_request("BUCKET_DELETE", bucket = bucket)
		response = self.send_request(request)
		return response

	def bucket_info(self, uri):
		request = self.create_request("BUCKET_LIST", bucket = uri.bucket(), extra = "?location")
		response = self.send_request(request)
		response['bucket-location'] = getTextFromXml(response['data'], "LocationConstraint") or "any"
		return response

	def object_put(self, filename, uri, extra_headers = None, extra_label = ""):
		# TODO TODO
		# Make it consistent with stream-oriented object_get()
		if uri.type != "s3":
			raise ValueError("Expected URI type 's3', got '%s'" % uri.type)

		if not os.path.isfile(filename):
			raise InvalidFileError(u"%s is not a regular file" % unicodise(filename))
		try:
			file = open(filename, "rb")
			size = os.stat(filename)[ST_SIZE]
		except IOError, e:
			raise InvalidFileError(u"%s: %s" % (unicodise(filename), e.strerror))
		headers = SortedDict(ignore_case = True)
		if extra_headers:
			headers.update(extra_headers)
		headers["content-length"] = size
		content_type = None
		if self.config.guess_mime_type:
			content_type = mimetypes.guess_type(filename)[0]
		if not content_type:
			content_type = self.config.default_mime_type
		debug("Content-Type set to '%s'" % content_type)
		headers["content-type"] = content_type
		if self.config.acl_public:
			headers["x-amz-acl"] = "public-read"
		if self.config.reduced_redundancy:
			headers["x-amz-storage-class"] = "REDUCED_REDUNDANCY"
		request = self.create_request("OBJECT_PUT", uri = uri, headers = headers)
		labels = { 'source' : unicodise(filename), 'destination' : unicodise(uri.uri()), 'extra' : extra_label }
		response = self.send_file(request, file, labels)
		return response
Esempio n. 46
0
class Config(object):
    _instance = None
    _parsed_files = []
    _doc = {}
    access_key = ""
    secret_key = ""
    host_base = "s3.amazonaws.com"
    host_bucket = "%(bucket)s.s3.amazonaws.com"
    simpledb_host = "sdb.amazonaws.com"
    cloudfront_host = "cloudfront.amazonaws.com"
    verbosity = logging.WARNING
    progress_meter = True
    progress_class = Progress.ProgressCR
    send_chunk = 4096
    recv_chunk = 4096
    list_md5 = False
    human_readable_sizes = False
    extra_headers = SortedDict(ignore_case=True)
    force = False
    enable = None
    get_continue = False
    skip_existing = False
    recursive = False
    acl_public = None
    acl_grants = []
    acl_revokes = []
    proxy_host = ""
    proxy_port = 3128
    encrypt = False
    dry_run = False
    preserve_attrs = True
    preserve_attrs_list = [
        'uname',  # Verbose owner Name (e.g. 'root')
        'uid',  # Numeric user ID (e.g. 0)
        'gname',  # Group name (e.g. 'users')
        'gid',  # Numeric group ID (e.g. 100)
        'atime',  # Last access timestamp
        'mtime',  # Modification timestamp
        'ctime',  # Creation timestamp
        'mode',  # File mode (e.g. rwxr-xr-x = 755)
        #'acl',     # Full ACL (not yet supported)
    ]
    delete_removed = False
    _doc[
        'delete_removed'] = "[sync] Remove remote S3 objects when local file has been deleted"
    gpg_passphrase = ""
    gpg_command = ""
    gpg_encrypt = "%(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s"
    gpg_decrypt = "%(gpg_command)s -d --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s"
    use_https = False
    bucket_location = "US"
    default_mime_type = "binary/octet-stream"
    guess_mime_type = False
    mime_type = ""
    # List of checks to be performed for 'sync'
    sync_checks = ['size', 'md5']  # 'weak-timestamp'
    # List of compiled REGEXPs
    exclude = []
    include = []
    # Dict mapping compiled REGEXPs back to their textual form
    debug_exclude = {}
    debug_include = {}
    encoding = "utf-8"
    urlencoding_mode = "normal"
    log_target_prefix = ""
    reduced_redundancy = False
    follow_symlinks = False
    socket_timeout = 300
    invalidate_on_cf = False
    website_index = "index.html"
    website_error = ""
    website_endpoint = "http://%(bucket)s.s3-website-%(location)s.amazonaws.com/"

    ## Creating a singleton
    def __new__(self, configfile=None):
        if self._instance is None:
            self._instance = object.__new__(self)
        return self._instance

    def __init__(self, configfile=None):
        if configfile:
            self.read_config_file(configfile)

    def option_list(self):
        retval = []
        for option in dir(self):
            ## Skip attributes that start with underscore or are not string, int or bool
            option_type = type(getattr(Config, option))
            if option.startswith("_") or \
               not (option_type in (
                    type("string"), # str
                        type(42),   # int
                    type(True))):   # bool
                continue
            retval.append(option)
        return retval

    def read_config_file(self, configfile):
        cp = ConfigParser(configfile)
        for option in self.option_list():
            self.update_option(option, cp.get(option))
        self._parsed_files.append(configfile)

    def dump_config(self, stream):
        ConfigDumper(stream).dump("default", self)

    def update_option(self, option, value):
        if value is None:
            return
        #### Handle environment reference
        if str(value).startswith("$"):
            return self.update_option(option, os.getenv(str(value)[1:]))
        #### Special treatment of some options
        ## verbosity must be known to "logging" module
        if option == "verbosity":
            try:
                setattr(Config, "verbosity", logging._levelNames[value])
            except KeyError:
                error("Config: verbosity level '%s' is not valid" % value)
        ## allow yes/no, true/false, on/off and 1/0 for boolean options
        elif type(getattr(Config, option)) is type(True):  # bool
            if str(value).lower() in ("true", "yes", "on", "1"):
                setattr(Config, option, True)
            elif str(value).lower() in ("false", "no", "off", "0"):
                setattr(Config, option, False)
            else:
                error(
                    "Config: value of option '%s' must be Yes or No, not '%s'"
                    % (option, value))
        elif type(getattr(Config, option)) is type(42):  # int
            try:
                setattr(Config, option, int(value))
            except ValueError, e:
                error(
                    "Config: value of option '%s' must be an integer, not '%s'"
                    % (option, value))
        else:  # string
Esempio n. 47
0
            if src_md5 != dst_md5:
                ## checksums are different.
                attribs_match = False
                debug(u"XFER: %s (md5 mismatch: src=%s dst=%s)" % (file, src_md5, dst_md5))

        return attribs_match

    # we don't support local->local sync, use 'rsync' or something like that instead ;-)
    assert(not(src_remote == False and dst_remote == False))

    info(u"Verifying attributes...")
    cfg = Config()
    ## Items left on src_list will be transferred
    ## Items left on update_list will be transferred after src_list
    ## Items left on copy_pairs will be copied from dst1 to dst2
    update_list = SortedDict(ignore_case = False)
    ## Items left on dst_list will be deleted
    copy_pairs = []

    debug("Comparing filelists (direction: %s -> %s)" % (__direction_str(src_remote), __direction_str(dst_remote)))

    for relative_file in src_list.keys():
        debug(u"CHECK: %s" % (relative_file))

        if dst_list.has_key(relative_file):
            ## Was --skip-existing requested?
            if cfg.skip_existing:
                debug(u"IGNR: %s (used --skip-existing)" % (relative_file))
                del(src_list[relative_file])
                del(dst_list[relative_file])
                continue