Exemplo n.º 1
0
    def download_part(self, headers=None, start_pos=0, end_pos=None, local_file_path=None):
 
        if not headers:
            headers = {}
        if end_pos:
            headers['Range'] = 'Bytes=' + str(start_pos) + '-' + str(end_pos)
        else:
            headers['Range'] = 'Bytes=' + str(start_pos) + '-'   
        fp = open(local_file_path, 'awb')
        if start_pos>0:
            fp.seek(start_pos-1)
        else:
            fp.seek(start_pos)
        res = self.bucket.jss_client.make_request(method='GET', bucket_name=self.bucket.name, object_name=self.name, headers=headers)
        if res.status / 100 != 2:
            error_handler(res)
        else:
            while True:
                data = res.read(commonconstants.DEFAULT_BUFFER_SIZE)
                if len(data) != 0:
                    fp.write(data)
                else:
                    break
        fp.flush()    
        fp.close() 
        return res
Exemplo n.º 2
0
 def upload_flow(self,headers=None,data=None,compute_MD5=True):
     bucket_name_check(self.bucket.name)
     object_name_check(self.name)
     if not headers:
         headers = {}
     headers['Content-Length'] = len(data)
     if compute_MD5:
         m = md5.new()
         m.update(data)
         headers['Content-MD5'] = m.hexdigest()
     headers['Content-Type'] = commonconstants.DEFAULT_CONTENT_TYPE
     # httplib.HTTPConnection("%s:%d")
     self.bucket.jss_client.open_connection_to_put('PUT', self.bucket.name, self.name, headers)
     offset=0
     total_size = len(data)
     while offset < total_size:
         read_bytes = data[offset:offset + commonconstants.DEFAULT_SEND_SIZE]
         offset += commonconstants.DEFAULT_SEND_SIZE
         self.bucket.jss_client.send(read_bytes)
     response = self.bucket.jss_client.pool.getresponse()
 #print response.status
     if response.status / 100 > 2:
         error_handler(response)
     else:
         return response       
Exemplo n.º 3
0
 def list_multi_uploads(self,headers=None,key_marker=None,upload_id_marker=None,max_uploads=None,prefix=None,delimiter=None):
     param={}
     l=[]
     query_param=''
     if key_marker:
         param['key-marker']=key_marker
     if upload_id_marker:
         param['upload-id-marker']=upload_id_marker
     if max_uploads:
         param['max-uploads']=max_uploads
     if prefix:
         param['prefix']=prefix
     if delimiter:
         param['delimiter']=delimiter
     for k, v in param.items():
         if isinstance(v, unicode):
             v = v.encode('utf-8')
         if v is not None and v != '':
             l.append('%s=%s' % (urllib.quote(k), urllib.quote(str(v))))
     if len(l):
         query_param = '&'.join(l) 
     if query_param:
         subresource = 'uploads&'+query_param
     else:
         subresource='uploads'    
     response = self.jss_client.make_request(method='GET', bucket_name=self.name, headers=headers, data='', query_args=subresource, subresource=subresource) 
     if response.status/100!=2:
         error_handler(response)
     return  response
Exemplo n.º 4
0
 def get_all_keys(self, headers=None, prefix='', delimiter='', marker='', maxKeys=''):
     headers = {}
     headers['Content-Type'] = 'application/json'
     params = {}
     if prefix:
         params['prefix'] = prefix
     if delimiter:
         params['delimiter'] = delimiter
     if  marker:
         params['marker'] = marker
     if maxKeys:
         params['maxKeys'] = maxKeys
     l = []
     for k, v in params.items():
         if isinstance(v, unicode):
             v = v.encode('utf-8')
         if v is not None and v != '':
             l.append('%s=%s' % (k, v))
     if len(l):
          query_string = '&'.join(l)
          #print query_string
     else:
          query_string = None        
     response = self.jss_client.make_request(method='GET', bucket_name=self.name, object_name=None, headers=headers, data='', query_args=query_string)
     data = response.read()
     keylist = json.loads(data)
    
     if response.status / 100 > 2:
         error_handler(response)
     else:
         return keylist
Exemplo n.º 5
0
 def get_uploaded_parts(self, headers=None, upload_id=None, max_parts=None, part_number_marker=None):  
       
     if not headers:
         headers = {}
     subresource = 'uploads'
     param = {}
     l = []
     query_param = ''
     if upload_id:
         param['uploadId'] = upload_id
     if max_parts:
         param['max-parts'] = max_parts
     if part_number_marker:
         param['part-number-marker'] = part_number_marker   
     for k, v in param.items():
         if isinstance(v, unicode):
             v = v.encode('utf-8')
         if v is not None and v != '':
             l.append('%s=%s' % (urllib.quote(k), urllib.quote(str(v))))
     if len(l):
         query_param = '&'.join(l)    
     response = self.bucket.jss_client.make_request(method='GET', bucket_name=self.bucket.name, object_name=self.name, headers=headers, data='', query_args=query_param, subresource=query_param)
     if response.status / 100 != 2:
         error_handler(response)
     data = response.read()
     result = json.loads(data)
     return result
Exemplo n.º 6
0
 def delete_key(self, key_name):
     
     response = self.jss_client.make_request(method='DELETE', bucket_name=self.name, object_name=key_name)
     if response.status / 100 > 2:
         error_handler(response)
     else:
         return response
Exemplo n.º 7
0
 def delete(self):
     response = self.jss_client.make_request('DELETE', 
         bucket_name = self.bucket_name, 
         subresource = 'acl',
         query_args = 'acl')
     if response.status/100!=2:
         error_handler(response)
     return  response.read()
Exemplo n.º 8
0
 def get(self):
     response = self.jss_client.make_request('GET', 
         bucket_name = self.bucket_name, 
         subresource = 'acl',
         query_args = 'acl')
     if response.status/100!=2:
         error_handler(response)
     data = response.read()
     self.policy.deserialize(data)
Exemplo n.º 9
0
 def set(self):
     entity = self.policy.serialize()
     if len(entity) > 4096:
         error_handler("Access control policy entity must less than 4096 byte,current size:%d" % len(entity)) 
     headers = {'Content-Type':'application/json; charset=UTF-8'}
     return self.jss_client.make_request('PUT', 
         bucket_name = self.bucket_name, 
         headers = headers,
         subresource = 'acl',
         query_args = 'acl',
         data = entity)
Exemplo n.º 10
0
 def init_multi_upload(self, headers=None):
     if not headers:
         headers = {}
     if not 'Content-Type' in headers:
         headers['Content-Type'] = commonconstants.DEFAULT_CONTENT_TYPE
     subresource = 'uploads'    
     response = self.bucket.jss_client.make_request('POST', self.bucket.name, self.name, headers, '', subresource, subresource)
     if response.status / 100 != 2:
         error_handler(response)
     data = response.read()    
     pre_upload = json.loads(data)
     return pre_upload
Exemplo n.º 11
0
 def download_part_flow(self, headers=None, start_pos=0, end_pos=None):
      
     if not headers:
         headers = {}
     if end_pos:
         headers['Range'] = 'Bytes=' + str(start_pos) + '-' + str(end_pos)
     else:
         headers['Range'] = 'Bytes=' + str(start_pos) + '-'   
     res = self.bucket.jss_client.make_request(method='GET', bucket_name=self.bucket.name, object_name=self.name, headers=headers)
     if res.status / 100 != 2:
         error_handler(res)
     else:
         return res
 def get_all_buckets(self, headers=None):
     headers = {}
     headers['Content-Type'] = 'application/json'
     response = self.make_request(method='GET', headers=headers)  
     if response.status / 100 > 2:
         error_handler(response)
     else:
         data = response.read()
         bucketlist = json.loads(data, 'UTF-8')
         result = {}
         result['size'] = bucketlist['Buckets'].__len__()
         result['bucketlist'] = bucketlist['Buckets']
         result['max-size'] = commonconstants.MAX_BUCKETS_SIZE
         return result
Exemplo n.º 13
0
 def complete_multi_upload(self, headers=None, upload_id=None, data=None):
     
     if not headers:
         headers = {}
     subresource = 'uploads'
     query_param = urllib.quote('uploadId') + '=' + urllib.quote(upload_id)
     response = self.bucket.jss_client.make_request(method='POST', bucket_name=self.bucket.name, object_name=self.name, headers=headers, data=data, query_args=query_param, subresource=query_param)
     #print response.status
     if response.status / 100 != 2:
         error_handler(response)
     while True:
         data = response.read()
         if len(data) != 0:
             message=json.loads(data)
             if message['ETag']=="We encountered an internal error. Please try again.":
                 raise Exception("merge the object error")
             return data
Exemplo n.º 14
0
 def upload(self, headers=None, local_file_path=None, compute_MD5=True):
     """
     :type :bucket_name:string
     :param :bucket_name:the bucket you file want to put
     :type :object_name:string
     :param :object_name:the key of the file you want to upload
     :type: headers:dict
     :param: headers:Additional headers to pass along with the request to
         JSS. 
     :type local_file_path:string 
     :param local_file_path:the path of the  file which you want to upload.
     """
     bucket_name_check(self.bucket.name)
     object_name_check(self.name)
     fq = open(local_file_path, 'rb')
     fq.seek(os.SEEK_SET, os.SEEK_END)
     filesize = fq.tell()
     fq.seek(os.SEEK_SET)
     suffix = local_file_path.split('.')[-1]      
     if not headers:
         headers = {}
     headers['Content-Length'] = filesize
     if compute_MD5:
         fp = open(local_file_path)
         md5_value = file_MD5(fp)
         headers['Content-MD5'] = md5_value
         fp.close()   #new add this time
       
     if '.' + suffix in self.bucket.jss_client.map:
         headers['Content-Type'] = self.bucket.jss_client.map['.' + suffix]
     else:
         headers['Content-Type'] = commonconstants.DEFAULT_CONTENT_TYPE
    # httplib.HTTPConnection("%s:%d")
     self.bucket.jss_client.open_connection_to_put('PUT', self.bucket.name, self.name, headers)
     fq.seek(os.SEEK_SET)
     l = fq.read(commonconstants.DEFAULT_SEND_SIZE)
     while len(l) > 0:
         self.bucket.jss_client.send(l)
         l = fq.read(commonconstants.DEFAULT_SEND_SIZE)
     response = self.bucket.jss_client.pool.getresponse()
     #print response.status
     if response.status / 100 > 2:
         error_handler(response)
     else:
         fq.close()    # new  add this time
         return response
Exemplo n.º 15
0
 def cancel_multipart_uploaded(self, headers=None, upload_id=None):
     param = {}
     l = []
     query_param=''
     if upload_id:
         param['uploadId'] = upload_id
     for k, v in param.items():
         if isinstance(v, unicode):
             v = v.encode('utf-8')
         if v is not None and v != '':
             l.append('%s=%s' % (urllib.quote(k), urllib.quote(str(v))))
     if len(l):
         query_param = '&'.join(l) 
     response = self.bucket.jss_client.make_request(method='DELETE', bucket_name=self.bucket.name, object_name=self.name, headers=headers, data='', query_args=query_param, subresource=query_param)
    
     if response.status / 100 != 2:
         error_handler(response)
     return response
Exemplo n.º 16
0
    def delete_bucket(self, bucket_name, headers=None):
        """
        Removes an JSS bucket.

        In order to remove the bucket, it must first be empty. If the bucket is
        not empty, an ``StorageError`` will be raised.

        :type bucket_name: string
        :param bucket_name: The name of the bucket

        :type headers: dict
        :param headers: Additional headers to pass along with the request to
            JSS.
        """
        response = self.make_request(method='DELETE', bucket_name=bucket_name, headers=headers)
        if response.status / 100 > 2:
            error_handler(response)
        else:
            return response
Exemplo n.º 17
0
 def create_bucket(self, bucket_name, headers=None):
     """
     creates a new bucket 
     
     type bucket_name: string
     :param bucket_name :The name of a new bucket
     
     type headers: dict
     :param headers: Additional headers to pass along with the request to JSS
     
     """
     bucket_name_check(bucket_name)
     if not headers:
     	headers={}
 	headers['Content-Length']=0
     response = self.make_request('PUT', bucket_name=bucket_name, headers=headers)
     if response.status / 100 > 2:
         error_handler(response)
     else:
         return response
Exemplo n.º 18
0
 def upload_fp(self, headers=None, fp=None):
     bucket_name_check(self.bucket.name)
     object_name_check(self.name)
     fp.seek(os.SEEK_SET, os.SEEK_END)
     filesize = fp.tell()
     fp.seek(os.SEEK_SET)    
     if not headers:
         headers = {}
     headers['Content-Length'] = filesize
     headers['Content-Type'] = commonconstants.DEFAULT_CONTENT_TYPE
    # httplib.HTTPConnection("%s:%d")
     self.bucket.jss_client.open_connection_to_put('PUT', self.bucket.name, self.name, headers)
     l = fp.read(commonconstants.DEFAULT_SEND_SIZE)
     while len(l) > 0:
         self.bucket.jss_client.send(l)
         l = fp.read(commonconstants.DEFAULT_SEND_SIZE)
     response = self.bucket.jss_client.pool.getresponse()
     #print response.status
     if response.status / 100 > 2:
         error_handler(response)
     else:
         return response  
Exemplo n.º 19
0
 def download(self, headers=None, local_file_path=None):
     """
     download file from JSS
     :type bucket_name :string
     :param  bucket_name:the name of bucket 
     :type headers:dict
     :param  headers:Additional headers to pass along with the request to
         JSS. 
     :type local_file_path:string 
     :param local_file_path:the path of the  file which you want to save.    
     """
     fp = open(local_file_path, 'wb')
     res = self.bucket.jss_client.make_request(method='GET', bucket_name=self.bucket.name, object_name=self.name, headers=headers)
     if res.status / 100 > 2:
         error_handler(res)
     else:
         while True:
             data = res.read(commonconstants.DEFAULT_BUFFER_SIZE)
             if len(data) != 0:
                 fp.write(data)
             else:
                 break
         fp.flush()    
         fp.close() 
Exemplo n.º 20
0
 def download_flow(self, headers=None):
     res = self.bucket.jss_client.make_request(method='GET', bucket_name=self.bucket.name, object_name=self.name, headers=headers)
     if res.status / 100 > 2:
         error_handler(res)
     return res