コード例 #1
0
 def test_feeding_sends_an_sqs_message(self, now):
     now.return_value = datetime.strptime('2016-01-01', '%Y-%m-%d')
     queue = MagicMock()
     key = Key()
     key.bucket = Bucket()
     key.bucket.name = 'ct-elife-production-final'
     key.name = 'elife-12345-vor-r1.zip'
     key.etag = '...'
     key.size = 2 * 1024 * 1024
     econ_article_feeder.initiate_econ_feed(queue, key, 'MyArticleWorkflow')
     self.assertEqual(len(queue.method_calls), 1)
     (_, args, _) = queue.method_calls[0]
     message_body = args[0].get_body()
     self.assertEqual(
         json.loads(message_body),
         {
             'workflow_name': 'MyArticleWorkflow',
             'workflow_data': {
                 'event_time': '2016-01-01T00:00:00Z',
                 'event_name': 'ObjectCreated:Put',
                 'file_name': 'elife-12345-vor-r1.zip',
                 'file_etag': '...',
                 'bucket_name': 'ct-elife-production-final',
                 'file_size': 2 * 1024 * 1024,
             },
         }
     )
コード例 #2
0
ファイル: ceph_api.py プロジェクト: shideqin/cephcmd
    def uploadfrompost(self,args,options,view_bar=True):
        content = args[1]
        (bucket,key) = parse_bucket_object(args[2])

        if not key:
            key = os.path.basename(srcFile)

        if not content:
            print "Content is not none"
            exit(-1)

        cb = None
        if view_bar:
            cb = self._view_bar

        from boto.s3.key import Key
        try:
            b = self.conn.get_bucket(bucket)
            k = Key(b)
            k.key = key
            hash_obj = md5()
            hash_obj.update(content)
            k.etag = hash_obj.hexdigest()
            res = k.set_contents_from_string(content,cb=cb)
            if res is not None:
                res = "\n"
                res += "Object URL is: "+self._url(bucket,key)+"\n"
                res += "Object abstract path is: "+os.path.join(args[2],key)+"\n"
                res += "ETag is "+k.etag
                return res
        except boto.exception.S3ResponseError as e:
            return "Error Status:\n%d %s" % (e.status,e.reason)
コード例 #3
0
ファイル: ceph_api.py プロジェクト: shideqin/cephcmd
    def _upload(self,srcFile,bucket,key,view_bar=True,is_replace=True):
        cb = None
        if view_bar:
            cb = self._view_bar

        if not is_replace:
            local_file_size = os.path.getsize(srcFile)
            local_modify_time = os.path.getmtime(srcFile)
            res = self._head(bucket,key)
            if res and local_file_size == res.size:
                format = "%a, %d %b %Y %H:%M:%S GMT"
                s3_last_modify_time = format_unixtime(res.last_modified,format)
                if s3_last_modify_time >= local_modify_time:
                    return (200,False,{"etag":res.etag})

        fp = open(srcFile,'rb')
        from boto.s3.key import Key
        try:
            b = self.conn.get_bucket(bucket)
            k = Key(b)
            md5 = k.compute_md5(fp)
            k.key = key
            k.etag = md5[0]
            res = k.set_contents_from_file(fp,cb=cb)
            if res is not None:
                return (200,True,{"etag":k.etag})
        except boto.exception.S3ResponseError as e:
            return (e.status,True,{"reason":e.reason})
コード例 #4
0
ファイル: tigre.py プロジェクト: varikin/Tigre
    def sync_folder(self, path, bucket):
        """Syncs a local directory with an S3 bucket.
     
        Currently does not delete files from S3 that are not in the local directory.

        path: The path to the directory to sync to S3
        bucket: The name of the bucket on S3
        """
        bucket = self.conn.get_bucket(bucket)
        local_files = self._get_local_files(path)
        s3_files = self._get_s3_files(bucket)
        for filename, hash in local_files.iteritems():
            s3_key = s3_files[filename]
            if s3_key is None:
                s3_key = Key(bucket)
                s3_key.key = filename
                s3_key.etag = '"!"'
            
            if s3_key.etag[1:-1] != hash[0]:
                s3_key.set_contents_from_filename(join(path, filename), md5=hash)