示例#1
0
 def _get_s3_connection(self, access_key, secret_key, host=DEFAULT_HOST):
     old_logger = boto.log.error
     boto.log.error = lambda x: x
     tx_s3_conn = None
     try:
         if not secret_key or not access_key:
             raise EC2ResponseError('', '', '')
         s3_conn = S3Connection(access_key, secret_key, host=host)
         tx_s3_conn = TwistedS3Connection.adapt(s3_conn)
     except EC2ResponseError:
         log.error('Invalid Keys. '
                   'Check your EC2 Access Key and EC2 Secret Key.')
         return
     except gaierror, e:
         log.error(e.strerror)
         return
示例#2
0
    def collect(self, config):
        data = self.new_data()
        bucket_keys = {}
        # Add support Signature V4 for 's3' into boto config
        if not boto.config.get('s3', 'use-sigv4'):
            boto.config.add_section('s3')
            boto.config.set('s3', 'use-sigv4', 'True')

        ds0 = config.datasources[0]

        s3connection = TwistedS3Connection.adapt(boto.s3.connect_to_region(
            S3BucketPlugin.DEFAULT_REGION,
            aws_access_key_id=ds0.ec2accesskey,
            aws_secret_access_key=ds0.ec2secretkey,
        ))

        try:
            buckets = yield s3connection.get_all_buckets()
        except Exception as ex:
            log.error("Unable to get buckets for %s region, %s",
                      S3BucketPlugin.DEFAULT_REGION, str(ex))
        else:
            data['events'].append({
                'summary': 'Monitoring ok',
                'eventClass': '/Status',
                'eventKey': 'aws_bucket_result',
                'severity': ZenEventClasses.Clear,
            })

            bucket_region = {}
            connections = {}
            for ds in config.datasources:
                region = ds.params['region']
                if region == "" or region is None:
                    continue

                bucket_region[ds.component] = region
                if region not in connections:
                    s3conn = boto.s3.connect_to_region(
                        region,
                        aws_access_key_id=ds0.ec2accesskey,
                        aws_secret_access_key=ds0.ec2secretkey,)
                    if s3conn:
                        connections[region] = TwistedS3Connection.adapt(s3conn)
                    else:
                        log.error("Unable to initiate an S3 connection to "
                                  "region: %s, plugin: %s, template: %s, "
                                  "datasource: %s, device: %s, component: %s",
                                  region, ds.plugin_classname, ds.template,
                                  ds.datasource, ds.device, ds.component)

            deferreds = []
            names = []
            for bucket in buckets:

                region = bucket_region.get(bucket.name)
                if region is None:
                    log.error("Bucket %s: Unable to determine region: skipping collection until this S3Bucket is modeled", bucket.name)
                    continue

                if bool(connections):
                    if connections.get(region) is None:
                        log.error("Bucket %s: Unable to connect to region %s: skipping collection", bucket.name, region)
                        continue

                    # to retrieve further information about the bucket, we need to
                    # query the right region.   Switch its connection over to the
                    # right one.
                    bucket.connection = connections[region]

                d = bucket.get_all_keys()
                names.append(bucket.name)
                deferreds.append(d)

            # Collect all of the bucket keys in parallel
            results = yield defer.DeferredList(deferreds, consumeErrors=True)

            for i, value in enumerate(results):
                success, result = value
                name = names[i]

                if success:
                    # value is a list of keys.
                    bucket_keys.update({name: result})
                else:
                    # result is a twisted Failure object, and result.value is the
                    # exception that occurred.
                    bucket_keys.update({name: result.value})

            for ds in config.datasources:
                self.component = ds.component
                keys = bucket_keys.get(ds.component) or []
                if not isinstance(keys, list):
                    data['events'].append({
                        'component': self.component,
                        'summary': str(keys),
                        'eventClass': '/Status',
                        'eventKey': 'aws_result',
                        'severity': ZenEventClasses.Error,
                    })
                    continue
                data['values'][ds.component] = dict(
                    keys_count=(len(keys), 'N'),
                    total_size=(sum([key.size for key in keys]), 'N'),
                )

        defer.returnValue(data)