Ejemplo n.º 1
0
def run_ebs_deploy(command, args, parser=None):
    """
    the main
    """
    init_logging(args.use_logging)

    # make sure we have an archive or a directory
    if not args.config_file or not os.path.exists(args.config_file):
        out("Config file not found: "+args.config_file)
        parser and parser.print_help()
        return -1

    # make sure that if we have a role to assume, that we also have a role name to display
    if (args.role_arn and not args.role_name) or (args.role_name and not args.role_arn):
        out("You must use and --role-arn and --role-name together")
        parser and parser.print_help()
        return -1

    # enable logging
    if args.verbose:
        from boto import set_stream_logger
        set_stream_logger('boto')

    config = load_config(args.config_file)
    aws = make_aws_credentials(config, args.role_arn, args.role_name)
    helper = EbsHelper(aws, app_name=get(config, 'app.app_name'), wait_time_secs=args.wait_time)

    # execute the command
    return command.execute(helper, config, args)
Ejemplo n.º 2
0
    def MANUAL_test_redeliver_bad(self):

        sqs_backend.SAFE_MAX_VISIBILITY_TIMEOUT = sqs_backend.AWS_MAX_VISIBILITY_TIMEOUT - 1

        with self.backend.open(self.test_queue) as queue:

            print 'PURGE'
            queue.purge()

            boto.set_stream_logger('tellme')

            print '-' * 80
            print 'Add Message'
            print '-' * 80
            queue.put('hello9')

            time.sleep(1)

            print '-' * 80
            print 'Get Message'
            print '-' * 80
            message_body, message_id = queue.get()
            self.assertEqual(message_body, 'hello9')

            print '-' * 80
            print 'Redeliver Message %d seconds' % sqs_backend.SAFE_MAX_VISIBILITY_TIMEOUT
            print '-' * 80
            queue.redeliver((sqs_backend.SAFE_MAX_VISIBILITY_TIMEOUT, message_id))

            time.sleep(5)

            print '-' * 80
            print 'Redeliver Message %d seconds' % sqs_backend.SAFE_MAX_VISIBILITY_TIMEOUT
            print '-' * 80
            queue.redeliver((sqs_backend.SAFE_MAX_VISIBILITY_TIMEOUT, message_id))
Ejemplo n.º 3
0
    def __init__(self,
                 hostname,
                 aws_access_key,
                 aws_secret_key,
                 path='/services/Properties',
                 port=8773,
                 is_secure=False,
                 boto_debug_level=0,
                 err_method=None,
                 logger=None,
                 **kwargs):
        """
        Primary Admin/Properties Query interface for a Eucalyptus Cloud

        :param hostname: service endpoint, hostname, ip, etc..
        :param access_key: cloud user access key to auth this connection
        :param secret_key: cloud user secret key to auth this connection
        :param port: remote port to be used for this connection
        :param path: service path for this connection
        :param is_secure: bool
        :param boto_debug_level: int 0-2
        :param debug_method: method to be used for logging debug information
        :param err_method:  method to be used for logging error information
        :param kwargs: Additional kwargs to be passed to connection init
        :raise ValueError: Upon or invalid params
        """

        # Note: aws_access and secret_key get assigned to self during super().__init__()
        self.host = hostname
        if not isinstance(self.host, basestring) or \
                not isinstance(aws_access_key, basestring) or \
                not isinstance(aws_secret_key, basestring):
            raise ValueError('Missing or invalid type for required arg. host:"{0}", '
                             'aws_access_key_id:"{1}", aws_secret_access_key:"{2}"'
                             .format(self.host,
                                     aws_access_key,
                                     aws_secret_key))
        self.is_secure = is_secure
        self.port = port
        self.path = path
        # debug is an int representation of the debug level. Use log.debug() for
        # logging debug information
        self.debug = boto_debug_level
        if self.debug:
            boto.set_stream_logger('boto')
        if not logger:
            logger = eulogger.Eulogger(identifier=self.__repr__())
        self.log = logger
        self._show_method = self.log.info
        self.debug_method = self.log.debug
        if err_method:
            self.err_method = err_method
        super(PropertiesConnection, self).__init__(path=self.path,
                                                   aws_access_key_id=aws_access_key,
                                                   aws_secret_access_key=aws_secret_key,
                                                   port=self.port,
                                                   is_secure=self.is_secure,
                                                   host=self.host,
                                                   debug=self.debug,
                                                   **kwargs)
Ejemplo n.º 4
0
 def send(self, verb='GET', **args):
     self.args.update(args)
     if 'debug' in self.args and self.args['debug'] >= 2:
         boto.set_stream_logger(self.name())
     self.process_args()
     self.process_filters()
     conn = self.get_connection(**self.args)
     self.http_response = conn.make_request(self.name(),
                                            self.request_params,
                                            verb=verb)
     self.body = self.http_response.read()
     boto.log.debug(self.body)
     if self.http_response.status == 200:
         self.aws_response = boto.jsonresponse.Element(list_marker=self.list_markers,
                                                       item_marker=self.item_markers)
         h = boto.jsonresponse.XmlHandler(self.aws_response, self)
         h.parse(self.body)
         return self.aws_response
     else:
         boto.log.error('%s %s' % (self.http_response.status,
                                   self.http_response.reason))
         boto.log.error('%s' % self.body)
         raise conn.ResponseError(self.http_response.status,
                                  self.http_response.reason,
                                  self.body)
Ejemplo n.º 5
0
 def set_verbose(self):
     import logging
     import boto
     self.verbose = True
     logging.getLogger('boto').setLevel(logging.DEBUG)
     logging.getLogger('botocore').setLevel(logging.DEBUG)
     boto.set_stream_logger('boto', level=logging.DEBUG)
Ejemplo n.º 6
0
def sns_send_push_notification_to_user(endpoint=None, message=None, data=None, user=None, **kwargs):
    if not user:
        return

    boto.set_stream_logger('boto')
    sns = boto.sns.connect_to_region('us-east-1')
    
    apns_dict = {'aps':{'alert':message,'sound':'default'}}
    if data:
        apns_dict['data'] = data
    apns_string = json.dumps(apns_dict, ensure_ascii=False)
    message = {'default':message, settings.AWS_SNS_APNS_PLATFORM:apns_string}
    messageJSON = json.dumps(message, ensure_ascii=False)
    
    try:
        for device in user.installations.all():
            apns_endpoint = None
            apns = device.push_endpoints.get('apns', None)
            if apns:
                apns_endpoint = apns.get(endpoint, None)
            if apns_endpoint:
                msg_id = sns.publish(topic=None, message = messageJSON, target_arn=apns_endpoint, message_structure='json')
    except BotoServerError as e:
        logger.error("utils.aws.sns_send_push_notification_to_user. Boto error {} to user {}".format(e.code, user))
    except:
        logger.error("utils.aws.sns_send_push_notification_to_user. Unknown error {} {}".format(sys.exc_info()[0], sys.exc_info()[1]))
Ejemplo n.º 7
0
 def enable_connection_debug(self, level=DEBUG, format_string=None):
     try:
         self.connection.debug = 2
         level = Eulogger.format_log_level(level, 'DEBUG')
         set_stream_logger('boto', level=level, format_string=None)
     except:
         self.log.error('Could not enable debug for: "{0}"'.format(self))
         raise
Ejemplo n.º 8
0
 def disable_connection_debug(self, level=NOTSET):
     try:
         self.connection.debug = 0
         level = Eulogger.format_log_level(level, 'NOTSET')
         set_stream_logger('boto', level=level, format_string=None)
     except:
         self.log.error('Could not disable debug for: "{0}"'.format(self))
         raise
Ejemplo n.º 9
0
def S3Test():
    boto.set_stream_logger('boto')
    bucket = s3.get_bucket('latest-snapshot', validate=False)
    exists = s3.lookup('latest-snapshot')
    for bucket in s3:
       for key in bucket:
           print(key.name)
    
    key = s3.get_bucket('latest-snapshot').get_key('knapp.JPG')
    key.set_contents_from_filename('/home/pi/OSecuritySnapshots/latestSnapshot.JPG')
Ejemplo n.º 10
0
    def __init__(self, config):

        # init config and database
        self._config = config
        self._mbs_db_client = None

        self._type_bindings = self._get_type_bindings()

        # make the maker
        self._maker =  Maker(type_bindings=self._type_bindings)

        #  notifications
        self._notifications = None

        # init object collections
        self._backup_collection = None
        self._plan_collection = None
        self._deleted_plan_collection = None
        self._audit_collection = None
        self._restore_collection = None

        # load backup system/engines lazily
        self._backup_system = None
        self._api_server = None

        # listens for backup events coming through rest
        self._backup_event_listener = None

        self._api_client = None


        self._engines = None

        # init the encryptor
        self._encryptor = self._get_encryptor()

        #
        self._backup_source_builder = None
        self._default_backup_assistant = None
        self._temp_dir = resolve_path(DEFAULT_BACKUP_TEMP_DIR_ROOT)

        self._event_colllection = None
        self._event_listener_collection = None
        self._event_queue = None

        # allow boto debug to be configuable
        if config.get("enableBotoDebug"):
            import boto
            boto.set_stream_logger('boto')

        # read log path
        if config.get("logPath"):
            import mbs_config
            mbs_config.MBS_LOG_PATH = config.get("logPath")
Ejemplo n.º 11
0
    def connect(self, profile=None):
        """create or reuse a boto s3 connection.
        An option aws profile may be given.
        """
        if profile:
            self.profile = profile

        if boto.config.getint('Boto', 'debug', 0) > 0:
            boto.set_stream_logger('boto')

        if not self.conn:
            self.conn = self.new_connection(profile)
        return self.conn
Ejemplo n.º 12
0
def main():
    parser = argparse.ArgumentParser(description="List S3 contents (with retry) to a file")
    parser.add_argument("--output-file", type=argparse.FileType('w'))
    parser.add_argument("--bucket", default="telemetry-published-v2")
    parser.add_argument("--prefix", default="")
    parser.add_argument("--verbose", action="store_true")
    parser.add_argument("--debug", action="store_true")
    args = parser.parse_args()
    
    if args.debug:
        boto.set_stream_logger('boto')

    list_files(args.bucket, args.output_file, prefix=args.prefix)
Ejemplo n.º 13
0
    def parse(*args, **kwargs):
        options, args = fn(*args, **kwargs)

        if options.debug:
            boto.set_stream_logger("boto")
            global DEBUG
            DEBUG = True

        if options.verbose:
            global VERBOSE
            VERBOSE = True

        return options, args
Ejemplo n.º 14
0
def addimage(user_id):
    if(db.isvaliduser(user_id)):
        upload = request.files.get('content')
        name, ext = os.path.splitext(upload.filename)

        if ext not in ('.png','.jpg','.jpeg'):
            return 'File extension not allowed.'
        print ext

        #here check if the directory is existing
        #save_path = '/Users/poojasrinivas/Desktop/275/save'
        save_path = 'C:\\tmp'
        if not os.path.exists(save_path):
            os.makedirs(save_path)
        upload.save(save_path)
        addedPinPath = save_path + '\\' + name +ext
        #addedPinPath = save_path + '/' + name +ext

        boto.set_stream_logger('boto')
        #TODO removed s3 line
        s3 =  S3Connection('AKIAIZGBRZFQJ6ZO7O6Q', 'y5zskvZW+yyH0OuUlbOuq6alNQvQzalhLarp2MAv')
        #s3 = boto.connect_s3()

        pin._pinid = db.returnPinid()
        print 'Connected to S3'
        bucket = s3.get_bucket('bucket275')


        #create key
        k=Key(bucket)
        k.key=pin._pinid # TODO set auto generated pin id here
        print addedPinPath
        k.set_contents_from_filename(addedPinPath)
        time.sleep(2)

        #pin._pinid = '3' #TODO generate the pin id
        pin._pinname = name
        pin._pinurl='https://bucket275.s3.amazonaws.com/'+ str(pin._pinid)
        pin._boardid = ''

        pindetails = db.insertPin(pin)
        os.remove(addedPinPath)
        response.set_header("content-type","application/json")
        response.body = pindetails
        print "***Response returned is:\n"
        print response.status
        print response.body
        print "\n***"
        return response
    else:
        return erroruser()
Ejemplo n.º 15
0
    def process_args(self, **args):
        """
        Responsible for walking through Params defined for the request and:

        * Matching them with keyword parameters passed to the request
          constructor or via the command line.
        * Checking to see if all required parameters have been specified
          and raising an exception, if not.
        * Encoding each value into the set of request parameters that will
          be sent in the request to the AWS service.
        """
        self.args.update(args)
        self.connection_args = copy.copy(self.args)
        if 'debug' in self.args and self.args['debug'] >= 2:
            boto.set_stream_logger(self.name())
        required = [p.name for p in self.Params+self.Args if not p.optional]
        for param in self.Params+self.Args:
            if param.long_name:
                python_name = param.long_name.replace('-', '_')
            else:
                python_name = boto.utils.pythonize_name(param.name, '_')
            value = None
            if python_name in self.args:
                value = self.args[python_name]
            if value is None:
                value = param.default
            if value is not None:
                if param.name in required:
                    required.remove(param.name)
                if param.request_param:
                    if param.encoder:
                        param.encoder(param, self.request_params, value)
                    else:
                        Encoder.encode(param, self.request_params, value)
            if python_name in self.args:
                del self.connection_args[python_name]
        if required:
            l = []
            for p in self.Params+self.Args:
                if p.name in required:
                    if p.short_name and p.long_name:
                        l.append('(%s, %s)' % (p.optparse_short_name,
                                               p.optparse_long_name))
                    elif p.short_name:
                        l.append('(%s)' % p.optparse_short_name)
                    else:
                        l.append('(%s)' % p.optparse_long_name)
            raise RequiredParamError(','.join(l))
        boto.log.debug('request_params: %s' % self.request_params)
        self.process_markers(self.Response)
 def __init__(self, clc_host, access_id, secret_key, token):
     boto.set_stream_logger('foo')
     path='/services/elb'
     port=8773
     if clc_host[len(clc_host)-13:] == 'amazonaws.com':
         clc_host = clc_host.replace('ec2', 'elasticloadbalancing', 1)
         path = '/'
         reg = None
         port=443
     reg = RegionInfo(name='eucalyptus', endpoint=clc_host)
     self.conn = ELBConnection(access_id, secret_key, region=reg,
                               port=port, path=path, validate_certs=False,
                               is_secure=True, security_token=token, debug=2)
     self.conn.http_connection_kwargs['timeout'] = 30
    def __init__(self,
                 host,
                 access_key, secret_key,
                 admin_path='/admin',
                 aws_signature='AWS4',
                 timeout=30,
                 is_secure=True, port=None,
                 proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None,
                 debug=False,
                 https_connection_factory=None, security_token=None,
                 validate_certs=True):
        """Constructor."""

        self._admin_path = admin_path
        if debug:
            boto.set_stream_logger('boto')
            debug_boto = 10
        else:
            debug_boto = 0

        # AWS4 and AWS2 signature support
        # see boto.auth.S3HmacAuthV4Handler
        # see boto.auth.HmacAuthV1Handler
        if aws_signature == 'AWS4':
            # AWS4 signature
            self._signature_algo = ['hmac-v4-s3']
        else:
            # old style AWS2 signature algo
            self._signature_algo = ['hmac-v1']

        # init AWS connection
        boto.connection.AWSAuthConnection.__init__(self,
                                                   host=host,
                                                   aws_access_key_id=access_key,
                                                   aws_secret_access_key=secret_key,
                                                   is_secure=is_secure, port=port,
                                                   proxy=proxy, proxy_port=proxy_port,
                                                   proxy_user=proxy_user, proxy_pass=proxy_pass,
                                                   debug=debug_boto,
                                                   https_connection_factory=https_connection_factory,
                                                   path=self._admin_path,
                                                   provider='aws',
                                                   security_token=security_token,
                                                   suppress_consec_slashes=True,
                                                   validate_certs=validate_certs)
        # set http_socket_timeout
        self.http_connection_kwargs['timeout'] = timeout
        if aws_signature == 'AWS4':
            self._set_auth_region_name('s3')
Ejemplo n.º 18
0
 def __init__(self, clc_host, access_id, secret_key, token):
     boto.set_stream_logger('foo')
     path='/services/Walrus'
     port=8773
     try:
         # this call is just here to check if the ec2 feature is config-ed
         host = eucaconsole.config.get('test', 'ec2.endpoint')
         path = '/'
         port=443
     except ConfigParser.Error:
         pass
     self.conn = S3Connection(access_id, secret_key, host=clc_host,
                               port=port, path=path,
                               is_secure=True, security_token=token, debug=2)
     self.conn.http_connection_kwargs['timeout'] = 30
Ejemplo n.º 19
0
 def __init__(self, index_path, bucket_name, num_processes=2,
              log_file=None, log_level=logging.INFO):
     self.index_path = index_path
     self.bucket_name = bucket_name
     self.num_processes = num_processes
     if log_file:
         boto.set_file_logger('pynas-uploader', log_file, log_level)
     else:
         boto.set_stream_logger('pynas-uploader', log_level)
     self.task_queue = multiprocessing.JoinableQueue()
     self.status_queue = multiprocessing.Queue()
     self.s3 = boto.connect_s3()
     self.bucket = self.s3.lookup(self.bucket_name)
     self.index = pynas.index.Index(index_path)
     self.n_tasks = 0
Ejemplo n.º 20
0
    def update(self, app_obj):
        """Update a given pattern."""
        if app_obj.pargs.debug is True:
            boto.set_stream_logger("aws_provider")

        context = self.scenario.context

        raw_template = self.scenario.template

        template_dict = parse_cf_json(raw_template)
        # Minimize JSON to save space
        template_json = json.dumps(template_dict, separators=(",", ":"))

        params = construct_params(context["parameters"], template_dict["Parameters"].keys())

        print " - Determining owner information"
        try:
            iam_user = self.iam_conn.get_user()
        except:
            print colored("Error: ", "red") + "Unable to get IAM username"
            exit(1)

        # Archive payload directory and send to S3
        payload_url = self._send_payload(context, timeout=3600)
        params.append(("PayloadURL", payload_url))

        try:
            print " - Updating CloudFormation stack"
            stack_id = self.connection.update_stack(
                self.stack_name,
                template_body=template_json,
                parameters=params,
                capabilities=["CAPABILITY_IAM"],
                tags={"OwnerId": iam_user.user_id, "OwnerName": iam_user.user_name, "CreatedBy": "nepho"},
                disable_rollback=True,
            )

            try:
                self._show_status(self.stack_name)
            except exc.CaughtSignal:
                exit()
        except boto.exception.BotoServerError as e:
            print colored("Error: ", "red") + "Problem communicating with CloudFormation"
            # Use e.message instead of e.body as per: https://github.com/boto/boto/issues/1658
            msg = literal_eval(e.message)
            print "(%s) %s " % (msg["Error"]["Code"], msg["Error"]["Message"])
            exit(1)
        return stack_id
Ejemplo n.º 21
0
    def __init__( self, aws_id=None, aws_key=None, debug=0):
        if aws_id is None:
            self.aws_access_key = None
            sys.exit("Please provide AWS access key and secret!!")
        else:
            self.aws_access_key = aws_id
            self.aws_secret_access_key = aws_key

        self.debug = debug
        if debug > 0:
            boto.set_stream_logger('default.log')
        self.s3 = None
        self.autoscale = None
        self.ec2 = None
        self.cf = None
        self.elb = None
        self.r53 = None
 def set_endpoint(self, endpoint, debug=0):
     if debug > 0:
         boto.set_stream_logger('clc')
     reg = RegionInfo(name='eucalyptus', endpoint=endpoint)
     path = '/services/Eucalyptus'
     port = 8773
     if endpoint[len(endpoint)-13:] == 'amazonaws.com':
         path = '/'
         reg = RegionInfo(endpoint=endpoint)
         port = 443
     self.conn = EC2Connection(self.access_id, self.secret_key, region=reg,
                               port=port, path=path,
                               is_secure=True, security_token=self.token, debug=debug)
     logging.info("set new ec2 connection for host : "+endpoint)
     self.conn.APIVersion = '2012-12-01'
     self.conn.https_validate_certificates = False
     self.conn.http_connection_kwargs['timeout'] = 30
Ejemplo n.º 23
0
Archivo: dynq.py Proyecto: grze/dynq
def run_query(connection, table_name, query_value, output_json):
    """
    Run the given query against DynamoDB.

    :param connection: a DynamoDB connection object to use
    :param table_name: the table name to query
    :param query_value: the query to issue to DynamoDB, should be in JSON format
    :param output_json: whether we should try to output the result in JSON instead of shell
    :return: content for the given query, if it exists
    """
    if DYNQ_DEBUG:
        boto.set_stream_logger('boto')

    items = Table(table_name, connection=connection).get_item(**query_value)
    if output_json:
        click.echo(json.dumps(items._data))
    else:
        for field, value in items.items():
            click.echo(field + "=" + value)
Ejemplo n.º 24
0
    def __init__(self, creds=None, host=None, aws_access_key=None, aws_secret_key=None,
                 is_secure=None, port=None, path=None, logger=None, boto_debug=0, **kwargs):

        self.debug = boto_debug
        if self.debug:
            set_stream_logger('boto')
        if creds and not isinstance(creds, Eucarc):
            credsattr = getattr(creds, 'creds', None)
            if not isinstance(credsattr, Eucarc):
                raise ValueError('Unknown type passed for creds arg: "{0}/{1}"'
                                 .format(creds, type(creds)))
            creds = credsattr
        self._eucarc = creds
        self.account_id = None
        self.user_id = None
        if not logger:
            logger = Eulogger(identifier=self.__class__.__name__)
        self.log = logger
        if creds:
            self.user_id = creds.ec2_user_id
            self.account_id = creds.ec2_account_number
            assert isinstance(creds, Eucarc), 'UserAdmin. eucarc not type Eucarc(), got:"{0}/{1}"'\
                .format(creds, type(creds))
            urlp = urlparse(creds.euare_url)
            host = host or getattr(urlp, 'hostname', None)
            port = port or getattr(urlp, 'port', 8773)
            path = path or getattr(urlp, 'path', '/services/Euare')
            if is_secure is None and urlp.scheme == 'https':
                is_secure = True
            aws_secret_key = aws_secret_key or creds.aws_secret_key
            aws_access_key = aws_access_key or creds.aws_access_key
        is_secure = is_secure or False
        ac_kwargs = {'host': host, 'aws_access_key_id': aws_access_key,
                     'aws_secret_access_key': aws_secret_key,
                     'is_secure': is_secure,
                     'port': port, 'path': path}
        ac_kwargs.update(kwargs)
        try:
            super(AccessConnection, self).__init__(**ac_kwargs)
        except:
            self.log.error('Failed to create AccessConnection with kwargs:"{0}"'.format(ac_kwargs))
            raise
Ejemplo n.º 25
0
    def process_cli_args(self):
        '''
        Process CLI args to fill in missing parts of self.args and enable
        debugging if necessary.
        '''
        cli_args = self._cli_parser.parse_args().__dict__
        for (key, val) in cli_args.iteritems():
            self.args.setdefault(key, val)

        if self.args.get('debug'):
            boto.set_stream_logger(self.name())
        if self.args.get('debugger'):
            sys.excepthook = _requestbuilder_except_hook(
                    self.args.get('debugger', False),
                    self.args.get('debug', False))

        if '_filters' in self.args:
            self.args['Filter'] = _process_filters(cli_args.pop('_filters'))
            self._arg_routes.setdefault(self.DefaultRoute, [])
            self._arg_routes[self.DefaultRoute].append('Filter')
Ejemplo n.º 26
0
def addimage(user_id):
    if(db.isvaliduser(user_id)):
        upload = request.files.get('content')
        name, ext = os.path.splitext(upload.filename)

        if ext not in ('.png','.jpg','.jpeg'):
            return 'File extension not allowed.'

        #here check if the directory is existing
        save_path = 'C:\\tmp'
        upload.save(save_path)
        addedPinPath = save_path + '/' + name +'.jpg'

        boto.set_stream_logger('boto')
        #TODO removed s3 line
        s3 = S3Connection('', '')
        #s3 = boto.connect_s3()
        print 'Connected to S3'
        bucket = s3.get_bucket('bucket275')
        #create key
        k=Key(bucket)
        k.key='3' # TODO set auto generated pin id here
        k.set_contents_from_filename(addedPinPath)
        time.sleep(2)

        pin._pinid = '3' #TODO generate the pin id
        pin._pinname = name
        pin._pinurl='https://bucket275.s3.amazonaws.com/'+pin._pinid
        pin._boardid = ''

        pindetails = db.insertPin(pin)
        os.remove(addedPinPath)
        response.set_header("content-type","application/json")
        response.body = pindetails
        print "***Response returned is:\n"
        print response.status
        print response.body
        print "\n***"
        return response
    else:
        return erroruser()
Ejemplo n.º 27
0
def start_server(configs):
	"""
	The main entry point for the service, regardless of how it's used.
	Takes any number of filename or dictionary objects suitable for
	cherrypy.config.update.
	"""
	importlib.import_module('.agency', __package__)
	aws.set_connection_environment()
	server = JobServer()
	if hasattr(cherrypy.engine, "signal_handler"):
		cherrypy.engine.signal_handler.subscribe()
	if hasattr(cherrypy.engine, "console_control_handler"):
		cherrypy.engine.console_control_handler.subscribe()
	app = cherrypy.tree.mount(server, '/')
	server._app = app
	list(map(app.merge, configs))
	admin_app = cherrypy.tree.mount(Admin(server), '/admin')
	devel_configs = list(configs) + [{
		'/': {
			'tools.auth_basic.on': True,
			'tools.auth_basic.realm': 'RecaptureDocs admin',
			'tools.auth_basic.checkpassword':
				cherrypy.lib.auth_basic.checkpassword_dict(dict(
					admin='g0tch4-h4x0r',
				)),
		},
	}]
	list(map(admin_app.merge, devel_configs))
	cherrypy.tree.mount(GGCServer(), '/ggc')
	if not cherrypy.config.get('server.production', False):
		boto.set_stream_logger('recapturedocs')
		aws.ConnectionFactory.production = False
	server.send_notice(
		"RecaptureDocs {version} server starting on {hostname}"
		.format(
			hostname=socket.getfqdn(),
			version=pkg_resources.require('recapturedocs')[0].version))
	cherrypy.engine.start()
	yield server
	cherrypy.engine.exit()
Ejemplo n.º 28
0
def get_configurator(settings, enable_auth=True):
    connection_debug = asbool(settings.get('connection.debug'))
    boto.set_stream_logger('boto', level=(logging.DEBUG if connection_debug else logging.CRITICAL))
    ensure_session_keys(settings)
    check_types()
    config = Configurator(root_factory=SiteRootFactory, settings=settings)
    if enable_auth:
        authn_policy = SessionAuthenticationPolicy(callback=groupfinder)
        authz_policy = ACLAuthorizationPolicy()
        config.set_authentication_policy(authn_policy)
        config.set_authorization_policy(authz_policy)
        config.set_default_permission('view')
    config.add_request_method(User.get_auth_user, 'user', reify=True)
    cache_duration = int(settings.get('static.cache.duration', 43200))
    config.add_static_view(name='static/' + __version__, path='static', cache_max_age=cache_duration)
    config.add_layout('eucaconsole.layout.MasterLayout',
                      'eucaconsole.layout:templates/master_layout.pt')
    for route in urls:
        config.add_route(route.name, route.pattern)
    setup_tweens(config)
    setup_exts(config)
    config.scan()
    return config
Ejemplo n.º 29
0
def Main(args):
    """Main, where the job dispatching happens and the result is printed.

    Args:
        args (dict): The result of the command-line argument parsing.
    """
    global debug, info, warn, error

    s3statsLogging = Logging()
    s3statsLogging.Configure(args)

    debug, info, warn, error = s3statsLogging.GetLoggers()

    if args.verbose:
        boto.set_stream_logger('boto')

    s3 = boto.connect_s3(args.awsKeyId, args.awsSecret)
    buckets = ApplyFilters(args.bucket, s3.get_all_buckets(), args.bucketRe)
    results = ParallelStats(buckets, args.threads)

    if args.format == "json":
        print json.dumps(results, sort_keys = True, indent = 4)
    else:
        PrintResults(args, results)
Ejemplo n.º 30
0
 def do_cli(self, cli_args=None):
     if not self.parser:
         self.build_cli_parser()
     options, args = self.parser.parse_args(cli_args)
     if hasattr(options, 'help_filters') and options.help_filters:
         print 'Available filters:'
         for filter in self.filters:
             print '%s\t%s' % (filter['name'], filter['doc'])
         sys.exit(0)
     d = {}
     for param in self.params:
         if param.cli_option:
             p_name = param.cli_option[-1]
             d[p_name] = getattr(options, p_name.replace('-', '_'))
         else:
             p_name = boto.utils.pythonize_name(param.name)
             d[p_name] = args
     try:
         self.process_args(d)
     except ValueError as ve:
         print ve.message
         sys.exit(1)
     if hasattr(options, 'filter') and options.filter:
         d = {}
         for filter in options.filter:
             name, value = filter.split('=')
             d[name] = value
         self.process_filters(d)
     try:
         if options.debug:
             boto.set_stream_logger(self.name)
             self.args['debug'] = 2
         self.send()
         self.cli_output_formatter()
     except self.connection.ResponseError as err:
         print 'Error(%s): %s' % (err.error_code, err.error_message)
Ejemplo n.º 31
0
privateFileName = 'nodes-private'

if not dryRun:
    fpublic = open(publicFileName, 'w')
    fprivate = open(privateFileName, 'w')

print 'Using boto version', boto.Version
if True:
    botoVersionArr = boto.Version.split(".")
    if (botoVersionArr[0] != 2):
        botoVersionMismatch
    if (botoVersionArr[1] < 13):
        botoVersionMismatch

if (debug):
    boto.set_stream_logger('h2o-ec2')
ec2 = boto.ec2.connect_to_region(regionName, debug=debug)

print 'Launching', numInstancesToLaunch, 'instances.'

reservation = ec2.run_instances(image_id=amiId,
                                min_count=numInstancesToLaunch,
                                max_count=numInstancesToLaunch,
                                key_name=keyName,
                                instance_type=instanceType,
                                security_groups=[securityGroupName],
                                instance_profile_arn=iam_profile_resource_name,
                                instance_profile_name=iam_profile_name,
                                dry_run=dryRun)

for i in range(numInstancesToLaunch):
Ejemplo n.º 32
0
from functools import wraps

from c2client.compat import get_connection
from c2client.utils import prettify_xml

# Nasty hack to workaround default ascii codec
if sys.version_info[0] < 3:
    sys.stdout = codecs.getwriter('utf8')(sys.stdout)
    sys.stderr = codecs.getwriter('utf8')(sys.stderr)

if hasattr(ssl, '_create_unverified_context'):
    ssl._create_default_https_context = ssl._create_unverified_context

if os.environ.get("DEBUG"):
    boto.set_stream_logger("c2")


class EnvironmentVariableError(Exception):
    def __init__(self, name):
        super(EnvironmentVariableError, self).__init__(
            "Environment variable '{0}' not found.".format(name.upper()))


def configure_boto():
    """Configure boto runtime for CROC Cloud"""

    if not boto.config.has_section("Boto"):
        boto.config.add_section("Boto")
    boto.config.set("Boto", "is_secure", "True")
    boto.config.set("Boto", "num_retries", "0")
def main():
    """Main function"""
    global options

    # DB instance classes as listed on
    # http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.DBInstanceClass.html
    db_classes = {
        'db.t1.micro': 0.615,
        'db.m1.small': 1.7,
        'db.m1.medium': 3.75,
        'db.m1.large': 7.5,
        'db.m1.xlarge': 15,
        'db.m4.large': 8,
        'db.m4.xlarge': 16,
        'db.m4.2xlarge': 32,
        'db.m4.4xlarge': 64,
        'db.m4.10xlarge': 160,
        'db.r3.large': 15,
        'db.r3.xlarge': 30.5,
        'db.r3.2xlarge': 61,
        'db.r3.4xlarge': 122,
        'db.r3.8xlarge': 244,
        'db.t2.micro': 1,
        'db.t2.small': 2,
        'db.t2.medium': 4,
        'db.t2.large': 8,
        'db.m3.medium': 3.75,
        'db.m3.large': 7.5,
        'db.m3.xlarge': 15,
        'db.m3.2xlarge': 30,
        'db.m2.xlarge': 17.1,
        'db.m2.2xlarge': 34.2,
        'db.m2.4xlarge': 68.4,
        'db.cr1.8xlarge': 244,
    }

    # RDS metrics http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/rds-metricscollected.html
    metrics = {
        'BinLogDiskUsage':
        'binlog_disk_usage',  # The amount of disk space occupied by binary logs on the master.  Units: Bytes
        'CPUUtilization':
        'utilization',  # The percentage of CPU utilization.  Units: Percent
        'DatabaseConnections':
        'connections',  # The number of database connections in use.  Units: Count
        'DiskQueueDepth':
        'disk_queue_depth',  # The number of outstanding IOs (read/write requests) waiting to access the disk.  Units: Count
        'ReplicaLag':
        'replica_lag',  # The amount of time a Read Replica DB Instance lags behind the source DB Instance.  Units: Seconds
        'SwapUsage':
        'swap_usage',  # The amount of swap space used on the DB Instance.  Units: Bytes
        'FreeableMemory':
        'used_memory',  # The amount of available random access memory.  Units: Bytes
        'FreeStorageSpace':
        'used_space',  # The amount of available storage space.  Units: Bytes
        'ReadIOPS':
        'read_iops',  # The average number of disk I/O operations per second.  Units: Count/Second
        'WriteIOPS':
        'write_iops',  # The average number of disk I/O operations per second.  Units: Count/Second
        'ReadLatency':
        'read_latency',  # The average amount of time taken per disk I/O operation.  Units: Seconds
        'WriteLatency':
        'write_latency',  # The average amount of time taken per disk I/O operation.  Units: Seconds
        'ReadThroughput':
        'read_throughput',  # The average number of bytes read from disk per second.  Units: Bytes/Second
        'WriteThroughput':
        'write_throughput',  # The average number of bytes written to disk per second.  Units: Bytes/Second
    }

    # Parse options
    parser = optparse.OptionParser()
    parser.add_option('-l',
                      '--list',
                      help='list DB instances',
                      action='store_true',
                      default=False,
                      dest='db_list')
    parser.add_option(
        '-n',
        '--profile',
        default=None,
        help=
        'AWS profile from ~/.boto or /etc/boto.cfg. Default: None, fallbacks to "[Credentials]".'
    )
    parser.add_option(
        '-r',
        '--region',
        default='us-east-1',
        help=
        'AWS region. Default: us-east-1. If set to "all", we try to detect the instance region '
        'across all of them, note this will be slower than if you specify the region explicitly.'
    )
    parser.add_option('-i', '--ident', help='DB instance identifier')
    parser.add_option(
        '-p',
        '--print',
        help='print status and other details for a given DB instance',
        action='store_true',
        default=False,
        dest='printinfo')
    parser.add_option('-m',
                      '--metric',
                      help='metrics to retrive separated by comma: [%s]' %
                      ', '.join(metrics.keys()))
    parser.add_option('-d',
                      '--debug',
                      help='enable debugging',
                      action='store_true',
                      default=False)
    options, _ = parser.parse_args()

    # Strip a prefix _ which is sent by Cacti, so an empty argument is interpreted correctly.
    # Than set defaults if argument is supposed to be empty.
    options.region = options.region.lstrip('_')
    options.profile = options.profile.lstrip('_')
    if not options.region:
        options.region = 'us-east-1'

    if not options.profile:
        options.profile = None

    if options.debug:
        boto.set_stream_logger('boto')

    rds = RDS(region=options.region,
              profile=options.profile,
              identifier=options.ident)

    # Check args
    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit()
    elif options.db_list:
        info = rds.get_list()
        print 'List of all DB instances in %s region(s):' % (options.region, )
        pprint.pprint(info)
        sys.exit()
    elif not options.ident:
        parser.print_help()
        parser.error('DB identifier is not set.')
    elif options.printinfo:
        info = rds.get_info()
        pprint.pprint(vars(info))
        sys.exit()
    elif not options.metric:
        parser.print_help()
        parser.error('Metric is not set.')

    selected_metrics = options.metric.split(',')
    for metric in selected_metrics:
        if metric not in metrics.keys():
            parser.print_help()
            parser.error('Invalid metric.')

    # Do not remove the empty lines in the start and end of this docstring
    perl_magic_vars = """

    # Define the variables to output.  I use shortened variable names so maybe
    # it'll all fit in 1024 bytes for Cactid and Spine's benefit.  Strings must
    # have some non-hex characters (non a-f0-9) to avoid a Cacti bug.  This list
    # must come right after the word MAGIC_VARS_DEFINITIONS.  The Perl script
    # parses it and uses it as a Perl variable.
    $keys = array(
       'binlog_disk_usage'       =>  'gg',
       'utilization'             =>  'gh',
       'connections'             =>  'gi',
       'disk_queue_depth'        =>  'gj',
       'replica_lag'             =>  'gk',
       'swap_usage'              =>  'gl',
       'used_memory'             =>  'gm',
       'total_memory'            =>  'gn',
       'used_space'              =>  'go',
       'total_space'             =>  'gp',
       'read_iops'               =>  'gq',
       'write_iops'              =>  'gr',
       'read_latency'            =>  'gs',
       'write_latency'           =>  'gt',
       'read_throughput'         =>  'gu',
       'write_throughput'        =>  'gv',
    );

    """
    output = dict()
    for row in perl_magic_vars.split('\n'):
        if row.find('=>') >= 0:
            k = row.split(' => ')[0].strip().replace("'", '')
            v = row.split(' => ')[1].strip().replace("'", '').replace(',', '')
            output[k] = v

    debug('Perl magic vars: %s' % output)
    debug('Metric associations: %s' % dict(
        (k, output[v]) for (k, v) in metrics.iteritems()))

    # Handle metrics
    results = []
    for metric in selected_metrics:
        stats = rds.get_metric(metric)
        if metric == 'FreeableMemory':
            info = rds.get_info()
            try:
                memory = db_classes[info.instance_class] * 1024**3
            except IndexError:
                print 'Unknown DB instance class "%s"' % info.instance_class
                sys.exit(1)

            results.append('%s:%.0f' % (output['used_memory'], memory - stats))
            results.append('%s:%.0f' % (output['total_memory'], memory))
        elif metric == 'FreeStorageSpace':
            info = rds.get_info()
            storage = float(info.allocated_storage) * 1024**3
            results.append('%s:%.0f' % (output['used_space'], storage - stats))
            results.append('%s:%.0f' % (output['total_space'], storage))
        else:
            short_var = output.get(metrics[metric])
            if not short_var:
                print 'Chosen metric does not have a correspondent entry in perl magic vars'
                sys.exit(1)

            results.append('%s:%s' % (short_var, stats))

    print ' '.join(results)
Ejemplo n.º 34
0
proxyHost = None
proxyPort = None

# Local proxy setting determination

myAddress = socket.gethostbyaddr(socket.gethostname())

if string.find(myAddress[0], "s3group") > 0:
    if string.find(myAddress[-1][0], "193.120") == 0:
        proxyHost = 'w3proxy.s3group.com'
        proxyPort = 3128


# Turn on verbose output: debugging aid
boto.set_stream_logger('mylog')

###########################################################################
# Stage #1: Load balancer
###########################################################################

# Takes credentials from the environment : assumes they are defined in shell variables in one of the supported ways
# If they aren't this will fail authentication
reg_con = boto.ec2.connect_to_region(region_name=thisRegion, proxy=proxyHost, proxy_port=proxyPort)

# We will allow all availability zones in the chosen region. 
# This is supposed to allow a better chance of the site staying up (although I am not sure it makes any difference)
allZones = reg_con.get_all_zones()

zoneStrings = []
for zone in allZones:
Ejemplo n.º 35
0
from __future__ import division
from future import standard_library
standard_library.install_aliases()
import logging
import re
import fnmatch
import configparser
import math
import os
from urllib.parse import urlparse
import warnings

import boto
from boto.s3.connection import S3Connection
from boto.sts import STSConnection
boto.set_stream_logger('boto')
logging.getLogger("boto").setLevel(logging.INFO)

from airflow.exceptions import AirflowException
from airflow.hooks.base_hook import BaseHook


def _parse_s3_config(config_file_name, config_format='boto', profile=None):
    """
    Parses a config file for s3 credentials. Can currently
    parse boto, s3cmd.conf and AWS SDK config formats

    :param config_file_name: path to the config file
    :type config_file_name: str
    :param config_format: config type. One of "boto", "s3cmd" or "aws".
        Defaults to "boto"
Ejemplo n.º 36
0
# This will cause full debug output to go to the console
>>> import boto
>>> boto.set_stream_logger('foo')
>>> ec2 = boto.connect_ec2(debug=2)
Ejemplo n.º 37
0
# Via Fine-uploader's server examples.
# MIT License https://github.com/Widen/fine-uploader-server/blob/master/license.txt
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt

import base64, hmac, hashlib, json

if hasattr(settings, "AWS_UPLOAD_CLIENT_KEY") and hasattr(
        settings, "AWS_UPLOAD_CLIENT_SECRET_KEY"):
    try:
        import boto
        import logging
        from boto.s3.connection import Key, S3Connection
        boto.set_stream_logger('boto', level=logging.INFO)
        S3 = S3Connection(settings.AWS_UPLOAD_CLIENT_KEY,
                          settings.AWS_UPLOAD_CLIENT_SECRET_KEY)
    except ImportError as e:
        print("Could not import boto, the Amazon SDK for Python.")
        print("Deleting files will not work.")
        print("Install boto with")
        print("$ pip install boto")


def home(request):
    """ The 'home' page. Returns an HTML page with Fine Uploader code
    ready to upload to S3.
    """
    return render(request, "index.html")
Ejemplo n.º 38
0
 def set_debug(self, debug=False):
     if debug:
         boto.set_stream_logger('euca2ools')
         self.debug = 2
Ejemplo n.º 39
0
def main():
    """Main function"""
    global options

    short_status = {
        OK: 'OK',
        WARNING: 'WARN',
        CRITICAL: 'CRIT',
        UNKNOWN: 'UNK'
    }

    # DB instance classes as listed on
    # http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.DBInstanceClass.html
    db_classes = {
        'db.t1.micro': 0.615,
        'db.m1.small': 1.7,
        'db.m1.medium': 3.75,
        'db.m1.large': 7.5,
        'db.m1.xlarge': 15,
        'db.m4.large': 8,
        'db.m4.xlarge': 16,
        'db.m4.2xlarge': 32,
        'db.m4.4xlarge': 64,
        'db.m4.10xlarge': 160,
        'db.r3.large': 15,
        'db.r3.xlarge': 30.5,
        'db.r3.2xlarge': 61,
        'db.r3.4xlarge': 122,
        'db.r3.8xlarge': 244,
        'db.t2.micro': 1,
        'db.t2.small': 2,
        'db.t2.medium': 4,
        'db.t2.large': 8,
        'db.m3.medium': 3.75,
        'db.m3.large': 7.5,
        'db.m3.xlarge': 15,
        'db.m3.2xlarge': 30,
        'db.m2.xlarge': 17.1,
        'db.m2.2xlarge': 34.2,
        'db.m2.4xlarge': 68.4,
        'db.cr1.8xlarge': 244,
    }

    # RDS metrics http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/rds-metricscollected.html
    metrics = {
        'status': 'RDS availability',
        'load': 'CPUUtilization',
        'memory': 'FreeableMemory',
        'storage': 'FreeStorageSpace'
    }

    units = ('percent', 'GB')

    # Parse options
    parser = optparse.OptionParser()
    parser.add_option('-l',
                      '--list',
                      help='list of all DB instances',
                      action='store_true',
                      default=False,
                      dest='db_list')
    parser.add_option(
        '-n',
        '--profile',
        default=None,
        help=
        'AWS profile from ~/.boto or /etc/boto.cfg. Default: None, fallbacks to "[Credentials]".'
    )
    parser.add_option(
        '-r',
        '--region',
        default='us-east-1',
        help=
        'AWS region. Default: us-east-1. If set to "all", we try to detect the instance region '
        'across all of them, note this will be slower than if you specify the region explicitly.'
    )
    parser.add_option('-i', '--ident', help='DB instance identifier')
    parser.add_option(
        '-p',
        '--print',
        help='print status and other details for a given DB instance',
        action='store_true',
        default=False,
        dest='printinfo')
    parser.add_option('-m',
                      '--metric',
                      help='metric to check: [%s]' % ', '.join(metrics.keys()))
    parser.add_option('-w', '--warn', help='warning threshold')
    parser.add_option('-c', '--crit', help='critical threshold')
    parser.add_option(
        '-u',
        '--unit',
        help='unit of thresholds for "storage" and "memory" metrics: [%s].'
        'Default: percent' % ', '.join(units),
        default='percent')
    parser.add_option('-t',
                      '--time',
                      help='time period in minutes to query. Default: 5',
                      type='int',
                      default=5)
    parser.add_option('-a',
                      '--avg',
                      help='time average in minutes to request. Default: 1',
                      type='int',
                      default=1)
    parser.add_option(
        '-f',
        '--forceunknown',
        help='force alerts on unknown status. This prevents issues related to '
        'AWS Cloudwatch throttling limits Default: False',
        action='store_true',
        default=False)
    parser.add_option('-d',
                      '--debug',
                      help='enable debug output',
                      action='store_true',
                      default=False)
    options, _ = parser.parse_args()

    if options.debug:
        boto.set_stream_logger('boto')

    rds = RDS(region=options.region,
              profile=options.profile,
              identifier=options.ident)

    # Check args
    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit()
    elif options.db_list:
        info = rds.get_list()
        print 'List of all DB instances in %s region(s):' % (options.region, )
        pprint.pprint(info)
        sys.exit()
    elif not options.ident:
        parser.print_help()
        parser.error('DB identifier is not set.')
    elif options.printinfo:
        info = rds.get_info()
        if info:
            pprint.pprint(vars(info))
        else:
            print 'No DB instance "%s" found on your AWS account and %s region(s).' % (
                options.ident, options.region)

        sys.exit()
    elif not options.metric or options.metric not in metrics.keys():
        parser.print_help()
        parser.error('Metric is not set or not valid.')
    elif not options.warn and options.metric != 'status':
        parser.print_help()
        parser.error('Warning threshold is not set.')
    elif not options.crit and options.metric != 'status':
        parser.print_help()
        parser.error('Critical threshold is not set.')
    elif options.avg <= 0 and options.metric != 'status':
        parser.print_help()
        parser.error('Average must be greater than zero.')
    elif options.time <= 0 and options.metric != 'status':
        parser.print_help()
        parser.error('Time must be greater than zero.')

    now = datetime.datetime.utcnow()
    status = None
    note = ''
    perf_data = None

    # RDS Status
    if options.metric == 'status':
        info = rds.get_info()
        if not info:
            status = UNKNOWN
            note = 'Unable to get RDS instance'
        else:
            status = OK
            try:
                version = info.EngineVersion
            except:
                version = info.engine_version

            note = '%s %s. Status: %s' % (info.engine, version, info.status)

    # RDS Load Average
    elif options.metric == 'load':
        # Check thresholds
        try:
            warns = [float(x) for x in options.warn.split(',')]
            crits = [float(x) for x in options.crit.split(',')]
            fail = len(warns) + len(crits)
        except:
            fail = 0

        if fail != 6:
            parser.error(
                'Warning and critical thresholds should be 3 comma separated numbers, e.g. 20,15,10'
            )

        loads = []
        fail = False
        j = 0
        perf_data = []
        for i in [1, 5, 15]:
            if i == 1:
                # Some stats are delaying to update on CloudWatch.
                # Let's pick a few points for 1-min load avg and get the last point.
                points = 5
            else:
                points = i

            load = rds.get_metric(
                metrics[options.metric],
                now - datetime.timedelta(seconds=points * 60), now, i * 60)
            if not load:
                status = UNKNOWN
                note = 'Unable to get RDS statistics'
                perf_data = None
                break

            loads.append(str(load))
            perf_data.append('load%s=%s;%s;%s;0;100' %
                             (i, load, warns[j], crits[j]))

            # Compare thresholds
            if not fail:
                if warns[j] > crits[j]:
                    parser.error(
                        'Parameter inconsistency: warning threshold is greater than critical.'
                    )
                elif load >= crits[j]:
                    status = CRITICAL
                    fail = True
                elif load >= warns[j]:
                    status = WARNING

            j = j + 1

        if status != UNKNOWN:
            if status is None:
                status = OK

            note = 'Load average: %s%%' % '%, '.join(loads)
            perf_data = ' '.join(perf_data)

    # RDS Free Storage
    # RDS Free Memory
    elif options.metric in ['storage', 'memory']:
        # Check thresholds
        try:
            warn = float(options.warn)
            crit = float(options.crit)
        except:
            parser.error('Warning and critical thresholds should be integers.')

        if crit > warn:
            parser.error(
                'Parameter inconsistency: critical threshold is greater than warning.'
            )

        if options.unit not in units:
            parser.print_help()
            parser.error('Unit is not valid.')

        info = rds.get_info()
        free = rds.get_metric(
            metrics[options.metric],
            now - datetime.timedelta(seconds=options.time * 60), now,
            options.avg * 60)
        if not info or not free:
            status = UNKNOWN
            note = 'Unable to get RDS details and statistics'
        else:
            if options.metric == 'storage':
                storage = float(info.allocated_storage)
            elif options.metric == 'memory':
                try:
                    storage = db_classes[info.instance_class]
                except:
                    print 'Unknown DB instance class "%s"' % info.instance_class
                    sys.exit(CRITICAL)

            free = '%.2f' % (free / 1024**3)
            free_pct = '%.2f' % (float(free) / storage * 100)
            if options.unit == 'percent':
                val = float(free_pct)
                val_max = 100
            elif options.unit == 'GB':
                val = float(free)
                val_max = storage

            # Compare thresholds
            if val <= crit:
                status = CRITICAL
            elif val <= warn:
                status = WARNING

            if status is None:
                status = OK

            note = 'Free %s: %s GB (%.0f%%) of %s GB' % (
                options.metric, free, float(free_pct), storage)
            perf_data = 'free_%s=%s;%s;%s;0;%s' % (options.metric, val, warn,
                                                   crit, val_max)

    # Final output
    if status != UNKNOWN and perf_data:
        print '%s %s | %s' % (short_status[status], note, perf_data)
    elif status == UNKNOWN and not options.forceunknown:
        print '%s %s | null' % ('OK', note)
        sys.exit(0)
    else:
        print '%s %s' % (short_status[status], note)

    sys.exit(status)
Ejemplo n.º 40
0
                                         check_lzop, check_pv, compressed_pipe)

DEFAULT_CONCURRENCY = max(multiprocessing.cpu_count() - 1, 1)
BUFFER_SIZE = 64  # Default bufsize is 64M
MBFACTOR = float(1 << 20)
MAX_RETRY_COUNT = 4
SLEEP_TIME = 2
SLEEP_MULTIPLIER = 3
UPLOAD_TIMEOUT = 600
DEFAULT_REDUCED_REDUNDANCY = False

logging_helper.configure(format='%(name)-12s %(levelname)-8s %(message)s')

logger = logging_helper.CassandraSnapshotterLogger(
    'cassandra_snapshotter.agent')
boto.set_stream_logger('boto', logging.WARNING)


def get_bucket(s3_bucket, aws_access_key_id, aws_secret_access_key,
               s3_connection_host):
    connection = S3Connection(aws_access_key_id=aws_access_key_id,
                              aws_secret_access_key=aws_secret_access_key,
                              host=s3_connection_host)
    return connection.get_bucket(s3_bucket, validate=False)


def destination_path(s3_base_path, file_path, compressed=True):
    suffix = compressed and '.lzo' or ''
    dest_path = "{}{}{}".format(s3_base_path, file_path, suffix)
    return dest_path