Exemple #1
0
    def create(self, label=None, name=None, critical_state=None, ok_state=None,
            warning_state=None):
        """
        Creates a notification plan to be executed when a monitoring check
        triggers an alarm. You can optionally label (or name) the plan.

        A plan consists of one or more notifications to be executed when an
        associated alarm is triggered. You can have different lists of actions
        for CRITICAL, WARNING or OK states.
        """
        uri = "/%s" % self.uri_base
        body = {"label": label or name}

        def make_list_of_ids(parameter):
            params = utils.coerce_string_to_list(parameter)
            return [utils.get_id(param) for param in params]

        if critical_state:
            critical_state = utils.coerce_string_to_list(critical_state)
            body["critical_state"] = make_list_of_ids(critical_state)
        if warning_state:
            warning_state = utils.coerce_string_to_list(warning_state)
            body["warning_state"] = make_list_of_ids(warning_state)
        if ok_state:
            ok_state = utils.coerce_string_to_list(ok_state)
            body["ok_state"] = make_list_of_ids(ok_state)
        resp, resp_body = self.api.method_post(uri, body=body)
        return self.get(resp.headers["x-object-id"])
Exemple #2
0
    def _resolve_lbs(load_balancers):
        """
        Takes either a single LB reference or a list of references and returns
        the dictionary required for creating a Scaling Group.

        References can be either a dict that matches the structure required by
        the autoscale API, a CloudLoadBalancer instance, or the ID of the load
        balancer.
        """
        lb_args = []
        lbs = utils.coerce_string_to_list(load_balancers)
        for lb in lbs:
            if isinstance(lb, dict):
                lb_args.append(lb)
            elif isinstance(lb, CloudLoadBalancer):
                lb_args.append({
                        "loadBalancerId": lb.id,
                        "port": lb.port,
                        })
            else:
                # See if it's an ID for a Load Balancer
                try:
                    instance = pyrax.cloud_loadbalancers.get(lb)
                except Exception:
                    raise exc.InvalidLoadBalancer("Received an invalid "
                            "specification for a Load Balancer: '%s'" % lb)
                lb_args.append({
                        "loadBalancerId": instance.id,
                        "port": instance.port,
                        })
        return lb_args
Exemple #3
0
 def update_check(self, check, label=None, name=None, disabled=None,
         metadata=None, monitoring_zones_poll=None, timeout=None,
         period=None, target_alias=None, target_hostname=None,
         target_receiver=None):
     if monitoring_zones_poll:
         monitoring_zones_poll = utils.coerce_string_to_list(
                 monitoring_zones_poll)
         monitoring_zones_poll = [utils.get_id(mzp)
                 for mzp in monitoring_zones_poll]
     body = {}
     local_dict = locals()
     label = label or name
     params = ("label", "disabled", "metadata", "monitoring_zones_poll",
             "timeout", "period", "target_alias", "target_hostname",
             "target_receiver")
     body = _params_to_dict(params, body, locals())
     entity = check.entity
     uri = "/%s/%s/checks/%s" % (self.uri_base, utils.get_id(entity),
             utils.get_id(check))
     try:
         resp, resp_body = self.api.method_put(uri, body=body)
     except exc.BadRequest as e:
         msg = e.message
         dtls = e.details
         if msg.startswith("Validation error"):
             raise exc.InvalidMonitoringCheckUpdate("The update failed "
                     "validation: %s: %s" % (msg, dtls))
         else:
             # Some other issue.
             raise
     return resp_body
Exemple #4
0
 def delete_by_ids(self, ids):
     """
     Deletes the messages whose IDs are passed in from this queue.
     """
     ids = utils.coerce_string_to_list(ids)
     uri = "/%s?ids=%s" % (self.uri_base, ",".join(ids))
     return self.api.method_delete(uri)
Exemple #5
0
 def delete_by_ids(self, ids):
     """
     Deletes the messages whose IDs are passed in from this queue.
     """
     ids = utils.coerce_string_to_list(ids)
     uri = "/%s?ids=%s" % (self.uri_base, ",".join(ids))
     return self.api.method_delete(uri)
Exemple #6
0
    def _resolve_lbs(self, load_balancers):
        """
        Takes either a single LB reference or a list of references and returns
        the dictionary required for creating a Scaling Group.

        References can be either a dict that matches the structure required by
        the autoscale API, a CloudLoadBalancer instance, or the ID of the load
        balancer.
        """
        lb_args = []
        lbs = utils.coerce_string_to_list(load_balancers)
        for lb in lbs:
            if isinstance(lb, dict):
                lb_args.append(lb)
            elif isinstance(lb, CloudLoadBalancer):
                lb_args.append({
                        "loadBalancerId": lb.id,
                        "port": lb.port,
                        })
            else:
                # See if it's an ID for a Load Balancer
                try:
                    instance = pyrax.cloud_loadbalancers.get(lb)
                except Exception:
                    raise exc.InvalidLoadBalancer("Received an invalid "
                            "specification for a Load Balancer: '%s'" % lb)
                lb_args.append({
                        "loadBalancerId": instance.id,
                        "port": instance.port,
                        })
        return lb_args
Exemple #7
0
    def get_metric_data_points(self, entity, check, metric, start, end,
            points=None, resolution=None, stats=None):
        """
        Returns the data points for a given metric for the given period. The
        'start' and 'end' times must be specified; they can be be either Python
        date/datetime values, a string representing a date/datetime in either
        of 'YYYY-MM-DD HH:MM:SS' or 'YYYY-MM-DD' formats, or a Unix timestamp:

        The 'points' parameter represents the number of points to return. The
        'resolution' parameter represents the granularity of the data. You must
        specify either 'points' or 'resolution', but not both. The allowed
        values for resolution are: 'FULL', 'MIN5', 'MIN20', 'MIN60', 'MIN240',
        and 'MIN1440'.

        Finally, the 'stats' parameter specifies the stats you want returned.
        By default only the 'average' is returned. You omit this parameter,
        pass in a single value, or pass in a list of values. The allowed values
        are: 'average', 'variance', 'min', and 'max'
        """
        allowed_resolutions = ("FULL", "MIN5", "MIN20", "MIN60", "MIN240",
                "MIN1440")
        if not (points or resolution):
            raise exc.MissingMonitoringCheckGranularity("You must specify "
                    "either the 'points' or 'resolution' parameter when "
                    "fetching metrics.")
        if resolution:
            if resolution.upper() not in allowed_resolutions:
                raise exc.InvalidMonitoringMetricsResolution("The specified "
                        "resolution '%s' is not valid. The valid values are: "
                        "%s." % (resolution, str(allowed_resolutions)))
        start_tm = utils.to_timestamp(start)
        end_tm = utils.to_timestamp(end)
        qparms = []
        # Timestamps with fractional seconds currently cause a 408 (timeout)
        qparms.append("from=%s" % int(start_tm))
        qparms.append("to=%s" % int(end_tm))
        if points:
            qparms.append("points=%s" % points)
        if resolution:
            qparms.append("resolution=%s" % resolution.upper())
        if stats:
            stats = utils.coerce_string_to_list(stats)
            for stat in stats:
                qparms.append("select=%s" % stat)
        qparm = "&".join(qparms)
        uri = "/%s/%s/checks/%s/metrics/%s/plot?%s" % (self.uri_base,
                utils.get_id(entity), utils.get_id(check), metric, qparm)
        try:
            resp, resp_body = self.api.method_get(uri)
        except exc.BadRequest as e:
            msg = e.message
            dtls = e.details
            if msg.startswith("Validation error"):
                raise exc.InvalidMonitoringMetricsRequest("Your request was "
                        "invalid: '%s'" % dtls)
            else:
                raise
        return resp_body["values"]
Exemple #8
0
 def __init__(self, root_folder, container, ignore, upload_key, client):
     self.root_folder = root_folder.rstrip("/")
     if container:
         self.container = client.create_container(container)
     else:
         self.container = None
     self.ignore = utils.coerce_string_to_list(ignore)
     self.upload_key = upload_key
     self.client = client
     threading.Thread.__init__(self)
Exemple #9
0
 def __init__(self, root_folder, container, ignore, upload_key, client):
     self.root_folder = root_folder.rstrip("/")
     if container:
         self.container = client.create_container(container)
     else:
         self.container = None
     self.ignore = utils.coerce_string_to_list(ignore)
     self.upload_key = upload_key
     self.client = client
     threading.Thread.__init__(self)
Exemple #10
0
    def upload_folder(self, folder_path, container=None, ignore=None, ttl=None):
        """
        Convenience method for uploading an entire folder, including any
        sub-folders, to Cloud Files.

        All files will be uploaded to objects with the same name as the file.
        In the case of nested folders, files will be named with the full path
        relative to the base folder. E.g., if the folder you specify contains a
        folder named 'docs', and 'docs' contains a file named 'install.html',
        that file will be uploaded to an object named 'docs/install.html'.

        If 'container' is specified, the folder's contents will be uploaded to
        that container. If it is not specified, a new container with the same
        name as the specified folder will be created, and the files uploaded to
        this new container.

        You can selectively ignore files by passing either a single pattern or
        a list of patterns; these will be applied to the individual folder and
        file names, and any names that match any of the 'ignore' patterns will
        not be uploaded. The patterns should be standard *nix-style shell
        patterns; e.g., '*pyc' will ignore all files ending in 'pyc', such as
        'program.pyc' and 'abcpyc'.

        The upload will happen asynchronously; in other words, the call to
        upload_folder() will generate a UUID and return a 2-tuple of (UUID,
        total_bytes) immediately. Uploading will happen in the background; your
        app can call get_uploaded(uuid) to get the current status of the
        upload. When the upload is complete, the value returned by
        get_uploaded(uuid) will match the total_bytes for the upload.

        If you start an upload and need to cancel it, call
        cancel_folder_upload(uuid), passing the uuid returned by the initial
        call.  It will then be up to you to either keep or delete the
        partially-uploaded content.

        If you specify a `ttl` parameter, the uploaded files will be deleted
        after that number of seconds.
        """
        if not os.path.isdir(folder_path):
            raise exc.FolderNotFound("No such folder: '%s'" % folder_path)

        ignore = utils.coerce_string_to_list(ignore)
        total_bytes = utils.folder_size(folder_path, ignore)
        upload_key = str(uuid.uuid4())
        self.folder_upload_status[upload_key] = {"continue": True,
                "total_bytes": total_bytes,
                "uploaded": 0,
                }
        self._upload_folder_in_background(folder_path, container, ignore,
                upload_key, ttl)
        return (upload_key, total_bytes)
Exemple #11
0
 def _sync_folder_to_container(self, folder_path, cont, prefix, delete,
         include_hidden, ignore, ignore_timestamps):
     """
     This is the internal method that is called recursively to handle
     nested folder structures.
     """
     fnames = os.listdir(folder_path)
     ignore = utils.coerce_string_to_list(ignore)
     if not include_hidden:
         ignore.append(".*")
     for fname in fnames:
         if utils.match_pattern(fname, ignore):
             continue
         pth = os.path.join(folder_path, fname)
         if os.path.isdir(pth):
             subprefix = fname
             if prefix:
                 subprefix = "%s/%s" % (prefix, subprefix)
             self._sync_folder_to_container(pth, cont, prefix=subprefix,
                     delete=delete, include_hidden=include_hidden,
                     ignore=ignore, ignore_timestamps=ignore_timestamps)
             continue
         self._local_files.append(os.path.join(prefix, fname))
         local_etag = utils.get_checksum(pth)
         fullname = fname
         if prefix:
             fullname = "%s/%s" % (prefix, fname)
         try:
             obj = cont.get_object(fullname)
             obj_etag = obj.etag
         except exc.NoSuchObject:
             obj = None
             obj_etag = None
         if local_etag != obj_etag:
             if not ignore_timestamps:
                 if obj:
                     obj_time_str = obj.last_modified[:19]
                 else:
                     obj_time_str = EARLY_DATE_STR
                 local_mod = datetime.datetime.utcfromtimestamp(
                         os.stat(pth).st_mtime)
                 local_mod_str = local_mod.isoformat()
                 if obj_time_str >= local_mod_str:
                     # Remote object is newer
                     continue
             cont.upload_file(pth, obj_name=fullname, etag=local_etag,
                     return_none=True)
     if delete and not prefix:
         self._delete_objects_not_in_list(cont)
Exemple #12
0
 def _sync_folder_to_container(self, folder_path, cont, prefix, delete,
         include_hidden, ignore, ignore_timestamps):
     """
     This is the internal method that is called recursively to handle
     nested folder structures.
     """
     fnames = os.listdir(folder_path)
     ignore = utils.coerce_string_to_list(ignore)
     if not include_hidden:
         ignore.append(".*")
     for fname in fnames:
         if utils.match_pattern(fname, ignore):
             continue
         pth = os.path.join(folder_path, fname)
         if os.path.isdir(pth):
             subprefix = fname
             if prefix:
                 subprefix = "%s/%s" % (prefix, subprefix)
             self._sync_folder_to_container(pth, cont, prefix=subprefix,
                     delete=delete, include_hidden=include_hidden,
                     ignore=ignore, ignore_timestamps=ignore_timestamps)
             continue
         self._local_files.append(os.path.join(prefix, fname))
         local_etag = utils.get_checksum(pth)
         fullname = fname
         if prefix:
             fullname = "%s/%s" % (prefix, fname)
         try:
             obj = cont.get_object(fullname)
             obj_etag = obj.etag
         except exc.NoSuchObject:
             obj = None
             obj_etag = None
         if local_etag != obj_etag:
             if not ignore_timestamps:
                 if obj:
                     obj_time_str = obj.last_modified[:19]
                 else:
                     obj_time_str = EARLY_DATE_STR
                 local_mod = datetime.datetime.utcfromtimestamp(
                         os.stat(pth).st_mtime)
                 local_mod_str = local_mod.isoformat()
                 if obj_time_str >= local_mod_str:
                     # Remote object is newer
                     continue
             cont.upload_file(pth, obj_name=fullname, etag=local_etag,
                     return_none=True)
     if delete and not prefix:
         self._delete_objects_not_in_list(cont)
Exemple #13
0
    def upload_folder(self, folder_path, container=None, ignore=None):
        """
        Convenience method for uploading an entire folder, including any
        sub-folders, to Cloud Files.

        All files will be uploaded to objects with the same name as the file.
        In the case of nested folders, files will be named with the full path
        relative to the base folder. E.g., if the folder you specify contains a
        folder named 'docs', and 'docs' contains a file named 'install.html',
        that file will be uploaded to an object named 'docs/install.html'.

        If 'container' is specified, the folder's contents will be uploaded to
        that container. If it is not specified, a new container with the same
        name as the specified folder will be created, and the files uploaded to
        this new container.

        You can selectively ignore files by passing either a single pattern or
        a list of patterns; these will be applied to the individual folder and
        file names, and any names that match any of the 'ignore' patterns will
        not be uploaded. The patterns should be standard *nix-style shell
        patterns; e.g., '*pyc' will ignore all files ending in 'pyc', such as
        'program.pyc' and 'abcpyc'.

        The upload will happen asynchronously; in other words, the call to
        upload_folder() will generate a UUID and return a 2-tuple of (UUID,
        total_bytes) immediately. Uploading will happen in the background; your
        app can call get_uploaded(uuid) to get the current status of the
        upload. When the upload is complete, the value returned by
        get_uploaded(uuid) will match the total_bytes for the upload.

        If you start an upload and need to cancel it, call
        cancel_folder_upload(uuid), passing the uuid returned by the initial
        call.  It will then be up to you to either keep or delete the
        partially-uploaded content.
        """
        if not os.path.isdir(folder_path):
            raise exc.FolderNotFound("No such folder: '%s'" % folder_path)

        ignore = utils.coerce_string_to_list(ignore)
        total_bytes = utils.folder_size(folder_path, ignore)
        upload_key = str(uuid.uuid4())
        self.folder_upload_status[upload_key] = {"continue": True,
                "total_bytes": total_bytes,
                "uploaded": 0,
                }
        self._upload_folder_in_background(folder_path, container, ignore,
                upload_key)
        return (upload_key, total_bytes)
Exemple #14
0
 def list_by_ids(self, ids):
     """
     If you wish to retrieve a list of messages from this queue and know the
     IDs of those messages, you can pass in a list of those IDs, and only
     the matching messages will be returned. This avoids pulling down all
     the messages in a queue and filtering on the client side.
     """
     ids = utils.coerce_string_to_list(ids)
     uri = "/%s?ids=%s" % (self.uri_base, ",".join(ids))
     # The API is not consistent in how it returns message lists, so this
     # workaround is needed.
     curr_prkey = self.plural_response_key
     self.plural_response_key = ""
     # BROKEN: API returns a list, not a dict.
     ret = self._list(uri)
     self.plural_response_key = curr_prkey
     return ret
Exemple #15
0
 def list_by_ids(self, ids):
     """
     If you wish to retrieve a list of messages from this queue and know the
     IDs of those messages, you can pass in a list of those IDs, and only
     the matching messages will be returned. This avoids pulling down all
     the messages in a queue and filtering on the client side.
     """
     ids = utils.coerce_string_to_list(ids)
     uri = "/%s?ids=%s" % (self.uri_base, ",".join(ids))
     # The API is not consistent in how it returns message lists, so this
     # workaround is needed.
     curr_prkey = self.plural_response_key
     self.plural_response_key = ""
     # BROKEN: API returns a list, not a dict.
     ret = self._list(uri)
     self.plural_response_key = curr_prkey
     return ret
Exemple #16
0
 def _get_db_names(self, dbs, strict=True):
     """
     Accepts a single db (name or object) or a list of dbs, and returns a
     list of database names. If any of the supplied dbs do not exist, a
     NoSuchDatabase exception will be raised, unless you pass strict=False.
     """
     dbs = utils.coerce_string_to_list(dbs)
     db_names = [utils.get_name(db) for db in dbs]
     if strict:
         good_dbs = self.instance.list_databases()
         good_names = [utils.get_name(good_db) for good_db in good_dbs]
         bad_names = [db_name for db_name in db_names
                 if db_name not in good_names]
         if bad_names:
             bad = ", ".join(bad_names)
             raise exc.NoSuchDatabase("The following database(s) were not "
                     "found: %s" % bad)
     return db_names
Exemple #17
0
 def _get_db_names(self, dbs, strict=True):
     """
     Accepts a single db (name or object) or a list of dbs, and returns a
     list of database names. If any of the supplied dbs do not exist, a
     NoSuchDatabase exception will be raised, unless you pass strict=False.
     """
     dbs = utils.coerce_string_to_list(dbs)
     db_names = [utils.get_name(db) for db in dbs]
     if strict:
         good_dbs = self.instance.list_databases()
         good_names = [utils.get_name(good_db) for good_db in good_dbs]
         bad_names = [db_name for db_name in db_names
                 if db_name not in good_names]
         if bad_names:
             bad = ", ".join(bad_names)
             raise exc.NoSuchDatabase("The following database(s) were not "
                     "found: %s" % bad)
     return db_names
Exemple #18
0
    def create_check(self, entity, label=None, name=None, check_type=None,
            details=None, disabled=False, metadata=None,
            monitoring_zones_poll=None, timeout=None, period=None,
            target_alias=None, target_hostname=None, target_receiver=None,
            test_only=False, include_debug=False):
        """
        Creates a check on the entity with the specified attributes. The
        'details' parameter should be a dict with the keys as the option name,
        and the value as the desired setting.

        If the 'test_only' parameter is True, then the check is not created;
        instead, the check is run and the results of the test run returned. If
        'include_debug' is True, additional debug information is returned.
        According to the current Cloud Monitoring docs:
            "Currently debug information is only available for the
            remote.http check and includes the response body."
        """
        if details is None:
            raise exc.MissingMonitoringCheckDetails("The required 'details' "
                    "parameter was not passed to the create_check() method.")
        if not (target_alias or target_hostname):
            raise exc.MonitoringCheckTargetNotSpecified("You must specify "
                    "either the 'target_alias' or 'target_hostname' when "
                    "creating a check.")
        ctype = utils.get_id(check_type)
        is_remote = ctype.startswith("remote")
        monitoring_zones_poll = utils.coerce_string_to_list(
                monitoring_zones_poll)
        monitoring_zones_poll = [utils.get_id(mzp)
                for mzp in monitoring_zones_poll]
        if is_remote and not monitoring_zones_poll:
            raise exc.MonitoringZonesPollMissing("You must specify the "
                    "'monitoring_zones_poll' parameter for remote checks.")
        body = {"label": label or name,
                "details": details,
                "disabled": disabled,
                "type": utils.get_id(check_type),
                }
        params = ("monitoring_zones_poll", "timeout", "period",
                "target_alias", "target_hostname", "target_receiver")
        body = _params_to_dict(params, body, locals())
        if test_only:
            uri = "/%s/%s/test-check" % (self.uri_base, entity.id)
            if include_debug:
                uri = "%s?debug=true" % uri
        else:
            uri = "/%s/%s/checks" % (self.uri_base, entity.id)
        try:
            resp, resp_body = self.api.method_post(uri, body=body)
        except exc.BadRequest as e:
            msg = e.message
            dtls = e.details
            match = _invalid_key_pat.match(msg)
            if match:
                missing = match.groups()[0].replace("details.", "")
                if missing in details:
                    errcls = exc.InvalidMonitoringCheckDetails
                    errmsg = "".join(["The value passed for '%s' in the ",
                            "details parameter is not valid."]) % missing
                else:
                    errmsg = "".join(["The required value for the '%s' ",
                            "setting is missing from the 'details' ",
                            "parameter."]) % missing
                    utils.update_exc(e, errmsg)
                raise e
            else:
                if msg == "Validation error":
                    # Info is in the 'details'
                    raise exc.InvalidMonitoringCheckDetails("Validation "
                            "failed. Error: '%s'." % dtls)
        else:
            if resp.status_code == 201:
                check_id = resp.headers["x-object-id"]
                return self.get_check(entity, check_id)
Exemple #19
0
 def make_list_of_ids(parameter):
     params = utils.coerce_string_to_list(parameter)
     return [utils.get_id(param) for param in params]