def get_perun_attributes(cls) -> Dict[str, Type[PerunAttribute[Any]]]: """ Return all class attributes which are annotated with subclasses of :class:`~os_credits.perun.base_attributes.PerunAttribute`. Since the content of the response cannot change at runtime a :func:`~functools.lru_cache` is used. :return: Dictionary of the attribute names of this class and the corresponding :class:`~os_credits.perun.base_attributes.PerunAttribute` subclass. """ attributes = {} for attr_name, attr_class_name in cls.__annotations__.items(): try: attributes[attr_name] = PerunAttribute.registered_attributes[ attr_class_name ] internal_logger.debug( "Connected group attribute `%s` with PerunAttribute `%s`", attr_name, attr_class_name, ) except KeyError: # this will fail for any non-Perun attribute, such as name or id pass return attributes
def from_iterpoint(cls: Type[PT], values: List[Any], meta: Dict[str, str]) -> PT: """Only intended to be passed to the ``iterpoints`` method of ``aioinflux`` to parse the points and construct valid InfluxDBPoint instances. See its documentation for a description of the contents of ``values`` and ``meta``. The metadata of dataclass attributes are used to parse and convert the necessary information, unknown values and tags are dropped. :param cls: Subclass of :class:`InfluxDBPoint` on which this method is called. Instances of this class will be tried to be constructed given the returned data from the InfluxDB and returned. """ measurement_name = meta["name"] combined_dict = dict(zip(meta["columns"], values)) args: Dict[str, Any] = { "measurement": measurement_name, "timestamp": deserialize(combined_dict["time"], datetime), } for f in fields(cls): if f.default is not MISSING: continue # values of this fields are already known if f.name in args: continue args[f.name] = deserialize(combined_dict[f.name], f) new_point = cls(**args) internal_logger.debug("Constructed %s", new_point) return new_point
def calculate_credits(measurement1: MT, measurement2: MT) -> Credits: """ High-level function to calculate the credits based on the differences of the two usage measurements. Will sort the two measurements according to their time and call the :func:`~os_credits.credits.base_models.Metric.calculate_credits` method of the **more recent** measurement's metric to calculate the credits. :return: Non-negative amount of credits :raises CalculationResultError: If the amount of credits would be negative. """ if measurement1.timestamp < measurement2.timestamp: older_measurement, new_measurement = measurement1, measurement2 else: older_measurement, new_measurement = measurement2, measurement1 internal_logger.debug( "Billing older `%s` and current measurement `%s`", older_measurement, new_measurement, ) credits = new_measurement.metric.calculate_credits( current_measurement=new_measurement, older_measurement=older_measurement ) internal_logger.debug("Calculated credits: %f", credits) if credits < 0: raise CalculationResultError( f"Credits calculation of {measurement1} and {measurement2} returned a " "negative amount of credits." ) return credits
def __init_subclass__(cls, name: str, friendly_name: str) -> None: if None in (name, friendly_name): internal_logger.debug( "Not registering subclass %s of `Metric` since one or both of its " "names are None.") return if name in Metric._metrics_by_name: raise ValueError(f"Metric with name {name} is already registered: " f"{Metric._metrics_by_name[name]}") if friendly_name in Metric.metrics_by_friendly_name: raise ValueError( f"Metric with friendly_name {friendly_name} is already registered: " f"{Metric.metrics_by_friendly_name[friendly_name]}") Metric._metrics_by_name.update({name: cls}) Metric.metrics_by_friendly_name.update({friendly_name: cls}) cls.name = name cls.friendly_name = friendly_name internal_logger.debug("Registered subclass of `Metric`: %s", cls)
async def influxdb_write(request: web.Request) -> web.Response: """ Consumes the `Line Protocol <https://docs.influxdata.com/influxdb/v1.7/write_protocols/line_protocol_tutorial/>`_ of InfluxDB. :param request: Incoming request with one or multiple *InfluxDB Line* instances. --- description: Used by InfluxDB to post subscription updates tags: - Service consumes: - text/plain parameters: - in: body name: line description: Point in Line Protocol format (https://docs.influxdata.com/influxdb/v1.7/write_protocols/line_protocol_tutorial) schema: type: string example: weather,location=us-midwest temperature=82 1465839830100400200 required: true responses: 202: description: A corresponding task object will be created. See application log for further information """ # noqa (cannot fix long url) # .text() performs automatic decoding from bytes internal_logger.info(f"Called: {request.rel_url}") influxdb_lines = await request.text() # an unknown number of lines will be send, put them all into the queue for influx_line in influxdb_lines.splitlines(): await request.app["task_queue"].put(influx_line) internal_logger.debug( "Put %s into queue (%s elements)", influx_line, request.app["task_queue"].qsize(), ) # always answer 202 return web.HTTPAccepted()
async def save(self, _save_all: bool = False) -> None: """Collects all annotated :class:`~os_credits.perun.base_attributes.PerunAttribute` of this group and sends/saves them to *Perun* in case their value has changed since retrieval. Uses the :attr:`~os_credits.perun.base_attributes.PerunAttribute.has_changed` attribute. :param _save_all: Save all attributes regardless whether their value was actually changed since retrieval. Also used for testing. """ internal_logger.debug("Save of Group %s called", self) changed_attrs: List[PerunAttribute[Any]] = [] changed_resource_bound_attrs: List[PerunAttribute[Any]] = [] # collect all attributes whose value has changed since retrieval for attribute_name in type(self).get_perun_attributes(): attr = getattr(self, attribute_name) # save all attributes in offline/dummy since we will not get non-stored back # from Perun if attr.has_changed or _save_all: if not attr.is_resource_bound(): changed_attrs.append(attr) else: changed_resource_bound_attrs.append(attr) if changed_attrs: internal_logger.debug( "Sending modified regular attributes to perun %s", changed_attrs ) await set_attributes(self.id, changed_attrs) if changed_resource_bound_attrs: if getattr(self, "assigned_resource", False): internal_logger.debug( "Sending modified resource bound attributes to perun %s", changed_resource_bound_attrs, ) await set_resource_bound_attributes( self.id, self.resource_id, changed_resource_bound_attrs ) else: internal_logger.warning( "Not sending modified attribute to perun, since Group %s is not " "associated with resource with id %s. How did we even retrieve any " "such attributes?", self.name, self.resource_id, )
async def send_notification(notification: EmailNotificationBase, loop: Optional[AbstractEventLoop] = None) -> None: loop = loop or get_event_loop() async with SMTP(hostname=config["MAIL_SMTP_SERVER"], port=config["MAIL_SMTP_PORT"], loop=loop) as smtp: if not config["MAIL_NOT_STARTTLS"]: internal_logger.debug("Not connecting via STARTTLS as requested") await smtp.starttls() if config["MAIL_SMTP_USER"] and config["MAIL_SMTP_PASSWORD"]: internal_logger.debug("Authenticating against smtp server") await smtp.login(config["MAIL_SMTP_USER"], config["MAIL_SMTP_PASSWORD"]) else: internal_logger.debug( "Not authenticating against smtp server since neither user and/nor " "password are specified.") await smtp.send_message(notification.construct_message())
def construct_message(self) -> MIMEText: """Constructs a :class:`~email.mime.text.MIMEText` object from the notification's attributes. The recipient placeholders are resolved, body and subject templates are rendered with the following default placeholders: ``project`` Name of the Project as stored in Perun. ``credits_used`` The current value of :class:`~os_credits.perun.attributes.DenbiCreditsUsed`. ``credits_granted`` The current value of :class:`~os_credits.perun.attributes.DenbiCreditsGranted`. Subclasses are advised to add their own placeholders to :attr:`custom_placeholders` instead of overwriting this method. If any placeholder of a template cannot be resolved it will be left in place to ensure that the message can be constructed and sent. """ placeholders = { "project": self.group.name, "credits_used": str(self.group.credits_used.value), "credits_granted": str(self.group.credits_granted.value), **self.custom_placeholders, } try: rendered_subject = self._subject.substitute(placeholders) except KeyError as e: internal_logger.error( "Subject of Notification %s contains unknown placeholder %s. Sending " "partially unformatted mail.", type(self).__name__, e, ) rendered_subject = self._subject.safe_substitute(placeholders) except ValueError as e: internal_logger.error( "Subject of Notification %s contains invalid placeholder %s.", type(self).__name__, e, ) raise BrokenTemplateError(f"Subject of Notification {type(self).__name__}") try: rendered_body = self._body.substitute(placeholders) except KeyError as e: internal_logger.error( "Body of Notification %s contains unknown placeholder %s. Sending " "partially unformatted mail.", type(self).__name__, e, ) rendered_body = self._body.safe_substitute(placeholders) except ValueError as e: internal_logger.error( "Body of Notification %s contains invalid placeholder %s.", type(self).__name__, e, ) raise BrokenTemplateError(f"Body of Notification {type(self).__name__}") message = MIMEText(rendered_body) message["Subject"] = rendered_subject message["From"] = config["MAIL_FROM"] if config["NOTIFICATION_TO_OVERWRITE"].strip(): internal_logger.info( "Applying `NOTIFICATION_TO_OVERWRITE` setting to notification `%s`", self, ) message["To"] = config["NOTIFICATION_TO_OVERWRITE"] else: message["To"] = self._resolve_recipient_placeholders(self.to) message["Cc"] = self._resolve_recipient_placeholders(self.cc) message["Bcc"] = self._resolve_recipient_placeholders(self.bcc) internal_logger.debug( "Recipients of notification `%s`: To=%s, Cc=%s, Bcc=%s", self, message["To"], message["Cc"], message["Bcc"], ) return message
def parse_config_from_environment() -> Config: # for environment variables that need to be processed PROCESSED_ENV_CONFIG: Dict[str, Any] = {} try: PROCESSED_ENV_CONFIG.update({ "OS_CREDITS_PROJECT_WHITELIST": set(environ["OS_CREDITS_PROJECT_WHITELIST"].split(";")) }) except KeyError: # Environment variable not set, that's ok pass for bool_value in ["MAIL_NOT_STARTTLS"]: if bool_value in environ: PROCESSED_ENV_CONFIG.update({bool_value: True}) for int_value_key in [ "OS_CREDITS_PRECISION", "OS_CREDITS_WORKERS", "INFLUXDB_PORT", "OS_CREDITS_PERUN_VO_ID", "MAIL_SMTP_PORT", ]: try: int_value = int(environ[int_value_key]) if int_value < 0: internal_logger.warning( "Integer value (%s) must not be negative, falling back to default " "value", int_value_key, ) del environ[int_value_key] continue PROCESSED_ENV_CONFIG.update({int_value_key: int_value}) internal_logger.debug(f"Added {int_value_key} to procssed env") except KeyError: # Environment variable not set, that's ok pass except ValueError: internal_logger.warning( "Could not convert value of $%s('%s') to int", int_value_key, environ[int_value_key], ) # since we cannot use a subset of the actual environment, see below, we have # to remove invalid keys from environment to make sure that if such a key is # looked up inside the config the chainmap does not return the unprocessed # value from the environment but rather the default one del environ[int_value_key] for decimal_value_key in [ "VCPU_CREDIT_PER_HOUR", "RAM_CREDIT_PER_HOUR", ]: try: decimal_value = Decimal(environ[decimal_value_key]) if decimal_value < 0: internal_logger.warning( "Decimal value (%s) must not be negative, falling back to default " "value", decimal_value_key, ) del environ[decimal_value_key] continue PROCESSED_ENV_CONFIG.update({decimal_value_key: decimal_value}) internal_logger.debug(f"Added {decimal_value_key} to procssed env") except KeyError: # Environment variable not set, that's ok pass except ValueError: internal_logger.warning( "Could not convert value of $%s('%s') to Decimal", decimal_value_key, environ[decimal_value_key], ) # since we cannot use a subset of the actual environment, see below, we have # to remove invalid keys from environment to make sure that if such a key is # looked up inside the config the chainmap does not return the unprocessed # value from the environment but rather the default one del environ[decimal_value_key] if "OS_CREDITS_PRECISION" in PROCESSED_ENV_CONFIG: PROCESSED_ENV_CONFIG["OS_CREDITS_PRECISION"] = ( Decimal(10)**-PROCESSED_ENV_CONFIG["OS_CREDITS_PRECISION"]) if "VCPU_CREDIT_PER_HOUR" in PROCESSED_ENV_CONFIG: PROCESSED_ENV_CONFIG["VCPU_CREDIT_PER_HOUR"] = (Decimal( PROCESSED_ENV_CONFIG["VCPU_CREDIT_PER_HOUR"])) if "RAM_CREDIT_PER_HOUR" in PROCESSED_ENV_CONFIG: PROCESSED_ENV_CONFIG["RAM_CREDIT_PER_HOUR"] = (Decimal( PROCESSED_ENV_CONFIG["RAM_CREDIT_PER_HOUR"])) # this would be the right way but makes pytest hang forever -.-' # use the workaround explained above and add the raw process environment to the # chainmap although this is not really nice :( # At least mypy should show an error whenever a config value not defined in # :class:`Config` is accessed # for key in Config.__annotations__: # # every value which needs processing should already be present in # # PROCESSED_ENV_CONFIG if set in the environment # if key in PROCESSED_ENV_CONFIG: # continue # if key in environ: # PROCESSED_ENV_CONFIG.update({key: environ[key]}) return cast(Config, PROCESSED_ENV_CONFIG)
def from_lineprotocol(cls: Type[PT], influx_line_: AnyStr) -> PT: """ Creates a point from an InfluxDB Line, see https://docs.influxdata.com/influxdb/v1.7/write_protocols/line_protocol_tutorial/ Deliberate usage of ``cls`` to allow and support potential subclassing. If the line contains more information than defined by ``cls`` the rest is simply ignored. >>> from os_credits.influx.model import InfluxDBPoint >>> line = b'weather,location=us-midwest temperature=82 1465839830100399872' >>> InfluxDBPoint.from_lineprotocol(line) InfluxDBPoint(measurement='weather', timestamp=datetime.datetime(2016, 6, 13, 19, 43, 50, 100400)) # noqa :param cls: Subclass on which this method is called. Instances of this class will be the return type. :param influx_line_: Influx Line to parse, either ``string`` or ``bytes``. :return: Instances of `cls`. :raises KeyError: Attribute of ``cls`` without default value not present in line """ if isinstance(influx_line_, bytes): influx_line = influx_line_.decode() else: influx_line = influx_line_ internal_logger.debug("Converting InfluxDB Line `%s`", influx_line) measurement_and_tag, field_set, timestamp_str = influx_line.strip().split() measurement_name, tag_set = measurement_and_tag.split(",", 1) tag_field_dict: Dict[str, str] = {} for tag_pair in tag_set.split(","): tag_name, tag_value = tag_pair.split("=", 1) tag_field_dict.update({tag_name: tag_value}) for field_pair in field_set.split(","): field_name, field_value = field_pair.split("=", 1) tag_field_dict.update({field_name: field_value}) # we know how to deserialize those args: Dict[str, Any] = { "measurement": measurement_name, "timestamp": deserialize(timestamp_str, datetime), } for f in fields(cls): # currently not serialized, see class documentation if f.default is not MISSING: continue # values of this fields are already known if f.name in args: continue is_tag = False if f.metadata and f.metadata.get("tag", False): is_tag = True if f.name not in tag_field_dict: raise KeyError( f"InfluxDB Line does not contain {'tag' if is_tag else 'field'} " "`{f.name}`" ) value = tag_field_dict[f.name] # string field values are quoted, strip them if not is_tag and isinstance(value, str): value = value.strip('"') args[f.name] = deserialize(value, f) new_point = cls(**args) internal_logger.debug("Constructed %s", new_point) return new_point
async def connect(self: GTV) -> GTV: """Retrieve all required values from *Perun* and populate the rest of the variables of this instance. #. The ID of this group is determined by calling :func:`~os_credits.perun.groupsManager.get_group_by_name` since it is needed by the next methods. #. :func:`get_perun_attributes` is used to determine which attributes of the class must be retrieved from *Perun*. #. If any of these attributes are *resource bound* we check whether the resource whose ID is stored in :attr:`resource_id` is actually associated with this group. This check is necessary since *Perun* is happy to return and store attributes of *invalid* combinations of groups and resources. The result is stored in :attr:`assigned_resource` and performed by :func:`is_assigned_resource`. #. All attributes of the group are retrieved by calling :func:`~os_credits.perun.attributesManager.get_attributes` and :func:`~os_credits.perun.attributesManager.get_resource_bound_attributes` if necessary and stored inside this group. :return: Self, to allow chaining such as ``g=await Group([...]).connect()`` :raises GroupResourceNotAssociatedError: In case group :attr:`name` is not assigned to resource with :attr:`resource_id` inside *Perun*. :raises ~os_credits.perun.exceptions.PerunBaseException: Or any other subclass of exception indicating errors during communication with perun. """ group_response = await get_group_by_name(self.name) self.id = int(group_response["id"]) # Mappings between the names of perun attributes needed for this instance and # the friendlyName of the actual attributes # friendlyName -> name_used_in_instance friendly_name_to_group_attr_name: Dict[str, str] = {} requested_attributes: List[str] = [] requested_resource_bound_attributes: List[str] = [] for (attr_name, attr_class) in type(self).get_perun_attributes().items(): friendly_name_to_group_attr_name[attr_class.friendlyName] = attr_name if attr_class.is_resource_bound(): requested_resource_bound_attributes.append(attr_class.get_full_name()) else: requested_attributes.append(attr_class.get_full_name()) # will hold the contents of all retrieved attributes attributes: Dict[str, Dict[str, Any]] = {} if requested_attributes: for attr in await get_attributes( self.id, attribute_full_names=requested_attributes ): attributes[attr["friendlyName"]] = attr if requested_resource_bound_attributes: self.assigned_resource = await self.is_assigned_resource() if not self.assigned_resource: raise GroupResourceNotAssociatedError( f"Group `{self.name}` is not associated with resource with id " f"`{self.resource_id}` but resource bound attributes have been " "requested " ) for attr in await get_resource_bound_attributes( self.id, self.resource_id, attribute_full_names=requested_resource_bound_attributes, ): attributes[attr["friendlyName"]] = attr internal_logger.debug( "Retrieved attributes Group %s: %s", self, {attr_name: attr["value"] for attr_name, attr in attributes.items()}, ) for friendly_name, group_attr_name in friendly_name_to_group_attr_name.items(): attr_class = type(self).get_perun_attributes()[group_attr_name] try: setattr(self, group_attr_name, attr_class(**attributes[friendly_name])) except KeyError: # in case we got no content for this attribute by perun setattr(self, group_attr_name, attr_class(value=None)) return self