Ejemplo n.º 1
0
def messaging(
    partitions: int,
    replication: int,
    topic_name: Optional[str],
    create_if_not_exist: bool,
    message_contents: Optional[str],
    message_file: Optional[str],
):
    """Run messaging cli with the given arguments."""
    admin = admin_client.create_admin_client()
    prod = producer.create_producer()
    if message_file:
        if topic_name or message_contents:
            _LOGGER.warning(
                "Topic name and/or message contents are being ignored due to presence of message file."
            )
        with open(message_file, "r") as m_file:
            all_messages = json.load(m_file)

    else:
        if topic_name is None or message_contents is None:
            raise AttributeError(
                "Both topic_name and message_contents must be set when not reading from file."
            )

        temp_message = {}
        temp_message["message_contents"] = json.loads(message_contents)
        temp_message["topic_name"] = topic_name
        all_messages = [temp_message]

    # NOTE: we don't need to check based on deployment because it is only prepended after we call __init__
    for m in all_messages:
        m_contents = m["message_contents"]
        if "component_name" not in m_contents:
            m_contents["component_name"] = "messaging-cli"
        if "service_version" not in m_contents:
            m_contents["service_version"] = __version__
        m_base_name = m["topic_name"]

        validate: bool
        # get or create message type
        for message in ALL_MESSAGES:
            if m_base_name == message.base_name:
                _LOGGER.info(
                    f"Found message in registered list: {m_base_name}")
                topic = message
                validate = True
                break
        else:
            validate = False
            _LOGGER.info(
                "Message not in the registered list checking topics on Kafka..."
            )

            kafka_topic_list = admin.list_topics().topics
            topic = message_factory(b_name=m_base_name,
                                    message_model=BaseMessageContents)

            if topic.topic_name not in kafka_topic_list:
                if not create_if_not_exist:
                    raise Exception(
                        "Topic name does not match messages and message should not be created."
                    )
                _LOGGER.info("Creating new topic.")
                admin_client.create_topic(admin,
                                          topic,
                                          partitions=partitions,
                                          replication_factor=replication)

        producer.publish_to_topic(prod, topic, m_contents, validate=validate)

        _LOGGER.info(
            f"Sent message {topic.topic_name} with content: {m_contents}")
    prod.flush()
Ejemplo n.º 2
0
    def __getattr__(self, item):
        """Connect to the database lazily on first call."""
        if not self._graph.is_connected():
            self._graph.connect()

        return getattr(self._graph, item)


# Instantiate one GraphDatabase adapter in the whole application (one per wsgi worker) to correctly
# reuse connection pooling from one instance. Any call to this wrapper has to be done after the wsgi fork
# (hence the wrapper logic).
GRAPH = _GraphDatabaseWrapper()

# similarly to DB we create one confluent-kafka-python producer
PRODUCER = producer.create_producer()

# custom metric to expose head revision from thoth-storages library
schema_revision_metric = metrics.info(
    "thoth_database_schema_revision_script",
    "Thoth database schema revision from script",
    component="user-api",  # label
    revision=GRAPH.get_script_alembic_version_head(),  # label
    env=Configuration.THOTH_DEPLOYMENT_NAME,
)

# custom metric to expose cache expiration configuration
user_api_cache_expiration_configuration = metrics.info(
    "user_api_cache_expiration_configuration",
    "Thoth User API cache expiration configuration",
    env=Configuration.THOTH_DEPLOYMENT_NAME,
Ejemplo n.º 3
0
from thoth.messaging.cve_provided import MessageContents as CVEProvidedMessageContent
from thoth.messaging import __version__ as __messaging__version__
from thoth.messaging.producer import create_producer, publish_to_topic
from thoth.storages import GraphDatabase
from thoth.storages import __version__ as __storages__version__

__version__ = "0.5.2"
__component_version__ = (f"{__version__}+storages.{__storages__version__}"
                         f".common.{__common__version__}"
                         f".messaging.{__messaging__version__}")

prometheus_registry = CollectorRegistry()
THOTH_DEPLOYMENT_NAME = os.getenv("THOTH_DEPLOYMENT_NAME")
_THOTH_METRICS_PUSHGATEWAY_URL = os.getenv("PROMETHEUS_PUSHGATEWAY_URL")

_PRODUCER = create_producer()
_COMPONENT_NAME = "thoth-cve-update-job"

_GRAPH_DB = GraphDatabase()
_GRAPH_DB.connect()

init_logging()

_LOGGER = logging.getLogger("thoth.cve_update")
_LOGGER.info(f"Thoth-cve-update-producer v%s", __component_version__)

# Metrics Exporter Metrics
_METRIC_INFO = Gauge(
    "thoth_cve_update_job_info",
    "Thoth CVE update Producer information",
    ["env", "version"],
Ejemplo n.º 4
0
from thoth.messaging import __version__ as __messaging__version__
import thoth.messaging.producer as producer
from thoth.messaging import package_released_message
from thoth.messaging.package_releases import MessageContents as PackageReleasedContent

from prometheus_client import CollectorRegistry, Gauge, Counter, push_to_gateway

from thoth.python import Source
from thoth.python.exceptions import NotFound

init_logging()

prometheus_registry = CollectorRegistry()

p = producer.create_producer()

_LOGGER = logging.getLogger("thoth.package_releases_job")
__service_version__ = f"{__version__}+storages.{__storages__version__}.common.{__common__version__}.messaging.{__messaging__version__}"  # noqa: E501
_LOGGER.info("Thoth-package-releases-job-producer v%s", __service_version__)

_THOTH_DEPLOYMENT_NAME = os.environ["THOTH_DEPLOYMENT_NAME"]
_THOTH_METRICS_PUSHGATEWAY_URL = os.getenv("PROMETHEUS_PUSHGATEWAY_URL")

COMPONENT_NAME = "thoth-package-releases-job"

# Metrics Exporter Metrics
_METRIC_INFO = Gauge(
    "thoth_package_release_job_info",
    "Thoth Package Release Producer information",
    ["env", "version"],
Ejemplo n.º 5
0
from thoth.messaging.producer import create_producer, publish_to_topic
from thoth.messaging.cve_provided import CVEProvidedMessage

__version__ = "0.2.0"

init_logging()

_LOGGER = logging.getLogger("thoth.cve_update")
__service_version__ = f"{__version__}+storages.{__storages__version__}.common.{__common__version__}.messaging.{__messaging__version__}"  # noqa: E501
_LOGGER.info(f"Thoth-cve-update-producer v%s", __service_version__)

_SAFETY_DB_URL = (
    "https://raw.githubusercontent.com/pyupio/safety-db/master/data/insecure_full.json"
)

producer = create_producer()
COMPONENT_NAME = "thoth-cve-update-job"


def _list_cves() -> dict:
    """Download pyup safety db and parse it."""
    _LOGGER.debug("Downloading pyup CVE database")
    response = requests.get(_SAFETY_DB_URL)
    response.raise_for_status()
    return response.json()


def update_cve_info() -> None:
    """Gather CVEs from pyup and assign them in the graph database to packages."""
    graph = GraphDatabase()
    graph.connect()