def __init__(self): self.logger = get_logger(__name__) self.conn = DatabaseHandler.get_connection() self.module_store = ModulesStore() self.package_store = PackageStore() self.update_store = UpdateStore() self.content_set_to_db_id = self._prepare_content_set_map()
def __init__(self): self.logger = get_logger(__name__) self.downloader = FileDownloader() self.downloader.num_threads = 1 # rh.com returns 403 when downloading too quickly (DDoS protection?) self.unpacker = FileUnpacker() self.oval_store = OvalStore() self.tmp_directory = tempfile.mkdtemp(prefix="oval-") self.feed_path = os.path.join(self.tmp_directory, "feed.json")
def __init__(self): self.logger = get_logger(__name__) self.downloader = FileDownloader() self.unpacker = FileUnpacker() self.repo_store = RepositoryStore() self.repositories = set() self.certs_tmp_directory = None self.certs_files = {}
def __init__(self): self.logger = get_logger(__name__) self.conn = DatabaseHandler.get_connection() self.cpe_label_to_id = {} self.cpe_label_to_name = {} cur = self.conn.cursor() cur.execute("select id, label, name from cpe") for cpe_id, label, name in cur.fetchall(): self.cpe_label_to_id[label] = cpe_id self.cpe_label_to_name[label] = name cur.close()
def __init__(self, oval_id, updated, url, local_path): self.logger = get_logger(__name__) self.oval_id = oval_id self.updated = updated self.url = url self.local_path = local_path self.root = None self.definitions = [] self.tests = [] self.module_tests = [] self.objects = [] self.states = []
def test_progress_logger(caplog): """Test ProgressLogger.""" logger = logging_utils.get_logger(__name__) progress_logger = logging_utils.ProgressLogger(logger, 3, log_interval=0) progress_logger.update() progress_logger.update() progress_logger.update() progress_logger.reset(4) progress_logger.update() assert caplog.records[0].message == ' 33.33 % completed [1/3]' assert caplog.records[1].message == ' 66.67 % completed [2/3]' assert caplog.records[2].message == '100.00 % completed [3/3]' assert caplog.records[3].message == ' 25.00 % completed [1/4]'
import glob import os import json import gzip from vmaas.common.logging_utils import get_logger, init_logging from vmaas.common.date_utils import format_datetime, now from vmaas.common.fileutil import remove_file_if_exists from vmaas.common.rpm_utils import join_rpm_name from vmaas.reposcan.database.database_handler import DatabaseHandler, NamedCursor, init_db DEFAULT_KEEP_COPIES = "2" PKGTREE_FILE = '/data/pkg_tree.json.gz' DEFAULT_PKGTREE_INDENT = "0" LOGGER = get_logger(__name__) class JsonPkgTree: # pylint: disable=too-many-instance-attributes """Class for creating package tree json file from database.""" def __init__(self, db_instance, filename): self.db_instance = db_instance self.filename = filename self.outputdata = {} self.datadict = {} self.pkgnameid2pkgname = {} self.evrid2evr = {} self.archid2arch = {} self.packagedata = {} self.repodata = {} self.cvename = {}
""" import signal import uuid from tornado.ioloop import IOLoop from tornado.web import Application, RequestHandler from tornado.websocket import WebSocketHandler from vmaas.common.config import Config from vmaas.common.logging_utils import get_logger from vmaas.common.logging_utils import init_logging WEBSOCKET_PING_INTERVAL = 5 WEBSOCKET_TIMEOUT = 60 LOGGER = get_logger("websocket") class NotificationHandler(WebSocketHandler): """Websocket handler to send messages to subscribed clients.""" connections = {} # Timestamp of the last data dump reported by reposcan last_dump_version = None last_advertised_version = None # What is the freshest data each webapp has webapp_export_timestamps = {} webapp_statuses = {} def open(self, *args, **kwargs): # Uuid just for reference in logs self.connections[self] = [uuid.uuid4().hex[:8], "unsubscribed"]
def __init__(self): self.logger = get_logger(__name__) self.conn = DatabaseHandler.get_connection()
""" Wait for VMaaS services. """ import os import signal import sys import time from requests import request from vmaas.common.config import Config from vmaas.common.logging_utils import get_logger, init_logging from vmaas.reposcan.database.database_handler import DatabaseHandler, init_db LOGGER = get_logger(__file__) def bye(signum, frame): """Handle signal""" sys.exit(f"Stopped,{signum} received.") def wait(func, *args, delay=1, service="", **kwargs): """Waits for success of `func`.""" LOGGER.info("Checking if %s is up", service) while True: try: result = func(*args, **kwargs) if result: return LOGGER.info("%s is unavailable - sleeping", service)
def __init__(self): self.queue = Queue() self.logger = get_logger(__name__) self.num_threads = int(os.getenv('THREADS', DEFAULT_THREADS))