def __init__(self, runner, device, config): self.runner = runner self._gcp = runner.gcp self.gateway = device.gateway self.config = config self.switch_setup = self.config.get('switch_setup', {}) self._no_test = self.config.get('no_test', False) self.device = device self.target_mac = device.mac self.target_port = device.port.port_no self._use_target_port_mirror = device.is_local() self.fake_target = self.gateway.fake_target self.devdir = self._init_devdir() self.run_id = self.make_runid() self.scan_base = os.path.abspath(os.path.join(self.devdir, 'scans')) self.logger = logger.get_logger('host') self._port_base = self._get_port_base() self._device_base = self._get_device_base() self.state = None self._state_transition(_STATE.READY) self.results = {} self.fake_host = None self.test_name = None self.test_start = gcp.get_timestamp() self.test_host = None self.test_port = None self._startup_time = None self._monitor_scan_sec = int(config.get('monitor_scan_sec', 0)) _default_timeout_sec = int(config.get('default_timeout_sec', 0)) self._default_timeout_sec = _default_timeout_sec if _default_timeout_sec else None self._usi_config = config.get('usi_setup', {}) self._topology_hook_script = config.get('topology_hook') self._mirror_intf_name = None self._monitor_ref = None self._monitor_start = None self.target_ip = None self._dhcp_listeners = [] self._loaded_config = None self.configurator = configurator.Configurator() self.reload_config() assert self._loaded_config, 'config was not loaded' self._write_module_config(self._loaded_config, self._device_aux_path()) self.enabled_tests = self._get_enabled_tests() self.remaining_tests = list(self.enabled_tests) self.logger.info('Host %s running with enabled tests %s', self.target_mac, self.remaining_tests) self._report = ReportGenerator(config, self.target_mac, self._loaded_config, self.runner.report_sink, get_devdir(self.target_mac)) self.record_result('startup', state=MODE.PREP) self._record_result('info', state=self.target_mac, config=self._make_config_bundle()) self._trigger_path = None self._startup_file = None self.timeout_handler = self._aux_module_timeout_handler self._all_ips = [] self._ip_listener = None
def __init__(self, **kwargs): futils.set_kwargs_attrs(self, kwargs) self.config = configurator.Configurator().config dir_ = os.path.abspath(getattr(self.config, self.config_dir_field)) self.testdir = os.path.join(dir_, self.tc_dirname) force = getattr(self.config, self.config_force_field) if force: self.env = {'PMEM2_FORCE_GRANULARITY': self.force_env}
def startFirstRun(self): config = configurator.Configurator() if config.hasSucceeded(): adminlog = adminlogin.Adminlogin(self.sett.getAdminPWHash(), self.sett.getAdminIdentifier()) if adminlog.isAccessGranted(): adm = admin.Admin() return None
def __init__(self, msg): super().__init__(msg) config = configurator.Configurator().config if config.fail_on_skip: raise Fail(msg) self.messages = [] self.messages.append(msg)
def __init__(self, config): self.configurator = configurator.Configurator() self.gateway_sets = set(range(1, self.MAX_GATEWAYS + 1)) self.config = config self._result_sets = {} self._devices = Devices() self._ports = {} self._callback_queue = [] self._event_lock = threading.Lock() self.gcp = gcp.GcpManager(self.config, self._queue_callback) self._base_config = self._load_base_config() self.description = config.get('site_description', '').strip('\"') self._daq_version = os.environ['DAQ_VERSION'] self._lsb_release = os.environ['DAQ_LSB_RELEASE'] self._sys_uname = os.environ['DAQ_SYS_UNAME'] self.network = network.TestNetwork(config) self.result_linger = config.get('result_linger', False) self._linger_exit = 0 self.faucet_events = None self.single_shot = config.get('single_shot', False) self.fail_mode = config.get('fail_mode', False) self.run_trigger = config.get('run_trigger', {}) self.run_tests = True self.stream_monitor = None self.exception = None self.run_count = 0 self.run_limit = int(config.get('run_limit', 0)) self._default_port_flap_timeout = int( config.get('port_flap_timeout_sec', 0)) self.result_log = self._open_result_log() self._system_active = False logging_client = self.gcp.get_logging_client() self.daq_run_id = self._init_daq_run_id() self._device_result_client = self._init_device_result_client() if logging_client: logger.set_stackdriver_client( logging_client, labels={"daq_run_id": self.daq_run_id}) test_list = self._get_test_list( config.get('host_tests', self._DEFAULT_TESTS_FILE)) if self.config.get('keep_hold'): LOGGER.info('Appending test_hold to master test list') if 'hold' not in test_list: test_list.append('hold') config['test_list'] = test_list config['test_metadata'] = self._get_test_metadata() LOGGER.info('DAQ RUN id: %s' % self.daq_run_id) LOGGER.info('Configured with tests %s' % ', '.join(config['test_list'])) LOGGER.info('DAQ version %s' % self._daq_version) LOGGER.info('LSB release %s' % self._lsb_release) LOGGER.info('system uname %s' % self._sys_uname)
def get(cls, conf_ctx): """Get specific context value to be run""" for c in conf_ctx: if c.is_preferred: # pick preferred if found return c # if no preferred is found, pick the first non-explicit one ret = [c for c in conf_ctx if not c.explicit] if ret: return ret[0] else: config = configurator.Configurator().config msg = futils.Message(config.unittest_log_level) msg.print_verbose('No valid "Any" context found') return None
def __init__(self, **kwargs): futils.set_kwargs_attrs(self, kwargs) self.config = configurator.Configurator().config dir_ = os.path.abspath(getattr(self.config, self.config_dir_field)) self.testdir = os.path.join(dir_, self.tc_dirname) force = getattr(self.config, self.config_force_field) if force: self.env = { 'PMEM2_FORCE_GRANULARITY': self.force_env, # PMEM2_FORCE_GRANULARITY is implemented only by # libpmem2. Corresponding PMEM_IS_PMEM_FORCE variable # is set to support tests for older PMDK libraries. 'PMEM_IS_PMEM_FORCE': self.pmem_force_env }
def __init__(self, config): self.configurator = configurator.Configurator() self.gateway_sets = set(range(1, self.MAX_GATEWAYS + 1)) self.config = config self._result_sets = {} self._devices = Devices() self._ports = {} self._callback_queue = [] self._event_lock = threading.RLock() self.daq_run_id = self._init_daq_run_id() self._init_gcp() self._base_config = self._load_base_config() self.description = config.get('site_description', '').strip('\"') self._daq_version = os.environ['DAQ_VERSION'] self._lsb_release = os.environ['DAQ_LSB_RELEASE'] self._sys_uname = os.environ['DAQ_SYS_UNAME'] self.network = network.TestNetwork(config) self.result_linger = config.get('result_linger', False) self.run_trigger = config.setdefault('run_trigger', {}) self._native_vlan = self.run_trigger.get('native_vlan') self._native_gateway = None self._linger_exit = 0 self.faucet_events = None self.single_shot = config.get('single_shot', False) self.fail_mode = config.get('fail_mode', False) self.run_tests = True self.stream_monitor = None self.exception = None self.run_count = 0 self.run_limit = int(config.get('run_limit', 0)) self._default_port_flap_timeout = int( config.get('port_flap_timeout_sec', 0)) self.result_log = self._open_result_log() self._system_active = False self._device_result_handler = self._init_device_result_handler() self._cleanup_previous_runs() self._init_test_list() self._target_set_queue = [] self._max_hosts = self.run_trigger.get('max_hosts') or float('inf') LOGGER.info('DAQ RUN id: %s', self.daq_run_id) tests_string = ', '.join(config['test_list']) or '**none**' LOGGER.info('Configured with tests %s', tests_string) LOGGER.info('DAQ version %s', self._daq_version) LOGGER.info('LSB release %s', self._lsb_release) LOGGER.info('system uname %s', self._sys_uname)
def __init__(self, socket, config_file, haproxy_file): """Parse the config file and create corresponding watchers and pools""" self._watchers = [] self._orchestrators = [] # Config parser self._configurator = configurator.Configurator(socket, config_file, haproxy_file) # Get list of services and their configs services = self._configurator.parse_config() # Write the initial configuration to file self._configurator.config_write() # If there is an existing HAProxy running that is using the same # pid file, take over the existing to avoid conflicts if haproxy_cmd.haproxy_proc(): haproxy_cmd.restart_haproxy() else: haproxy_cmd.start_haproxy() # Instantiate initial connection to haproxy socket only once haproxy_sock = haproxy.HAProxy(socket_dir=socket) # Share the single instance of haproxy socket and config parser for # efficiency for service_name, service in services.items(): self._watchers.append( watcher.Watcher(service_name, service, self._configurator)) # Only create orchestrators when the config is present if 'elasticity' in service: self._orchestrators.append( orchestrator.Orchestrator(service_name, service, haproxy_sock)) # Run self._cleanup on exit atexit.register(self._cleanup)
def __init__(self, args): config_helper = configurator.Configurator(verbose=True) self.config = config_helper.parse_args(args)
def __init__(self, *args, **kwargs): self.conf = configurator.Configurator().config self.msg = futils.Message(self.conf.unittest_log_level) ContextBase.__init__(self, *args, **kwargs)
# -*- coding: utf-8 -*- import configurator import os import hashlib from termcolor import colored # GETTING CONFIG ### config = configurator.Configurator() # END GETTING CONFIG # # Functions print " -- PDD SYNCHRONIZER V0.1 -- " ## # Obtain Axon version from Axon_config files ## with open(config.commander, 'r') as commander_file: for line in commander_file.readlines: if 'jaxon' not in line: pass else: line_splitted = line.split('/') for entry in line_splitted: if str(entry).startswith('jaxon'): axon_version = str(entry).split(' ')[0] else:
def __init__(self): self.cfg = conf.Configurator().config
def __init__(self, **kwargs): futils.set_kwargs_attrs(self, kwargs) self.conf = configurator.Configurator().config
if end: LOGGER.info('Limiting to end time %s', to_timestamp(end)) query = query.where('updated', '<=', to_timestamp(end)) if daq_run_id: LOGGER.info('Limiting to DAQ run id %s', daq_run_id) query = query.where('daq_run_id', '==', daq_run_id) runids = query.order_by( u'updated', direction=DESCENDING).limit(limit_count).stream() for runid in runids: json_report = self._get_json_report(runid) if json_report: yield json_report def _query_user(self, message): reply = input(message) options = set(('y', 'Y', 'yes', 'YES', 'Yes', 'sure')) if reply in options: return True return False if __name__ == '__main__': logger.set_config(format='%(levelname)s:%(message)s', level="INFO") CONFIGURATOR = configurator.Configurator() CONFIG = CONFIGURATOR.parse_args(sys.argv) GCP = GcpManager(CONFIG, None) if CONFIG.get('register_offenders'): GCP.register_offenders() else: print('Unknown command mode for gcp module.')
def __init__(self) : super().__init__() # Задаем имя главному меню, чтобы потом использовать его в css файле self.setObjectName("MainWindow") args = self.create_parser() # Устанавливаем иконку приложения self.setWindowIcon(QIcon(os.path.join(os.getcwd(), args.path, "images/icon3.png"))) # Устанавливаем размеры и расположение окна приложения self.setGeometry(200, 200, 400, 300) # Создание строки которое пишется в рамке приложения self.setWindowTitle("CompAdmiss") # Добавляем трей self.trayConf = tray.TrayCofiguration(os.path.join(os.getcwd(), args.path, "images/icon3.png")) # Подключаемся к управлению главным виджетом self.trayConf.S_widget_state.connect(self.change_widget_state) # Инициализируем класс сохраняющий конфигурцию приложения self.config = configurator.Configurator(os.path.join(os.getcwd(), args.path, "config.ini")) # Инициализируем класса отвечающий за выdод информации о температурах self.dp = presenter.DataPresenter() # Инициализируем класс отвечающий за настройку подсветки self.lm = lightsmenu.LightsMenu(self.config.config_data["LIGHT"]["main_mode"], self.config.config_data["LIGHT"]["mode"], self.config.config_data["LIGHT"]["brightness"], self.config.config_data["LIGHT"]["speed"], self.config.config_data["LIGHT"]["max_cpu_temp"], self.config.config_data["LIGHT"]["max_gpu_temp"], self.config.config_data["LIGHT"]["color"]) # self.S_lights_mode.connect(self.lm.update_mode) # self.S_lights_bright.connect(self.lm.update_bright) # Инициализируем класс отвечающий за настройку вентилятора self.fm = fanmenu.FanMenu(self.config.config_data['FAN']['mode'], self.config.config_data['FAN']['step_cpu_temp'], self.config.config_data['FAN']['step_gpu_temp']) # self.S_fan_mode.connect(self.fm.update_mode) # self.S_fan_cpu_step_temp.connect(self.fm.set_cpu_step_temp) # self.S_fan_gpu_step_temp.connect(self.fm.set_gpu_step_temp) self.ds = sender.DataSender(self.config.config_data['SETTING']['port'], self.config.config_data['SETTING']['port_speed']) # Инициализируем класс настроек self.sm = settingmenu.SettingMenu(self.config.config_data['SETTING']['port'], self.config.config_data['SETTING']['port_speed']) self.sm.S_port_info.connect(self.ds.connect) self.sm.S_update_ports.connect(self.ds.get_ports) self.ds.S_ports.connect(self.sm.update_ports) # Создаем вертикальное пространство vBoxLay = QVBoxLayout() vBoxLay.setContentsMargins(0, 0, 0, 0) # Прикрепляем к левому верхнему краю # vBoxLay.setAlignment(Qt.AlignTop | Qt.AlignLeft) # Добавляем в пространсво виджеты vBoxLay.addSpacing(20) vBoxLay.addWidget(self.dp) vBoxLay.addWidget(self.lm) vBoxLay.addWidget(self.fm) vBoxLay.addWidget(self.sm) self.setLayout(vBoxLay) # Cоздаем действия openHardInfo = QAction("Hardware", self) openHardInfo.triggered.connect(self.fm.hide) openHardInfo.triggered.connect(self.lm.hide) openHardInfo.triggered.connect(self.sm.hide) openHardInfo.triggered.connect(self.dp.show) openFanMenu = QAction("Fan", self) openFanMenu.triggered.connect(self.dp.hide) openFanMenu.triggered.connect(self.lm.hide) openFanMenu.triggered.connect(self.sm.hide) openFanMenu.triggered.connect(self.fm.show) openLightMenu = QAction("Light", self) openLightMenu.triggered.connect(self.dp.hide) openLightMenu.triggered.connect(self.sm.hide) openLightMenu.triggered.connect(self.fm.hide) openLightMenu.triggered.connect(self.lm.show) openSettingMenu = QAction("Settings", self) openSettingMenu.triggered.connect(self.lm.hide) openSettingMenu.triggered.connect(self.dp.hide) openSettingMenu.triggered.connect(self.fm.hide) openSettingMenu.triggered.connect(self.sm.show) openSettingMenu.triggered.connect(self.ds.get_ports) saveAct = QAction(QIcon(os.path.join(os.getcwd(), args.path, "images/save.png")), "Save", self) # Cохраняем данные о подсвектке saveAct.triggered.connect(self.lm.save_info) saveAct.triggered.connect(self.lm.save_extra_info) self.lm.S_light_info.connect(self.config.save_light_info) self.lm.S_light_extra_info.connect(self.config.save_light_extra_info) # Сохраняем данные которые были изменены в классе вентилятора saveAct.triggered.connect(self.fm.save_info) self.fm.S_fan_info.connect(self.config.save_fan_info) # Сохраняем данные о порте и настройках self.sm.S_port_info.connect(self.config.save_port) # Сохраняем файл настроек saveAct.triggered.connect(self.config.save) # Создаем меню menuBar = QMenuBar(self) menuBar.setObjectName("MenuBar") # Добавляем действия к меню menuBar.addAction(openHardInfo) menuBar.addAction(openFanMenu) menuBar.addAction(openLightMenu) menuBar.addAction(openSettingMenu) menuBar.addAction(saveAct) timer = QTimer(self) timer.setInterval(1000) timer.setSingleShot(False) if (self.ds.is_connect()): timer.timeout.connect(self.send_info) timer.start(1000) self.show()
def __init__(self, args): config_helper = configurator.Configurator(raw_print=True) self.config = config_helper.parse_args(args)
while self._faucet_events: event = self._faucet_events.next_event() LOGGER.debug('Faucet event %s', event) if not event: return True (dpid, port, active) = self._faucet_events.as_port_state(event) if dpid and port: LOGGER.info('Port state %s %s %s', dpid, port, active) (dpid, port, target_mac) = self._faucet_events.as_port_learn(event) if dpid and port: LOGGER.info('Port learn %s %s %s', dpid, port, target_mac) (dpid, restart_type) = self._faucet_events.as_config_change(event) if dpid is not None: LOGGER.info('DP restart %d %s', dpid, restart_type) return False def get_overview(self, params): """Get an overview of the system""" return {'hello': 'world', 'params': params} if __name__ == '__main__': logger.set_config(level=logging.INFO) CONFIG = configurator.Configurator().parse_args(sys.argv) FORCH = Forchestrator(CONFIG) FORCH.initialize() HTTP = http_server.HttpServer(CONFIG) HTTP.map_request('overview', FORCH.get_overview) HTTP.start_server() FORCH.main_loop()
import sys import configurator import random import discord sys.path.insert(1, (os.path.dirname(os.path.dirname(__file__)))) from functools import reduce from discord.ext import commands from src import tkfinder, util from src.resources import embed, const from github import Github from discord_components import DiscordComponents base_path = os.path.dirname(__file__) config = configurator.Configurator(os.path.abspath(os.path.join(base_path, "resources", "config.json"))) prefix = '§' description = 'The premier Tekken 7 Frame bot, made by Baikonur#4927, continued by Tib#1303' bot = commands.Bot(command_prefix=prefix, description=description) buttons = DiscordComponents(bot) # Set logger to log errors logger = logging.getLogger(__name__) logger.setLevel(logging.WARNING) logfile_directory = os.path.abspath(os.path.join(base_path, "..", "log")) logfile_path = logfile_directory + "\\logfile.log" # Create logfile if not exists if not os.path.exists(logfile_directory): os.makedirs(logfile_directory)
def initialize(self, path_docs, path_ppds=None, path_digests=None, debug_mode=False, threads_count=8): """ @param path_docs: path to local directory with documents to print @param path_ppds: path to local directory with PPD files to test; if None is set then all PPD files from the SCS server are downloaded and tested @param path_digests: path to local directory with digests files for test documents; if None is set then content of printed documents is not verified @param debug_mode: if set to True, then the autotest temporarily remounts the root partition in R/W mode and changes CUPS configuration, what allows to extract pipelines for all tested PPDs and rerun the outside CUPS @param threads_count: number of threads to use """ # Calculates absolute paths for all parameters self._location_of_test_docs = self._calculate_full_path(path_docs) self._location_of_PPD_files = self._calculate_full_path(path_ppds) location_of_digests_files = self._calculate_full_path(path_digests) # This object is used for running tasks in many threads simultaneously self._processor = multithreaded_processor.MultithreadedProcessor( threads_count) # This object is responsible for parsing CUPS logs self._log_reader = log_reader.LogReader() # This object is responsible for the system configuration self._configurator = configurator.Configurator() self._configurator.configure(debug_mode) # Reads list of test documents self._docs = helpers.list_entries_from_directory( path=self._location_of_test_docs, with_suffixes=('.pdf'), nonempty_results=True, include_directories=False) # Get list of PPD files ... if self._location_of_PPD_files is None: # ... from the SCS server self._ppds = self._get_filenames_from_PPD_indexes() else: # ... from the given local directory # Unpack archives with all PPD files: path_archive = self._calculate_full_path('ppds_all.tar.xz') path_target_dir = self._calculate_full_path('.') file_utils.rm_dir_if_exists( os.path.join(path_target_dir, 'ppds_all')) subprocess.call( ['tar', 'xJf', path_archive, '-C', path_target_dir]) path_archive = self._calculate_full_path('ppds_100.tar.xz') file_utils.rm_dir_if_exists( os.path.join(path_target_dir, 'ppds_100')) subprocess.call( ['tar', 'xJf', path_archive, '-C', path_target_dir]) # Load PPD files from the chosen directory self._ppds = helpers.list_entries_from_directory( path=self._location_of_PPD_files, with_suffixes=('.ppd', '.ppd.gz'), nonempty_results=True, include_directories=False) self._ppds.sort() # Load digests files self._digests = dict() if location_of_digests_files is None: for doc_name in self._docs: self._digests[doc_name] = dict() else: path_blacklist = os.path.join(location_of_digests_files, 'blacklist.txt') blacklist = helpers.load_blacklist(path_blacklist) for doc_name in self._docs: digests_name = doc_name + '.digests' path = os.path.join(location_of_digests_files, digests_name) self._digests[doc_name] = helpers.parse_digests_file( path, blacklist) # Prepare a working directory for pipelines if debug_mode: self._pipeline_dir = tempfile.mkdtemp(dir='/tmp') else: self._pipeline_dir = None