コード例 #1
0
ファイル: tankcore.py プロジェクト: sabirovruslan/yandex-tank
    def __init__(self, configs, artifacts_base_dir=None, artifacts_dir_name=None, cfg_depr=None):
        """

        :param configs: list of dict
        """
        self.raw_configs = configs
        self.config = TankConfig(self.raw_configs,
                                 with_dynamic_options=True,
                                 core_section=self.SECTION)
        self.status = {}
        self._plugins = None
        self._artifacts_dir = None
        self.artifact_files = {}
        self._artifacts_base_dir = None
        self.manual_start = False
        self.scheduled_start = None
        self.interrupted = False
        self.lock_file = None
        self.lock_dir = None
        self.taskset_path = None
        self.taskset_affinity = None
        self._job = None
        self.cfg_depr = cfg_depr
        self._cfg_snapshot = None

        self.interrupted = False
コード例 #2
0
ファイル: tankcore.py プロジェクト: sadpdtchr/yandex-tank
    def __init__(self,
                 configs,
                 interrupted_event,
                 artifacts_base_dir=None,
                 artifacts_dir_name=None):
        """

        :param configs: list of dict
        :param interrupted_event: threading.Event
        """
        self.output = {}
        self.raw_configs = configs
        self.status = {}
        self._plugins = None
        self._artifacts_dir = None
        self.artifact_files = {}
        self.artifacts_to_send = []
        self._artifacts_base_dir = None
        self.manual_start = False
        self.scheduled_start = None
        self.taskset_path = None
        self.taskset_affinity = None
        self._job = None
        self._cfg_snapshot = None

        self.interrupted = interrupted_event

        self.error_log = None

        error_output = 'validation_error.yaml'
        self.config, self.errors, self.configinitial = TankConfig(
            self.raw_configs,
            with_dynamic_options=True,
            core_section=self.SECTION,
            error_output=error_output).validate()
        if not self.config:
            raise ValidationError(self.errors)
        self.test_id = self.get_option(
            self.SECTION, 'artifacts_dir',
            datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f"))
        self.lock_dir = self.get_option(self.SECTION, 'lock_dir')
        with open(os.path.join(self.artifacts_dir, CONFIGINITIAL), 'w') as f:
            yaml.dump(self.configinitial, f)
        self.add_artifact_file(error_output)
        self.add_artifact_to_send(LPRequisites.CONFIGINITIAL,
                                  yaml.dump(self.configinitial))
        configinfo = self.config.validated.copy()
        configinfo.setdefault(self.SECTION, {})
        configinfo[self.SECTION][self.API_JOBNO] = self.test_id
        self.add_artifact_to_send(LPRequisites.CONFIGINFO,
                                  yaml.dump(configinfo))
        with open(os.path.join(self.artifacts_dir, VALIDATED_CONF), 'w') as f:
            yaml.dump(configinfo, f)
        logger.info('New test id %s' % self.test_id)
コード例 #3
0
def test_validate(ini_file):
    # noinspection PyStatementEffect
    TankConfig(
        [load_core_base_cfg()] +
        cfg_folder_loader(os.path.join(os.path.dirname(__file__), 'etc_cfg')) +
        [load_cfg(os.path.join(os.path.dirname(__file__), ini_file))]
    ).validated
コード例 #4
0
ファイル: tankcore.py プロジェクト: f2nd/yandex-tank
    def __init__(self, configs, artifacts_base_dir=None, artifacts_dir_name=None):
        """

        :param configs: list of dict
        """
        self.raw_configs = configs
        self.status = {}
        self._plugins = None
        self._artifacts_dir = None
        self.artifact_files = {}
        self._artifacts_base_dir = None
        self.manual_start = False
        self.scheduled_start = None
        self.interrupted = False
        self.lock_file = None
        self.lock_dir = None
        self.taskset_path = None
        self.taskset_affinity = None
        self._job = None
        self._cfg_snapshot = None

        self.interrupted = False

        self.error_log = None

        error_output = 'validation_error.yaml'
        self.config = TankConfig(self.raw_configs,
                                 with_dynamic_options=True,
                                 core_section=self.SECTION,
                                 error_output=error_output)
        self.add_artifact_file(error_output)
コード例 #5
0
ファイル: api.py プロジェクト: vveliev/tank-yard
def validate_config(config, fmt):
    def response(full_cfg, errors):
        return {'config': full_cfg, 'errors': errors}

    if fmt == 'ini':
        stream = StringIO(str(config.read()))
        config.close()
        try:
            cfg = convert_ini(stream)
            tank_config = TankConfig([load_core_base_cfg()] +
                                     load_local_base_cfgs() + [cfg])
            return response(tank_config.raw_config_dict, tank_config.errors())
        except ConversionError as e:
            return response({}, [e.message])
        except Exception as e:
            log_exception(e)
            raise BadRequest()
    else:
        try:
            cfg = yaml.load(config)
            config.close()
            tank_config = TankConfig([load_core_base_cfg()] +
                                     load_local_base_cfgs() + [cfg])
            return response(tank_config.raw_config_dict, tank_config.errors())
        except Exception as e:
            log_exception(e)
            return BadRequest
コード例 #6
0
ファイル: tankcore.py プロジェクト: xammi/yandex-tank
    def __init__(self,
                 configs,
                 artifacts_base_dir=None,
                 artifacts_dir_name=None):
        """

        :param configs: list of dict
        """
        self.raw_configs = configs
        self.status = {}
        self._plugins = None
        self._artifacts_dir = None
        self.artifact_files = {}
        self.artifacts_to_send = []
        self._artifacts_base_dir = None
        self.manual_start = False
        self.scheduled_start = None
        self.interrupted = False
        self.lock_file = None
        self.lock_dir = None
        self.taskset_path = None
        self.taskset_affinity = None
        self._job = None
        self._cfg_snapshot = None

        self.interrupted = False

        self.error_log = None

        error_output = 'validation_error.yaml'
        self.config = TankConfig(self.raw_configs,
                                 with_dynamic_options=True,
                                 core_section=self.SECTION,
                                 error_output=error_output)
        self.add_artifact_file(error_output)
        self.add_artifact_to_send(LPRequisites.CONFIGINFO,
                                  unicode(self.config))
コード例 #7
0
ファイル: tankcore.py プロジェクト: yandex/yandex-tank
    def __init__(self, configs, interrupted_event, artifacts_base_dir=None, artifacts_dir_name=None):
        """

        :param configs: list of dict
        :param interrupted_event: threading.Event
        """
        self.output = {}
        self.raw_configs = configs
        self.status = {}
        self._plugins = None
        self._artifacts_dir = None
        self.artifact_files = {}
        self.artifacts_to_send = []
        self._artifacts_base_dir = None
        self.manual_start = False
        self.scheduled_start = None
        self.taskset_path = None
        self.taskset_affinity = None
        self._job = None
        self._cfg_snapshot = None

        self.interrupted = interrupted_event

        self.error_log = None

        error_output = 'validation_error.yaml'
        self.config, self.errors, self.configinitial = TankConfig(self.raw_configs,
                                                                  with_dynamic_options=True,
                                                                  core_section=self.SECTION,
                                                                  error_output=error_output).validate()
        if not self.config:
            raise ValidationError(self.errors)
        self.test_id = self.get_option(self.SECTION, 'artifacts_dir',
                                       datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f"))
        self.lock_dir = self.get_option(self.SECTION, 'lock_dir')
        with open(os.path.join(self.artifacts_dir, CONFIGINITIAL), 'w') as f:
            yaml.dump(self.configinitial, f)
        self.add_artifact_file(error_output)
        self.add_artifact_to_send(LPRequisites.CONFIGINITIAL, yaml.dump(self.configinitial))
        configinfo = self.config.validated.copy()
        configinfo.setdefault(self.SECTION, {})
        configinfo[self.SECTION][self.API_JOBNO] = self.test_id
        self.add_artifact_to_send(LPRequisites.CONFIGINFO, yaml.dump(configinfo))
        with open(os.path.join(self.artifacts_dir, VALIDATED_CONF), 'w') as f:
            yaml.dump(configinfo, f)
        logger.info('New test id %s' % self.test_id)
コード例 #8
0
ファイル: webserver.py プロジェクト: Hurenka/yandex-tank-api
    def post(self):
        config = self.request.body
        try:
            config = yaml.safe_load(config)
            assert isinstance(config, dict), 'Config must be YAML dict'
        except yaml.YAMLError:
            self.reply_reason(400, 'Config is not a valid YAML')
            return
        except AssertionError as aexc:
            self.reply_reason(400, repr(aexc))
            return
        _, errors = TankConfig([load_core_base_cfg()] +
                               load_local_base_cfgs() + [config],
                               with_dynamic_options=False).validate()

        self.reply_json(200, {
            'config': yaml.safe_dump(config),
            'errors': errors
        })
        return
コード例 #9
0
def test_get_plugins(config, expected):
    assert {(name, pack)
            for name, pack, cfg, updater in TankConfig(config).plugins
            } == expected
コード例 #10
0
def test_validate_all_error(config, expected):
    with pytest.raises(ValidationError) as e:
        TankConfig(config).validated(config)
    assert e.value.errors == expected
コード例 #11
0
def test_validate_all(config, expected):
    assert TankConfig(config, False).validated == expected
コード例 #12
0
def test_load_multiple(configs, expected):
    assert TankConfig(configs).raw_config_dict == expected
コード例 #13
0
def test_validate_core_error(config, expected):
    with pytest.raises(Exception) as e:
        TankConfig(config).validated
    assert expected in str(e.value)
コード例 #14
0
ファイル: tankcore.py プロジェクト: vascudo-dev/yandex-tank
class TankCore(object):
    """
    JMeter + dstat inspired :)
    """
    SECTION = 'core'
    SECTION_META = 'meta'
    PLUGIN_PREFIX = 'plugin_'
    PID_OPTION = 'pid'
    UUID_OPTION = 'uuid'
    API_JOBNO = 'api_jobno'

    def __init__(self, configs, interrupted_event, artifacts_base_dir=None, artifacts_dir_name=None):
        """

        :param configs: list of dict
        :param interrupted_event: threading.Event
        """
        self.output = {}
        self.raw_configs = configs
        self.status = {}
        self._plugins = None
        self._artifacts_dir = None
        self.artifact_files = {}
        self.artifacts_to_send = []
        self._artifacts_base_dir = None
        self.manual_start = False
        self.scheduled_start = None
        self.taskset_path = None
        self.taskset_affinity = None
        self._job = None
        self._cfg_snapshot = None

        self.interrupted = interrupted_event

        self.error_log = None
        self.monitoring_data_listeners = []

        error_output = 'validation_error.yaml'
        self.config, self.errors, self.configinitial = TankConfig(self.raw_configs,
                                                                  with_dynamic_options=True,
                                                                  core_section=self.SECTION,
                                                                  error_output=error_output).validate()
        if not self.config:
            raise ValidationError(self.errors)
        self.test_id = self.get_option(self.SECTION, 'artifacts_dir',
                                       datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f"))
        self.lock_dir = self.get_option(self.SECTION, 'lock_dir')
        with open(os.path.join(self.artifacts_dir, CONFIGINITIAL), 'w') as f:
            yaml.dump(self.configinitial, f)
        self.add_artifact_file(error_output)
        self.add_artifact_to_send(LPRequisites.CONFIGINITIAL, yaml.dump(self.configinitial))
        configinfo = self.config.validated.copy()
        configinfo.setdefault(self.SECTION, {})
        configinfo[self.SECTION][self.API_JOBNO] = self.test_id
        self.add_artifact_to_send(LPRequisites.CONFIGINFO, yaml.dump(configinfo))
        with open(os.path.join(self.artifacts_dir, VALIDATED_CONF), 'w') as f:
            yaml.dump(configinfo, f)
        logger.info('New test id %s' % self.test_id)

    @property
    def cfg_snapshot(self):
        if not self._cfg_snapshot:
            self._cfg_snapshot = str(self.config)
        return self._cfg_snapshot

    @staticmethod
    def get_available_options():
        # todo: should take this from schema
        return [
            "artifacts_base_dir", "artifacts_dir",
            "taskset_path", "affinity"
        ]

    @property
    def plugins(self):
        """
        :returns: {plugin_name: plugin_class, ...}
        :rtype: dict
        """
        if self._plugins is None:
            self.load_plugins()
            if self._plugins is None:
                self._plugins = {}
        return self._plugins

    @property
    def artifacts_base_dir(self):
        if not self._artifacts_base_dir:
            try:
                artifacts_base_dir = os.path.abspath(self.get_option(self.SECTION, "artifacts_base_dir"))
            except ValidationError:
                artifacts_base_dir = os.path.abspath('logs')
            if not os.path.exists(artifacts_base_dir):
                os.makedirs(artifacts_base_dir)
                os.chmod(self.artifacts_base_dir, 0o755)
            self._artifacts_base_dir = artifacts_base_dir
        return self._artifacts_base_dir

    def load_plugins(self):
        """
        Tells core to take plugin options and instantiate plugin classes
        """
        logger.info("Loading plugins...")
        for (plugin_name, plugin_path, plugin_cfg) in self.config.plugins:
            logger.debug("Loading plugin %s from %s", plugin_name, plugin_path)
            if plugin_path == "yandextank.plugins.Overload":
                logger.warning(
                    "Deprecated plugin name: 'yandextank.plugins.Overload'\n"
                    "There is a new generic plugin now.\n"
                    "Correcting to 'yandextank.plugins.DataUploader overload'")
                plugin_path = "yandextank.plugins.DataUploader overload"
            try:
                plugin = il.import_module(plugin_path)
            except ImportError:
                logger.warning('Plugin name %s path %s import error', plugin_name, plugin_path)
                logger.debug('Plugin name %s path %s import error', plugin_name, plugin_path, exc_info=True)
                raise
            try:
                instance = getattr(plugin, 'Plugin')(self, cfg=plugin_cfg, name=plugin_name)
            except AttributeError:
                logger.warning('Plugin %s classname should be `Plugin`', plugin_name)
                raise
            else:
                self.register_plugin(self.PLUGIN_PREFIX + plugin_name, instance)
        logger.debug("Plugin instances: %s", self._plugins)

    @property
    def job(self):
        if not self._job:
            # monitoring plugin
            monitorings = [plugin for plugin in self.plugins.values() if isinstance(plugin, MonitoringPlugin)]
            # generator plugin
            try:
                gen = self.get_plugin_of_type(GeneratorPlugin)
            except KeyError:
                logger.warning("Load generator not found")
                gen = GeneratorPlugin(self, {}, 'generator dummy')
            # aggregator
            aggregator = TankAggregator(gen)
            self._job = Job(monitoring_plugins=monitorings,
                            generator_plugin=gen,
                            aggregator=aggregator,
                            tank=socket.getfqdn())
        return self._job

    def plugins_configure(self):
        """        Call configure() on all plugins        """
        self.publish("core", "stage", "configure")

        logger.info("Configuring plugins...")
        self.taskset_affinity = self.get_option(self.SECTION, 'affinity')
        if self.taskset_affinity:
            self.__setup_taskset(self.taskset_affinity, pid=os.getpid())

        for plugin in self.plugins.values():
            if not self.interrupted.is_set():
                logger.debug("Configuring %s", plugin)
                plugin.configure()
                if isinstance(plugin, MonitoringDataListener):
                    self.monitoring_data_listeners.append(plugin)

    def plugins_prepare_test(self):
        """ Call prepare_test() on all plugins        """
        logger.info("Preparing test...")
        self.publish("core", "stage", "prepare")
        for plugin in self.plugins.values():
            if not self.interrupted.is_set():
                logger.debug("Preparing %s", plugin)
                plugin.prepare_test()

    def plugins_start_test(self):
        """        Call start_test() on all plugins        """
        if not self.interrupted.is_set():
            logger.info("Starting test...")
            self.publish("core", "stage", "start")
            self.job.aggregator.start_test()
            for plugin in self.plugins.values():
                logger.debug("Starting %s", plugin)
                start_time = time.time()
                plugin.start_test()
                logger.info("Plugin {0:s} required {1:f} seconds to start".format(plugin,
                                                                                  time.time() - start_time))
            self.publish('generator', 'test_start', self.job.generator_plugin.start_time)

    def wait_for_finish(self):
        """
        Call is_test_finished() on all plugins 'till one of them initiates exit
        """
        if not self.interrupted.is_set():
            logger.info("Waiting for test to finish...")
            logger.info('Artifacts dir: {dir}'.format(dir=self.artifacts_dir))
            self.publish("core", "stage", "shoot")
            if not self.plugins:
                raise RuntimeError("It's strange: we have no plugins loaded...")

        while not self.interrupted.is_set():
            begin_time = time.time()
            aggr_retcode = self.job.aggregator.is_test_finished()
            if aggr_retcode >= 0:
                return aggr_retcode
            for plugin_name, plugin in self.plugins.items():
                logger.debug("Polling %s", plugin)
                try:
                    retcode = plugin.is_test_finished()
                    if retcode >= 0:
                        return retcode
                except Exception:
                    logger.warning('Plugin {} failed:'.format(plugin_name), exc_info=True)
                    if isinstance(plugin, GeneratorPlugin):
                        return RetCode.ERROR
                    else:
                        logger.warning('Disabling plugin {}'.format(plugin_name))
                        plugin.is_test_finished = lambda: RetCode.CONTINUE
            end_time = time.time()
            diff = end_time - begin_time
            logger.debug("Polling took %s", diff)
            logger.debug("Tank status: %s", json.dumps(self.status))
            # screen refresh every 0.5 s
            if diff < 0.5:
                time.sleep(0.5 - diff)
        return 1

    def plugins_end_test(self, retcode):
        """        Call end_test() on all plugins        """
        logger.info("Finishing test...")
        self.publish("core", "stage", "end")
        self.publish('generator', 'test_end', time.time())
        logger.info("Stopping load generator and aggregator")
        retcode = self.job.aggregator.end_test(retcode)
        logger.debug("RC after: %s", retcode)

        logger.info('Stopping monitoring')
        for plugin in self.job.monitoring_plugins:
            logger.info('Stopping %s', plugin)
            retcode = plugin.end_test(retcode) or retcode
            logger.info('RC after: %s', retcode)

        for plugin in [p for p in self.plugins.values() if
                       p is not self.job.generator_plugin and p not in self.job.monitoring_plugins]:
            logger.debug("Finalize %s", plugin)
            try:
                logger.debug("RC before: %s", retcode)
                retcode = plugin.end_test(retcode)
                logger.debug("RC after: %s", retcode)
            except Exception:  # FIXME too broad exception clause
                logger.error("Failed finishing plugin %s", plugin, exc_info=True)
                if not retcode:
                    retcode = 1
        return retcode

    def plugins_post_process(self, retcode):
        """
        Call post_process() on all plugins
        """
        logger.info("Post-processing test...")
        self.publish("core", "stage", "post_process")
        for plugin in self.plugins.values():
            logger.debug("Post-process %s", plugin)
            try:
                logger.debug("RC before: %s", retcode)
                retcode = plugin.post_process(retcode)
                logger.debug("RC after: %s", retcode)
            except Exception:  # FIXME too broad exception clause
                logger.error("Failed post-processing plugin %s", plugin, exc_info=True)
                if not retcode:
                    retcode = 1
        return retcode

    def publish_monitoring_data(self, data):
        """sends pending data set to listeners"""
        for plugin in self.monitoring_data_listeners:
            # deep copy to ensure each listener gets it's own copy
            try:
                plugin.monitoring_data(copy.deepcopy(data))
            except Exception:
                logger.error("Plugin failed to process monitoring data", exc_info=True)

    def __setup_taskset(self, affinity, pid=None, args=None):
        """ if pid specified: set process w/ pid `pid` CPU affinity to specified `affinity` core(s)
            if args specified: modify list of args for Popen to start w/ taskset w/ affinity `affinity`
        """
        self.taskset_path = self.get_option(self.SECTION, 'taskset_path')

        if args:
            return [self.taskset_path, '-c', affinity] + args

        if pid:
            args = "%s -pc %s %s" % (self.taskset_path, affinity, pid)
            retcode, stdout, stderr = execute(args, shell=True, poll_period=0.1, catch_out=True)
            logger.debug('taskset for pid %s stdout: %s', pid, stdout)
            if retcode == 0:
                logger.info("Enabled taskset for pid %s with affinity %s", str(pid), affinity)
            else:
                logger.debug('Taskset setup failed w/ retcode :%s', retcode)
                raise KeyError(stderr)

    def _collect_artifacts(self, validation_failed=False):
        logger.debug("Collecting artifacts")
        logger.info("Artifacts dir: %s", self.artifacts_dir)
        for filename, keep in self.artifact_files.items():
            try:
                self.__collect_file(filename, keep)
            except Exception as ex:
                logger.warn("Failed to collect file %s: %s", filename, ex)

    def get_option(self, section, option, default=None):
        return self.config.get_option(section, option, default)

    def set_option(self, section, option, value):
        """
        Set an option in storage
        """
        raise NotImplementedError

    def set_exitcode(self, code):
        self.output['core']['exitcode'] = code

    def get_plugin_of_type(self, plugin_class):
        """
        Retrieve a plugin of desired class, KeyError raised otherwise
        """
        logger.debug("Searching for plugin: %s", plugin_class)
        matches = [plugin for plugin in self.plugins.values() if isinstance(plugin, plugin_class)]
        if matches:
            if len(matches) > 1:
                logger.debug(
                    "More then one plugin of type %s found. Using first one.",
                    plugin_class)
            return matches[-1]
        else:
            raise KeyError("Requested plugin type not found: %s" % plugin_class)

    def get_plugins_of_type(self, plugin_class):
        """
        Retrieve a list of plugins of desired class, KeyError raised otherwise
        """
        logger.debug("Searching for plugins: %s", plugin_class)
        matches = [plugin for plugin in self.plugins.values() if isinstance(plugin, plugin_class)]
        if matches:
            return matches
        else:
            raise KeyError("Requested plugin type not found: %s" % plugin_class)

    def get_jobno(self, plugin_name='plugin_lunapark'):
        uploader_plugin = self.plugins[plugin_name]
        return uploader_plugin.lp_job.number

    def __collect_file(self, filename, keep_original=False):
        """
        Move or copy single file to artifacts dir
        """
        dest = self.artifacts_dir + '/' + os.path.basename(filename)
        logger.debug("Collecting file: %s to %s", filename, dest)
        if not filename or not os.path.exists(filename):
            logger.warning("File not found to collect: %s", filename)
            return

        if os.path.exists(dest):
            # FIXME: 3 find a way to store artifacts anyway
            logger.warning("File already exists: %s", dest)
            return

        if keep_original:
            shutil.copy(filename, self.artifacts_dir)
        else:
            shutil.move(filename, self.artifacts_dir)

        os.chmod(dest, 0o644)

    def add_artifact_file(self, filename, keep_original=False):
        """
        Add file to be stored as result artifact on post-process phase
        """
        if filename:
            logger.debug(
                "Adding artifact file to collect (keep=%s): %s", keep_original,
                filename)
            self.artifact_files[filename] = keep_original

    def add_artifact_to_send(self, lp_requisites, content):
        self.artifacts_to_send.append((lp_requisites, content))

    def apply_shorthand_options(self, options, default_section='DEFAULT'):
        for option_str in options:
            key, value = option_str.split('=')
            try:
                section, option = key.split('.')
            except ValueError:
                section = default_section
                option = key
            logger.debug(
                "Override option: %s => [%s] %s=%s", option_str, section,
                option, value)
            self.set_option(section, option, value)

    def mkstemp(self, suffix, prefix, directory=None):
        """
        Generate temp file name in artifacts base dir
        and close temp file handle
        """
        if not directory:
            directory = self.artifacts_dir
        fd, fname = tempfile.mkstemp(suffix, prefix, directory)
        os.close(fd)
        os.chmod(fname, 0o644)  # FIXME: chmod to parent dir's mode?
        return fname

    def publish(self, publisher, key, value):
        update_status(self.status, [publisher] + key.split('.'), value)

    def close(self):
        """
        Call close() for all plugins
        """
        logger.info("Close allocated resources...")
        for plugin in self.plugins.values():
            logger.debug("Close %s", plugin)
            try:
                plugin.close()
            except Exception as ex:
                logger.error("Failed closing plugin %s: %s", plugin, ex)
                logger.debug(
                    "Failed closing plugin: %s", traceback.format_exc(ex))

    @property
    def artifacts_dir(self):
        if not self._artifacts_dir:
            new_path = os.path.join(self.artifacts_base_dir, self.test_id)
            if not os.path.isdir(new_path):
                os.makedirs(new_path)
            os.chmod(new_path, 0o755)
            self._artifacts_dir = os.path.abspath(new_path)
        return self._artifacts_dir

    @staticmethod
    def get_user_agent():
        tank_agent = 'YandexTank/{}'.format(
            pkg_resources.require('yandextank')[0].version)
        py_info = sys.version_info
        python_agent = 'Python/{}.{}.{}'.format(
            py_info[0], py_info[1], py_info[2])
        os_agent = 'OS/{}'.format(platform.platform())
        return ' '.join((tank_agent, python_agent, os_agent))

    def register_plugin(self, plugin_name, instance):
        if self._plugins is None:
            self._plugins = {}
        if self._plugins.get(plugin_name, None) is not None:
            logger.exception('Plugins\' names should diverse')
        self._plugins[plugin_name] = instance

    def save_cfg(self, path):
        self.config.dump(path)

    def plugins_cleanup(self):
        for plugin_name, plugin in self.plugins.items():
            logger.info('Cleaning up plugin {}'.format(plugin_name))
            plugin.cleanup()
コード例 #15
0
ファイル: tankcore.py プロジェクト: yandex/yandex-tank
class TankCore(object):
    """
    JMeter + dstat inspired :)
    """
    SECTION = 'core'
    SECTION_META = 'meta'
    PLUGIN_PREFIX = 'plugin_'
    PID_OPTION = 'pid'
    UUID_OPTION = 'uuid'
    API_JOBNO = 'api_jobno'

    def __init__(self, configs, interrupted_event, artifacts_base_dir=None, artifacts_dir_name=None):
        """

        :param configs: list of dict
        :param interrupted_event: threading.Event
        """
        self.output = {}
        self.raw_configs = configs
        self.status = {}
        self._plugins = None
        self._artifacts_dir = None
        self.artifact_files = {}
        self.artifacts_to_send = []
        self._artifacts_base_dir = None
        self.manual_start = False
        self.scheduled_start = None
        self.taskset_path = None
        self.taskset_affinity = None
        self._job = None
        self._cfg_snapshot = None

        self.interrupted = interrupted_event

        self.error_log = None

        error_output = 'validation_error.yaml'
        self.config, self.errors, self.configinitial = TankConfig(self.raw_configs,
                                                                  with_dynamic_options=True,
                                                                  core_section=self.SECTION,
                                                                  error_output=error_output).validate()
        if not self.config:
            raise ValidationError(self.errors)
        self.test_id = self.get_option(self.SECTION, 'artifacts_dir',
                                       datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f"))
        self.lock_dir = self.get_option(self.SECTION, 'lock_dir')
        with open(os.path.join(self.artifacts_dir, CONFIGINITIAL), 'w') as f:
            yaml.dump(self.configinitial, f)
        self.add_artifact_file(error_output)
        self.add_artifact_to_send(LPRequisites.CONFIGINITIAL, yaml.dump(self.configinitial))
        configinfo = self.config.validated.copy()
        configinfo.setdefault(self.SECTION, {})
        configinfo[self.SECTION][self.API_JOBNO] = self.test_id
        self.add_artifact_to_send(LPRequisites.CONFIGINFO, yaml.dump(configinfo))
        with open(os.path.join(self.artifacts_dir, VALIDATED_CONF), 'w') as f:
            yaml.dump(configinfo, f)
        logger.info('New test id %s' % self.test_id)

    @property
    def cfg_snapshot(self):
        if not self._cfg_snapshot:
            self._cfg_snapshot = str(self.config)
        return self._cfg_snapshot

    @staticmethod
    def get_available_options():
        # todo: should take this from schema
        return [
            "artifacts_base_dir", "artifacts_dir",
            "taskset_path", "affinity"
        ]

    @property
    def plugins(self):
        """
        :returns: {plugin_name: plugin_class, ...}
        :rtype: dict
        """
        if self._plugins is None:
            self.load_plugins()
            if self._plugins is None:
                self._plugins = {}
        return self._plugins

    @property
    def artifacts_base_dir(self):
        if not self._artifacts_base_dir:
            try:
                artifacts_base_dir = os.path.abspath(self.get_option(self.SECTION, "artifacts_base_dir"))
            except ValidationError:
                artifacts_base_dir = os.path.abspath('logs')
            if not os.path.exists(artifacts_base_dir):
                os.makedirs(artifacts_base_dir)
                os.chmod(self.artifacts_base_dir, 0o755)
            self._artifacts_base_dir = artifacts_base_dir
        return self._artifacts_base_dir

    def load_plugins(self):
        """
        Tells core to take plugin options and instantiate plugin classes
        """
        logger.info("Loading plugins...")
        for (plugin_name, plugin_path, plugin_cfg) in self.config.plugins:
            logger.debug("Loading plugin %s from %s", plugin_name, plugin_path)
            if plugin_path == "yandextank.plugins.Overload":
                logger.warning(
                    "Deprecated plugin name: 'yandextank.plugins.Overload'\n"
                    "There is a new generic plugin now.\n"
                    "Correcting to 'yandextank.plugins.DataUploader overload'")
                plugin_path = "yandextank.plugins.DataUploader overload"
            try:
                plugin = il.import_module(plugin_path)
            except ImportError:
                logger.warning('Plugin name %s path %s import error', plugin_name, plugin_path)
                logger.debug('Plugin name %s path %s import error', plugin_name, plugin_path, exc_info=True)
                raise
            try:
                instance = getattr(plugin, 'Plugin')(self, cfg=plugin_cfg, name=plugin_name)
            except AttributeError:
                logger.warning('Plugin %s classname should be `Plugin`', plugin_name)
                raise
            else:
                self.register_plugin(self.PLUGIN_PREFIX + plugin_name, instance)
        logger.debug("Plugin instances: %s", self._plugins)

    @property
    def job(self):
        if not self._job:
            # monitoring plugin
            monitorings = [plugin for plugin in self.plugins.values() if isinstance(plugin, MonitoringPlugin)]
            # generator plugin
            try:
                gen = self.get_plugin_of_type(GeneratorPlugin)
            except KeyError:
                logger.warning("Load generator not found")
                gen = GeneratorPlugin(self, {}, 'generator dummy')
            # aggregator
            aggregator = TankAggregator(gen)
            self._job = Job(monitoring_plugins=monitorings,
                            generator_plugin=gen,
                            aggregator=aggregator,
                            tank=socket.getfqdn())
        return self._job

    def plugins_configure(self):
        """        Call configure() on all plugins        """
        self.publish("core", "stage", "configure")

        logger.info("Configuring plugins...")
        self.taskset_affinity = self.get_option(self.SECTION, 'affinity')
        if self.taskset_affinity:
            self.__setup_taskset(self.taskset_affinity, pid=os.getpid())

        for plugin in self.plugins.values():
            if not self.interrupted.is_set():
                logger.debug("Configuring %s", plugin)
                plugin.configure()

    def plugins_prepare_test(self):
        """ Call prepare_test() on all plugins        """
        logger.info("Preparing test...")
        self.publish("core", "stage", "prepare")
        for plugin in self.plugins.values():
            if not self.interrupted.is_set():
                logger.debug("Preparing %s", plugin)
                plugin.prepare_test()

    def plugins_start_test(self):
        """        Call start_test() on all plugins        """
        if not self.interrupted.is_set():
            logger.info("Starting test...")
            self.publish("core", "stage", "start")
            self.job.aggregator.start_test()
            for plugin in self.plugins.values():
                logger.debug("Starting %s", plugin)
                start_time = time.time()
                plugin.start_test()
                logger.info("Plugin {0:s} required {1:f} seconds to start".format(plugin,
                                                                                  time.time() - start_time))

    def wait_for_finish(self):
        """
        Call is_test_finished() on all plugins 'till one of them initiates exit
        """
        if not self.interrupted.is_set():
            logger.info("Waiting for test to finish...")
            logger.info('Artifacts dir: {dir}'.format(dir=self.artifacts_dir))
            self.publish("core", "stage", "shoot")
            if not self.plugins:
                raise RuntimeError("It's strange: we have no plugins loaded...")

        while not self.interrupted.is_set():
            begin_time = time.time()
            aggr_retcode = self.job.aggregator.is_test_finished()
            if aggr_retcode >= 0:
                return aggr_retcode
            for plugin in self.plugins.values():
                logger.debug("Polling %s", plugin)
                retcode = plugin.is_test_finished()
                if retcode >= 0:
                    return retcode
            end_time = time.time()
            diff = end_time - begin_time
            logger.debug("Polling took %s", diff)
            logger.debug("Tank status: %s", json.dumps(self.status))
            # screen refresh every 0.5 s
            if diff < 0.5:
                time.sleep(0.5 - diff)
        return 1

    def plugins_end_test(self, retcode):
        """        Call end_test() on all plugins        """
        logger.info("Finishing test...")
        self.publish("core", "stage", "end")
        logger.info("Stopping load generator and aggregator")
        retcode = self.job.aggregator.end_test(retcode)
        logger.debug("RC after: %s", retcode)

        logger.info('Stopping monitoring')
        for plugin in self.job.monitoring_plugins:
            logger.info('Stopping %s', plugin)
            retcode = plugin.end_test(retcode) or retcode
            logger.info('RC after: %s', retcode)

        for plugin in [p for p in self.plugins.values() if
                       p is not self.job.generator_plugin and p not in self.job.monitoring_plugins]:
            logger.debug("Finalize %s", plugin)
            try:
                logger.debug("RC before: %s", retcode)
                retcode = plugin.end_test(retcode)
                logger.debug("RC after: %s", retcode)
            except Exception:  # FIXME too broad exception clause
                logger.error("Failed finishing plugin %s", plugin, exc_info=True)
                if not retcode:
                    retcode = 1
        return retcode

    def plugins_post_process(self, retcode):
        """
        Call post_process() on all plugins
        """
        logger.info("Post-processing test...")
        self.publish("core", "stage", "post_process")
        for plugin in self.plugins.values():
            logger.debug("Post-process %s", plugin)
            try:
                logger.debug("RC before: %s", retcode)
                retcode = plugin.post_process(retcode)
                logger.debug("RC after: %s", retcode)
            except Exception:  # FIXME too broad exception clause
                logger.error("Failed post-processing plugin %s", plugin, exc_info=True)
                if not retcode:
                    retcode = 1
        return retcode

    def interrupt(self):
        logger.warning('Interrupting')

    def __setup_taskset(self, affinity, pid=None, args=None):
        """ if pid specified: set process w/ pid `pid` CPU affinity to specified `affinity` core(s)
            if args specified: modify list of args for Popen to start w/ taskset w/ affinity `affinity`
        """
        self.taskset_path = self.get_option(self.SECTION, 'taskset_path')

        if args:
            return [self.taskset_path, '-c', affinity] + args

        if pid:
            args = "%s -pc %s %s" % (self.taskset_path, affinity, pid)
            retcode, stdout, stderr = execute(args, shell=True, poll_period=0.1, catch_out=True)
            logger.debug('taskset for pid %s stdout: %s', pid, stdout)
            if retcode == 0:
                logger.info("Enabled taskset for pid %s with affinity %s", str(pid), affinity)
            else:
                logger.debug('Taskset setup failed w/ retcode :%s', retcode)
                raise KeyError(stderr)

    def _collect_artifacts(self, validation_failed=False):
        logger.debug("Collecting artifacts")
        logger.info("Artifacts dir: %s", self.artifacts_dir)
        for filename, keep in self.artifact_files.items():
            try:
                self.__collect_file(filename, keep)
            except Exception as ex:
                logger.warn("Failed to collect file %s: %s", filename, ex)

    def get_option(self, section, option, default=None):
        return self.config.get_option(section, option, default)

    def set_option(self, section, option, value):
        """
        Set an option in storage
        """
        raise NotImplementedError

    def set_exitcode(self, code):
        self.output['core']['exitcode'] = code

    def get_plugin_of_type(self, plugin_class):
        """
        Retrieve a plugin of desired class, KeyError raised otherwise
        """
        logger.debug("Searching for plugin: %s", plugin_class)
        matches = [plugin for plugin in self.plugins.values() if isinstance(plugin, plugin_class)]
        if matches:
            if len(matches) > 1:
                logger.debug(
                    "More then one plugin of type %s found. Using first one.",
                    plugin_class)
            return matches[-1]
        else:
            raise KeyError("Requested plugin type not found: %s" % plugin_class)

    def get_plugins_of_type(self, plugin_class):
        """
        Retrieve a list of plugins of desired class, KeyError raised otherwise
        """
        logger.debug("Searching for plugins: %s", plugin_class)
        matches = [plugin for plugin in self.plugins.values() if isinstance(plugin, plugin_class)]
        if matches:
            return matches
        else:
            raise KeyError("Requested plugin type not found: %s" % plugin_class)

    def get_jobno(self, plugin_name='plugin_lunapark'):
        uploader_plugin = self.plugins[plugin_name]
        return uploader_plugin.lp_job.number

    def __collect_file(self, filename, keep_original=False):
        """
        Move or copy single file to artifacts dir
        """
        dest = self.artifacts_dir + '/' + os.path.basename(filename)
        logger.debug("Collecting file: %s to %s", filename, dest)
        if not filename or not os.path.exists(filename):
            logger.warning("File not found to collect: %s", filename)
            return

        if os.path.exists(dest):
            # FIXME: 3 find a way to store artifacts anyway
            logger.warning("File already exists: %s", dest)
            return

        if keep_original:
            shutil.copy(filename, self.artifacts_dir)
        else:
            shutil.move(filename, self.artifacts_dir)

        os.chmod(dest, 0o644)

    def add_artifact_file(self, filename, keep_original=False):
        """
        Add file to be stored as result artifact on post-process phase
        """
        if filename:
            logger.debug(
                "Adding artifact file to collect (keep=%s): %s", keep_original,
                filename)
            self.artifact_files[filename] = keep_original

    def add_artifact_to_send(self, lp_requisites, content):
        self.artifacts_to_send.append((lp_requisites, content))

    def apply_shorthand_options(self, options, default_section='DEFAULT'):
        for option_str in options:
            key, value = option_str.split('=')
            try:
                section, option = key.split('.')
            except ValueError:
                section = default_section
                option = key
            logger.debug(
                "Override option: %s => [%s] %s=%s", option_str, section,
                option, value)
            self.set_option(section, option, value)

    def mkstemp(self, suffix, prefix, directory=None):
        """
        Generate temp file name in artifacts base dir
        and close temp file handle
        """
        if not directory:
            directory = self.artifacts_dir
        fd, fname = tempfile.mkstemp(suffix, prefix, directory)
        os.close(fd)
        os.chmod(fname, 0o644)  # FIXME: chmod to parent dir's mode?
        return fname

    def publish(self, publisher, key, value):
        update_status(self.status, [publisher] + key.split('.'), value)

    def close(self):
        """
        Call close() for all plugins
        """
        logger.info("Close allocated resources...")
        for plugin in self.plugins.values():
            logger.debug("Close %s", plugin)
            try:
                plugin.close()
            except Exception as ex:
                logger.error("Failed closing plugin %s: %s", plugin, ex)
                logger.debug(
                    "Failed closing plugin: %s", traceback.format_exc(ex))

    @property
    def artifacts_dir(self):
        if not self._artifacts_dir:
            new_path = os.path.join(self.artifacts_base_dir, self.test_id)
            if not os.path.isdir(new_path):
                os.makedirs(new_path)
            os.chmod(new_path, 0o755)
            self._artifacts_dir = os.path.abspath(new_path)
        return self._artifacts_dir

    @staticmethod
    def get_user_agent():
        tank_agent = 'YandexTank/{}'.format(
            pkg_resources.require('yandextank')[0].version)
        py_info = sys.version_info
        python_agent = 'Python/{}.{}.{}'.format(
            py_info[0], py_info[1], py_info[2])
        os_agent = 'OS/{}'.format(platform.platform())
        return ' '.join((tank_agent, python_agent, os_agent))

    def register_plugin(self, plugin_name, instance):
        if self._plugins is None:
            self._plugins = {}
        if self._plugins.get(plugin_name, None) is not None:
            logger.exception('Plugins\' names should diverse')
        self._plugins[plugin_name] = instance

    def save_cfg(self, path):
        self.config.dump(path)

    def plugins_cleanup(self):
        for plugin_name, plugin in self.plugins.items():
            logger.info('Cleaning up plugin {}'.format(plugin_name))
            plugin.cleanup()
コード例 #16
0
def test_get_plugins(config, expected):
    validated, errors, raw = TankConfig(config).validate()
    assert {(name, pack) for name, pack, cfg in validated.plugins} == expected
コード例 #17
0
def test_validate_core(config, expected):
    validated, errors, initial = TankConfig(config, False).validate()
    assert validated.validated == expected, errors == errors
コード例 #18
0
    def __init__(self,
                 configs,
                 interrupted_event,
                 local_configs=None,
                 user_configs=None,
                 artifacts_base_dir=None,
                 artifacts_dir_name=None):
        """

        :param configs: list of dict
        :param interrupted_event: threading.Event
        """
        self.output = {}
        self.raw_configs = configs
        self.status = {}
        self._plugins = None
        self._artifacts_dir = artifacts_dir_name
        self.artifact_files = {}
        self.artifacts_to_send = []
        self._artifacts_base_dir = artifacts_base_dir
        self.manual_start = False
        self.scheduled_start = None
        self.taskset_path = None
        self.taskset_affinity = None
        self._job = None
        self._cfg_snapshot = None
        self.local_configs = load_multiple(local_configs)
        self.user_configs = load_multiple(user_configs)
        self.configinitial = self.user_configs

        self.interrupted = interrupted_event

        self.error_log = None

        self.config, self.errors = TankConfig(
            self.raw_configs,
            with_dynamic_options=True,
            core_section=self.SECTION,
            error_output=ERROR_OUTPUT).validate()
        if not self.config:
            raise ValidationError(self.errors)
        self.test_id = self.get_option(
            self.SECTION, 'artifacts_dir',
            datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f"))
        self.lock_dir = self.get_option(self.SECTION, 'lock_dir')
        with open(os.path.join(self.artifacts_dir, LOCAL_CONFIG), 'w') as f:
            yaml.dump(self.local_configs, f)
        with open(os.path.join(self.artifacts_dir, USER_CONFIG), 'w') as f:
            yaml.dump(self.user_configs, f)
        configinfo = self.config.validated.copy()
        configinfo.setdefault(self.SECTION, {})
        configinfo[self.SECTION][self.API_JOBNO] = self.test_id
        with open(os.path.join(self.artifacts_dir, VALIDATED_CONF), 'w') as f:
            yaml.dump(configinfo, f)
        self.add_artifact_file(os.path.join(self.artifacts_dir, USER_CONFIG))
        self.add_artifact_file(os.path.join(self.artifacts_dir, LOCAL_CONFIG))
        self.add_artifact_file(os.path.join(self.artifacts_dir,
                                            VALIDATED_CONF))
        if self.errors:
            self.add_artifact_file(
                os.path.join(self.artifacts_dir, ERROR_OUTPUT))
コード例 #19
0
def test_validate_core_error(config, expected):
    with pytest.raises(Exception) as e:
        TankConfig(config).validated
        print('exception value:\n', str(e.value))
    assert expected == e.value.errors
コード例 #20
0
def test_setter(config, plugin, key, value):
    tankconfig = TankConfig(config)
    tankconfig._TankConfig__get_cfg_updater(plugin)(key, value)
    assert tankconfig.get_option(plugin, key) == value
コード例 #21
0
ファイル: api.py プロジェクト: yandex-load/controlcenter
def get_validation_result(cfg):
    config, errors, configinitial = TankConfig(
        [load_core_base_cfg()] + load_local_base_cfgs() + [cfg],
        with_dynamic_options=False).validate()
    return response(configinitial, errors)
コード例 #22
0
ファイル: tankcore.py プロジェクト: sabirovruslan/yandex-tank
class TankCore(object):
    """
    JMeter + dstat inspired :)
    """
    SECTION = 'core'
    SECTION_META = 'meta'
    PLUGIN_PREFIX = 'plugin_'
    PID_OPTION = 'pid'
    UUID_OPTION = 'uuid'

    def __init__(self, configs, artifacts_base_dir=None, artifacts_dir_name=None, cfg_depr=None):
        """

        :param configs: list of dict
        """
        self.raw_configs = configs
        self.config = TankConfig(self.raw_configs,
                                 with_dynamic_options=True,
                                 core_section=self.SECTION)
        self.status = {}
        self._plugins = None
        self._artifacts_dir = None
        self.artifact_files = {}
        self._artifacts_base_dir = None
        self.manual_start = False
        self.scheduled_start = None
        self.interrupted = False
        self.lock_file = None
        self.lock_dir = None
        self.taskset_path = None
        self.taskset_affinity = None
        self._job = None
        self.cfg_depr = cfg_depr
        self._cfg_snapshot = None

        self.interrupted = False
    #
    # def get_uuid(self):
    #     return self.uuid

    @property
    def cfg_snapshot(self):
        if not self._cfg_snapshot:
            self._cfg_snapshot = str(self.config)
        return self._cfg_snapshot

    @staticmethod
    def get_available_options():
        # todo: should take this from schema
        return [
            "artifacts_base_dir", "artifacts_dir",
            "taskset_path", "affinity"
        ]

    @property
    def plugins(self):
        """
        :returns: {plugin_name: plugin_class, ...}
        :rtype: dict
        """
        if self._plugins is None:
            self.load_plugins()
            if self._plugins is None:
                self._plugins = {}
        return self._plugins

    def save_config(self, filename):
        self.config.save(filename)

    @property
    def artifacts_base_dir(self):
        if not self._artifacts_base_dir:
            artifacts_base_dir = os.path.expanduser(self.get_option(self.SECTION, "artifacts_base_dir"))
            if not os.path.exists(artifacts_base_dir):
                os.makedirs(artifacts_base_dir)
                os.chmod(self.artifacts_base_dir, 0o755)
            self._artifacts_base_dir = artifacts_base_dir
        return self._artifacts_base_dir

    def load_plugins(self):
        """
        Tells core to take plugin options and instantiate plugin classes
        """
        logger.info("Loading plugins...")
        for (plugin_name, plugin_path, plugin_cfg, cfg_updater) in self.config.plugins:
            logger.debug("Loading plugin %s from %s", plugin_name, plugin_path)
            if plugin_path is "yandextank.plugins.Overload":
                logger.warning(
                    "Deprecated plugin name: 'yandextank.plugins.Overload'\n"
                    "There is a new generic plugin now.\n"
                    "Correcting to 'yandextank.plugins.DataUploader overload'")
                plugin_path = "yandextank.plugins.DataUploader overload"
            try:
                plugin = il.import_module(plugin_path)
            except ImportError:
                logger.warning('Plugin name %s path %s import error', plugin_name, plugin_path)
                logger.debug('Plugin name %s path %s import error', plugin_name, plugin_path, exc_info=True)
                raise
            try:
                instance = getattr(plugin, 'Plugin')(self, cfg=plugin_cfg, cfg_updater=cfg_updater)
            except AttributeError:
                logger.warning('Plugin %s classname should be `Plugin`', plugin_name)
                raise
            else:
                self.register_plugin(self.PLUGIN_PREFIX + plugin_name, instance)
        logger.debug("Plugin instances: %s", self._plugins)

    @property
    def job(self):
        if not self._job:
            # monitoring plugin
            try:
                mon = self.get_plugin_of_type(TelegrafPlugin)
            except KeyError:
                logger.debug("Telegraf plugin not found:", exc_info=True)
                mon = None
            # generator plugin
            try:
                gen = self.get_plugin_of_type(GeneratorPlugin)
            except KeyError:
                logger.warning("Load generator not found")
                gen = GeneratorPlugin()
            # aggregator
            aggregator = TankAggregator(gen)
            self._job = Job(monitoring_plugin=mon,
                            generator_plugin=gen,
                            aggregator=aggregator,
                            tank=socket.getfqdn())
        return self._job

    def plugins_configure(self):
        """        Call configure() on all plugins        """
        self.publish("core", "stage", "configure")

        logger.info("Configuring plugins...")
        self.taskset_affinity = self.get_option(self.SECTION, 'affinity')
        if self.taskset_affinity:
            self.__setup_taskset(self.taskset_affinity, pid=os.getpid())

        for plugin in self.plugins.values():
            logger.debug("Configuring %s", plugin)
            plugin.configure()

    def plugins_prepare_test(self):
        """ Call prepare_test() on all plugins        """
        logger.info("Preparing test...")
        self.publish("core", "stage", "prepare")
        for plugin in self.plugins.values():
            logger.debug("Preparing %s", plugin)
            plugin.prepare_test()

    def plugins_start_test(self):
        """        Call start_test() on all plugins        """
        logger.info("Starting test...")
        self.publish("core", "stage", "start")
        self.job.aggregator.start_test()
        for plugin in self.plugins.values():
            logger.debug("Starting %s", plugin)
            start_time = time.time()
            plugin.start_test()
            logger.info("Plugin {0:s} required {1:f} seconds to start".format(plugin,
                                                                              time.time() - start_time))

    def wait_for_finish(self):
        """
        Call is_test_finished() on all plugins 'till one of them initiates exit
        """

        logger.info("Waiting for test to finish...")
        logger.info('Artifacts dir: {dir}'.format(dir=self.artifacts_dir))
        self.publish("core", "stage", "shoot")
        if not self.plugins:
            raise RuntimeError("It's strange: we have no plugins loaded...")

        while not self.interrupted:
            begin_time = time.time()
            aggr_retcode = self.job.aggregator.is_test_finished()
            if aggr_retcode >= 0:
                return aggr_retcode
            for plugin in self.plugins.values():
                logger.debug("Polling %s", plugin)
                retcode = plugin.is_test_finished()
                if retcode >= 0:
                    return retcode
            end_time = time.time()
            diff = end_time - begin_time
            logger.debug("Polling took %s", diff)
            logger.debug("Tank status: %s", json.dumps(self.status))
            # screen refresh every 0.5 s
            if diff < 0.5:
                time.sleep(0.5 - diff)
        return 1

    def plugins_end_test(self, retcode):
        """        Call end_test() on all plugins        """
        logger.info("Finishing test...")
        self.publish("core", "stage", "end")
        logger.info("Stopping load generator and aggregator")
        retcode = self.job.aggregator.end_test(retcode)
        logger.debug("RC after: %s", retcode)
        for plugin in [p for p in self.plugins.values() if p is not self.job.generator_plugin]:
            logger.debug("Finalize %s", plugin)
            try:
                logger.debug("RC before: %s", retcode)
                retcode = plugin.end_test(retcode)
                logger.debug("RC after: %s", retcode)
            except Exception:  # FIXME too broad exception clause
                logger.error("Failed finishing plugin %s: %s", plugin, exc_info=True)
                if not retcode:
                    retcode = 1
        return retcode

    def plugins_post_process(self, retcode):
        """
        Call post_process() on all plugins
        """
        logger.info("Post-processing test...")
        self.publish("core", "stage", "post_process")
        for plugin in self.plugins.values():
            logger.debug("Post-process %s", plugin)
            try:
                logger.debug("RC before: %s", retcode)
                retcode = plugin.post_process(retcode)
                logger.debug("RC after: %s", retcode)
            except Exception:  # FIXME too broad exception clause
                logger.error("Failed post-processing plugin %s: %s", plugin, exc_info=True)
                if not retcode:
                    retcode = 1
        self.__collect_artifacts()
        return retcode

    def __setup_taskset(self, affinity, pid=None, args=None):
        """ if pid specified: set process w/ pid `pid` CPU affinity to specified `affinity` core(s)
            if args specified: modify list of args for Popen to start w/ taskset w/ affinity `affinity`
        """
        self.taskset_path = self.get_option(self.SECTION, 'taskset_path')

        if args:
            return [self.taskset_path, '-c', affinity] + args

        if pid:
            args = "%s -pc %s %s" % (self.taskset_path, affinity, pid)
            retcode, stdout, stderr = execute(args, shell=True, poll_period=0.1, catch_out=True)
            logger.debug('taskset for pid %s stdout: %s', pid, stdout)
            if retcode == 0:
                logger.info("Enabled taskset for pid %s with affinity %s", str(pid), affinity)
            else:
                logger.debug('Taskset setup failed w/ retcode :%s', retcode)
                raise KeyError(stderr)

    def __collect_artifacts(self):
        logger.debug("Collecting artifacts")
        logger.info("Artifacts dir: %s", self.artifacts_dir)
        for filename, keep in self.artifact_files.items():
            try:
                self.__collect_file(filename, keep)
            except Exception as ex:
                logger.warn("Failed to collect file %s: %s", filename, ex)

    def get_option(self, section, option, default=None):
        return self.config.get_option(section, option)

    def set_option(self, section, option, value):
        """
        Set an option in storage
        """
        raise NotImplementedError

    def set_exitcode(self, code):
        self.config.validated['core']['exitcode'] = code

    def get_plugin_of_type(self, plugin_class):
        """
        Retrieve a plugin of desired class, KeyError raised otherwise
        """
        logger.debug("Searching for plugin: %s", plugin_class)
        matches = [plugin for plugin in self.plugins.values() if isinstance(plugin, plugin_class)]
        if matches:
            if len(matches) > 1:
                logger.debug(
                    "More then one plugin of type %s found. Using first one.",
                    plugin_class)
            return matches[-1]
        else:
            raise KeyError("Requested plugin type not found: %s" % plugin_class)

    def get_plugins_of_type(self, plugin_class):
        """
        Retrieve a list of plugins of desired class, KeyError raised otherwise
        """
        logger.debug("Searching for plugins: %s", plugin_class)
        matches = [plugin for plugin in self.plugins.values() if isinstance(plugin, plugin_class)]
        if matches:
            return matches
        else:
            raise KeyError("Requested plugin type not found: %s" % plugin_class)

    def get_jobno(self, plugin_name='plugin_lunapark'):
        uploader_plugin = self.plugins[plugin_name]
        return uploader_plugin.lp_job.number

    def __collect_file(self, filename, keep_original=False):
        """
        Move or copy single file to artifacts dir
        """
        dest = self.artifacts_dir + '/' + os.path.basename(filename)
        logger.debug("Collecting file: %s to %s", filename, dest)
        if not filename or not os.path.exists(filename):
            logger.warning("File not found to collect: %s", filename)
            return

        if os.path.exists(dest):
            # FIXME: 3 find a way to store artifacts anyway
            logger.warning("File already exists: %s", dest)
            return

        if keep_original:
            shutil.copy(filename, self.artifacts_dir)
        else:
            shutil.move(filename, self.artifacts_dir)

        os.chmod(dest, 0o644)

    def add_artifact_file(self, filename, keep_original=False):
        """
        Add file to be stored as result artifact on post-process phase
        """
        if filename:
            logger.debug(
                "Adding artifact file to collect (keep=%s): %s", keep_original,
                filename)
            self.artifact_files[filename] = keep_original

    def apply_shorthand_options(self, options, default_section='DEFAULT'):
        for option_str in options:
            key, value = option_str.split('=')
            try:
                section, option = key.split('.')
            except ValueError:
                section = default_section
                option = key
            logger.debug(
                "Override option: %s => [%s] %s=%s", option_str, section,
                option, value)
            self.set_option(section, option, value)

    # todo: remove lock_dir from config
    def get_lock_dir(self):
        if not self.lock_dir:
            self.lock_dir = self.get_option(
                self.SECTION, "lock_dir")
        return os.path.expanduser(self.lock_dir)

    def get_lock(self, force=False, lock_dir=None):
        lock_dir = lock_dir if lock_dir else self.get_lock_dir()
        if not force and self.is_locked(lock_dir):
            raise LockError("Lock file(s) found")

        fh, self.lock_file = tempfile.mkstemp(
            '.lock', 'lunapark_', lock_dir)
        os.close(fh)
        os.chmod(self.lock_file, 0o644)
        self.config.save(self.lock_file)

    def write_cfg_to_lock(self):
        if self.lock_file:
            self.config.save(self.lock_file)

    def release_lock(self):
        if self.lock_file and os.path.exists(self.lock_file):
            logger.debug("Releasing lock: %s", self.lock_file)
            os.remove(self.lock_file)

    @classmethod
    def is_locked(cls, lock_dir='/var/lock'):
        retcode = False
        for filename in os.listdir(lock_dir):
            if fnmatch.fnmatch(filename, LOCK_FILE_WILDCARD):
                full_name = os.path.join(lock_dir, filename)
                logger.info("Lock file is found: %s", full_name)
                try:
                    with open(full_name) as f:
                        running_cfg = yaml.load(f)
                    pid = running_cfg.get(TankCore.SECTION).get(cls.PID_OPTION)
                    if not pid:
                        logger.warning('Failed to get {}.{} from lock file {}'.format(TankCore.SECTION))
                    else:
                        if not pid_exists(int(pid)):
                            logger.debug(
                                "Lock PID %s not exists, ignoring and "
                                "trying to remove", pid)
                            try:
                                os.remove(full_name)
                            except Exception as exc:
                                logger.debug(
                                    "Failed to delete lock %s: %s", full_name, exc)
                        else:
                            retcode = True
                except Exception as exc:
                    logger.warn(
                        "Failed to load info from lock %s: %s", full_name, exc)
                    retcode = True
        return retcode

    def mkstemp(self, suffix, prefix, directory=None):
        """
        Generate temp file name in artifacts base dir
        and close temp file handle
        """
        if not directory:
            directory = self.artifacts_base_dir
        fd, fname = tempfile.mkstemp(suffix, prefix, directory)
        os.close(fd)
        os.chmod(fname, 0o644)  # FIXME: chmod to parent dir's mode?
        return fname

    def publish(self, publisher, key, value):
        update_status(self.status, [publisher] + key.split('.'), value)

    def close(self):
        """
        Call close() for all plugins
        """
        logger.info("Close allocated resources...")
        self.release_lock()
        for plugin in self.plugins.values():
            logger.debug("Close %s", plugin)
            try:
                plugin.close()
            except Exception as ex:
                logger.error("Failed closing plugin %s: %s", plugin, ex)
                logger.debug(
                    "Failed closing plugin: %s", traceback.format_exc(ex))

    @property
    def artifacts_dir(self):
        if not self._artifacts_dir:
            dir_name = self.get_option(self.SECTION, 'artifacts_dir')
            if not dir_name:
                date_str = datetime.datetime.now().strftime(
                    "%Y-%m-%d_%H-%M-%S.")
                dir_name = tempfile.mkdtemp("", date_str, self.artifacts_base_dir)
            elif not os.path.isdir(dir_name):
                os.makedirs(dir_name)
            os.chmod(dir_name, 0o755)
            self._artifacts_dir = os.path.abspath(dir_name)
        return self._artifacts_dir

    @staticmethod
    def get_user_agent():
        tank_agent = 'YandexTank/{}'.format(
            pkg_resources.require('yandextank')[0].version)
        py_info = sys.version_info
        python_agent = 'Python/{}.{}.{}'.format(
            py_info[0], py_info[1], py_info[2])
        os_agent = 'OS/{}'.format(platform.platform())
        return ' '.join((tank_agent, python_agent, os_agent))

    def register_plugin(self, plugin_name, instance):
        if self._plugins is None:
            self._plugins = {}
        if self._plugins.get(plugin_name, None) is not None:
            logger.exception('Plugins\' names should diverse')
        self._plugins[plugin_name] = instance