Esempio n. 1
0
    def run(cls, name, genesis_path, pipe_conn, seed, batch_size, batch_rate,
            req_kind, buff_req, wallet_key, pool_config, send_mode, mask_sign,
            taa_text, taa_version, ext_set, log_dir, log_lvl, short_stat):
        if mask_sign:
            logger_init(log_dir, "{}.log".format(name), log_lvl)
            signal.signal(signal.SIGINT, signal.SIG_IGN)

        logging.getLogger(name).info("starting")

        exts = {}
        if ext_set and isinstance(ext_set, str):
            try:
                exts = json.loads(ext_set)
            except Exception as e:
                logging.getLogger(name).warning("{} parse ext settings error {}".format(name, e))
                exts = {}

        cln = cls(name, pipe_conn, batch_size, batch_rate, req_kind, buff_req,
                  pool_config, send_mode, short_stat, **exts)
        try:
            asyncio.run_coroutine_threadsafe(cln.run_test(genesis_path, seed, wallet_key, taa_text, taa_version),
                                             loop=cln._loop)
            cln._loop.run_forever()
        except Exception as e:
            logging.getLogger(name).exception("running error {}".format(e))
        stat = cln._stat.dump_stat(dump_all=True)

        logging.getLogger(name).info("stopped")
        return stat
Esempio n. 2
0
    if not dict_args["seed"]:
        dict_args["seed"].append("000000000000000000000000Trustee1")

    dict_args["genesis_path"] = check_fs(False, dict_args["genesis_path"])
    dict_args["out_dir"] = check_fs(True, dict_args["out_dir"])

    # Prepare output directory
    out_dir = dict_args["out_dir"]
    test_name = "load_test_{}".format(datetime.now().strftime("%Y%m%d_%H%M%S"))
    out_dir = os.path.expanduser(os.path.join(out_dir, test_name))
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)

    # Initialize logger
    log_lvl = dict_args["log_lvl"]
    logger_init(out_dir, "{}.log".format(test_name), log_lvl)

    check_genesis(dict_args["genesis_path"])

    if dict_args["test_conn"]:
        exit(0)

    tr = LoadRunner(dict_args["clients"],
                    dict_args["genesis_path"],
                    dict_args["seed"],
                    dict_args["req_kind"],
                    dict_args["batch_size"],
                    dict_args["refresh_rate"],
                    dict_args["buff_req"],
                    out_dir,
                    dict_args["val_sep"],
Esempio n. 3
0
    def __init__(
            self,
            clients=0,
            genesis_path="~/.indy-cli/networks/sandbox/pool_transactions_genesis",
            seed=["000000000000000000000000Trustee1"],
            req_kind="nym",
            batch_size=10,
            refresh_rate=10,
            buff_req=30,
            out_dir=".",
            val_sep="|",
            wallet_key="key",
            mode="p",
            pool_config='',
            sync_mode="freeflow",
            load_rate=10,
            out_file="",
            load_time=0,
            ext_set=None,
            client_runner=LoadClient.run,
            log_lvl=logging.INFO,
            short_stat=False):
        self._client_runner = client_runner
        self._clients = dict()  # key process future; value ClientRunner
        self._loop = asyncio.get_event_loop()
        self._out_dir = ""
        self._succ_f = None
        self._failed_f = None
        self._total_f = None
        self._nacked_f = None
        self._start_counter = time.perf_counter()
        self._proc_count = clients if clients > 0 else multiprocessing.cpu_count(
        )
        self._refresh_rate = refresh_rate if refresh_rate > 0 else 10
        self._buff_req = buff_req if buff_req >= 0 else 30
        self._value_separator = val_sep if val_sep != "" else "|"
        self._batch_size = batch_size if batch_size > 0 else 10
        self._stop_sec = load_time if load_time > 0 else 0
        self._genesis_path = genesis_path
        self._seed = seed
        self._req_kind = req_kind
        self._wallet_key = wallet_key
        self._sync_mode = sync_mode
        self._start_sync = sync_mode in ['all', 'one']
        self._mode = mode
        self._pool_config = None
        lr = load_rate if load_rate > 0 else 10
        self._batch_rate = 1 / lr
        self._ext_set = ext_set
        if pool_config:
            try:
                self._pool_config = json.loads(pool_config)
            except Exception as ex:
                raise RuntimeError(
                    "pool_config param is ill-formed JSON: {}".format(ex))

        test_name = "load_test_{}".format(
            datetime.now().strftime("%Y%m%d_%H%M%S"))
        self._log_dir = os.path.join(out_dir, test_name)
        self._log_lvl = log_lvl
        logger_init(self._log_dir, "{}.log".format(test_name), self._log_lvl)
        self._logger = logging.getLogger(__name__)
        self._out_file = self.prepare_fs(out_dir, test_name, out_file)
        self._short_stat = short_stat