Beispiel #1
0
def set_info_log_level(request):
    Logger.setLogLevel(logging.INFO)

    def reset():
        Logger.setLogLevel(logging.NOTSET)

    request.addfinalizer(reset)
Beispiel #2
0
def run_node(config, name, node_port, client_port):
    node_ha = HA("0.0.0.0", node_port)
    client_ha = HA("0.0.0.0", client_port)

    logFileName = os.path.join(config.baseDir, name + ".log")

    Logger(config)
    Logger().enableFileLogging(logFileName)

    logger = getlogger()
    logger.setLevel(config.logLevel)
    logger.debug("You can find logs in {}".format(logFileName))

    vars = [var for var in os.environ.keys() if var.startswith("INDY")]
    logger.debug("Indy related env vars: {}".format(vars))

    from stp_core.loop.looper import Looper
    from indy_node.server.node import Node
    with Looper(debug=config.LOOPER_DEBUG) as looper:
        node = Node(name,
                    nodeRegistry=None,
                    basedirpath=config.baseDir,
                    ha=node_ha,
                    cliha=client_ha)
        looper.add(node)
        looper.run()
Beispiel #3
0
def set_info_log_level(request):
    Logger.setLogLevel(logging.INFO)

    def reset():
        Logger.setLogLevel(logging.NOTSET)

    request.addfinalizer(reset)
def test_default_log_rotation_config_is_correct(tdir_for_func):
    log_dir_path = tdir_for_func
    log_file = os.path.join(log_dir_path, "log")
    logger = Logger()

    # Assert this doesn't fail
    logger.enableFileLogging(log_file)
def setup_logging(tdir, tconf):
    Logger().apply_config(tconf)
    log_file_name = os.path.join(tdir, 'var', 'log', 'indy', 'sandbox',
                                 'Alpha' + ".log")
    Logger().enableFileLogging(log_file_name)
    logger = getlogger()
    logger.setLevel(tconf.logLevel)
Beispiel #6
0
def run_node(config, name, node_ip, node_port, client_ip, client_port):
    node_ha = HA(node_ip, node_port)
    client_ha = HA(client_ip, client_port)

    node_config_helper = NodeConfigHelper(name, config)

    logFileName = os.path.join(node_config_helper.log_dir, name + ".log")

    logger = getlogger()
    Logger().apply_config(config)
    Logger().enableFileLogging(logFileName)

    logger.setLevel(config.logLevel)
    logger.debug("You can find logs in {}".format(logFileName))

    vars = [var for var in os.environ.keys() if var.startswith("INDY")]
    logger.debug("Indy related env vars: {}".format(vars))

    with Looper(debug=config.LOOPER_DEBUG) as looper:
        node = Node(name,
                    config_helper=node_config_helper,
                    ha=node_ha, cliha=client_ha)
        node = integrate(node_config_helper, node, logger)
        looper.add(node)
        looper.run()
def test_default_log_rotation_config_is_correct(tdir_for_func):
    log_dir_path = tdir_for_func
    log_file = os.path.join(log_dir_path, "log")
    logger = Logger()

    # Assert this doesn't fail
    logger.enableFileLogging(log_file)
Beispiel #8
0
    def __init__(self, seed, logFileName=None):
        if logFileName:
            Logger().enableFileLogging(logFileName)

        self._seed = seed

        self._client = None
        self._wallet = None

        self._looper = None
Beispiel #9
0
    EditNodeClientIpTest, EditNodeClientPortTest, EditNodeBlsTest
from indy_node.test.auth_rule.auth_framework.pool_config import PoolConfigTest
from indy_node.test.auth_rule.auth_framework.restart import RestartTest
from indy_node.test.auth_rule.auth_framework.revoc_reg_def import AddRevocRegDefTest, \
    EditRevocRegDefTest
from indy_node.test.auth_rule.auth_framework.revoc_reg_entry import AddRevocRegEntryTest, EditRevocRegEntryTest
from indy_node.test.auth_rule.auth_framework.txn_author_agreement import TxnAuthorAgreementTest
from indy_node.test.auth_rule.auth_framework.txn_author_agreement_aml import TxnAuthorAgreementAMLTest
from indy_node.test.auth_rule.auth_framework.validator_info import ValidatorInfoTest
from indy_node.test.pool_config.conftest import poolConfigWTFF
from indy_node.test.upgrade.conftest import patch_packet_mgr_output, EXT_PKT_NAME, EXT_PKT_VERSION

nodeCount = 7

from stp_core.common.log import Logger
Logger().enableStdLogging()


class TestAuthRuleUsing():
    map_of_tests = OrderedDict({
        auth_map.adding_new_node.get_action_id():
        AddNewNodeTest,
        auth_map.adding_new_node_with_empty_services.get_action_id():
        AddNewNodeEmptyServiceTest,
        auth_map.demote_node.get_action_id():
        DemoteNodeTest,
        auth_map.promote_node.get_action_id():
        PromoteNodeTest,
        auth_map.change_node_ip.get_action_id():
        EditNodeIpTest,
        auth_map.change_node_port.get_action_id():
Beispiel #10
0
    genTestClient, TestClient, createNym

# noinspection PyUnresolvedReferences
from plenum.test.conftest import tdir, nodeReg, up, ready, \
    whitelist, concerningLogLevels, logcapture, keySharedNodes, \
    startedNodes, tdirWithDomainTxns, txnPoolNodeSet, poolTxnData, dirName, \
    poolTxnNodeNames, allPluginsPath, tdirWithNodeKeepInited, tdirWithPoolTxns, \
    poolTxnStewardData, poolTxnStewardNames, getValueFromModule, \
    txnPoolNodesLooper, nodeAndClientInfoFilePath, patchPluginManager, \
    warncheck, warnfilters as plenum_warnfilters, setResourceLimits

# noinspection PyUnresolvedReferences
from sovrin_common.test.conftest import conf, tconf, poolTxnTrusteeNames, \
    domainTxnOrderedFields, looper

Logger.setLogLevel(logging.DEBUG)

@pytest.fixture(scope="session")
def warnfilters(plenum_warnfilters):
    def _():
        plenum_warnfilters()
        warnings.filterwarnings('ignore', category=ResourceWarning, message='unclosed file')
    return _


@pytest.fixture(scope="module")
def primes1():
    P_PRIME1, Q_PRIME1 = primes.get("prime1")
    return dict(p_prime=P_PRIME1, q_prime=Q_PRIME1)

Beispiel #11
0
from plenum.common.util import getNoInstances
from plenum.server.notifier_plugin_manager import PluginManager
from plenum.test.helper import checkLastClientReqForNode, \
    waitForViewChange, requestReturnedToNode, randomText, \
    mockGetInstalledDistributions, mockImportModule, chk_all_funcs, \
    create_new_test_node, sdk_json_to_request_object, sdk_send_random_requests, \
    sdk_get_and_check_replies, sdk_set_protocol_version
from plenum.test.node_request.node_request_helper import checkPrePrepared, \
    checkPropagated, checkPrepared, checkCommitted
from plenum.test.plugin.helper import getPluginPath
from plenum.test.test_node import TestNode, Pool, \
    checkNodesConnected, ensureElectionsDone, genNodeReg, getPrimaryReplica, \
    getNonPrimaryReplicas
from plenum.common.config_helper import PConfigHelper, PNodeConfigHelper

Logger.setLogLevel(logging.INFO)
logger = getlogger()

GENERAL_CONFIG_DIR = 'etc/indy'

DEV_NULL_PATH = '/dev/null'
ROCKSDB_WRITE_BUFFER_SIZE = 256 * 1024


def get_data_for_role(pool_txn_data, role):
    name_and_seeds = []
    for txn in pool_txn_data['txns']:
        txn_data = get_payload_data(txn)
        if txn_data.get(ROLE) == role:
            name = txn_data[ALIAS]
            name_and_seeds.append((name, pool_txn_data['seeds'][name]))
Beispiel #12
0
    whitelist, concerningLogLevels, logcapture, \
    tdirWithDomainTxns as PTdirWithDomainTxns, txnPoolNodeSet, poolTxnData, dirName, \
    poolTxnNodeNames, allPluginsPath, tdirWithNodeKeepInited, tdirWithPoolTxns, \
    poolTxnStewardData, poolTxnStewardNames, getValueFromModule, \
    txnPoolNodesLooper, patchPluginManager, tdirWithClientPoolTxns, \
    warncheck, warnfilters as plenum_warnfilters, setResourceLimits, do_post_node_creation

# noinspection PyUnresolvedReferences
from indy_common.test.conftest import tconf, general_conf_tdir, poolTxnTrusteeNames, \
    domainTxnOrderedFields, looper, config_helper_class, node_config_helper_class

from plenum.test.conftest import sdk_pool_handle as plenum_pool_handle, sdk_pool_name, sdk_wallet_steward, \
    sdk_wallet_handle, sdk_wallet_data, sdk_steward_seed, sdk_wallet_trustee, sdk_trustee_seed, trustee_data, \
    sdk_wallet_client, sdk_client_seed, poolTxnClientData, poolTxnClientNames, poolTxnData

Logger.setLogLevel(logging.DEBUG)


@pytest.fixture(scope="module")
def sdk_wallet_trust_anchor(looper, sdk_pool_handle, sdk_wallet_trustee):
    return sdk_add_new_nym(looper, sdk_pool_handle, sdk_wallet_trustee,
                           alias='TA-1', role='TRUST_ANCHOR')


@pytest.fixture(scope="session")
def warnfilters(plenum_warnfilters):
    def _():
        plenum_warnfilters()
        warnings.filterwarnings(
            'ignore', category=ResourceWarning, message='unclosed file')
Beispiel #13
0
from plenum.common.util import getNoInstances
from plenum.server.notifier_plugin_manager import PluginManager
from plenum.test.helper import checkLastClientReqForNode, \
    waitForViewChange, requestReturnedToNode, randomText, \
    mockGetInstalledDistributions, mockImportModule, chk_all_funcs, \
    create_new_test_node, sdk_json_to_request_object, sdk_send_random_requests, \
    sdk_get_and_check_replies, sdk_set_protocol_version
from plenum.test.node_request.node_request_helper import checkPrePrepared, \
    checkPropagated, checkPrepared, checkCommitted
from plenum.test.plugin.helper import getPluginPath
from plenum.test.test_node import TestNode, Pool, \
    checkNodesConnected, ensureElectionsDone, genNodeReg, getPrimaryReplica, \
    getNonPrimaryReplicas
from plenum.common.config_helper import PConfigHelper, PNodeConfigHelper

Logger.setLogLevel(logging.INFO)
logger = getlogger()

GENERAL_CONFIG_DIR = 'etc/indy'

DEV_NULL_PATH = '/dev/null'
ROCKSDB_WRITE_BUFFER_SIZE = 256 * 1024


def get_data_for_role(pool_txn_data, role):
    name_and_seeds = []
    for txn in pool_txn_data['txns']:
        txn_data = get_payload_data(txn)
        if txn_data.get(ROLE) == role:
            name = txn_data[ALIAS]
            name_and_seeds.append((name, pool_txn_data['seeds'][name]))
from plenum.common.util import randomString
from stp_core.loop.eventually import eventually

from plenum.common.constants import DOMAIN_LEDGER_ID, STEWARD_STRING

from plenum.test.pool_transactions.helper import prepare_nym_request, \
    sdk_sign_and_send_prepared_request
from plenum.test import waits
from plenum.test.helper import sdk_send_random_and_check, \
    sdk_get_and_check_replies, get_key_from_req

from stp_core.common.log import Logger
import logging


Logger.setLogLevel(logging.NOTSET)


ERORR_MSG = "something went wrong"

whitelist = [ERORR_MSG]

def testLoggingTxnStateForValidRequest(
        looper, logsearch, txnPoolNodeSet,
        sdk_pool_handle, sdk_wallet_client):
    logsPropagate, _ = logsearch(files=['propagator.py'], funcs=['propagate'],
                                 msgs=['propagating.*request.*from client'])
    logsOrdered, _ = logsearch(files=['replica.py'], funcs=['order_3pc_key'], msgs=['ordered batch request'])
    logsCommited, _ = logsearch(files=['node.py'], funcs=['executeBatch'], msgs=['committed batch request'])

    reqs = sdk_send_random_and_check(looper, txnPoolNodeSet, sdk_pool_handle,
def demo_setup_logging(base_dir):
    Logger().enableCliLogging(out, override_tags={})
Beispiel #16
0
from time import perf_counter

from plenum.common.signer_did import DidSigner
from indy_client.client.client import Client
from indy_client.client.wallet.wallet import Wallet
from indy_common.identity import Identity
from stp_core.common.log import getlogger, Logger
from stp_core.network.port_dispenser import genHa, HA
from stp_core.loop.looper import Looper
from plenum.test.helper import waitForSufficientRepliesForRequests
from indy_common.config_util import getConfig

numReqs = 100
splits = 1

Logger.setLogLevel(logging.WARNING)
logger = getlogger()


def sendRandomRequests(wallet: Wallet, client: Client, count: int):
    print('{} random requests will be sent'.format(count))
    for i in range(count):
        idr, signer = wallet.addIdentifier()
        idy = Identity(identifier=idr,
                       verkey=signer.verkey)
        wallet.addTrustAnchoredIdentity(idy)
    reqs = wallet.preparePending()
    return client.submitReqs(*reqs)[0]


def put_load():
Beispiel #17
0
def test_apply_config():
    logger = Logger()
    with pytest.raises(ValueError):
        logger.apply_config(None)
Beispiel #18
0
 def setupLogging(self, filePath):
     Logger().setLogLevel(agentLoggingLevel)
     Logger().enableFileLogging(filePath)
Beispiel #19
0
from plenum.common.txn_util import getTxnOrderedFields
from plenum.common.types import PLUGIN_TYPE_STATS_CONSUMER, f
from plenum.common.util import getNoInstances, getMaxFailures
from plenum.server.notifier_plugin_manager import PluginManager
from plenum.test.helper import randomOperation, \
    checkReqAck, checkLastClientReqForNode, waitForSufficientRepliesForRequests, \
    waitForViewChange, requestReturnedToNode, randomText, \
    mockGetInstalledDistributions, mockImportModule, chk_all_funcs
from plenum.test.node_request.node_request_helper import checkPrePrepared, \
    checkPropagated, checkPrepared, checkCommitted
from plenum.test.plugin.helper import getPluginPath
from plenum.test.test_client import genTestClient, TestClient
from plenum.test.test_node import TestNode, TestNodeSet, Pool, \
    checkNodesConnected, ensureElectionsDone, genNodeReg

Logger.setLogLevel(logging.NOTSET)
logger = getlogger()
config = getConfig()


@pytest.mark.firstresult
def pytest_xdist_make_scheduler(config, log):
    return GroupedLoadScheduling(config, log)


@pytest.fixture(scope="session")
def warnfilters():
    def _():
        warnings.filterwarnings(
            'ignore',
            category=DeprecationWarning,
Beispiel #20
0
 def reset():
     Logger.setLogLevel(logging.NOTSET)
Beispiel #21
0
 def setupLogging(self, filePath):
     Logger().setLogLevel(agentLoggingLevel)
     Logger().enableFileLogging(filePath)
     self.setupRaetLogging(Console.Wordage.concise)
Beispiel #22
0
 def reset():
     Logger.setLogLevel(logging.NOTSET)
Beispiel #23
0
 def setupRaetLogging(self, level):
     Logger().setupRaet(raet_log_level=level)
 def _enable_file_logging(self):
     path_to_log_file = os.path.join(self.config.LOG_DIR, LOG_FILE_NAME)
     Logger().enableFileLogging(path_to_log_file)
Beispiel #25
0
from stp_core.common.log import getlogger, Logger
from plenum.test.helper import sendReqsToNodesAndVerifySuffReplies
from plenum.test.node_catchup.helper import waitNodeDataEquality, \
    check_ledger_state
from plenum.test.pool_transactions.helper import \
    disconnect_node_and_ensure_disconnected, buildPoolClientAndWallet
from plenum.test.test_node import checkNodesConnected, TestNode
from plenum.test import waits

# noinspection PyUnresolvedReferences
from plenum.test.node_catchup.conftest import whitelist, \
    nodeCreatedAfterSomeTxns, nodeSetWithNodeAddedAfterSomeTxns, newNodeCaughtUp
from plenum.test.pool_transactions.conftest import looper, clientAndWallet1, \
    client1, wallet1, client1Connected

Logger.setLogLevel(logging.WARNING)
logger = getlogger()
txnCount = 5

TestRunningTimeLimitSec = math.inf
"""
Since these tests expect performance to be of certain level, they can fail and
for now should only be run when a perf check is required, like after a relevant
change in protocol, setting `SkipTests` to False will run tests in this
module
"""
SkipTests = True
skipper = pytest.mark.skipif(SkipTests, reason='Perf optimisations not done')


@pytest.fixture(scope="module")
def main():
    global logger

    def check_unsigned(s):
        res = None
        try:
            res = int(s)
        except ValueError:
            pass
        if res is None or res <= 0:
            raise argparse.ArgumentTypeError(("{!r} is incorrect, "
                                              "should be int > 0").format(s, ))
        else:
            return res

    config_helper = ConfigHelper(config)

    parser = argparse.ArgumentParser(
        description=(
            "Tool to explore and gather statistics about running validator"),
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument("-v",
                        "--verbose",
                        action="store_true",
                        help="Verbose mode (command line)")
    parser.add_argument("--json",
                        action="store_true",
                        help="Format output as JSON (ignores -v)")
    parser.add_argument("--nagios",
                        action="store_true",
                        help="Format output as NAGIOS output (ignores -v)")

    statfile_group = parser.add_argument_group(
        "statfile", "settings for exploring validator stats from stat file")

    statfile_group.add_argument("--basedir",
                                metavar="PATH",
                                default=config_helper.node_info_dir,
                                help=("Path to stats files"))
    # statfile_group.add_argument(
    #     "--watch", action="store_true", help="Watch for stats file updates"
    # )

    # socket group is disabled for now due the feature is unsupported
    # socket_group = parser.add_argument_group(
    #     "socket", "settings for exploring validator stats from socket"
    # )
    #
    # socket_group.add_argument(
    #     "--listen", action="store_true",
    #     help="Listen socket for stats (ignores --statfile)"
    # )
    #
    # socket_group.add_argument(
    #     "-i", "--ip", metavar="IP", default=config.STATS_SERVER_IP,
    #     help="Server IP"
    # )
    # socket_group.add_argument(
    #     "-p", "--port", metavar="PORT", default=config.STATS_SERVER_PORT,
    #     type=check_unsigned, help="Server port"
    # )

    other_group = parser.add_argument_group("other", "other settings")

    other_group.add_argument("--log",
                             metavar="FILE",
                             default=os.path.join(
                                 config_helper.log_base_dir,
                                 os.path.basename(sys.argv[0] + ".log")),
                             help="Path to log file")

    args = parser.parse_args()

    remove_log_handlers()

    if args.log:
        set_log_owner(args.log)

    Logger().enableFileLogging(args.log)

    logger.debug("Cmd line arguments: {}".format(args))

    # is not supported for now
    # if args.listen:
    #     logger.info("Starting server on {}:{} ...".format(
    #       args.ip, args.port))
    #     print("Starting server on {}:{} ...".format(args.ip, args.port))
    #
    #     loop = asyncio.get_event_loop()
    #     coro = asyncio.start_server(accept_client,
    #                                 args.ip, args.port, loop=loop)
    #     server = loop.run_until_complete(coro)
    #
    #     logger.info("Serving on {}:{} ...".format(args.ip, args.port))
    #     print('Serving on {} ...'.format(server.sockets[0].getsockname()))
    #
    #     # Serve requests until Ctrl+C is pressed
    #     try:
    #         loop.run_forever()
    #     except KeyboardInterrupt:
    #         pass
    #
    #     logger.info("Stopping server ...")
    #     print("Stopping server ...")
    #
    #     # Close the server
    #     server.close()
    #     for task in clients.keys():
    #         task.cancel()
    #     loop.run_until_complete(server.wait_closed())
    #     loop.close()
    # else:
    all_paths = glob(os.path.join(args.basedir, "*_info.json"))

    files_by_node = dict()

    for path in all_paths:
        bn = os.path.basename(path)
        if not bn:
            continue
        node_name = bn.split("_", maxsplit=1)[0]
        if "additional" in bn:
            files_by_node.setdefault(node_name,
                                     {}).update({"additional": path})
        elif "version" in bn:
            files_by_node.setdefault(node_name, {}).update({"version": path})
        else:
            files_by_node.setdefault(node_name, {}).update({"info": path})
    if not files_by_node:
        print('There are no info files in {}'.format(args.basedir))
        return

    if args.json:
        allf = []
        for n, ff in files_by_node.items():
            allf.extend([v for k, v in ff.items()])
        out_json = compile_json_ouput(allf)
        if out_json:
            print(json.dumps(out_json, sort_keys=True))
            sys.exit(0)

    for node in files_by_node:
        inf_ver = [
            v for k, v in files_by_node[node].items()
            if k in ["info", "version"]
        ]
        json_data = compile_json_ouput(inf_ver)
        if json_data:
            if args.verbose:
                print("{}".format(os.linesep).join(
                    create_print_tree(json_data, lines=[])))
            else:
                print(
                    get_stats_from_file(json_data, args.verbose, args.json,
                                        args.nagios))

        print('\n')
    if args.verbose:
        for node in files_by_node:
            file_path = files_by_node[node].get("additional", "")
            if not file_path:
                continue
            json_data = read_json(file_path)
            if json_data:
                print("{}".format(os.linesep).join(
                    create_print_tree(json_data, lines=[])))

    logger.info("Done")