Пример #1
0
from swagger_client.models.pub_key import PubKey
from swagger_client.configuration import Configuration

from nose.tools import assert_equals
from testconfig import config
from waiting import wait

import msgpack
import base58

EXT_API = {}
for node, node_config in config['nodes'].iteritems():
    empty_config = Configuration()
    empty_config.host = node_config['host'] + ':' + str(
        node_config['ports']['external_api']) + '/v2'
    EXT_API[node] = ExternalApi(ApiClient(empty_config))

INT_API = {}
for node, node_config in config['nodes'].iteritems():
    empty_config = Configuration()
    empty_config.host = node_config['host'] + ':' + str(
        node_config['ports']['internal_api']) + '/v2'
    INT_API[node] = InternalApi(ApiClient(empty_config))


def external_api(name):
    return EXT_API[name]


def internal_api(name):
    return INT_API[name]
Пример #2
0
def main(argv):
    logging.getLogger("urllib3").setLevel(logging.ERROR)
    root_dir, tar_file_name, blocks_to_mine, version = read_argv(argv)
    curr_dir = os.getcwd()
    temp_dir_dev1 = os.path.join(root_dir, "node1")
    os.makedirs(temp_dir_dev1)

    temp_dir_dev2 = os.path.join(root_dir, "node2")
    temp_dir_dev3 = os.path.join(root_dir, "node3")

    print("Tar name: " + tar_file_name)
    extract_tarball(tar_file_name, temp_dir_dev1)
    shutil.copytree(temp_dir_dev1, temp_dir_dev2)
    shutil.copytree(temp_dir_dev1, temp_dir_dev3)

    node_names = ["node1", "node2", "node3"]
    node_dirs = [temp_dir_dev1, temp_dir_dev2, temp_dir_dev3]
    [
        setup_node(n, d, curr_dir, version)
        for n, d in zip(node_names, node_dirs)
    ]
    [start_node(d) for d in node_dirs]

    empty_config = Configuration()
    node_objs = []
    for n in node_names:
        empty_config.host = SETUP[n]["host"]
        node_objs.append(ExternalApi(ApiClient(configuration=empty_config)))

    wait_all_nodes_are_online(node_objs)

    top = node_objs[0].get_top_block()
    height = top.height
    max_height = blocks_to_mine + height
    test_failed = False
    try:
        print("Will mine till block " + str(max_height))
        while height < max_height:
            time.sleep(1)  # check every second
            for name, node in zip(node_names, node_objs):
                top = node.get_top_block()  # node is alive and mining
                print("[" + name + "] height=" + str(top.height))
                height = max(height, top.height)
            print("")
    except ApiException as e:
        test_failed = True
        print("node died")
    except urllib3.exceptions.MaxRetryError as e:
        test_failed = True
        print("node died")
    [stop_node(d) for d in node_dirs]

    if not test_failed:
        print(
            "Checking that nodes are able to start with persisted non-empty DB"
        )
        [start_node(d) for d in node_dirs]
        wait_all_nodes_are_online(node_objs)
        [stop_node(d) for d in node_dirs]

    if test_failed:
        for name, node_dir in zip(node_names, node_dirs):
            print(name + " logs:")
            print(tail_logs(node_dir, "epoch.log"))
            print("\n")
    if test_failed:
        sys.exit("FAILED")