Example #1
0
def get_node(hostname):
    instance_name = hostname
    n = GCloudNode(instance_name,
                   username=NODE_USERNAME,
                   project=PROJECT,
                   ssh_key_path=NODE_SSH_KEY_PATH)
    return n
Example #2
0
def get_nodes(pattern=None):
    machines = gcloud.list(pattern=pattern,
                           project=PROJECT,
                           username=NODE_USERNAME,
                           ssh_key_path=NODE_SSH_KEY_PATH)
    nodes = pmap(
        lambda machine: GCloudNode(machine.name,
                                   username=NODE_USERNAME,
                                   project=PROJECT,
                                   ssh_key_path=NODE_SSH_KEY_PATH), machines)
    return nodes
Example #3
0
def get_node(i, prefix=''):
    n = GCloudNode(f'{prefix}{NODE_BASE_NAME}{i}')
    n.machine.username = NODE_USERNAME
    n.machine.ssh_key_path = NODE_SSH_KEY_PATH
    return n
Example #4
0
import pathlib

sys.path.append(str(pathlib.Path(__file__).resolve().parents[2] / 'lib'))

from cluster import GCloudNode, RpcNode
from configured_logger import logger
from utils import user_name
from concurrent.futures import ThreadPoolExecutor, as_completed
import datetime

validators = [None] * 100

while True:
    futures = {}
    with ThreadPoolExecutor(max_workers=20) as pool:
        for i in range(100):
            node = GCloudNode(f'pytest-node-{user_name()}-{i}')
            futures[pool.submit(lambda: node.validators())] = i

    for f in as_completed(futures):
        i = futures[f]
        validators[i] = f.result()

    for v in validators[1:]:
        assert v == validators[0], f'{v} not equal to {validators[0]}'

    v0 = sorted(list(validators[0]))
    logger.info(
        f'{datetime.datetime.now(datetime.timezone.utc).isoformat()}, {len(v0)}, {v0}'
    )
Example #5
0
def query_node(i):
    node = GCloudNode(f'pytest-node-{i}')
    chain_query(node, lambda b: print_chain_data(b, f[i]), max_blocks=20)
Example #6
0
def print_chain_data(block, logger):
    chunks = []
    for c in block['chunks']:
        chunks.append(
            f'{c["chunk_hash"]} {c["shard_id"]} {c["height_created"]} {c["height_included"]}'
        )
    logger.info(
        f"{block['header']['height']} {block['header']['hash']} {','.join(chunks)}"
    )


subprocess.run('mkdir -p /tmp/100_node/', shell=True)

f = []
for node in range(100):
    f.append(new_logger(outfile=f'/tmp/100_node/pytest-node-{node}.txt'))


def query_node(i):
    node = GCloudNode(f'pytest-node-{i}')
    chain_query(node, lambda b: print_chain_data(b, f[i]), max_blocks=20)


# pmap(query_node, range(100))

node = GCloudNode('pytest-node-0')
chain_query(node,
            print_chain_data,
            block_hash='9rnC5G6qDpXgT4gTG4znowmdSUavC1etuV99F18ByxxK')
Example #7
0
def get_nodes():
    nodes = [GCloudNode(f'{NODE_BASE_NAME}{i}') for i in range(0, NUM_NODES)]
    for n in nodes:
        n.machine.username = NODE_USERNAME
        n.machine.ssh_key_path = NODE_SSH_KEY_PATH
    return nodes
Example #8
0
args = (3, 1, 1, {
    'local': False,
    'near_root': '../target/debug/',
    'remote': {
        'instance_name': 'near-pytest',
    }
}, [], [])

init_cluster(*args)
subprocess.run([os.path.join(os.path.dirname(os.path.realpath(__file__)), "../../scripts/create_instance_pool.sh"),
                "near-pytest",
                "us-west2-a us-west2-b us-west2-c us-west2-a"])
node_dirs = subprocess.check_output("find ~/.near/* -maxdepth 0", shell=True).decode('utf-8').strip().split('\n')

g = GCloudNode('near-pytest-0', node_dirs[0])
assert g.machine_status() == 'RUNNING'
g.is_ready()
print(g.addr())

start_cluster(*args)

g.change_version('staging')
retry.retry(lambda: g.is_ready(), 1200)

g.update_config_files("/tmp/near/test0")
g.start()

g.turn_off_machine()
assert g.machine_status() == 'STOPPED'