def check(server_env=None): """ Check for completed jobs """ # todo: add a "continuous" mode so that this can run as a background job # this will require locking the incomplete jobs file - see https://filelock.readthedocs.io/ config = load_config() client = nmpi.Client(username=config["username"], job_service=_url_from_env(server_env)) incomplete_jobs = read_incomplete_jobs() completed_jobs = [] for job_config in incomplete_jobs: job = client.get_job(job_config["job_id"]) if job["status"] == "finished": client.download_data(job, local_dir=job_config["output_dir"]) completed_jobs.append(job_config) click.echo("Job #{} complete".format(job["id"])) elif job["status"] == "error": completed_jobs.append(job_config) click.echo("Job #{} errored".format(job["id"])) with open( os.path.join(job_config["output_dir"], "job_{}.log".format(job["id"])), 'w') as fp: fp.write(job["log"]) for job_config in completed_jobs: incomplete_jobs.remove(job_config) write_incomplete_jobs(incomplete_jobs)
def run(script, platform, batch, output_dir, tag, server_env): """ Run a simulation/emulation """ config = load_config() client = nmpi.Client(username=config["username"], job_service=_url_from_env(server_env)) if os.path.exists(os.path.expanduser(script)): if os.path.isdir(script): source = script command = "run.py {system}" else: source = os.path.dirname(script) if len(source) == 0: source = "." command = "{} {{system}}".format(os.path.basename(script)) else: raise click.ClickException("Script '{}' does not exist".format(script)) job = client.submit_job(source, platform=platform or config["default_platform"], collab_id=config["collab_id"], config=config.get("hardware_config", None), inputs=None, command=command, tags=tag, wait=not batch) output_dir = output_dir or config.get("default_output_dir", ".") if batch: # save job_id for later checking write_incomplete_jobs(read_incomplete_jobs() + [{ "job_id": job, "output_dir": output_dir }]) else: if job["status"] == 'finished': client.download_data(job, local_dir=output_dir) else: assert job["status"] == 'error' click.echo(job["log"])
from urllib import urlopen except ImportError: from urllib.parse import urlparse from urllib.request import urlopen from urllib.error import HTTPError import json import tempfile import shutil from sh import git import nmpi import requests BENCHMARKS_SERVER = "https://benchmarks.hbpneuromorphic.eu" BENCHMARKS_COLLAB = "510" job_manager = nmpi.Client("testuser123", password=os.environ["BENCHMARK_RUNNER_PASSWORD"]) queue_name_map = { "SpiNNaker": "SpiNNaker", "BrainScaleS": "BrainScaleS", "Spikey": "Spikey", "NEST": "nest-server" } repositories = [ #"https://github.com/CNRS-UNIC/hardware-benchmarks.git", #"https://github.com/apdavison/pynam.git", "https://github.com/hbp-unibi/SNABSuite_deploy", ]
#!/usr/bin/env python """ Pool-based monitoring of the live logs of the smallest job ID running """ import os import nmpi import time USERNAME = '******' COLLAB_ID = 14744 c = nmpi.Client(USERNAME) latest_job = int(c.completed_jobs(COLLAB_ID)[0].split('/')[-1]) URI = c.resource_map['queue'] + '/{}' def find_running_job(curr): try: status = c.job_status(URI.format(curr + 1)) if status == 'submitted': return curr print('Skipping job #{} ({})...'.format(curr, status)) return find_running_job(curr + 1) except Exception as ex: if 'no such job' in str(ex).lower(): return curr else: raise ex while True:
""" """ from time import sleep from os.path import splitext, join from urlparse import urlparse from urllib import urlopen import json import nmpi import requests BENCHMARKS_SERVER = "https://benchmarks.hbpneuromorphic.eu" job_manager = nmpi.Client("testuser123") repositories = ("https://github.com/CNRS-UNIC/hardware-benchmarks.git", ) def main(platform): for repository in repositories: code_dir = update_repository(repository) models = get_models(code_dir) for model in models: for task in model["tasks"]: job = run_job(repository, task, platform) results = get_results(job) save_results(model, task, results, job)
import nmpi import sys print("introduce script") inp = str(sys.argv[1]) client = nmpi.Client("rafaperez") job = client.submit_job( source="https://github.com/rafapb97/Spin", platform=nmpi.SPINNAKER, #config = {"extra_pip_installs": ["snntoolbox"]}, collab_id=89273, command=inp)
from __future__ import print_function, unicode_literals import sys import json import nmpi tokens = {} for username in sys.argv[1:]: print(username, end=" ") c = nmpi.Client(username) assert c.user_info["username"] == username tokens[c.user_info["id"]] = "Bearer " + c.token print(json.dumps(tokens))