def test_log_file_encoding(capsys): """Test that logging some fancy Unicode to file does not cause encoding errors. `capsys` is a `pytest` fixture for capturing and accessing stdout/stderr, see https://docs.pytest.org/en/stable/capture.html#accessing-captured-output-from-a-test-function. """ # We have to close the temporary file before it can be re-opened by the logging handler # in Windows, hence we set `delete=False`. tmp_file = tempfile.NamedTemporaryFile(delete=False) try: tmp_file.close() enable_default_logger(log_file=tmp_file.name) logger = logging.getLogger("yapapi") logger.debug("| (• ◡•)| It's Adventure Time! (❍ᴥ❍ʋ)") for handler in logger.handlers: if isinstance(handler, logging.FileHandler): if handler.baseFilename == tmp_file.name: handler.close() err = capsys.readouterr().err assert "UnicodeEncodeError" not in err finally: os.unlink(tmp_file.name)
def execute(self, steps: List): """ executes a list of steps. """ enable_default_logger() loop = asyncio.get_event_loop() for step in steps: task = loop.create_task(self.run_step(step)) try: asyncio.get_event_loop().run_until_complete(task) except (Exception, KeyboardInterrupt) as e: logger.error(e) task.cancel() asyncio.get_event_loop().run_until_complete(task)
async def main(argv): """Parsing arguments and executing the logic""" params: TranscodingParameters = parse_params(argv) enable_default_logger() # prepare default values for golem parameters subnet_tag = 'devnet-alpha.2' max_workers = 10 budget = 10.0 golem_details: GolemParameters = GolemParameters(subnet_tag, max_workers, budget) processor: TranscodingProcessor = await TranscodingProcessor.instance( golem_details) await processor.transcode(params)
def main(): subnet = "goth" enable_default_logger() try: asyncio.get_event_loop().run_until_complete( asyncio.wait_for( renegotiate_offers( Configuration(), subnet_tag=subnet, ), timeout=140, )) except TimeoutError: print("Main timeout triggered :(")
def run_golem_example(example_main, log_file=None): # This is only required when running on Windows with Python prior to 3.8: windows_event_loop_fix() if log_file: enable_default_logger( log_file=log_file, debug_activity_api=True, debug_market_api=True, debug_payment_api=True, debug_net_api=True, ) loop = asyncio.get_event_loop() task = loop.create_task(example_main) try: loop.run_until_complete(task) except NoPaymentAccountError as e: handbook_url = ( "https://handbook.golem.network/requestor-tutorials/" "flash-tutorial-of-requestor-development" ) print( f"{TEXT_COLOR_RED}" f"No payment account initialized for driver `{e.required_driver}` " f"and network `{e.required_network}`.\n\n" f"See {handbook_url} on how to initialize payment accounts for a requestor node." f"{TEXT_COLOR_DEFAULT}" ) except KeyboardInterrupt: print( f"{TEXT_COLOR_YELLOW}" "Shutting down gracefully, please wait a short while " "or press Ctrl+C to exit immediately..." f"{TEXT_COLOR_DEFAULT}" ) task.cancel() try: loop.run_until_complete(task) print( f"{TEXT_COLOR_YELLOW}Shutdown completed, thank you for waiting!{TEXT_COLOR_DEFAULT}" ) except (asyncio.CancelledError, KeyboardInterrupt): pass
def main(): parser = utils.build_parser("List offers") args = parser.parse_args() subnet = args.subnet_tag sys.stderr.write(f"Using subnet: {utils.TEXT_COLOR_YELLOW}{subnet}{utils.TEXT_COLOR_DEFAULT}\n") enable_default_logger() try: asyncio.get_event_loop().run_until_complete( asyncio.wait_for( list_offers( Configuration(), subnet_tag=subnet, ), timeout=4, ) ) except TimeoutError: pass
def main(): import pathlib import sys parent_directory = pathlib.Path(__file__).resolve().parent.parent sys.stderr.write(f"Adding {parent_directory} to sys.path.\n") sys.path.append(str(parent_directory)) import utils parser = utils.build_parser("List offers") args = parser.parse_args() subnet = args.subnet_tag sys.stderr.write(f"Using subnet: {utils.TEXT_COLOR_YELLOW}{subnet}{utils.TEXT_COLOR_DEFAULT}\n") enable_default_logger() try: asyncio.get_event_loop().run_until_complete( asyncio.wait_for(list_offers(Configuration(), subnet_tag=subnet,), timeout=4,) ) except TimeoutError: pass
f"{num_tasks} tasks computed, total time: {datetime.now() - start_time}" f"{TEXT_COLOR_DEFAULT}") if __name__ == "__main__": parser = build_parser("Send a drone task") now = datetime.now().strftime("%Y-%m-%d_%H.%M.%S") parser.set_defaults(log_file=f"drone-yapapi-{now}.log") args = parser.parse_args() # This is only required when running on Windows with Python prior to 3.8: windows_event_loop_fix() enable_default_logger( log_file=args.log_file, debug_activity_api=True, debug_market_api=True, debug_payment_api=True, ) loop = asyncio.get_event_loop() task = loop.create_task( main( subnet_tag=args.subnet_tag, payment_driver=args.payment_driver, payment_network=args.payment_network, )) try: loop.run_until_complete(task) except NoPaymentAccountError as e: handbook_url = ("https://handbook.golem.network/requestor-tutorials/"
script = work_ctx.new_script() script.run("/bin/sleep", "1") yield script task.accept_result() return async with Golem( budget=10.0, subnet_tag="goth", event_consumer=log_event_repr, ) as golem: tasks = [Task(data=n) for n in range(3)] async for task in golem.execute_tasks( worker, tasks, vm_package, max_workers=1, timeout=timedelta(minutes=6), ): print(f"Task computed: {task}") if __name__ == "__main__": enable_default_logger() console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) logging.getLogger("yapapi.events").addHandler(console_handler) asyncio.run(main())
f"{TEXT_COLOR_DEFAULT}") print(f"{TEXT_COLOR_MAGENTA}" f"Saving Model Weights for round {global_round_number}" f"{TEXT_COLOR_DEFAULT}") model.save( os.path.join(ROUND_WEIGHTS_FOLDER, f'round_{global_round_number}.h5')) if __name__ == "__main__": create_folder('output') create_folder(WORKER_MODEL_WEIGHTS_FOLDER) create_folder(WORKER_LOGS_FOLDER) create_folder(ROUND_WEIGHTS_FOLDER) enable_default_logger(log_file='deml.log') loop = asyncio.get_event_loop() task = loop.create_task(main()) try: loop.run_until_complete(task) except KeyboardInterrupt: print(f"{TEXT_COLOR_YELLOW}" "Shutting down gracefully, please wait a short while " "or press Ctrl+C to exit immediately..." f"{TEXT_COLOR_DEFAULT}") task.cancel() try: loop.run_until_complete(task) print(f"{TEXT_COLOR_YELLOW}"
async def worker(context: WorkContext, tasks: AsyncIterable[Task]): async for task in tasks: script = context.new_script() future_result = script.run("/bin/sh", "-c", "date") yield script task.accept_result(result=await future_result) async def main(): package = await vm.repo( image_hash="d646d7b93083d817846c2ae5c62c72ca0507782385a2e29291a3d376", ) tasks = [Task(data=None)] async with Golem(budget=1.0, subnet_tag="devnet-beta") as golem: async for completed in golem.execute_tasks(worker, tasks, payload=package): print(completed.result.stdout) if __name__ == "__main__": enable_default_logger(log_file="hello.log") loop = asyncio.get_event_loop() task = loop.create_task(main()) loop.run_until_complete(task)
async def inner(*args, **kwargs): # Firstly, we'll save the function body to file tmpdir = tempfile.TemporaryDirectory() module_path = PurePath(f"{tmpdir.name}/gfaas_module") with open(module_path, "wb") as f: marshal.dump(func.__code__, f) if self.run_local: import asyncio fut = self.engine.submit(_local_remote_fn, module_path, *args) res = await asyncio.wait_for(asyncio.wrap_future(fut), self.timeout.seconds) return res else: from yapapi.runner import Engine, Task, vm from yapapi.runner.ctx import WorkContext from yapapi.log import enable_default_logger, log_summary # Save input args to files saved_args = [] for i, arg in enumerate(args): arg_path = PurePath(f"{tmpdir.name}/arg{i}") with open(arg_path, "w") as f: json.dump(arg, f) saved_args.append(arg_path) enable_default_logger() package = await vm.repo( image_hash= "74e9cdb5a5aa2c73a54f9ebf109986801fe2d4f026ea7d9fbfcca221", min_mem_gib=0.5, min_storage_gib=2.0, ) out_path = PurePath(f"{tmpdir.name}/out") async def worker(ctx: WorkContext, tasks): async for task in tasks: ctx.send_file(module_path, "/golem/input/func") remote_args = [] for (i, arg_path) in enumerate(saved_args): remote_arg = f"/golem/input/arg{i}" ctx.send_file(arg_path, remote_arg) remote_args.append(remote_arg) ctx.run("python", "/golem/runner.py", "/golem/input/func", *remote_args) ctx.download_file("/golem/output/out", out_path) yield ctx.commit() task.accept_task(result=out_path) ctx.log("done") init_overhead: timedelta = timedelta(minutes=3) async with Engine( package=package, max_workers=1, budget=self.budget, timeout=init_overhead + self.timeout, subnet_tag=self.subnet, event_emitter=log_summary(), ) as engine: async for progress in engine.map(worker, [Task(data=None)]): print(f"progress={progress}") with open(out_path, "r") as f: out = json.load(f) return out
tasks = [Task(data=n) for n in range(6)] async for task in golem.execute_tasks( worker, tasks, package, max_workers=1, timeout=timedelta(minutes=6), ): print(f"Task computed: {task}, time: {task.running_time}") print("All tasks computed") if __name__ == "__main__": enable_default_logger(log_file="test.log") console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) logging.getLogger("yapapi.events").addHandler(console_handler) loop = asyncio.get_event_loop() task = loop.create_task(main()) try: loop.run_until_complete(task) except KeyboardInterrupt: print("Shutting down gracefully...") task.cancel() try: loop.run_until_complete(task)
async def main(): package = await vm.repo( image_hash="d646d7b93083d817846c2ae5c62c72ca0507782385a2e29291a3d376") tasks = [Task(data=None)] timeout = timedelta(hours=24) async with Golem( budget=10.0, strategy=ShortDebitNoteIntervalAndPaymentTimeout(), subnet_tag="goth", event_consumer=log_event_repr, ) as golem: logger = logging.getLogger("yapapi") logger.handlers[0].setLevel(logging.DEBUG) async for completed in golem.execute_tasks(worker, tasks, payload=package, max_workers=1, timeout=timeout): print(f"Task finished: {completed}.") if __name__ == "__main__": enable_default_logger(log_file="mid_agreement_payments.log") loop = asyncio.get_event_loop() task = loop.create_task(main()) loop.run_until_complete(task)
cluster = await golem.run_service(FirstInstanceFailsToStart) while instances_started < 3: log("Waiting for another instance...") await asyncio.sleep(2) assert [i for i in cluster.instances if i.is_available] log("Closing the second cluster...") cluster.stop() while [i for i in cluster.instances if i.is_available]: log("Waiting for the cluster to stop...") await asyncio.sleep(2) log("Cluster stopped") if __name__ == "__main__": now = datetime.now().strftime("%Y-%m-%d_%H.%M.%S") enable_default_logger( log_file=f"test-instance-restart-{now}.log", debug_activity_api=True, debug_market_api=True, debug_payment_api=True, ) loop = asyncio.get_event_loop() task = loop.create_task(main()) loop.run_until_complete(task)
async def test_demand_resubscription(log_dir: Path, goth_config_path: Path, monkeypatch) -> None: """Test that checks that a demand is re-submitted after its previous submission expires.""" configure_logging(log_dir) # Override the default test configuration to create only one provider node nodes = [ { "name": "requestor", "type": "Requestor" }, { "name": "provider-1", "type": "VM-Wasm-Provider", "use-proxy": True }, ] goth_config = load_yaml(goth_config_path, [("nodes", nodes)]) vm_package = await vm.repo( image_hash="9a3b5d67b0b27746283cb5f287c13eab1beaa12d92a9f536b747c7ae", min_mem_gib=0.5, min_storage_gib=2.0, ) runner = Runner(base_log_dir=log_dir, compose_config=goth_config.compose_config) async with runner(goth_config.containers): requestor = runner.get_probes(probe_type=RequestorProbe)[0] env = dict(os.environ) env.update(requestor.get_agent_env_vars()) # Setup the environment for the requestor for key, val in env.items(): monkeypatch.setenv(key, val) monitor = EventMonitor() monitor.add_assertion(assert_demand_resubscribed) monitor.start() # The requestor enable_default_logger() async def worker(work_ctx, tasks): async for task in tasks: script = work_ctx.new_script() script.run("/bin/sleep", "5") yield script task.accept_result() async with Golem( budget=10.0, event_consumer=monitor.add_event_sync, ) as golem: task: Task # mypy needs this for some reason async for task in golem.execute_tasks( worker, [Task(data=n) for n in range(20)], vm_package, max_workers=1, timeout=timedelta(seconds=30), ): logger.info("Task %d computed", task.data) await monitor.stop() for a in monitor.failed: raise a.result()
print(echoer_message) if __name__ == "__main__": parser = utils.build_parser("John the Ripper") parser.add_argument("node_count") parser.add_argument("timeout_seconds") parser.add_argument("password") parser.set_defaults(log_file="john.log", node_count="4", timeout_seconds="5", password="******") args = parser.parse_args() enable_default_logger(log_file=args.log_file) loop = asyncio.get_event_loop() subnet = args.subnet_tag sys.stderr.write( f"yapapi version: {utils.TEXT_COLOR_YELLOW}{yapapi.__version__}{utils.TEXT_COLOR_DEFAULT}\n" ) sys.stderr.write( f"Using subnet: {utils.TEXT_COLOR_YELLOW}{subnet}{utils.TEXT_COLOR_DEFAULT}\n" ) task = loop.create_task( main(subnet_tag=args.subnet_tag, node_count=int(args.node_count), timeout_seconds=int(args.timeout_seconds), password=args.password)) try: asyncio.get_event_loop().run_until_complete(task)
# Seed the queue with the first task: await task_queue.put(Task(data=3)) async def input_generator(): """Task generator yields tasks removed from `queue`.""" while True: task = await task_queue.get() if task.data == 0: break yield task async for task in golem.execute_tasks( worker, input_generator(), vm_package, max_workers=1, timeout=timedelta(minutes=6), ): print("task result:", task.result, file=sys.stderr) for n in range(task.result): await task_queue.put(Task(data=task.result - 1)) print("all done!", file=sys.stderr) if __name__ == "__main__": test_dir = pathlib.Path(__file__).parent.name enable_default_logger(log_file=f"{test_dir}.log") asyncio.run(main())