def read_task(file_, file_dest): with open(file_) as f: task = json.load(f) task.main_scene_file = os.path.normpath( os.path.join(get_golem_path(), task.main_scene_file)) if not os.path.isfile(task.main_scene_file): raise IOError("incorrect main scene path: {}".format( task.main_scene_file)) task.main_program_file = os.path.normpath( os.path.join(get_golem_path(), task.main_program_file)) if not os.path.isfile(task.main_program_file): raise IOError("incorrect main program path: {}".format( task.main_program_file)) task.resources = { os.path.normpath(os.path.join(get_golem_path(), res)) for res in task.resources } for res in task.resources: if not os.path.isfile(res): raise IOError("incorrect resource path: {}".format(res)) if not os.path.isdir(os.path.dirname(file_dest)): os.makedirs(os.path.dirname(file_dest)) with open(file_dest, 'w') as f: json.dump(task, f)
def test_dummytask_job(self): app_dir = path.join(get_golem_path(), "apps", "dummy") task_script = find_task_script(app_dir, "docker_dummytask.py") with open(task_script) as f: task_script_src = f.read() os.mkdir(os.path.join(self.resources_dir, "data")) os.mkdir(os.path.join(self.resources_dir, "code")) # copy the resources to the resources dir data_dir = path.join(get_golem_path(), "apps", "dummy", "test_data") for f in os.listdir(data_dir): task_file = path.join(data_dir, f) if path.isfile(task_file) or path.isdir(task_file): shutil.copy(task_file, path.join(self.resources_dir, "data", f)) code_dir = path.join(get_golem_path(), "apps", "dummy", "resources", "code_dir") for f in os.listdir(code_dir): task_file = path.join(code_dir, f) if (path.isfile(task_file) or path.isdir(task_file)) \ and os.path.basename(task_file) != "__pycache__": shutil.copy(task_file, path.join(self.resources_dir, "code", f)) # this is the stuff that is available by "params" module # in the docker job script params = { "data_files": ["in.data"], "subtask_data": "00110011", # it is kept in string on purpose "subtask_data_size": 8, # subtask_data_size is to double check the size, # if we haven't kept subtask_data in string, # we would lose leading zeros "difficulty": 10, "result_size": 256, "result_file": "out.result", } with self._create_test_job(script=task_script_src, params=params) as job: job.start() exit_code = job.wait() self.assertEqual(exit_code, 0) out_files = os.listdir(self.output_dir) self.assertTrue( any(f.endswith(".result") and "out" in f for f in out_files))
def setUpClass(cls): super().setUpClass() data_dir = os.path.join(get_golem_path(), "apps", "dummy", "test_data") code_dir = os.path.join(get_golem_path(), "apps", "dummy", "resources", "code_dir") cls.test_tmp = os.path.join(get_golem_path(), "apps", "dummy", "test_tmp") os.mkdir(cls.test_tmp) cls.code_link = os.path.join(cls.test_tmp, "code") cls.data_link = os.path.join(cls.test_tmp, "data") symlink_or_copy(code_dir, cls.code_link) symlink_or_copy(data_dir, cls.data_link)
def __init__(self, app_logic, mainWindowClass): try: # Linux check might suffice if X11 was the only option available QtCore.QCoreApplication.setAttribute(QtCore.Qt.AA_X11InitThreads) except Exception as ex: from sys import platform if platform != "win32": from logging import getLogger logger = getLogger("gui") logger.warning( "Error occurred when setting up Qt: {}".format(ex)) self.app = QApplication(sys.argv) app_icon = QIcon() icon_path = path.join(get_golem_path(), "gui", "view", "img") app_icon.addFile(path.join(icon_path, "favicon-32x32.png"), QSize(32, 32)) app_icon.addFile(path.join(icon_path, "favicon-48x48.png"), QSize(48, 48)) app_icon.addFile(path.join(icon_path, "favicon-256x256.png"), QSize(256, 256)) self.app.setWindowIcon(app_icon) self.main_window = mainWindowClass() self.app_logic = app_logic
def test_dir_size(self): with self.assertRaises(OSError): get_dir_size("notexisting") with open(self.testfile1, 'w') as f: f.write("a" * 20000) os.makedirs(self.testdir2) with open(self.testfile2, 'w') as f: f.write("b" * 30000) size = get_dir_size(self.testdir) self.assertGreaterEqual(size, 50000) self.assertGreater(get_dir_size(get_golem_path()), 3 * 1024 * 1024) if not is_windows(): os.makedirs(self.testdir3) with open(self.testfile3, 'w') as f: f.write("c" * 30000) os.chmod(self.testdir3, 0o200) new_size = get_dir_size(self.testdir) self.assertGreaterEqual(new_size, size) if getpass.getuser() != 'root': errors = [] get_dir_size(self.testdir, report_error=errors.append) self.assertEqual(len(errors), 1) self.assertIs(type(errors[0]), PermissionError)
def __init__(self): self._normalization_constant = 1000 # TODO tweak that. issue #1356 self.dummy_task_path = join(get_golem_path(), "apps", "dummy", "test_data") td = self._task_definition = DummyTaskDefinition(DummyTaskDefaults()) td.shared_data_files = [join(self.dummy_task_path, x) for x in td.shared_data_files] td.out_file_basename = td.out_file_basename td.task_id = str(uuid.uuid4()) td.main_program_file = DummyTaskEnvironment().main_program_file td.resources = {join(self.dummy_task_path, "in.data")} td.add_to_resources() self.verification_options = {"difficulty": td.options.difficulty, "shared_data_files": td.shared_data_files, "result_size": td.result_size, "result_extension": DummyTask.RESULT_EXT} verification_data = dict() self.verification_options["subtask_id"] = "DummyBenchmark" verification_data['subtask_info'] = self.verification_options self.verifier = DummyTaskVerifier(verification_data) self.subtask_data = \ DummyTask.TESTING_CHAR * td.options.subtask_data_size
def _get_test_task_definition(cls) -> TaskDefinition: task_path = Path(__file__).parent / cls.TASK_FILE with open(task_path) as f: golem_path = get_golem_path() json_str = f.read().replace('$GOLEM_DIR', Path(golem_path).as_posix()) return DictSerializer.load(json.loads(json_str))
class BlenderEnvironment(DockerEnvironment): DOCKER_IMAGE = "golemfactory/blender" DOCKER_TAG = "1.4" ENV_ID = "BLENDER" APP_DIR = path.join(get_golem_path(), 'apps', 'blender') SCRIPT_NAME = "docker_blendertask.py" SHORT_DESCRIPTION = "Blender (www.blender.org)"
def file_name(): """ Get wheel name :return: Name for wheel """ from git import Repo repo = Repo(get_golem_path()) tag = repo.tags[-2] # get latest tag tag_id = tag.commit.hexsha # get commit id from tag commit_id = repo.head.commit.hexsha # get last commit id if platform.startswith('linux'): from platform import architecture if architecture()[0].startswith('64'): plat = "linux_x86_64" else: plat = "linux_i386" elif platform.startswith('win'): plat = "win32" elif platform.startswith('darwin'): plat = "macosx_10_12_x86_64" else: raise SystemError("Incorrect platform: {}".format(platform)) if commit_id != tag_id: # devel package return "golem-{}-0x{}{}-cp27-none-{}.whl".format( tag.name, commit_id[:4], commit_id[-4:], plat) else: # release package return "golem-{}-cp27-none-{}.whl".format(tag.name, plat)
def test_blender_job(self): app_dir = os.path.join(get_golem_path(), "apps", "blender") task_script = find_task_script(app_dir, "docker_blendertask.py") with open(task_script) as f: task_script_src = f.read() # prepare dummy crop script from apps.blender.resources.scenefileeditor import generate_blender_crop_file crop_script_contents = generate_blender_crop_file( resolution=(800, 600), borders_x=(0, 1), borders_y=(0, 1), use_compositing=True, ) # copy the scene file to the resources dir benchmarks_dir = path.join(get_golem_path(), path.normpath("apps/blender/benchmark/")) scene_files = glob.glob(path.join(benchmarks_dir, "**/*.blend")) if len(scene_files) == 0: self.fail("No .blend files available") shutil.copy(scene_files[0], self.resources_dir) params = { "outfilebasename": "out", "scene_file": DockerJob.RESOURCES_DIR + "/" + path.basename(scene_files[0]), "script_src": crop_script_contents, "start_task": 42, "end_task": 42, "output_format": "EXR", "frames": [1], } with self._create_test_job(script=task_script_src, params=params) as job: job.start() exit_code = job.wait() self.assertEqual(exit_code, 0) out_files = os.listdir(self.output_dir) self.assertEqual(out_files, ['out_420001.exr'])
class DummyTaskEnvironment(DockerEnvironment): DOCKER_IMAGE = "golemfactory/base" DOCKER_TAG = "1.2" ENV_ID = "DUMMYPOW" APP_DIR = path.join(get_golem_path(), 'apps', 'dummy') SCRIPT_NAME = "docker_dummytask.py" SHORT_DESCRIPTION = "Dummy task (example app calculating proof-of-work " \ "hash)"
def _get_task_collector_path(cls): if is_windows(): task_collector_name = "taskcollector.exe" else: task_collector_name = "taskcollector" return os.path.normpath( os.path.join(get_golem_path(), "apps", "rendering", "resources", "taskcollector", "Release", task_collector_name))
def test_regenerate_cmd_called(self): makedirs(path.join(self.path, "gen")) test_ui_file_name = "NodeNameDialog.ui" test_ui_file = path.join(get_golem_path(), "gui", "view", test_ui_file_name) tmp_ui_file = path.join(self.path, test_ui_file_name) copyfile(test_ui_file, tmp_ui_file) regenerate_ui_files(self.path)
def move_wheel(): from shutil import move path_ = path.join(get_golem_path(), 'dist') files_ = [f for f in listdir(path_) if path.isfile(path.join(path_, f))] files_.sort() source = path.join(path_, files_[-1]) dst = path.join(path_, file_name()) move(source, dst)
def start_gui(address): if hasattr(sys, 'frozen') and sys.frozen: runner = [sys.executable] else: runner = [sys.executable, os.path.join(get_golem_path(), sys.argv[0])] return subprocess.Popen(runner + ['--qt', '-r', '{}:{}'.format(address.host, address.port)])
def update_ini(): version_file = join(get_golem_path(), '.version.ini') file_name_ = file_name().split('-') tag = file_name_[1] commit = file_name_[2] version = "[version]\nversion = {}\n".format( tag + ("-" + commit) if commit.startswith('0x') else "") with open(version_file, 'wb') as f_: f_.write(version)
def _get_task_collector_path(cls): if is_windows(): build_path = os.path.join("x64", "Release", "taskcollector.exe") else: build_path = os.path.join("Release", "taskcollector") return os.path.normpath( os.path.join(get_golem_path(), "apps", "rendering", "resources", "taskcollector", build_path))
def test_luxrender_job(self): app_dir = path.join(get_golem_path(), "apps", "lux") task_script = find_task_script(app_dir, "docker_luxtask.py") with open(task_script) as f: task_script_src = f.read() # read the scene file and copy the resources to the resources dir lux_task_dir = path.join(get_golem_path(), "apps", "lux", "benchmark", "test_task") scene_src = None for f in os.listdir(lux_task_dir): task_file = path.join(lux_task_dir, f) if path.isfile(task_file) and task_file.endswith(".lxs"): if scene_src is not None: self.fail("Multiple .lxs files found in {}" .format(lux_task_dir)) with open(task_file, "r") as scene_file: scene_src = scene_file.read() elif path.isdir(task_file): shutil.copytree(task_file, path.join(self.resources_dir, f)) if scene_src is None: self.fail("No .lxs files found in {}".format(lux_task_dir)) params = { "outfilebasename": "out", "output_format": "png", "scene_file_src": scene_src, "start_task": 42, "end_task": 42, "frames": [1], "scene_dir": "/golem/resources/", "num_threads": 1 } with self._create_test_job(script=task_script_src, params=params) as job: job.start() exit_code = job.wait() self.assertEqual(exit_code, 0) out_files = os.listdir(self.output_dir) self.assertEqual(out_files, ['out42.png'])
def run_default_benchmark(cls, save=False): logger = logging.getLogger('golem.task.benchmarkmanager') logger.info('Running benchmark for %s', cls.get_id()) test_file = path.join(get_golem_path(), 'apps', 'rendering', 'benchmark', 'minilight', 'cornellbox.ml.txt') performance = make_perf_test(test_file) logger.info('%s performance is %.2f', cls.get_id(), performance) if save: Performance.update_or_create(cls.get_id(), performance) return performance
def update_variables(): import re file_ = path.join(get_golem_path(), 'golem', 'core', 'variables.py') with open(file_, 'rb') as f_: variables = f_.read() version = get_version() variables = re.sub(r"APP_VERSION = \".*\"", "APP_VERSION = \"{}\"".format(version), variables) with open(file_, 'wb') as f_: f_.write(variables)
def test_conformance(self): """Test that we conform to PEP-8.""" style = pycodestyle.StyleGuide(ignore=[], max_line_length=80) # PyCharm needs absolute paths base_path = Path(get_golem_path()) absolute_files = [str(base_path / path) for path in self.PEP8_FILES] result = style.check_files(absolute_files) self.assertEqual(result.total_errors, 0, "Found code style errors (and warnings).")
def __init__(self): super(DummyTaskDefaults, self).__init__() self.options = DummyTaskOptions() self.options.difficulty = 0xffff0000 # magic number self.shared_data_files = ["in.data"] self.out_file_basename = "out" self.default_subtasks = 5 self.code_dir = os.path.join(get_golem_path(), "apps", "dummy", "resources", "code_dir") self.result_size = 256 # length of result hex number
def test_blender_subtask_script_error(self): task = self._create_test_task() # Replace the main script source with another script that will # produce errors when run in the task environment: task.src_code = 'main :: IO()\nmain = putStrLn "Hello, Haskell World"\n' task.main_program_file = path.join( path.join(get_golem_path(), "golem"), "node.py") task.task_resources = {task.main_program_file, task.main_scene_file} task_thread, error_msg, out_dir = self._run_docker_task(task) assert isinstance(task_thread, DockerTaskThread) assert isinstance(error_msg, str) assert error_msg.startswith("Subtask computation failed")
def test_lint(self): base_path = pathlib.Path(common.get_golem_path()) concent_path = base_path / "golem/network/concent" tests_path = base_path / "tests/golem/network/concent" options = "{tests_dir} {lib_dir} -f json --rcfile={rcfile}".format( rcfile=(base_path / '.pylintrc').as_posix(), lib_dir=concent_path.as_posix(), tests_dir=tests_path.as_posix(), ) stdout_io, _ = epylint.py_run(options, return_std=True) stdout = stdout_io.read() self.assertEqual(stdout, '')
def test_blender_job(self): app_dir = os.path.join(get_golem_path(), "apps", "blender") task_script = find_task_script(app_dir, "docker_blendertask.py") with open(task_script) as f: task_script_src = f.read() # prepare dummy crop script crop_script_contents = generate_blender_crop_file(resolution=(800, 600), borders_x=(0, 1), borders_y=(0, 1), use_compositing=True, samples=5) # copy the scene file to the resources dir scene_file = pathlib.Path(get_golem_path()) scene_file /= "apps/blender/benchmark/test_task/cube.blend" shutil.copy(str(scene_file), self.resources_dir) dest_scene_file = pathlib.PurePosixPath(DockerJob.RESOURCES_DIR) dest_scene_file /= scene_file.name params = { "outfilebasename": "out", "scene_file": str(dest_scene_file), "script_src": crop_script_contents, "start_task": 42, "end_task": 42, "output_format": "EXR", "frames": [1], } with self._create_test_job(script=task_script_src, params=params) \ as job: job.start() exit_code = job.wait(timeout=300) self.assertEqual(exit_code, 0) out_files = os.listdir(self.output_dir) self.assertEqual(out_files, ['out_420001.exr'])
def test_init(self): td = DummyTaskDefinition() assert isinstance(td, DummyTaskDefinition) assert isinstance(td.options, DummyTaskOptions) assert td.code_dir == os.path.join(get_golem_path(), "apps", "dummy", "resources", "code_dir") assert td.result_size == 256 assert td.out_file_basename == "out" assert isinstance(td.resources, set) defaults = DummyTaskDefaults() tdd = DummyTaskDefinition(defaults) assert tdd.options.subtask_data_size == 128 assert tdd.options.difficulty == 0xffff0000 assert tdd.code_dir == os.path.join(get_golem_path(), "apps", "dummy", "resources", "code_dir") for c in list_dir_recursive(tdd.code_dir): assert os.path.isfile(c) assert tdd.result_size == 256 assert tdd.subtasks_count == 5 assert tdd.out_file_basename == "out" assert tdd.shared_data_files == ["in.data"]
def test_init(self): td = DummyTaskDefaults() assert isinstance(td, DummyTaskDefaults) assert isinstance(td.options, DummyTaskOptions) assert td.options.subtask_data_size == 128 assert td.options.difficulty == 0xffff0000 assert td.code_dir == os.path.join(get_golem_path(), "apps", "dummy", "resources", "code_dir") assert td.result_size == 256 assert td.default_subtasks == 5 assert td.out_file_basename == "out" assert td.shared_data_files == ["in.data"]
def test_subtask_killed(self): task = self._get_test_task() # Replace the main script source with another script that will # kill itself task.src_code = \ 'import os; import signal; os.kill(os.getpid(), signal.SIGKILL)' task.main_program_file = path.join( path.join(get_golem_path(), "golem"), "node.py") task.task_resources = {task.main_program_file, task.main_scene_file} task_thread = self._run_task(task) self.assertIsInstance(task_thread, DockerTaskThread) self.assertIsInstance(task_thread.error_msg, str) self.assertIn("out-of-memory", task_thread.error_msg)
def _load_test_task_definition(self, task_file): task_file = path.join(path.dirname(__file__), task_file) with open(task_file, "r") as f: task_def = json.loads(f.read()) # Replace $GOLEM_DIR in paths in task definition by get_golem_path() golem_dir = get_golem_path() def set_root_dir(p): return p.replace("$GOLEM_DIR", golem_dir) task_def.resources = set(set_root_dir(p) for p in task_def.resources) task_def.main_scene_file = set_root_dir(task_def.main_scene_file) task_def.main_program_file = set_root_dir(task_def.main_program_file) return task_def
def generate_ui_files(): golem_path = get_golem_path() ui_path = os.path.normpath(os.path.join(golem_path, "gui", "view")) gen_ui_files(ui_path) apps_path = os.path.normpath(os.path.join(golem_path, "apps")) apps_candidates = os.listdir(apps_path) apps = [ os.path.join(apps_path, app) for app in apps_candidates if os.path.isdir(os.path.join(apps_path, app)) ] for app in apps: ui_path = os.path.join(app, "gui", "view") if os.path.isdir(ui_path): gen_ui_files(ui_path)