def generate_program_folder(sdfg, code_objects: List[CodeObject], out_path: str, config=None): """ Writes all files required to configure and compile the DaCe program into the specified folder. :param sdfg: The SDFG to generate the program folder for. :param code_objects: List of generated code objects. :param out_path: The folder in which the build files should be written. :return: Path to the program folder. """ src_path = os.path.join(out_path, "src") try: os.makedirs(src_path) except FileExistsError: pass filelist = [] # Write each code object to a file for code_object in code_objects: name = code_object.name extension = code_object.language target_name = code_object.target.target_name # Create target folder target_folder = os.path.join(src_path, target_name) try: os.makedirs(target_folder) except FileExistsError: pass # Write code to file basename = "{}.{}".format(name, extension) code_path = os.path.join(target_folder, basename) with open(code_path, "w") as code_file: clean_code = re.sub(r'[ \t]*////__DACE:[^\n]*', '', code_object.code) code_file.write(clean_code) filelist.append("{},{}".format(target_name, basename)) # Write list of files with open(os.path.join(out_path, "dace_files.csv"), "w") as filelist_file: filelist_file.write("\n".join(filelist)) # Copy snapshot of configuration script if config is not None: config.save(os.path.join(out_path, "dace.conf")) else: Config.save(os.path.join(out_path, "dace.conf")) # Save the SDFG itself sdfg.save(os.path.join(out_path, "program.sdfg")) return out_path
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument("-l", "--localhost", action="store_true", help="Bind to localhost only") parser.add_argument( "-r", "--remotedace", action="store_true", help="Use ssh commands instead of locally running dace") parser.add_argument("-rd", "--restoredace", action="store_true", help="Restore the backup file") parser.add_argument( "-e", "--executor", action="store_true", help="Run as an executor server instead of DIODE server") parser.add_argument("-p", "--port", type=int, help="Port to listen on") args = parser.parse_args() if args.restoredace: from dace.config import Config Config.load("./dace.conf.bak") Config.save() remote_execution = args.remotedace es = ExecutorServer() es_ref.append(es) if not args.executor: app.run(host='localhost' if args.localhost else "0.0.0.0", debug=True, port=args.port, use_reloader=False) es.stop() else: import atexit def tmp(): es.stop() atexit.register(tmp) # Wait for an event that will never arrive (passive wait) event = threading.Event() event.wait()
def set_settings(settings_array, client_id): from dace.config import Config if not os.path.isdir("./client_configs"): os.mkdir("./client_configs/") clientpath = "./client_configs/" + client_id + ".conf" if os.path.isfile(clientpath): Config.load(clientpath) else: Config.load() for path, val in settings_array.items(): path = path.split("/") Config.set(*path, value=val) Config.save(clientpath)
def addRun(self, client_id, compilation_output_tuple, more_options): config_path = "./client_configs/" + client_id + ".conf" if not os.path.isdir("./client_configs/"): os.mkdir("./client_configs/") if not os.path.isfile(config_path): # Config not (yet) available, load default and copy with config_lock: from dace.config import Config Config.load() Config.save(config_path) if isinstance(compilation_output_tuple, str): # Group command gc = compilation_output_tuple val = { 'cid': client_id, 'cmd': 'control', 'index': self._run_num, 'operation': None, 'config_path': config_path, 'state': "pending" } if gc == "start": val['operation'] = 'startgroup' elif gc == "end": val['operation'] = 'endgroup' else: def g(): yield '{ "error": "Unknown group operation" }' return g with self._oplock: self._executor_queue.put(val) self._task_dict[self._run_num] = val self._run_num += 1 return with self._oplock: val = { 'index': self._run_num, 'type': 'run', 'cid': client_id, 'config_path': config_path, 'cmd': 'run', 'cot': compilation_output_tuple, 'opt': more_options, 'state': 'pending', 'reset-perfdata': False } self._executor_queue.put(val) self._task_dict[self._run_num] = val self._run_num += 1 def error_gen(): yield '{ "error": "Run was scheduled. Please poll until ready or longpoll." }' return error_gen
def generate_program_folder(sdfg, code_objects: List[CodeObject], out_path: str, config=None): """ Writes all files required to configure and compile the DaCe program into the specified folder. :param sdfg: The SDFG to generate the program folder for. :param code_objects: List of generated code objects. :param out_path: The folder in which the build files should be written. :return: Path to the program folder. """ src_path = os.path.join(out_path, "src") filelist = list() # Write each code object to a file for code_object in code_objects: name = code_object.name extension = code_object.language target_name = code_object.target.target_name target_type = code_object.target_type # Create target folder target_folder = os.path.join(src_path, target_name) if target_type: target_folder = os.path.join(target_folder, target_type) os.makedirs(target_folder, exist_ok=True) # Write code to file basename = "{}.{}".format(name, extension) code_path = os.path.join(target_folder, basename) clean_code = re.sub(r'[ \t]*////__DACE:[^\n]*', '', code_object.code) # Save the file only if it changed (keeps old timestamps and saves # build time) if not identical_file_exists(code_path, clean_code): with open(code_path, "w") as code_file: code_file.write(clean_code) if code_object.linkable == True: filelist.append("{},{},{}".format(target_name, target_type, basename)) # Write list of files with open(os.path.join(out_path, "dace_files.csv"), "w") as filelist_file: filelist_file.write("\n".join(filelist)) # Build a list of environments used environments = set() for obj in code_objects: environments |= obj.environments # Write list of environments with open(os.path.join(out_path, "dace_environments.csv"), "w") as env_file: env_file.write("\n".join(environments)) # Copy snapshot of configuration script if config is not None: config.save(os.path.join(out_path, "dace.conf")) else: Config.save(os.path.join(out_path, "dace.conf")) if sdfg is not None: # Save the SDFG itself and its hash hash = sdfg.save(os.path.join(out_path, "program.sdfg"), hash=True) filepath = os.path.join(out_path, 'include', 'hash.h') contents = f'#define __HASH_{sdfg.name} "{hash}"\n' if not identical_file_exists(filepath, contents): with open(filepath, 'w') as hfile: hfile.write(contents) return out_path
def win_close_callback(self, widget, *data): Config.save()
help="Restore the backup file") parser.add_argument( "-e", "--executor", action="store_true", help="Run as an executor server instead of DIODE server") parser.add_argument("-p", "--port", type=int, help="Port to listen on") args = parser.parse_args() if args.restoredace: from dace.config import Config Config.load("./dace.conf.bak") Config.save() remote_execution = args.remotedace es = ExecutorServer() es_ref.append(es) if not args.executor: app.run(host='localhost' if args.localhost else "0.0.0.0", debug=True, port=args.port, use_reloader=False) es.stop() else: import atexit