def __init__(self, name=None, output_dir=get_unique_task_id(), overwrite=True, frame=0, scene=None, preset='pt', visualize=True, **kwargs): self.renderer_name = name if output_dir is not None: self.output_dir = taichi.settings.get_output_path(output_dir + '/') self.post_processor = LDRDisplay() self.frame = frame self.viewer_started = False self.viewer_process = None try: os.mkdir(self.output_dir) except Exception as e: if not overwrite: print(e) exit(-1) if scene: self.initialize(preset, scene=scene, **kwargs) self.visualize = visualize
def __init__(self, max_side, simulator, color_scheme, levelset_supersampling=2, show_grid=False, show_images=True, rescale=True, video_framerate=24, video_output=True, substep=False, need_press=False, show_stat=True): if rescale: scale = min(1.0 * max_side / simulator.res[0], 1.0 * max_side / simulator.res[1]) width = int(round(scale * simulator.res[0])) height = int(round(scale * simulator.res[1])) else: width = max_side height = max_side super(SimulationWindow, self).__init__(width=width, height=height, fullscreen=False, caption='Taichi', config=pyglet.gl.Config(sample_buffers=0, samples=0, depth_size=16, double_buffer=True)) self.width = width self.height = height self.video_framerate = video_framerate self.task_id = get_unique_task_id() self.simulator = simulator self.frame_count = 0 self.color_scheme = color_scheme self.show_images = show_images self.levelset_supersampling = levelset_supersampling self.show_grid = show_grid self.quit_pressed = False self.output_directory = os.path.join(get_output_directory(), self.task_id) self.cpu_time = 0 self.show_stat = show_stat os.mkdir(self.output_directory) self.substep = substep self.video_output = video_output self.video_manager = VideoManager(self.output_directory, automatic_build=self.video_output) self.need_press = need_press self.pressed = False pyglet.clock.schedule_interval(self.update, 1 / 120.0) pyglet.app.run()
def __init__(self, target_file): self.target_file = target_file self.src = '' self.dir = get_output_path(get_unique_task_id()) os.mkdir(self.dir) self.unit_dll = tc_core.create_unit_dll() self.src_path = os.path.join(self.dir, 'unit.cpp') self.dll_path = os.path.join(self.dir, 'build', 'libunit.dylib') self.cmakelists_path = os.path.join(self.dir, 'CMakeLists.txt') cmakelists_src = os.path.join(get_root_directory(), 'taichi', 'python', 'taichi', 'misc', 'CMakeLists.txt') shutil.copy(cmakelists_src, self.cmakelists_path)
def __init__(self, max_side, simulator, color_scheme, levelset_supersampling=2, show_grid=False, show_images=True, rescale=True): if rescale: scale = min(1.0 * max_side / simulator.simulation_width, 1.0 * max_side / simulator.simulation_height) width = int(round(scale * simulator.simulation_width)) height = int(round(scale * simulator.simulation_height)) else: width = max_side height = max_side super(SimulationWindow, self).__init__(width=width, height=height, fullscreen=False, caption='Taichi', config=pyglet.gl.Config(sample_buffers=0, samples=0, depth_size=16, double_buffer=True)) uuid = get_unique_task_id() self.video_filename = uuid + ".mp4" self.frame_output_path = VIDEO_OUTPUT_ROOT + '/' + uuid + '/' os.mkdir(self.frame_output_path[:-1]) self.simulator = simulator self.frame_count = 0 self.color_scheme = color_scheme self.show_images = show_images self.levelset_supersampling = levelset_supersampling self.show_grid = show_grid self.quit_pressed = False pyglet.clock.schedule_interval(self.update, 1 / 120.0) pyglet.app.run()
os.chdir(tmp_cwd) elif get_os_name() == 'win': bin_dir = get_bin_directory() dll_path = os.path.join(bin_dir, 'Release', 'taichi_core.dll') if not os.path.exists(dll_path): build() # The problem here is, on windows, when an dll/pyd is loaded, we can not write to it any more old_wd = os.getcwd() os.chdir(bin_dir) if CREATE_SAND_BOX_ON_WINDOWS: # Create a sandbox for separated core lib development and loading dir = os.path.join(get_output_directory(), 'tmp', get_unique_task_id()) os.environ['PATH'] += ';' + (os.path.join(get_repo_directory(), 'external', 'lib')) os.makedirs(dir) shutil.copy(dll_path, os.path.join(dir, 'taichi_core.pyd')) sys.path.append(dir) else: shutil.copy(dll_path, os.path.join(bin_dir, 'taichi_core.pyd')) sys.path.append(bin_dir) import taichi_core as tc_core os.chdir(old_wd) def get_dll_name(name):
os.chdir(tmp_cwd) elif get_os_name() == 'win': bin_dir = get_bin_directory() dll_path1 = os.path.join(bin_dir, 'RelWithDebInfo', 'taichi_core.dll') dll_path2 = os.path.join(bin_dir, 'libtaichi_core.dll') assert os.path.exists(dll_path1) and not os.path.exists(dll_path2) # On windows when an dll/pyd is loaded, we can not write to it any more old_wd = os.getcwd() os.chdir(bin_dir) if create_sand_box_on_windows: # Create a sandbox for separated core lib development and loading dir = os.path.join(get_output_directory(), 'tmp', get_unique_task_id()) lib_dir = os.path.join(get_repo_directory(), 'external', 'lib') os.environ['PATH'] += ';' + lib_dir os.makedirs(dir) if os.path.exists(dll_path1): shutil.copy(dll_path1, os.path.join(dir, 'taichi_core.pyd')) else: shutil.copy(dll_path2, os.path.join(dir, 'taichi_core.pyd')) os.environ['PATH'] += ';' + dir sys.path.append(dir) else: shutil.copy(dll_path, os.path.join(bin_dir, 'taichi_core.pyd')) sys.path.append(bin_dir) try:
bin_dir = get_bin_directory() + '/' dll_path = bin_dir + '/Release/taichi_core.dll' if not os.path.exists(dll_path): dll_path = bin_dir + '/taichi_core.dll' if not os.path.exists(dll_path): assert False, "Library taichi_core doesn't exist." # The problem here is, on windows, when an dll/pyd is loaded, we can not write to it any more... # Ridiculous... old_wd = os.getcwd() os.chdir(bin_dir) if CREATE_SAND_BOX_ON_WINDOWS: # So let's just create a sandbox for separated core lib development and loading dir = get_output_directory() + '/tmp/' + get_unique_task_id() + '/' os.makedirs(dir) ''' for fn in os.listdir(bin_dir): if fn.endswith('.dll') and fn != 'taichi_core.dll': print dir + fn, bin_dir + fn # Why can we create symbolic links.... # if not ctypes.windll.kernel32.CreateSymbolicLinkW(bin_dir + fn, dir + fn, 0): # raise OSError shutil.copy(bin_dir + fn, dir + fn) ''' shutil.copy(dll_path, dir + 'taichi_core.pyd') sys.path.append(dir) else: shutil.copy(dll_path, bin_dir + 'taichi_core.pyd') sys.path.append(bin_dir)
def __init__(self, **kwargs): res = kwargs['res'] self.script_name = sys.argv[0].split('.')[0] assert (self.script_name.startswith('opt_')) self.script_name = self.script_name[4:] self.snapshot_period = kwargs.get('snapshot_period', 0) script_fn = os.path.join(os.getcwd(), sys.argv[0]) suffix = '' self.version = kwargs.get('version', 0) if 'version' in kwargs: suffix += '_v{:0d}'.format(int(self.version)) self.wireframe = kwargs.get('wireframe', False) if 'wireframe' in kwargs: if 'wireframe_grid_size' not in kwargs: kwargs['wireframe_grid_size'] = 10 if 'wireframe_thickness' not in kwargs: kwargs['wireframe_thickness'] = 3 if self.wireframe: suffix += '_wf{}g{}t{}'.format(int(self.wireframe), kwargs['wireframe_grid_size'], kwargs['wireframe_thickness']) else: suffix += '_wf{}'.format(int(self.wireframe)) suffix += '_r{:04d}'.format(res[0]) parser = argparse.ArgumentParser(description='Topology Optimization.') parser.add_argument('options', metavar='Option', type=str, nargs='*', help='An option to override') parser.add_argument('-c', type=str, help='iteration to start from') args = parser.parse_args() if args.c is not None: suffix += '_continue' self.task_id = get_unique_task_id() self.suffix = suffix + kwargs.get('suffix', '') self.working_directory = os.path.join(tc.get_output_directory(), 'topo_opt', self.script_name, self.task_id + '_' + self.suffix) kwargs['working_directory'] = self.working_directory self.snapshot_directory = os.path.join(self.working_directory, 'snapshots') self.fem_directory = os.path.join(self.working_directory, 'fem') self.fem_obj_directory = os.path.join(self.working_directory, 'fem_obj') os.makedirs(self.snapshot_directory, exist_ok=True) os.makedirs(self.fem_directory, exist_ok=True) os.makedirs(self.fem_obj_directory, exist_ok=True) self.max_iterations = kwargs.get('max_iterations', 1000) self.log_fn = os.path.join(self.working_directory, 'log.txt') tc.start_memory_monitoring(os.path.join(self.working_directory, 'memory_usage.txt'), interval=0.1) tc.duplicate_stdout_to_file(self.log_fn) tc.redirect_print_to_log() tc.trace("log_fn = {}", self.log_fn) with open(script_fn) as f: script_content = f.read() shutil.copy(sys.argv[0], self.working_directory + "/") tc.info("Script backuped") tc.info( "Script content:\n********************\n\n{}\n*******************", script_content) print(args) super().__init__(name='spgrid_topo_opt', **kwargs) if args.c is not None: # Restart (continue) print(args.options) print(args.c) last_iter = self.general_action( action='load_state', #filename=self.get_snapshot_file_name(args.c)) filename=args.c) for o in args.options: o = o.split('=') assert (len(o) == 2) self.override_parameter(o[0], o[1]) self.i_start = int(last_iter) + 1 tc.info("\n*** Restarting from iter {}", self.i_start) self.run() exit() # Start from scratch self.i_start = 0 tc.trace("log duplicated") if kwargs.get('check_log_file', True): assert (os.path.exists(self.log_fn))