def __init__(self, path_to_miz_file, temp_dir=None, keep_temp_dir: bool = False): self.miz_path = Path(path_to_miz_file) if not self.miz_path.exists(): raise FileNotFoundError(path_to_miz_file) if not self.miz_path.isfile(): raise TypeError(path_to_miz_file) if not self.miz_path.ext == '.miz': raise ValueError(path_to_miz_file) if temp_dir is not None: raise PendingDeprecationWarning() self.keep_temp_dir = keep_temp_dir self.tmpdir = Path(tempfile.mkdtemp('EMFT_')) logger.debug('temporary directory: {}'.format(self.tmpdir.abspath())) self.zip_content = None self._mission = None self._mission_qual = None self._l10n = None self._l10n_qual = None self._map_res = None self._map_res_qual = None
def __init__(self, path): Path.__init__(self, path) if not self.exists(): raise FileNotFoundError(path) if not self.isfile(): raise TypeError(path) if not self.ext == '.miz': raise ValueError(path)
def __init__(self, sb, screen, size, lb_res): self.screen = screen self.res = size self.style = f"{Path.style()}winner_menu{Path.DATA_SUFFIX}" self.soundtrack = Path.soundtracks() self.image = Path.images() self.lb_back, self.lb_player, self.lb_score, self.lb_mult = lb_res self.object_id = "#Text" if size[0] == 1920 else "#Text2" self.sb = sb self.scores = self._get_scores() self._show_lb()
async def delete(request: web.Request): """ allows deletion of both directories and """ try: username = await auth_client.get_user(request.headers['Authorization']) except ValueError as bad_auth: return web.json_response( {'error': 'Unable to validate authentication credentials'}) try: path = Path.validate_path(username, request.match_info['path']) except ValueError as bad_path: return web.json_response({'error': 'badly formed path'}) # make sure directory isn't home if path.user_path == username: return web.json_response({'error': 'cannot delete home directory'}) if os.path.isfile(path.full_path): os.remove(path.full_path) if os.path.exists(path.metadata_path): os.remove(path.metadata_path) elif os.path.isdir(path.full_path): shutil.rmtree(path.full_path) if os.path.exists(path.metadata_path): shutil.rmtree(path.metadata_path) else: return web.json_response( {'error': 'could not delete {path}'.format(path=path.user_path)}) return web.Response(text='successfully deleted {path}'.format( path=path.user_path))
def __init__(self, screen) -> None: self.config = Settings() self.screen = screen self.resources_images = Path.images() + "starter" + sep self.bg_classic = pg.image.load( f"{self.resources_images}classic{sep}starter_screen{Path.IMAGE_SUFFIX}" ) self.bg_halloween = pg.image.load( f"{self.resources_images}halloween{sep}starter_screen{Path.IMAGE_SUFFIX}" ) self.bg_vaporwave = pg.image.load( f"{self.resources_images}vaporwave{sep}starter_screen{Path.IMAGE_SUFFIX}" ) self.bg_christmas = pg.image.load( f"{self.resources_images}christmas{sep}starter_screen{Path.IMAGE_SUFFIX}" ) self.bg_old_west = pg.image.load( f"{self.resources_images}old_west{sep}starter_screen{Path.IMAGE_SUFFIX}" ) self.size = self.config.size if self.size == (1280, 720): self.bg_classic = pg.transform.scale(self.bg_classic, (1280, 720)) self.bg_halloween = pg.transform.scale(self.bg_halloween, (1280, 720)) self.bg_vaporwave = pg.transform.scale(self.bg_vaporwave, (1280, 720)) self.bg_christmas = pg.transform.scale(self.bg_christmas, (1280, 720)) self.bg_old_west = pg.transform.scale(self.bg_old_west, (1280, 720))
def __init__(self, name, *args, **kwargs): if name == '.': name = os.getcwd().split('/')[-1] self.name = name self.treepath = os.path.join(PROJECTROOT, self.name) #self.descrpath = os.path.join(self.treepath, PROJECTDESCR) #self.confpath = os.path.join(self.treepath, PROJECTCONF) self.descrpath = Path(self, PROJECTDESCR) self.confpath = Path(self, PROJECTCONF) self.confpathlist = [self.confpath.rel()] self.args = {} self.features = [] self.tree = TREE self.deployattrs = { 'ip': 'ip' in kwargs and kwargs['ip'] or utils.ifconfig_ip(), } self.hosts = [] self.fqdnpool = [] # used to keep track of used fqdn to # not make them overlap in different features # set to True whether you want to control project automatically, # i.e. reconfigure features for local host self.autofix = 'autofix' in kwargs and kwargs['autofix'] or False # set to False if load()/__init__() do not need to perform # additional stuff self.nocreate = 'nocreate' in kwargs and True or False self.svn = False self.loaded = self.load() if not 'create' in kwargs: # found an existing project or just requested to pass return else: # creating a new one self.description = kwargs['descr'] self.user = kwargs['user'] self.group = kwargs['group'] self.featurelist = kwargs['features'] self.hosts.append(HOSTNAME)
async def upload_files_chunked(request: web.Request): """ @api {post} /upload post endpoint to upload data @apiName upload @apiSampleRequest /upload/ @apiSuccess {json} meta meta on data uploaded @apiSuccessExample {json} Success-Response: HTTP/1.1 200 OK [{ "path": "/nconrad/Athaliana.TAIR10_GeneAtlas_Experiments.tsv", "size": 3190639, "encoding": "7bit", "name": "Athaliana.TAIR10_GeneAtlas_Experiments.tsv" }, { "path": "/nconrad/Sandbox_Experiments-1.tsv", "size": 4309, "encoding": "7bit", "name": "Sandbox_Experiments-1.tsv" }] """ try: username = await auth_client.get_user(request.headers['Authorization']) except ValueError as bad_auth: return web.json_response( {'error': 'Unable to validate authentication credentials'}) reader = await request.multipart() counter = 0 while counter < 100: # TODO this is arbitrary to keep an attacker from creating infinite loop # This loop handles the null parts that come in inbetween destpath and file part = await reader.next() if part.name == 'destPath': destPath = await part.text() elif part.name == 'uploads': user_file = part break else: counter += 1 filename: str = user_file.filename size = 0 destPath = os.path.join(destPath, filename) try: path = Path.validate_path(username, destPath) except ValueError as error: return web.json_response({ 'error': 'invalid destination for file for user' }) # TODO use format here for better error message with open(path.full_path, 'wb') as f: while True: chunk = await user_file.read_chunk() if not chunk: break size += len(chunk) f.write(chunk) response = await stat_data(path.full_path) return web.json_response([response])
def test_init(self, tmpdir, cls): t = Path(str(tmpdir)) f = t.joinpath('f.txt') with pytest.raises(FileNotFoundError): cls(f) with pytest.raises(TypeError): cls(t) f.write_text('') with pytest.raises(ValueError): cls(f) f = t.joinpath('f.miz') f.write_text('') cls(f)
def main(): load_dotenv() username = os.environ['INSTA_USER'] password = os.environ['INSTA_PASSWORD'] bot = Bot() bot.login(username=username, password=password) resize_for_instagram() outpath = Path.cwd() insta_path = outpath / 'images' / 'insta' for picture in os.listdir(insta_path): bot.upload_photo(insta_path / picture)
async def search(request: web.Request): try: username = await auth_client.get_user(request.headers['Authorization']) except ValueError as bad_auth: return web.json_response( {'error': 'Unable to validate authentication credentials'}) query = request.match_info['query'] user_dir = Path.validate_path(username, username) results = await dir_info(user_dir.full_path, query) results.sort(key=lambda x: x['mtime'], reverse=True) return web.json_response(results)
def mirror_dir(src, dst): logger.debug('{} -> {}'.format(src, dst)) diff_ = dircmp(src, dst, ignore) diff_list = diff_.left_only + diff_.diff_files logger.debug('differences: {}'.format(diff_list)) for x in diff_list: source = Path(diff_.left).joinpath(x) target = Path(diff_.right).joinpath(x) logger.debug('looking at: {}'.format(x)) if source.isdir(): logger.debug('isdir: {}'.format(x)) if not target.exists(): logger.debug('creating: {}'.format(x)) target.mkdir() mirror_dir(source, target) else: logger.debug('copying: {}'.format(x)) source.copy2(diff_.right) for sub in diff_.subdirs.values(): assert isinstance(sub, dircmp) mirror_dir(sub.left, sub.right)
def zip(self, destination=None): self._encode() if destination is None: destination = Path(self.miz_path.dirname()).joinpath('{}_EMFT.miz'.format(self.miz_path.namebase)) destination = Path(destination).abspath() logger.debug('zipping mission to: {}'.format(destination)) with open(destination, mode='wb') as f: f.write(dummy_miz) with ZipFile(destination, mode='w', compression=8) as _z: for f in self.zip_content: abs_path = self.tmpdir.joinpath(f).abspath() logger.debug('injecting in zip file: {}'.format(abs_path)) _z.write(abs_path, arcname=f) return destination
def make_path(self, end_node, add_goal=False, remove_start=True): # a path is a list of control states, in the order of traversal # builds it in reverse order by following parent pointers, then reverses result path = [end_node.state] while end_node.parent: path.append(end_node.parent.state) end_node = end_node.parent # don't execute the start state if remove_start: path.pop() path.reverse() if add_goal: path.append(self.goal_state) return Path(states=path)
async def get_metadata(request: web.Request): try: username = await auth_client.get_user(request.headers['Authorization']) except ValueError as bad_auth: return web.json_response( {'error': 'Unable to validate authentication credentials'}) try: path = Path.validate_path(username, request.match_info['path']) except ValueError as bad_path: return web.json_response({ 'error': 'badly formed path' }) # TODO use format here for better error message if not os.path.exists(path.full_path): return web.json_response({ 'error': 'path {path} does not exist'.format(path=path.user_path) }) return web.json_response(await some_metadata(path))
async def list_files(request: web.Request): """ {get} /list/:path list files/folders in path @apiParam {string} path path to directory @apiParam {string} ?type=(folder|file) only fetch folders or files @apiSuccess {json} meta metadata for listed objects @apiSuccessExample {json} Success-Response: HTTP/1.1 200 OK [ { name: "blue-panda", mtime: 1459822597000, size: 476 }, { name: "blue-zebra", mtime: 1458347601000, size: 170, isFolder: true } ] """ try: username = await auth_client.get_user(request.headers['Authorization']) except ValueError as bad_auth: return web.json_response( {'error': 'Unable to validate authentication credentials'}) try: path = Path.validate_path(username, request.match_info['path']) except ValueError as bad_path: return web.json_response({ 'error': 'badly formed path' }) # TODO use format here for better error message if not os.path.exists(path.full_path): return web.json_response({ 'error': 'path {path} does not exist'.format(path=path.user_path) }) return web.json_response(await dir_info(path.full_path, recurse=False))
async def rename(request: web.Request): try: username = await auth_client.get_user(request.headers['Authorization']) except ValueError as bad_auth: return web.json_response( {'error': 'Unable to validate authentication credentials'}) try: path = Path.validate_path(username, request.match_info['path']) except ValueError as bad_path: return web.json_response({'error': 'badly formed path'}) # make sure directory isn't home if path.user_path == username: return web.json_response({'error': 'cannot rename home directory'}) body = await request.post() new_name = body['newName'] # TODO new_name should be sanitized shutil.move(path.full_path, new_name) if os.path.exists(path.metadata_path): if os.path.isfile(path.metadata_path): shutil.move(path.metadata_path, new_name + '.json') else: shutil.move(path.metadata_path, new_name) return web.Response(text='successfully renamed {path}'.format( path=path.user_path))
def __init__(self, screen: Any, data: dict, **kwargs): self.screen = screen self.data = data self.score_1, self.score_2 = data.values() self.name_1, self.name_2 = data.keys() self.st = Settings() self.size = self.st.size self.width = self.st.width self.resources = Path.resources() self.styles = Path.styles() self.sounds = Path.sounds() self.soundtrack = Path.soundtracks() self.font = Path.fonts() self.image = Path.images() self._createUI(kwargs) self.sb = ScoreboardData(data) self.sb.updateTable() self.sb.winnerUpdate()
def trace_path_by_direction(self, center: Point2D, offset: Point2D, direction: Step): if not isinstance(direction, Step): raise ValueError('Expected \'direction\' to be a instance of Step') if not isinstance(center, Point2D): raise ValueError('Expected \'center\' to be a instance of Point2D') if not isinstance(offset, Point2D): raise ValueError('Expected \'offset\' to be a instance of Point2D') this_offset = offset / 2 if direction.step is Step.CONST_DIRECTION_A: first = Point2D(center.x - this_offset.x, center.y + this_offset.y) second = Point2D(center.x - this_offset.x, center.y - this_offset.y) third = Point2D(center.x + this_offset.x, center.y - this_offset.y) fourth = Point2D(center.x + this_offset.x, center.y + this_offset.y) path = Path( Step(Step.CONST_DIRECTION_D), Step(Step.CONST_DIRECTION_A), Step(Step.CONST_DIRECTION_A), Step(Step.CONST_DIRECTION_B), ) elif direction.step is Step.CONST_DIRECTION_B: first = Point2D(center.x + this_offset.x, center.y - this_offset.y) second = Point2D(center.x - this_offset.x, center.y - this_offset.y) third = Point2D(center.x - this_offset.x, center.y + this_offset.y) fourth = Point2D(center.x + this_offset.x, center.y + this_offset.y) path = Path( Step(Step.CONST_DIRECTION_C), Step(Step.CONST_DIRECTION_B), Step(Step.CONST_DIRECTION_B), Step(Step.CONST_DIRECTION_A), ) elif direction.step is Step.CONST_DIRECTION_C: first = Point2D(center.x + this_offset.x, center.y - this_offset.y) second = Point2D(center.x + this_offset.x, center.y + this_offset.y) third = Point2D(center.x - this_offset.x, center.y + this_offset.y) fourth = Point2D(center.x - this_offset.x, center.y - this_offset.y) path = Path( Step(Step.CONST_DIRECTION_B), Step(Step.CONST_DIRECTION_C), Step(Step.CONST_DIRECTION_C), Step(Step.CONST_DIRECTION_D), ) elif direction.step is Step.CONST_DIRECTION_D: first = Point2D(center.x - this_offset.x, center.y + this_offset.y) second = Point2D(center.x + this_offset.x, center.y + this_offset.y) third = Point2D(center.x + this_offset.x, center.y - this_offset.y) fourth = Point2D(center.x - this_offset.x, center.y - this_offset.y) path = Path( Step(Step.CONST_DIRECTION_A), Step(Step.CONST_DIRECTION_D), Step(Step.CONST_DIRECTION_D), Step(Step.CONST_DIRECTION_C), ) else: raise ValueError( 'Expected the attribute \'step\' in the parameter \'direction\' to be valid' ) return Trace(first, second, third, fourth, path)
def _page_name_template(self): return u"{temp_dir}/{filename}_%03d.png".format( temp_dir=self.temp_dir.path, filename=Path.filename(self.path))
def state_lattice_planner(n: int, m: int, file_name: str = "test", g_weight: float = 0.5, h_weight: float = 0.5, costmap_file: str = "", start_pos: tuple = (20, 10, 0), goal_pos: tuple = (20, 280, 0), initial_heading: float = math.pi / 2, padding: int = 0, turning_radius: int = 8, vel: int = 10, num_headings: int = 8, num_obs: int = 130, min_r: int = 1, max_r: int = 8, upper_offset: int = 20, lower_offset: int = 20, allow_overlap: bool = False, obstacle_density: int = 6, obstacle_penalty: float = 3, Kp: float = 3, Ki: float = 0.08, Kd: float = 0.5, inf_stream: bool = False, save_animation: bool = False, save_costmap: bool = False, smooth_path: bool = False, replan: bool = False, horizon: int = np.inf, y_axis_limit: int = 100, buffer: int = None, move_yaxis_threshold: int = 20, new_obs_dist: int = None): # PARAM SETUP # --- costmap --- # load_costmap_file = costmap_file ship_vertices = np.array([[-1, -4], [1, -4], [1, 2], [0, 4], [-1, 2]]) # load costmap object from file if specified if load_costmap_file: with open(load_costmap_file, "rb") as fd: costmap_obj = pickle.load(fd) # recompute costmap costs if obstacle penalty is different than original if costmap_obj.obstacle_penalty != obstacle_penalty: costmap_obj.update2(obstacle_penalty) else: # initialize costmap costmap_obj = CostMap( n, m, obstacle_penalty, min_r, max_r, inf_stream, y_axis_limit, num_obs, new_obs_dist ) # generate obs up until buffer if in inf stream mode max_y = y_axis_limit + buffer if inf_stream else goal_pos[1] # generate random obstacles costmap_obj.generate_obstacles(start_pos[1], max_y, num_obs, upper_offset, lower_offset, allow_overlap) orig_obstacles = costmap_obj.obstacles.copy() # initialize ship object ship = Ship(ship_vertices, start_pos, initial_heading, turning_radius, padding) # get the primitives prim = Primitives(turning_radius, initial_heading, num_headings) # generate swath dict swath_dict = swath.generate_swath(ship, prim) print("WEIGHTS", g_weight, h_weight) # initialize a star object a_star = AStar(g_weight, h_weight, cmap=costmap_obj, primitives=prim, ship=ship, first_initial_heading=initial_heading) # compute current goal curr_goal = (goal_pos[0], min(goal_pos[1], (start_pos[1] + horizon)), goal_pos[2]) t0 = time.time() worked, smoothed_edge_path, nodes_visited, x1, y1, x2, y2, orig_path = \ a_star.search(start_pos, curr_goal, swath_dict, smooth_path) init_plan_time = time.time() - t0 print("Time elapsed: ", init_plan_time) print("Hz", 1 / init_plan_time) if worked: plot_obj = Plot( costmap_obj, prim, ship, nodes_visited, smoothed_edge_path.copy(), path_nodes=(x1, y1), smoothing_nodes=(x2, y2), horizon=horizon, inf_stream=inf_stream, y_axis_limit=y_axis_limit ) path = Path(plot_obj.full_path) else: print("Failed to find path at step 0") exit(1) # init pymunk sim space = pymunk.Space() space.add(ship.body, ship.shape) space.gravity = (0, 0) staticBody = space.static_body # create a static body for friction constraints # create the pymunk objects and the polygon patches for the ice polygons = [ create_polygon( space, staticBody, (obs['vertices'] - np.array(obs['centre'])).tolist(), *obs['centre'], density=obstacle_density ) for obs in costmap_obj.obstacles ] # From pure pursuit state = State(x=start_pos[0], y=start_pos[1], yaw=0.0, v=0.0) target_course = TargetCourse(path.path[0], path.path[1]) target_ind = target_course.search_target_index(state) # init PID controller pid = PID(Kp, Ki, Kd, 0) pid.output_limits = (-1, 1) # limit on PID output # generator to end matplotlib animation when it reaches the goal def gen(): nonlocal at_goal i = 0 while not at_goal: i += 1 yield i raise StopIteration # should stop animation def animate(frame, queue_state, pipe_path): nonlocal at_goal steps = 10 # move simulation forward 20 ms seconds: for x in range(steps): space.step(0.02 / steps) # get current state ship_pos = (ship.body.position.x, ship.body.position.y, 0) # straight ahead of boat is 0 # check if ship has made it past the goal line if ship.body.position.y >= goal_pos[1]: at_goal = True print("\nAt goal, shutting down...") plt.close(plot_obj.map_fig) plt.close(plot_obj.sim_fig) queue_state.close() shutdown_event.set() return [] # Pymunk takes left turn as negative and right turn as positive in ship.body.angle # To get proper error, we must flip the sign on the angle, as to calculate the setpoint, # we look at a point one lookahead distance ahead, and find the angle to that point with # arctan2, but using this, we will get positive values on the left and negative values on the right # As the angular velocity in pymunk uses the same convention as ship.body.angle, we must flip the sign # of the output as well output = -pid(-ship.body.angle) # should play around with frequency at which new state data is sent if frame % 20 == 0 and frame != 0 and replan: # update costmap and polygons to_add, to_remove = costmap_obj.update(polygons, ship.body.position.y) assert len(costmap_obj.obstacles) <= costmap_obj.total_obs # remove polygons if any for obs in to_remove: polygons.remove(obs) # add polygons if any polygons.extend([ create_polygon( space, staticBody, (obs['vertices'] - np.array(obs['centre'])).tolist(), *obs['centre'], density=obstacle_density ) for obs in to_add ]) print("Total polygons", len(polygons)) try: # empty queue to ensure latest state data is pushed queue_state.get_nowait() except Empty: pass # send updated state via queue queue_state.put({ 'ship_pos': ship_pos, 'ship_body_angle': ship.body.angle, 'costmap': costmap_obj.cost_map, 'obstacles': costmap_obj.obstacles, }, block=False) print('\nSent new state data!') # check if there is a new path if pipe_path.poll(): # get new path path_data = pipe_path.recv() new_path = path_data['path'] # this is the full path and in the correct order i.e. start -> goal print('\nReceived replanned path!') # compute swath cost of new path up until the max y distance of old path for a fair comparison # note, we do not include the path length in the cost full_swath, full_cost, current_cost = swath.compute_swath_cost( costmap_obj.cost_map, new_path, ship.vertices, threshold_dist=path.path[1][-1] ) try: assert full_cost >= current_cost # sanity check except AssertionError: print("Full and partial swath costs", full_cost, current_cost) path_expired = False prev_cost = None # check if old path is 'expired' regardless of costs if (path.path[1][-1] - ship_pos[1]) < horizon / 2: path_expired = True else: # clip old path based on ship y position old_path = path.clip_path(ship_pos[1]) # compute cost of clipped old path _, prev_cost, _ = swath.compute_swath_cost(costmap_obj.cost_map, old_path, ship.vertices) print('\nPrevious Cost: {prev_cost:.3f}'.format(prev_cost=prev_cost)) print('Current Cost: {current_cost:.3f}\n'.format(current_cost=current_cost)) if path_expired or current_cost < prev_cost: if path_expired: print("Path expired, applying new path regardless of cost!") else: print("New path better than old path!") path.new_path_cnt += 1 plot_obj.update_path( new_path, full_swath, path_data['path_nodes'], path_data['smoothing_nodes'], path_data['nodes_expanded'] ) # update to new path path.path = new_path ship.set_path_pos(0) # update pure pursuit objects with new path target_course.update(path.path[0], path.path[1]) state.update(ship.body.position.x, ship.body.position.y, ship.body.angle) # update costmap and map fig plot_obj.update_map(costmap_obj.cost_map) plot_obj.map_fig.canvas.draw() else: print("Old path better than new path") path.old_path_cnt += 1 if ship.path_pos < np.shape(path.path)[1] - 1: # Translate linear velocity into direction of ship x_vel = math.sin(ship.body.angle) y_vel = math.cos(ship.body.angle) mag = math.sqrt(x_vel ** 2 + y_vel ** 2) x_vel = x_vel / mag * vel y_vel = y_vel / mag * vel ship.body.velocity = Vec2d(x_vel, y_vel) # Assign output of PID controller to angular velocity ship.body.angular_velocity = output # Update the pure pursuit state state.update(ship.body.position.x, ship.body.position.y, ship.body.angle) # Get look ahead index ind = target_course.search_target_index(state) if ind != ship.path_pos: # Find heading from current position to look ahead point ship.set_path_pos(ind) dy = path.path[1][ind] - ship.body.position.y dx = path.path[0][ind] - ship.body.position.x angle = np.arctan2(dy, dx) - a_star.first_initial_heading # set setpoint for PID controller pid.setpoint = angle # at each step animate ship and obstacle patches plot_obj.animate_ship(ship, horizon, move_yaxis_threshold) plot_obj.animate_obstacles(polygons) return plot_obj.get_sim_artists() # multiprocessing setup lifo_queue = Queue(maxsize=1) # LIFO queue to send state information to A* conn_recv, conn_send = Pipe(duplex=False) # pipe to send new path to controller and for plotting shutdown_event = Event() # setup a process to run A* print('\nStart process...') gen_path_process = Process( target=gen_path, args=(lifo_queue, conn_send, shutdown_event, ship, prim, costmap_obj, swath_dict, a_star, goal_pos, horizon, smooth_path) ) gen_path_process.start() # init vars used in animation methods at_goal = False # start animation in main process anim = animation.FuncAnimation(plot_obj.sim_fig, animate, frames=gen, fargs=(lifo_queue, conn_recv,), interval=20, blit=False, repeat=False, ) if save_animation: anim.save(os.path.join('gifs', file_name), writer=animation.PillowWriter(fps=30)) plt.show() total_dist_moved = 0 # Compare cost maps for i, j in zip(orig_obstacles, polygons): pos = (j.body.position.x, j.body.position.y) area = j.area if area > 4: total_dist_moved = a_star.dist(i['centre'], pos) * (area/2) + total_dist_moved print('TOTAL DIST MOVED', total_dist_moved) print('Old/new path counts', '\n\told path', path.old_path_cnt, '\n\tnew path', path.new_path_cnt) shutdown_event.set() print('\n...done with process') gen_path_process.join() print('Completed multiprocessing') # get response from user for saving costmap if save_costmap: costmap_obj.save_to_disk() return total_dist_moved, init_plan_time
'no_preview' : arguments.no_preview, 'debug' : arguments.debug, 'execute_programs' : [ [int(x[0]), x[1] ] for x in arguments.execute_program ] } device_args = {'cpu_only' : arguments.cpu_only, 'force_gpu_idx' : arguments.force_gpu_idx, } from mainscripts import Trainer Trainer.main(args, device_args) p = subparsers.add_parser( "train", help="Trainer") p.add_argument('--training-data-src-dir', required=True, action=fixPathAction, dest="training_data_src_dir", help="Dir of extracted SRC faceset.") p.add_argument('--training-data-dst-dir', required=True, action=fixPathAction, dest="training_data_dst_dir", help="Dir of extracted DST faceset.") p.add_argument('--pretraining-data-dir', action=fixPathAction, dest="pretraining_data_dir", default=None, help="Optional dir of extracted faceset that will be used in pretraining mode.") p.add_argument('--model-dir', required=True, action=fixPathAction, dest="model_dir", help="Model dir.") p.add_argument('--model', required=True, dest="model_name", choices=Path_utils.get_all_dir_names_startswith ( Path(__file__).parent / 'models' , 'Model_'), help="Type of model") p.add_argument('--no-preview', action="store_true", dest="no_preview", default=False, help="Disable preview window.") p.add_argument('--debug', action="store_true", dest="debug", default=False, help="Debug samples.") p.add_argument('--cpu-only', action="store_true", dest="cpu_only", default=False, help="Train on CPU.") p.add_argument('--force-gpu-idx', type=int, dest="force_gpu_idx", default=-1, help="Force to choose this GPU idx.") p.add_argument('--execute-program', dest="execute_program", default=[], action='append', nargs='+') p.set_defaults (func=process_train) def process_convert(arguments): os_utils.set_process_lowest_prio() args = {'training_data_src_dir' : arguments.training_data_src_dir, 'input_dir' : arguments.input_dir, 'output_dir' : arguments.output_dir, 'aligned_dir' : arguments.aligned_dir, 'model_dir' : arguments.model_dir, 'model_name' : arguments.model_name,
def load(self): localhost_hooked = False # parse project description if not self.descrpath.isfile(): return False descr = open(self.descrpath.abs()).readlines() for line in descr: ltokens = line.split() if ltokens[0] == 'project': # some paranoia if self.name != ltokens[1]: die('load: names do not match (%s and %s)\n' % (self.name, ltokens[1])) else: self.args[ltokens[0]] = ltokens[1:] self.description = self.args['description'][0] self.user = self.args['cred'][0] self.group = self.args['cred'][1] self.hosts = self.args['hosts'] # find project config for current host self.confpathlist = [] for host in self.hosts: cfname = Path(self, PROJECTCONF_HOSTFMT % host) if cfname.isfile(): self.confpathlist.append(cfname.rel()) else: # die? print 'ERROR: config %s for host %s not found' % (cfname, host) self.deployattrs = {} confpath = self.confpath # if not found, schedule for reconfiguration if HOSTNAME in self.hosts and self.confpath.rel() in self.confpathlist: localhost_hooked = True if not localhost_hooked and self.autofix: print 'NOTE: project is not hooked to %s' % HOSTNAME print 'reconfiguring project' confpath = Path(self, self.confpathlist[0]) if self.nocreate and not localhost_hooked: #return True return False # if found, parse config for local host # if not, parse first available config for generation of local one conf = open(confpath.abs()).readlines() for line in conf: ltokens = line.split() if len(ltokens) < 1: continue # skip empties if ltokens[0] == 'ip': self.deployattrs['ip'] = ltokens[1] else: # import project args to args dict nodeploy = False if ltokens[0][0] == '!': nodeploy = True ltokens[0] = ltokens[0][1:] self.args[ltokens[0]] = ltokens[1:] # add feature to project features list if used for ft in features.FEATURES: if ft.fid == ltokens[0]: #self._featureappend(ft, ltokens[1:]) # XXX: feature has args in self.project.args on load() self._featureappend(ft, nodeploy=nodeploy) # reconfigure features for localhost if scheduled if not localhost_hooked and self.autofix: self.hosts.append(HOSTNAME) # rewrite project config self._writeconf(felist=[fe.create() for fe in self.features], hostname=HOSTNAME) # rewrite project description self._writedescr() # test if the project uses subversion if os.path.isdir(os.path.join(self.treepath, '.svn')): self.svn = True return True
class Project: DESCR_FMT = """\ project %(name)s description %(descr)s cred %(user)s %(group)s hosts %(hosts)s """ CONF_FMT = """\ hook_up %(scriptdir)s/%(hostname)s_up hook_down %(scriptdir)s/%(hostname)s_down ip %(ip)s """ def __init__(self, name, *args, **kwargs): if name == '.': name = os.getcwd().split('/')[-1] self.name = name self.treepath = os.path.join(PROJECTROOT, self.name) #self.descrpath = os.path.join(self.treepath, PROJECTDESCR) #self.confpath = os.path.join(self.treepath, PROJECTCONF) self.descrpath = Path(self, PROJECTDESCR) self.confpath = Path(self, PROJECTCONF) self.confpathlist = [self.confpath.rel()] self.args = {} self.features = [] self.tree = TREE self.deployattrs = { 'ip': 'ip' in kwargs and kwargs['ip'] or utils.ifconfig_ip(), } self.hosts = [] self.fqdnpool = [] # used to keep track of used fqdn to # not make them overlap in different features # set to True whether you want to control project automatically, # i.e. reconfigure features for local host self.autofix = 'autofix' in kwargs and kwargs['autofix'] or False # set to False if load()/__init__() do not need to perform # additional stuff self.nocreate = 'nocreate' in kwargs and True or False self.svn = False self.loaded = self.load() if not 'create' in kwargs: # found an existing project or just requested to pass return else: # creating a new one self.description = kwargs['descr'] self.user = kwargs['user'] self.group = kwargs['group'] self.featurelist = kwargs['features'] self.hosts.append(HOSTNAME) def __str__(self): return 'project %s (%s)' % (self.name, self.description) def describe(self): return """ name = %s ip = %s descr = %s user = %s group = %s features = %s hosts = %s """ % (self.name, self.deployattrs['ip'], self.description, self.user, self.group, self.features, self.hosts) def _featureappend(self, feature, args=None, priority=-1, nodeploy=False): #if not feature.__class__ in features.FEATURES: # die('undefined feature') #if DEBUG: # print 'featureappend: %s %s' % (feature, args) #softlist = [ft.software for ft in self.features] #if feature.software in softlist: # die('feature %s: software %s is used by <%s>\n', feature, feature.software, # feature) nfeature = feature(project=self, fargs=args, nodeploy=nodeploy) if priority < 0: self.features.append(nfeature) else: self.features.insert(priority, nfeature) return nfeature def _writedescr(self): descr = open(self.descrpath.abs(), 'w') descr.write(Project.DESCR_FMT % { 'name': self.name, 'descr': self.description, 'user': self.user, 'group': self.group, 'hosts': ' '.join(self.hosts), #'features': ' '.join([fe.split()[0] for fe in felist]), }) descr.close() print '\n> %s' % self.descrpath print open(self.descrpath.abs()).read() def _writeconf(self, felist=[], hostname=HOSTNAME): conf = open(self.confpath.abs(), 'w') conf.write(Project.CONF_FMT % { 'hostname': hostname, 'scriptdir': self.getpath('scripts').rel(), 'ip': self.deployattrs['ip'], }) # add features to config for fe in felist: conf.write('%s\n' % fe) conf.close() print '\n> %s' % self.confpath.abs() print open(self.confpath.abs()).read() def getpath(self, node, trail=''): return Path(self, self.tree[node] + trail) def load(self): localhost_hooked = False # parse project description if not self.descrpath.isfile(): return False descr = open(self.descrpath.abs()).readlines() for line in descr: ltokens = line.split() if ltokens[0] == 'project': # some paranoia if self.name != ltokens[1]: die('load: names do not match (%s and %s)\n' % (self.name, ltokens[1])) else: self.args[ltokens[0]] = ltokens[1:] self.description = self.args['description'][0] self.user = self.args['cred'][0] self.group = self.args['cred'][1] self.hosts = self.args['hosts'] # find project config for current host self.confpathlist = [] for host in self.hosts: cfname = Path(self, PROJECTCONF_HOSTFMT % host) if cfname.isfile(): self.confpathlist.append(cfname.rel()) else: # die? print 'ERROR: config %s for host %s not found' % (cfname, host) self.deployattrs = {} confpath = self.confpath # if not found, schedule for reconfiguration if HOSTNAME in self.hosts and self.confpath.rel() in self.confpathlist: localhost_hooked = True if not localhost_hooked and self.autofix: print 'NOTE: project is not hooked to %s' % HOSTNAME print 'reconfiguring project' confpath = Path(self, self.confpathlist[0]) if self.nocreate and not localhost_hooked: #return True return False # if found, parse config for local host # if not, parse first available config for generation of local one conf = open(confpath.abs()).readlines() for line in conf: ltokens = line.split() if len(ltokens) < 1: continue # skip empties if ltokens[0] == 'ip': self.deployattrs['ip'] = ltokens[1] else: # import project args to args dict nodeploy = False if ltokens[0][0] == '!': nodeploy = True ltokens[0] = ltokens[0][1:] self.args[ltokens[0]] = ltokens[1:] # add feature to project features list if used for ft in features.FEATURES: if ft.fid == ltokens[0]: #self._featureappend(ft, ltokens[1:]) # XXX: feature has args in self.project.args on load() self._featureappend(ft, nodeploy=nodeploy) # reconfigure features for localhost if scheduled if not localhost_hooked and self.autofix: self.hosts.append(HOSTNAME) # rewrite project config self._writeconf(felist=[fe.create() for fe in self.features], hostname=HOSTNAME) # rewrite project description self._writedescr() # test if the project uses subversion if os.path.isdir(os.path.join(self.treepath, '.svn')): self.svn = True return True def create(self): if self.loaded: die('create: project already loaded/exists') if os.path.exists(self.treepath): die('create: project root exists on %s\n' % HOSTNAME) # create internal list of requested features for fe in self.featurelist: for ft in features.FEATURES: if ft.fid == fe[0]: self._featureappend(ft, fe[1:]) # resolve feature dependencies for fe in self.features: fe.depresolve() # create tree os.mkdir(self.treepath, 0775) for node in self.tree: os.mkdir(os.path.join(self.treepath, self.tree[node]), 0775) # check credentials existance on localhost if not self.group in [g[0] for g in grp.getgrall()]: cmd = '/usr/sbin/groupadd %s' % self.group utils.cmd(cmd) if not self.user in [p[0] for p in pwd.getpwall()]: cmd = '/usr/sbin/useradd -d %s -g %s %s' % (self.treepath, self.group, self.user) utils.cmd(cmd) # create features # create() should return config string, so keep them in a list felist = [fe.create() for fe in self.features] # create hook scripts hook_up = os.path.join(self.getpath('scripts').abs(), '%s_up' % HOSTNAME) open(hook_up, 'w').write('#!/bin/sh\n') os.chmod(hook_up, 0774) hook_down = os.path.join(self.getpath('scripts').abs(), '%s_down' % HOSTNAME) open(hook_down, 'w').write('#!/bin/sh\n') os.chmod(hook_down, 0774) # write project description self._writedescr() # write project config self._writeconf(felist) return def touch(self): utils.cmd('chown -R %s:%s %s' % (self.user, self.group, self.treepath)) utils.cmd('chmod -R g+rw %s' % self.treepath) return def delete(self): pass def deploy(self): self.deployctl = DeployCtl(self) if self.deployctl.deployed: return False for fe in self.features: fe.deploy() return self.deployctl.dump() def unlink(self): self.deployctl = DeployCtl(self) if not self.deployctl.deployed: return False for fe in self.features: fe.unlink() return self.deployctl.dump() @classmethod def svncheckout(cls, name): if Project(name, nocreate=True).loaded: print 'project %s exists' % name return False rc = utils.cmd('svn checkout %s/%s %s' % (SUBVERSION_PATH, name, os.path.join(PROJECTROOT, name)), system=True) if rc[0] != 0: die('svn error') nproj = Project(name, nocreate=True) if not nproj.loaded: print 'project %s does not seem like a valid one' % name return False assert self.svn == True return nproj def svnimport(self): if not self.loaded: die('%s not loaded (it does not exist?)' % name) rc = utils.cmd('svn import -m \'%s initial import\' %s %s/%s' % (self.name, self.treepath, SUBVERSION_PATH, self.name), system=True) if rc[0] != 0: die('svn error') rc = utils.cmd('svn checkout --force %s/%s %s' % (SUBVERSION_PATH, self.name, self.treepath), system=True) if rc[0] != 0: die('svn error') rc = utils.cmd('cd %s; svn propset svn:ignore * logs;' \ 'svn commit -m \'ignore logs\'' % self.treepath) #assert self.svn == True def featureadd(self, featuredesc): fe = None # test if we have similar feature for fe in self.features: if fe.cmp(featuredesc): return False # add it for ft in features.FEATURES: if ft.fid == featuredesc[0]: fe = self._featureappend(ft, featuredesc[1:]) break fe.depresolve() rc = fe.create() if not rc: return False # sync tree for node in self.tree: newpath = os.path.join(self.treepath, self.tree[node]) if not os.path.exists(newpath): os.mkdir(newpath, 0775) # update host config conf = open(self.confpath.abs(), 'a') conf.write('%s\n' % rc) conf.close() print '\n> %s' % self.confpath.abs() print open(self.confpath.abs()).read() return True def featuredrop(self, featuredesc): fe = None for fe in self.features: if fe.cmp(featuredesc): break if not fe: return False # drop tok = fe.delete() if not tok: return False # update host config conf = open(self.confpath.abs()).readlines() confc = conf for line in confc: if line.startswith(tok): conf.remove(line) open(self.confpath.abs(), 'w').write(''.join(conf)) print '\n> %s' % self.confpath.abs() print open(self.confpath.abs()).read() return True def up(self): if 'hook_up' in self.args: script = Path(self, self.args['hook_up'][0]) utils.cmd('sudo -u %s -H %s' % (self.user, script.abs()), system=True) def down(self): if 'hook_down' in self.args: script = Path(self, self.args['hook_down'][0]) utils.cmd('sudo -u %s -H %s' % (self.user, script.abs()), system=True)
def _on_change_sg(self, *_): Config().saved_games_path = str(Path(self.sg.text()).abspath())
def down(self): if 'hook_down' in self.args: script = Path(self, self.args['hook_down'][0]) utils.cmd('sudo -u %s -H %s' % (self.user, script.abs()), system=True)
class Miz: def __init__(self, path_to_miz_file, temp_dir=None, keep_temp_dir: bool = False): self.miz_path = Path(path_to_miz_file) if not self.miz_path.exists(): raise FileNotFoundError(path_to_miz_file) if not self.miz_path.isfile(): raise TypeError(path_to_miz_file) if not self.miz_path.ext == '.miz': raise ValueError(path_to_miz_file) if temp_dir is not None: raise PendingDeprecationWarning() self.keep_temp_dir = keep_temp_dir self.tmpdir = Path(tempfile.mkdtemp('EMFT_')) logger.debug('temporary directory: {}'.format(self.tmpdir.abspath())) self.zip_content = None self._mission = None self._mission_qual = None self._l10n = None self._l10n_qual = None self._map_res = None self._map_res_qual = None def __enter__(self): logger.debug('instantiating new Mission object as a context') self.unzip() self._decode() return self def __exit__(self, exc_type, exc_val, exc_tb): if exc_type: logger.error('there were error with this mission, keeping temp dir at "{}" and re-raising'.format( self.tmpdir.abspath())) logger.error('{}\n{}'.format(exc_type, exc_val)) return False else: logger.debug('closing Mission object context') if not self.keep_temp_dir: logger.debug('removing temp dir: {}'.format(self.tmpdir.abspath())) self.tmpdir.rmtree() @property def mission_file(self): return self.tmpdir.joinpath('mission') @property def dictionary_file(self): return self.tmpdir.joinpath('l10n', 'DEFAULT', 'dictionary') @property def map_res_file(self): return self.tmpdir.joinpath('l10n', 'DEFAULT', 'mapResource') @property def mission(self) -> Mission: if self._mission is None: raise RuntimeError() return self._mission @property def l10n(self) -> dict: if self._l10n is None: raise RuntimeError() return self._l10n @property def map_res(self) -> dict: if self._map_res is None: raise RuntimeError() return self._map_res @staticmethod def reorder(miz_file_path, target_dir, skip_options_file): with Miz(miz_file_path) as m: def mirror_dir(src, dst): logger.debug('{} -> {}'.format(src, dst)) diff_ = dircmp(src, dst, ignore) diff_list = diff_.left_only + diff_.diff_files logger.debug('differences: {}'.format(diff_list)) for x in diff_list: source = Path(diff_.left).joinpath(x) target = Path(diff_.right).joinpath(x) logger.debug('looking at: {}'.format(x)) if source.isdir(): logger.debug('isdir: {}'.format(x)) if not target.exists(): logger.debug('creating: {}'.format(x)) target.mkdir() mirror_dir(source, target) else: logger.debug('copying: {}'.format(x)) source.copy2(diff_.right) for sub in diff_.subdirs.values(): assert isinstance(sub, dircmp) mirror_dir(sub.left, sub.right) m.unzip(overwrite=True) m._decode() m._encode() if skip_options_file: ignore = ['options'] else: ignore = [] mirror_dir(m.tmpdir, target_dir) def _decode(self): logger.info('decoding lua tables') if not self.zip_content: self.unzip(overwrite=False) Progress.start('Decoding MIZ file', length=3) Progress.set_label('Decoding map resource') logger.debug('reading map resource file') with open(self.map_res_file, encoding=ENCODING) as f: self._map_res, self._map_res_qual = SLTP().decode(f.read()) Progress.set_value(1) Progress.set_label('Decoding dictionary file') logger.debug('reading l10n file') with open(self.dictionary_file, encoding=ENCODING) as f: self._l10n, self._l10n_qual = SLTP().decode(f.read()) Progress.set_value(2) Progress.set_label('Decoding mission file') logger.debug('reading mission file') with open(self.mission_file, encoding=ENCODING) as f: mission_data, self._mission_qual = SLTP().decode(f.read()) self._mission = Mission(mission_data, self._l10n) Progress.set_value(3) logger.info('decoding done') def _encode(self): logger.info('encoding lua tables') Progress.start('Decoding MIZ file', length=3) Progress.set_label('Encoding map resource') logger.debug('encoding map resource') with open(self.map_res_file, mode='w', encoding=ENCODING) as f: f.write(SLTP().encode(self._map_res, self._map_res_qual)) Progress.set_value(1) Progress.set_label('Encoding map resource') logger.debug('encoding l10n dictionary') with open(self.dictionary_file, mode='w', encoding=ENCODING) as f: f.write(SLTP().encode(self.l10n, self._l10n_qual)) Progress.set_value(2) Progress.set_label('Encoding map resource') logger.debug('encoding mission dictionary') with open(self.mission_file, mode='w', encoding=ENCODING) as f: f.write(SLTP().encode(self.mission.d, self._mission_qual)) Progress.set_value(3) logger.info('encoding done') def unzip(self, overwrite: bool = False): if self.zip_content and not overwrite: raise FileExistsError(self.tmpdir.abspath()) logger.debug('unzipping miz to temp dir') try: with ZipFile(self.miz_path.abspath()) as zip_file: logger.debug('reading infolist') self.zip_content = [f.filename for f in zip_file.infolist()] for item in zip_file.infolist(): # not using ZipFile.extractall() for security reasons assert isinstance(item, ZipInfo) logger.debug('unzipping item: {}'.format(item)) try: zip_file.extract(item, self.tmpdir.abspath()) except: logger.error('failed to extract archive member: {}'.format(item)) raise except BadZipFile: raise BadZipFile(self.miz_path.abspath()) except: logger.exception('error while unzipping miz file: {}'.format(self.miz_path.abspath())) raise logger.debug('checking miz content') # noinspection PyTypeChecker for miz_item in map( join, [self.tmpdir.abspath()], [ 'mission', 'options', 'warehouses', 'l10n/DEFAULT/dictionary', 'l10n/DEFAULT/mapResource' ]): if not exists(miz_item): logger.error('missing file in miz: {}'.format(miz_item)) raise FileNotFoundError(miz_item) for filename in self.zip_content: p = self.tmpdir.joinpath(filename) if not p.exists(): raise FileNotFoundError(p.abspath()) logger.debug('all files have been found, miz successfully unzipped') def zip(self, destination=None): self._encode() if destination is None: destination = Path(self.miz_path.dirname()).joinpath('{}_EMFT.miz'.format(self.miz_path.namebase)) destination = Path(destination).abspath() logger.debug('zipping mission to: {}'.format(destination)) with open(destination, mode='wb') as f: f.write(dummy_miz) with ZipFile(destination, mode='w', compression=8) as _z: for f in self.zip_content: abs_path = self.tmpdir.joinpath(f).abspath() logger.debug('injecting in zip file: {}'.format(abs_path)) _z.write(abs_path, arcname=f) return destination
def _show_skin_in_model_viewer(self, row): skin_name = self.proxy.data(self.proxy.index(row, 0)) ac_name = self.proxy.data(self.proxy.index(row, 1)) mv_autoexec_cfg = Path(self._active_dcs_install.install_path).joinpath( 'Config', 'ModelViewer', 'autoexec.lua') mv_exe = Path(self._active_dcs_install.install_path).joinpath( 'bin', 'ModelViewer.exe') for f in mv_autoexec_cfg, mv_exe: if not f.exists(): logger.error('file not found: {}'.format(f.abspath())) return mount_lines = set() if self._active_dcs_install.autoexec_cfg: for vfs_path in self._active_dcs_install.autoexec_cfg.mounted_vfs_paths: mount_lines.add( 'mount_vfs_texture_path("{}")\n'.format(vfs_path)) backup_path = mv_autoexec_cfg.dirname().joinpath( 'autoexec.lua_EMFT_BACKUP') if not backup_path.exists(): logger.info('backing up "{}" -> "{}"'.format( mv_autoexec_cfg.abspath(), backup_path.abspath())) mv_autoexec_cfg.copy2(backup_path.abspath()) orig_lines = mv_autoexec_cfg.lines() lines = [] for line in orig_lines: if Config().allow_mv_autoexec_changes: if RE_MOUNT_LINE.match(line): # print('skipping', line) continue if RE_LOAD_MODEL_LINE.match(line): # print('skipping', line) continue if RE_LOAD_LIVERY_LINE.match(line): # print('skipping', line) continue lines.append(line) # model_path = 'LoadModel("Bazar/World/Shapes/{}.edm")'.format(self._active_dcs_install.get_object_model(ac_name)) lines.insert( 0, 'LoadLivery("{ac_name}","{skin_name}")'.format(**locals())) lines.insert( 0, 'LoadModel("Bazar/World/Shapes/{ac_name}.edm")'.format(**locals())) if Config().allow_mv_autoexec_changes: for line in mount_lines: lines.insert(0, line) mv_autoexec_cfg.write_lines(lines) os.startfile(mv_exe.abspath())
""" import re import textwrap from utils import Path import os.path as path __all__ = ( "Ninja", ) def escape_path(word): return word.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:') home = Path.joined("/", "home", "istvan") class Ninja(object): def __init__(self, output, width=78): self.output = output self.width = width @classmethod def in_path(cls, *args, **kwargs): p = path.join(*args, "build.ninja") return cls(open(p, "w"), **kwargs) def __del__(self): self.close() def newline(self):
from sys import path as sys_path, argv from utils import Path main_path = str(Path().script_dir()) sys_path.append(main_path) from lexicalanalyzer import Lexical from syntaticanalyzer import Syntatic from semanthicanalyzer import Semanthic if main_path[len(main_path) - 1] != '/': main_path += '/' def main(file_path=main_path + 'teste.pas', output_early=True): file = argv[1] if len(argv) > 1 else file_path with open(file, 'r') as file: tokens = Lexical(file).split() print( 'Análise Léxica bem sucedida. A lista de tokens gerados está disponível no arquivo tokens.log' ) scope_manager = Syntatic(tokens).parse() print( 'Análise Sintática bem sucedida. A tabela de símbolos está disponível no arquivo symbols-table.log' ) if output_early: generate_output(tokens, scope_manager) Semanthic(tokens, scope_manager).analyze()
def path(self, *pargs): """Helper that constructs a Path object using the project dir as the base.""" return Path(*pargs, base=self.project_dir)
def _sg_browse(self): p = BrowseDialog.get_directory(self, 'Saved Games directory', Path(self.sg.text()).dirname()) if p: p = str(Path(p).abspath()) self.sg.setText(p)