def validate_patch(patch64, patch32): wc_base = os.path.abspath(os.path.join(BASE_PATH, "..", "..")) base_parse = urllib.parse.urlsplit(REPO_BASE, scheme='http') p64_parse = urllib.parse.urlsplit(patch64, scheme='http') p32_parse = urllib.parse.urlsplit(patch32, scheme='http') if not (p64_parse[0] == p32_parse[0] == base_parse[0]): raise Exception("URL scheme doesn't match repo base URL scheme") if not (p64_parse[1] == p32_parse[1] == base_parse[1]): raise Exception( "URL network location doesn't match repo base URL network location" ) if posixpath.commonpath((base_parse[2], p64_parse[2], p32_parse[2])) != \ posixpath.commonpath((base_parse[2],)): raise Exception("URL is not subpath of repo base path") p64_posix_relpath = posixpath.relpath(p64_parse[2], base_parse[2]) p32_posix_relpath = posixpath.relpath(p32_parse[2], base_parse[2]) p64_comp = posixpath_components(p64_posix_relpath) p32_comp = posixpath_components(p32_posix_relpath) p64_filepath = os.path.join(wc_base, *p64_comp) p32_filepath = os.path.join(wc_base, *p32_comp) if not os.path.exists(p64_filepath): raise Exception("File %s not found!" % p64_filepath) if not os.path.exists(p32_filepath): raise Exception("File %s not found!" % p32_filepath) if os.path.getsize(p64_filepath) == 0: raise Exception("File %s empty!" % p64_filepath) if os.path.exists(p32_filepath) == 0: raise Exception("File %s empty!" % p32_filepath)
def _open_file(self, filename): # Special cases, such as /dev/urandom. orig_filename = filename if filename == '/dev/urandom': logger.info("File opened '%s'" % filename) return self._store_fd('/dev/urandom', 'urandom') if filename.startswith("/"): filename = filename[1:] file_path = posixpath.join(self._root_path, filename) file_path = posixpath.normpath(file_path) if posixpath.commonpath([file_path, self._root_path ]) != self._root_path: raise RuntimeError("Emulated binary tried to escape vfs jail.") if os.path.isfile(file_path): logger.info("File opened '%s'" % orig_filename) flags = os.O_RDWR if hasattr(os, "O_BINARY"): flags |= os.O_BINARY return self._store_fd(orig_filename, os.open(file_path, flags=flags)) else: logger.info("File does not exist %s" % file_path) return -1
def _list(cls, client, spec, first_level=False): """ List tree using recursive then non recursive API. Yields raw results. Args: client (airfs.storage.github._api.ApiV3): Client. spec (dict): Item spec. first_level (bool): It True, returns only first level objects. Yields: tuple: Relative path, Absolute path, spec, headers, has content bool """ if "tree_sha" not in spec: parent = spec["parent"] if spec["object"] == cls else spec["object"] spec["tree_sha"] = parent.head(client, spec)["tree_sha"] cwd = spec.get("path", "").rstrip("/") cwd_index = len(cwd) cwd_seen = not cwd if cwd_index: # Include the ending "/" cwd_index += 1 response = client.get( cls.LIST.format(**spec), never_expire=True, params=dict(recursive=cwd or not first_level), )[0] for headers in response["tree"]: abspath = headers["path"] if cwd: if commonpath((abspath, cwd)) != cwd: continue relpath = abspath[cwd_index:] else: relpath = abspath if not relpath: cls._raise_if_not_dir(headers["type"] == "tree", spec) # Do not yield current working directory itself cwd_seen = True continue yield relpath, abspath, spec, headers, False if not cwd_seen: raise ObjectNotFoundError(path=spec["full_path"])
def translate_path(self, filename): if filename.startswith("/"): filename = filename[1:] if os.name == 'nt': filename = filename.replace(':', '_') file_path = posixpath.join(self._root_path, filename) file_path = posixpath.normpath(file_path) if posixpath.commonpath([file_path, self._root_path]) != self._root_path: raise RuntimeError("Emulated binary tried to escape vfs jail.") return file_path
def files_for_receive(cli_opts: TransferCLIOptions, dest: str, files: List[File], remote_home: str, specs: List[str]) -> Iterator[File]: spec_map: Dict[int, List[File]] = {i: [] for i in range(len(specs))} for f in files: spec_map[f.spec_id].append(f) spec_paths = [spec_map[i][0].remote_path for i in range(len(specs))] if cli_opts.mode == 'mirror': try: common_path = posixpath.commonpath(spec_paths) except ValueError: common_path = '' home = remote_home.rstrip('/') if common_path and common_path.startswith(home + '/'): spec_paths = [ posixpath.join('~', posixpath.relpath(x, home)) for x in spec_paths ] for spec_id, files_for_spec in spec_map.items(): spec = spec_paths[spec_id] tree = make_tree(files_for_spec, os.path.dirname(expand_home(spec))) for x in tree: yield x.entry else: number_of_source_files = sum(map(len, spec_map.values())) dest_is_dir = dest[-1] in ( os.sep, os.altsep) or number_of_source_files > 1 or os.path.isdir(dest) for spec_id, files_for_spec in spec_map.items(): if dest_is_dir: dest_path = os.path.join( dest, posixpath.basename(files_for_spec[0].remote_path)) tree = make_tree(files_for_spec, os.path.dirname(expand_home(dest_path))) for x in tree: yield x.entry else: f = files_for_spec[0] f.expanded_local_path = dest yield f
def find(self, path, detail=False, **kwargs): bucket, base = self.split_path(path) seen_paths = set() dir_ids = [self._ids_cache["ids"].copy()] contents = [] while dir_ids: query_ids = { dir_id: dir_name for dir_id, dir_name in dir_ids.pop().items() if posixpath.commonpath([base, dir_name]) == base if dir_id not in seen_paths } if not query_ids: continue seen_paths |= query_ids.keys() new_query_ids = {} dir_ids.append(new_query_ids) for item in self._gdrive_list_ids(query_ids): parent_id = item["parents"][0]["id"] item_path = posixpath.join(query_ids[parent_id], item["title"]) if item["mimeType"] == FOLDER_MIME_TYPE: new_query_ids[item["id"]] = item_path self._cache_path_id(item_path, item["id"]) continue contents.append({ "name": posixpath.join(bucket, item_path), "type": "file", "size": int(item["fileSize"]), "checksum": item["md5Checksum"], }) if detail: return {content["name"]: content for content in contents} else: return [content["name"] for content in contents]
def __call__(self, environ, start_response): # SCRIPT_NAME + PATH_INFO = full url urlpath = environ['SCRIPT_NAME'] + environ['PATH_INFO'] if not urlpath: urlpath = '/' potentials = [ prefix for prefix in self.keys() if posixpath.commonpath([prefix, urlpath]) == prefix ] try: match = max(potentials, key=len) except ValueError: # max() got an empty list, aka no matches found return self.no_route_found(environ, start_response) logger.debug("For %r found %r routes, selected %r", urlpath, potentials, match) app = self[match] environ['SCRIPT_NAME'] = urlpath[:len(match)] environ['PATH_INFO'] = urlpath[len(match):] return app(environ, start_response)
def _ls_recursive(self, path_info, detail=False): root_path = path_info.path seen_paths = set() dir_ids = [self._ids_cache["ids"].copy()] while dir_ids: query_ids = { dir_id: dir_name for dir_id, dir_name in dir_ids.pop().items() if posixpath.commonpath([root_path, dir_name]) == root_path if dir_id not in seen_paths } if not query_ids: continue seen_paths |= query_ids.keys() new_query_ids = {} dir_ids.append(new_query_ids) for item in self._gdrive_list_ids(query_ids): parent_id = item["parents"][0]["id"] item_path = posixpath.join(query_ids[parent_id], item["title"]) if item["mimeType"] == FOLDER_MIME_TYPE: new_query_ids[item["id"]] = item_path self._cache_path_id(item_path, item["id"]) continue if detail: yield { "type": "file", "name": item_path, "size": item["fileSize"], "checksum": item["md5Checksum"], } else: yield item_path
def check(paths, expected): self.assertEqual(posixpath.commonpath(paths), expected) self.assertEqual( posixpath.commonpath([os.fsencode(p) for p in paths]), os.fsencode(expected))
def check(paths, expected): self.assertEqual(posixpath.commonpath(paths), expected) self.assertEqual(posixpath.commonpath([os.fsencode(p) for p in paths]), os.fsencode(expected))
def get_base_url() -> str: prepare_url = reverse('s3_file_field:upload-initialize') finalize_url = reverse('s3_file_field:upload-finalize') # Use posixpath to always parse URL paths with forward slashes return posixpath.commonpath([prepare_url, finalize_url])
def update_event(self, inp=-1): self.set_output_val(0, posixpath.commonpath(self.input(0)))