def _build_presentation_stage(config, topic_set_id): print("Starting presentation stage for " + topic_set_id) for presentation_type in [item[1] for item in config.build_scripts[topic_set_id] if item[0] == 'present']: link_output_dir = posixpath.join( posixpath.dirname(config.build_scripts[topic_set_id][('link', presentation_type)]), 'out') _build_link_step(config=config, topic_set_id=topic_set_id, script=config.build_scripts[topic_set_id][('link', presentation_type)], output_dir=link_output_dir, synthesis_files=[x.replace('\\', '/') for x in glob( posixpath.join(posixpath.dirname(config.build_scripts[topic_set_id][('resolve', None)]), 'out') + '/*')], catalog_files=glob( posixpath.join(config.content_set_build_dir, 'catalogs') + '/*'), object_files=[x.replace('\\', '/') for x in glob(posixpath.join(config.content_set_build_dir, 'objects') + '/*/*')]) present_output_dir = posixpath.join( posixpath.dirname(config.build_scripts[topic_set_id][('present', presentation_type)]), 'out') _build_present_step(config=config, topic_set_id=topic_set_id, script=config.build_scripts[topic_set_id][('present', presentation_type)], output_dir=present_output_dir, synthesis_files=[x.replace('\\', '/') for x in glob( posixpath.join(posixpath.dirname(config.build_scripts[topic_set_id][('link', presentation_type)]), 'out') + '/*')], toc_files=glob(posixpath.join(config.content_set_build_dir, 'tocs') + '/*'), object_files=[x.replace('\\', '/') for x in glob(posixpath.join(config.content_set_build_dir, 'objects') + '/*/*')])
def parent_dir(self): if self.file_name: return posixpath.dirname(self._resource_path) + "/" elif self.is_root: return self._resource_path else: return posixpath.dirname(posixpath.dirname(self._resource_path)) + "/"
def request_in_scope(request): url = request.url purl = urlsplit(url) spurl = urlsplit(Shared.starturl) scope = Shared.options['scope'] in_scope = False # check for scopes if scope == CRAWLSCOPE_DOMAIN: for pattern in Shared.allowed_domains: if re.match(pattern, purl.hostname): in_scope = True break elif scope == CRAWLSCOPE_DIRECTORY: if purl.hostname != spurl.hostname: in_scope = False else: path = [p for p in posixpath.dirname(purl.path).split("/") if p] spath = [p for p in posixpath.dirname(spurl.path).split("/") if p] in_scope = path[:len(spath)] == spath elif scope == CRAWLSCOPE_URL: in_scope = url == Shared.starturl # check for excluded urls for pattern in Shared.excluded_urls: if re.match(pattern, request.url): in_scope = False break return in_scope
def adjust_uri(self, uri, relativeto): """Called from within a Mako template, avoids adjusting the uri if it looks like an asset specification""" # Don't adjust asset spec names isabs = os.path.isabs(uri) if (not isabs) and (':' in uri): return uri if not(isabs) and ('$' in uri): return uri.replace('$', ':') if relativeto is not None: relativeto = relativeto.replace('$', ':') if not(':' in uri) and (':' in relativeto): if uri.startswith('/'): return uri pkg, relto = relativeto.split(':') _uri = posixpath.join(posixpath.dirname(relto), uri) return '{0}:{1}'.format(pkg, _uri) if not(':' in uri) and not(':' in relativeto): return posixpath.join(posixpath.dirname(relativeto), uri) return TemplateLookup.adjust_uri( self, uri, relativeto, input_encoding='utf-8' )
def iso_country_to_countryball(isocode): """returns the countryball for given isocode omsk if file not found""" if isocode is None: return 'unknown.png' if isocode == 'BAY': isocode = 'bavaria' elif isocode == 'TEX': isocode = 'texas' isocode = isocode.lower() # rather dirty hack to get the path basepath = os.path.join(dirname(dirname(__file__)), 'static', 'img', 'cb') if rfk.CONFIG.has_option('site', 'cbprefix'): prebasepath = os.path.join(basepath, rfk.CONFIG.get('site', 'cbprefix')) if os.path.exists(os.path.join(prebasepath, '{}.png'.format(isocode))): return '{}{}.png'.format(rfk.CONFIG.get('site', 'cbprefix'), isocode) if os.path.exists(os.path.join(basepath, '{}.png'.format(isocode))): return '{}.png'.format(isocode) else: return 'unknown.png'
def mknod(path, mode=None, device=None): # import stat explicitly here to fix a namespace issue. import stat if mode == None: mode = 00600 | stat.S_IFREG filename = posixpath.basename(path) dirname = posixpath.dirname(path) destdir = _findFileFromPath(dirname) access(posixpath.dirname(path), os.W_OK|os.X_OK) if not isinstance(destdir, FakeDir): raise OSError(errno.ENOTDIR, '') if filename in destdir.getChildren(): raise OSError(errno.EEXIST, '') if mode & stat.S_IFREG: node = FakeFile(filename, mode) elif mode & stat.S_IFCHR: node = FakeDevice(filename, 'char', mode, device['major'], device['minor']) elif mode & stat.S_IFBLK: node = FakeDevice(filename, 'block', mode, device['major'], device['minor']) elif mode & stat.S_IFIFO: node = FakeFifo(filename, mode) else: raise OSError(errno.EINVAL, 'Invalid argument') destdir.linkChild(filename, node)
def main(): chapter_files, other_files = get_filenames() # make previous of first file and next of last file to just bring # back to README prevs = ['README.md'] + chapter_files[:-1] nexts = chapter_files[1:] + ['README.md'] print("Chapter files:") for prevpath, thispath, nextpath in zip(prevs, chapter_files, nexts): # all paths should be like 'section/file.md' where = posixpath.dirname(thispath) prev = posixpath.relpath(prevpath, where) next_ = posixpath.relpath(nextpath, where) extralinks = "[Previous](%s) | [Next](%s) |\n" % (prev, next_) end = END_TEMPLATE.format( toplevel='..', extralinks=extralinks, readmeheader=where) update_end(thispath, end) print() print("Other files:") for filename in other_files: where = posixpath.dirname(filename) end = END_TEMPLATE.format( toplevel=posixpath.relpath('.', where), extralinks="", readmeheader='list-of-contents') update_end(filename, end)
def validate_link(self, url, link): # Remove anchor link = re.sub(r'#[^#]*$', '', link) # Skip prefix if re.search('^(#|javascript:|mailto:|tel:)', link): return None #validate URL rx_url = re.match('(https?://)([^/:]+)(:[0-9]+)?([^\?]*)(\?.*)?', url) url_protocol = rx_url.group(1) url_host = rx_url.group(2) url_port = rx_url.group(3) if rx_url.group(3) else '' url_path = rx_url.group(4) if len(rx_url.group(4)) > 0 else '/' url_dir_path = dirname(url_path) #validate link and create a full url using above 'url' rx_link = re.match('((https?://)([^/:]+)(:[0-9]+)?)?([^\?]*)(\?.*)?', link) link_full_url = rx_link.group(1) != None link_protocol = rx_link.group(2) if rx_link.group(2) else url_protocol link_host = rx_link.group(3) if rx_link.group(3) else url_host link_port = rx_link.group(4) if rx_link.group(4) else url_port link_path = quote(rx_link.group(5), '/%') if rx_link.group(5) else url_path link_query = quote(rx_link.group(6), '?=&%') if rx_link.group(6) else '' link_dir_path = dirname(link_path) if not link_full_url and not link.startswith('/'): link_path = normpath(join(url_dir_path, link_path)) link_url = link_protocol + link_host + link_port + link_path + link_query return link_url
def __init__(self, json_data): build_settings = json_data["build_settings"] self.root_path = build_settings["root_path"] self.build_dir = build_settings["build_dir"] self.default_toolchain = build_settings["default_toolchain"] self.targets = {} for key, value in json_data["targets"].items(): self.targets[key] = Target(key, value, self) # Build files are a bit special; we load all build files that gn know about # Build files belonging to target folders will be added to respective target sources # For build files in //build/ folder we load all surrounding files, as the .gn and .gni # files may include scripts and resources gn does not known about self.build_files = [] with open(self.get_absolute_build_path() + "build.ninja.d", "r") as f: l = f.readline() args = l.split(" ") root_path = self.root_path if not root_path.endswith("/"): root_path += "/" known_build_files = set(args) processed_dirs = set() for arg in args: if arg.startswith(self.root_path): short_name = "//" + arg[len(root_path):] self.build_files.append(short_name) dir = posixpath.dirname(arg) if short_name.startswith("//build/") and not dir in processed_dirs: processed_dirs.add(dir) for file in os.listdir(dir): ext = posixpath.splitext(file)[1] if ext == ".pyc": continue path = dir + "/" + file if not path in known_build_files and os.path.isfile(path): short_name = "//" + path[len(root_path):] self.build_files.append(short_name) known_build_files.add(path) # Go through targets and add build files belonging to those for target_name, target in self.targets.items(): source_dir = target.get_source_dir() for file in self.build_files: path = posixpath.dirname(file) + "/" if source_dir == path: target.sources.append(file) # create build target build_target = Target("//build:build", {"type" : "build_dir", "toolchain" : self.default_toolchain}, self) for build_file in self.build_files: if (build_file.startswith(build_target.get_source_dir()) or posixpath.dirname(build_file) == "//" or # Also add root files to build dir build_file == self.build_dir + "args.gn"): build_target.sources.append(build_file) self.targets[build_target.name] = build_target
def rename(self, src, dst): """ Rename a file/directory from src to dst. Raises OSError on error. """ src = self.abspath(src) dst = self.abspath(dst) logging.debug("rename %r -> %r" % (src, dst)) self._listdir_cache.flush() # Check not renaming to itself if src == dst: logging.debug("Renaming %r to itself - doing nothing" % src) return # If dst is an existing directory, copy src inside it if self.isdir(dst): if dst: dst += "/" dst += posixpath.basename(src) # Check constraints for renaming a directory if self.isdir(src): if self.listdir(src): raise IOSError(ENOTEMPTY, "Can't rename non-empty directory: %s" % src) if self.isfile(dst): raise IOSError(ENOTDIR, "Can't rename directory to file") # Check not renaming to itself if src == dst: logging.debug("Renaming %r to itself - doing nothing" % src) return # Parse the paths now src_container_name, src_path = parse_fspath(src) dst_container_name, dst_path = parse_fspath(dst) logging.debug("`.. %r/%r -> %r/%r" % (src_container_name, src_path, dst_container_name, dst_path)) # Check if we are renaming containers if not src_path and not dst_path and src_container_name and dst_container_name: return self._rename_container(src_container_name, dst_container_name) # ...otherwise can't deal with root stuff if not src_container_name or not src_path or not dst_container_name or not dst_path: raise IOSError(EACCES, "Can't rename to / from root") # Check destination directory exists if not self.isdir(posixpath.split(dst)[0]): raise IOSError(ENOENT, "Can't copy %r to %r, destination directory doesn't exist" % (src, dst)) # check dst container self._container_exists(dst_container_name) # Do the rename of the file/dir meta = self.conn.head_object(src_container_name, src_path) if 'x-object-manifest' in meta: # a manifest file headers = { 'x-object-manifest': quote(meta['x-object-manifest']) } else: # regular file headers = { 'x-copy-from': quote("/%s/%s" % (src_container_name, src_path)) } self.conn.put_object(dst_container_name, dst_path, headers=headers, contents=None) # Delete src self.conn.delete_object(src_container_name, src_path) self._listdir_cache.flush(posixpath.dirname(src)) self._listdir_cache.flush(posixpath.dirname(dst))
def create_file(self, request=None): # Build an index page import tempfile fd, tempname = tempfile.mkstemp() # FIXME: Shouldn't hardcode this urlroot = "/music/" if request: inet, addr, port = request.getHost() if port == 80: hostport = '' else: hostport = ':%d' % port import urllib server = urllib.quote('http%s://%s%s' % ( request.isSecure() and 's' or '', request.getRequestHostname(), hostport), "/:") path = request.path[len(urlroot):] else: server = "http://unknown.org" path = self.cwd() os.write(fd, '<?xml version="1.0" encoding="utf-8"?>\n') cnt = self.select() os.write(fd, '<%s length="%s">\n' % (self.document_type, cnt)) # Strip away "list/<stylesheet>", if present if posixpath.basename(posixpath.dirname(path)) == "list": path = posixpath.dirname(posixpath.dirname(path)) + "/" path = xml_fix_string(path).replace("%20", ' ') or '/' if path != '/': path = "".join([ (elem and "<d>%s</d>" % elem) for elem in path.split("/") ]) os.write(fd, ' <path>%s</path>\n' % path) self.write_body(fd) os.write(fd, "</%s>\n" % self.document_type) os.close(fd) self.cursor.close() self.cursor = None # Perform xslt transformation on the file from commands import getstatusoutput params = "--stringparam audiostore.root %s" % urlroot params += " --stringparam audiostore.url %s%s" % (server, urlroot) logger.info("xsltproc %s %s %s" % \ (params, self.xsltfile, tempname)) st, output = getstatusoutput("xsltproc %s %s %s" % \ (params, self.xsltfile, tempname)) self.file = StringIO(output)
def test_dirname(self): self.assertEqual(posixpath.dirname("/foo/bar"), "/foo") self.assertEqual(posixpath.dirname("/"), "/") self.assertEqual(posixpath.dirname("foo"), "") self.assertEqual(posixpath.dirname("////foo"), "////") self.assertEqual(posixpath.dirname("//foo//bar"), "//foo") self.assertRaises(TypeError, posixpath.dirname)
def _gitlsdirs(files, prefix_length): # Return directories managed by Git dirs = set() for file in files: dir = posixpath.dirname(file) while len(dir) > prefix_length: dirs.add(dir) dir = posixpath.dirname(dir) return dirs
def am_gem(fd, var, gem, am): gemre = re.compile(r'\.files *= *\[ *(.*[^ ]) *\]') rd = 'RUBY_DIR' if 'DIR' in gem: rd = gem['DIR'][0] rd = am_translate_dir(rd, am) fd.write('if HAVE_RUBYGEM\n') fd.write('all-local-%s:' % var) am['ALL'].append(var) for f in gem['FILES']: fd.write(' %s' % f[:-4]) fd.write('\n') for f in gem['FILES']: srcs = list(map(lambda x: x.strip('" '), gemre.search(open(os.path.join(am['CWDRAW'], f)).read()).group(1).split(', '))) srcs.append(f) sf = f.replace('.', '_') am['INSTALL'].append(sf) am['UNINSTALL'].append(sf) fd.write('%s: %s\n' % (f[:-4], ' '.join(srcs))) dirs = [] for src in srcs: if '/' in src: d = posixpath.dirname(src) if d not in dirs: fd.write("\t[ '$(srcdir)' -ef . ] || mkdir -p '%s'\n" % posixpath.dirname(src)) dirs.append(d) while '/' in d: d = posixpath.dirname(d) dirs.append(d) fd.write("\t[ '$(srcdir)' -ef . ] || cp -p '$(srcdir)/%s' '%s'\n" % (src, src)) fd.write("\tgem build '%s'\n" % f) # use deprecated --rdoc and --ri options instead of --document=rdoc,ri # since we're still building on systems with old gem fd.write("\tgem install --local --install-dir ./'%s' --bindir .'%s' --force --rdoc --ri %s\n" % (rd, am_translate_dir('bindir', am), f[:-4])) fd.write('mostlyclean-local: mostlyclean-local-%s\n' % sf) fd.write('.PHONY: mostlyclean-local-%s\n' % sf) fd.write('mostlyclean-local-%s:\n' % sf) for src in srcs: fd.write("\t[ '$(srcdir)' -ef . ] || rm -f '%s'\n" % src) for d in sorted(dirs, reverse = True): fd.write("\t[ '$(srcdir)' -ef . -o ! -d '%s' ] || rmdir '%s'\n" % (d, d)) fd.write("install-exec-local-%s: %s\n" % (sf, f[:-4])) fd.write("\tmkdir -p $(DESTDIR)'%s'\n" % rd) fd.write("\tcp -a ./'%s'/* $(DESTDIR)'%s'\n" % (rd, rd)) fd.write("uninstall-local-%s: %s\n" % (sf, f[:-4])) # remove "-0.1.gemspec" from end of `f' fd.write("\tgem uninstall --install-dir $(DESTDIR)'%s' '%s'\n" % (rd, f[:-12])) am['BUILT_SOURCES'].append(f[:-4]) am['CLEAN'].append(f[:-4]) fd.write('else\n') for f in gem['FILES']: sf = f.replace('.', '_') fd.write("install-exec-local-%s:\n" % sf) fd.write('uninstall-local-%s:\n' % sf) fd.write('endif\n')
def _ensure_parent_dirs(self): curdir = vcspath.dirname(self.node['path']) dirs_to_create = [] while not self._svn_path_exists(curdir): dirs_to_create.append(curdir) curdir = vcspath.dirname(curdir) for curdir in reversed(dirs_to_create): log.debug('Creating missing directory "%s"', curdir) svn.fs.make_dir(self.txn_root, curdir)
def getEntryName(path): """ Retrieve the top level name (not h5py object) associated to a given path despite being or not an NXentry group. """ entry = path candidate = posixpath.dirname(entry) while len(candidate) > 1: entry = candidate candidate = posixpath.dirname(entry) return entry
def create_udf_data(con): ibis_home = posixpath.dirname(posixpath.dirname(os.path.abspath(__file__))) sep = os.sep path_list = ibis_home.split(sep) path_list += ['testing', 'udf'] udf_dir = sep.join(path_list) build_list = path_list + ['build'] build_dir = sep.join(build_list) subprocess.check_call('cmake . && make', shell=True, cwd=udf_dir) so_dir = pjoin(ENV.test_data_dir, 'udf') con.hdfs.put(so_dir, build_dir, verbose=True)
def _url_dirname(url_or_path): """Like posixpath.dirname, but preserves scheme:// prefix. Args: url_or_path: A string in the form of scheme://some/path OR /some/path. """ match = re.match(r'([a-z]+://)(.*)', url_or_path) if match is None: return posixpath.dirname(url_or_path) url_prefix, path = match.groups() return url_prefix + posixpath.dirname(path)
def iso_country_to_countryball(isocode): """returns the countryball for given isocode omsk if file not found""" if isocode is None: return 'unknown.png' isocode = isocode.lower() #rather dirty hack to get the path basepath = os.path.join(dirname(dirname(__file__)), 'static', 'img', 'cb') if os.path.exists(os.path.join(basepath,'{}.png'.format(isocode))): return '{}.png'.format(isocode) else: return 'unknown.png'
def test_dirname(self): self.assertEqual(posixpath.dirname("/foo/bar"), "/foo") self.assertEqual(posixpath.dirname("/"), "/") self.assertEqual(posixpath.dirname("foo"), "") self.assertEqual(posixpath.dirname("////foo"), "////") self.assertEqual(posixpath.dirname("//foo//bar"), "//foo") self.assertEqual(posixpath.dirname(b"/foo/bar"), b"/foo") self.assertEqual(posixpath.dirname(b"/"), b"/") self.assertEqual(posixpath.dirname(b"foo"), b"") self.assertEqual(posixpath.dirname(b"////foo"), b"////") self.assertEqual(posixpath.dirname(b"//foo//bar"), b"//foo")
def join_path(self, uri, parent): if os.path.isabs(uri) or ':' in uri: # we have an asset spec or absolute path return uri # make template lookup parent-relative if not os.path.isabs(parent) and ':' in parent: # parent is an asset spec ppkg, ppath = parent.split(':', 1) reluri = posixpath.join(posixpath.dirname(ppath), uri) return '{0}:{1}'.format(ppkg, reluri) # parent is just a normal path return posixpath.join(posixpath.dirname(parent), uri)
def rename(self, src, dst): '''Rename a file/directory from src to dst, raise OSError on error''' src = self.abspath(src) dst = self.abspath(dst) logging.debug("rename %r -> %r" % (src, dst)) self._listdir_cache.flush() # Check not renaming to itself if src == dst: logging.debug("Renaming %r to itself - doing nothing" % src) return # If dst is an existing directory, copy src inside it if self.isdir(dst): if dst: dst += "/" dst += posixpath.basename(src) # Check constraints for renaming a directory if self.isdir(src): if self.listdir(src): raise IOSError(ENOTEMPTY, "Can't rename non-empty directory: '%s'" % src) if self.isfile(dst): raise IOSError(ENOTDIR, "Can't rename directory to file") # Check not renaming to itself if src == dst: logging.debug("Renaming %r to itself - doing nothing" % src) return # Parse the paths now src_container_name, src_path = parse_fspath(src) dst_container_name, dst_path = parse_fspath(dst) logging.debug("`.. %r/%r -> %r/%r" % (src_container_name, src_path, dst_container_name, dst_path)) # Check if we are renaming containers if not src_path and not dst_path and src_container_name and dst_container_name: return self._rename_container(src_container_name, dst_container_name) # ...otherwise can't deal with root stuff if not src_container_name or not src_path or not dst_container_name or not dst_path: logging.info("Can't rename %r -> %r" % (src, dst)) raise IOSError(EACCES, "Can't rename to / from root") # Check destination directory exists if not self.isdir(posixpath.split(dst)[0]): logging.info("Can't copy %r -> %r dst directory doesn't exist" % (src, dst)) raise IOSError(ENOENT, "Can't copy %r -> %r dst directory doesn't exist" % (src, dst)) # Do the rename of the file/dir src_container = self._get_container(src_container_name) dst_container = self._get_container(dst_container_name) src_obj = src_container.get_object(src_path) # Copy src -> dst src_obj.copy_to(dst_container_name, dst_path) # Delete dst src_container.delete_object(src_path) self._listdir_cache.flush(posixpath.dirname(src)) self._listdir_cache.flush(posixpath.dirname(dst))
def ensure_known_host(host_key, known_hosts='/root/.ssh/known_hosts'): """Make sure a host key exists in the known_hosts file. This is idempotent: running it again won't add the same key again. """ if not exists(known_hosts, use_sudo=True): if not exists(posixpath.dirname(known_hosts), use_sudo=True): sudo('install -d -m700 %s' % posixpath.dirname(known_hosts)) sudo('touch %s' % known_hosts) # Must use shell=True to work around Fabric bug, where it would fall # flat in contains() with an error ("sudo: export: command not # found") that is silently suppressed, resulting in always appending # the ssh key to /root/.ssh/known_hosts. Probably because I use # `with settings(shell_env(LC_ALL='C.UTF-8')):`. append(known_hosts, host_key, use_sudo=True, shell=True)
def test_realpath_resolve_before_normalizing(self): # Bug #990669: Symbolic links should be resolved before we # normalize the path. E.g.: if we have directories 'a', 'k' and 'y' # in the following hierarchy: # a/k/y # # and a symbolic link 'link-y' pointing to 'y' in directory 'a', # then realpath("link-y/..") should return 'k', not 'a'. try: os.mkdir(ABSTFN) os.mkdir(ABSTFN + "/k") os.mkdir(ABSTFN + "/k/y") os.symlink(ABSTFN + "/k/y", ABSTFN + "/link-y") # Absolute path. self.assertEqual(realpath(ABSTFN + "/link-y/.."), ABSTFN + "/k") # Relative path. with support.change_cwd(dirname(ABSTFN)): self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."), ABSTFN + "/k") finally: test_support.unlink(ABSTFN + "/link-y") safe_rmdir(ABSTFN + "/k/y") safe_rmdir(ABSTFN + "/k") safe_rmdir(ABSTFN)
def get_cover(opf, opf_path, stream, reader=None): raster_cover = opf.raster_cover stream.seek(0) try: zf = ZipFile(stream) except: stream.seek(0) zf = LocalZipFile(stream) if raster_cover: base = posixpath.dirname(opf_path) cpath = posixpath.normpath(posixpath.join(base, raster_cover)) if reader is not None and \ reader.encryption_meta.is_encrypted(cpath): return try: member = zf.getinfo(cpath) except: pass else: f = zf.open(member) data = f.read() f.close() zf.close() return data return render_cover(opf, opf_path, zf, reader=reader)
def resolve_path(self, name, parent_path=None): if parent_path and not parent_path.startswith("<") and \ not parent_path.startswith("/") and \ not name.startswith("/"): file_dir = posixpath.dirname(parent_path) name = posixpath.normpath(posixpath.join(file_dir, name)) return name
def run(self): self.env = env = self.state.document.settings.env self.genopt = {} self.warnings = [] names = [x.strip().split()[0] for x in self.content if x.strip() and re.search(r'^[~a-zA-Z_]', x.strip()[0])] items = self.get_items(names) nodes = self.get_table(items) if 'toctree' in self.options: suffix = env.config.source_suffix dirname = posixpath.dirname(env.docname) tree_prefix = self.options['toctree'].strip() docnames = [] for name, sig, summary, real_name in items: docname = posixpath.join(tree_prefix, real_name) docname = posixpath.normpath(posixpath.join(dirname, docname)) if docname not in env.found_docs: self.warn('toctree references unknown document %r' % docname) docnames.append(docname) tocnode = addnodes.toctree() tocnode['includefiles'] = docnames tocnode['entries'] = [(None, docname) for docname in docnames] tocnode['maxdepth'] = -1 tocnode['glob'] = None tocnode = autosummary_toc('', '', tocnode) nodes.append(tocnode) return self.warnings + nodes
def postScore(self, submission, params): # Ensure admin access on the containing challenge phase phase = self.model('phase', 'challenge').load( submission['phaseId'], user=self.getCurrentUser(), exc=True, level=AccessType.ADMIN) submission['score'] = json.loads(cherrypy.request.body.read()) submission = self.model('submission', 'covalic').save(submission) # Delete the scirubg user's job token since the job is now complete. token = self.getCurrentToken() self.model('token').remove(token) user = self.model('user').load(submission['creatorId'], force=True) challenge = self.model('challenge', 'challenge').load( phase['challengeId'], force=True) covalicHost = posixpath.dirname(mail_utils.getEmailUrlPrefix()) html = mail_utils.renderTemplate('covalic.submissionComplete.mako', { 'phase': phase, 'challenge': challenge, 'submission': submission, 'host': covalicHost }) mail_utils.sendEmail( to=user['email'], subject='Your submission has been scored', text=html) return submission
def unpackFile(self, file_path, dest_dir=None): """ Unzips a remote bundle to a remote location If dest_dir is not specified, the bundle is extracted in the same directory returns: success: output of unzip command failure: None """ devroot = self.getDeviceRoot() if (devroot == None): return None # if no dest_dir is passed in just set it to file_path's folder if not dest_dir: dest_dir = posixpath.dirname(file_path) if dest_dir[-1] != '/': dest_dir += '/' try: data = self._runCmds([{ 'cmd': 'unzp %s %s' % (file_path, dest_dir)}]) except AgentError: return None return data
def _ExtractSources(target, target_dict, toplevel_dir): # |target| is either absolute or relative and in the format of the OS. Gyp # source paths are always posix. Convert |target| to a posix path relative to # |toplevel_dir_|. This is done to make it easy to build source paths. base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target))) base_path_components = base_path.split('/') # Add a trailing '/' so that _AddSources() can easily build paths. if len(base_path): base_path += '/' if debug: print 'ExtractSources', target, base_path results = [] if 'sources' in target_dict: _AddSources(target_dict['sources'], base_path, base_path_components, results) # Include the inputs from any actions. Any changes to these affect the # resulting output. if 'actions' in target_dict: for action in target_dict['actions']: _ExtractSourcesFromAction(action, base_path, base_path_components, results) if 'rules' in target_dict: for rule in target_dict['rules']: _ExtractSourcesFromAction(rule, base_path, base_path_components, results) return results
def WriteActionForEach(out, target, project, sources, synthetic_dependencies): all_outputs = target.properties.get('outputs', []) inputs = target.properties.get('sources', []) # TODO: consider expanding 'output_patterns' instead. outputs_per_input = len(all_outputs) / len(inputs) for count, source in enumerate(inputs): source_abs_path = project.GetAbsolutePath(source) outputs = [] output_directories = set() for output in all_outputs[outputs_per_input * count:outputs_per_input * (count + 1)]: output_abs_path = project.GetAbsolutePath(output) outputs.append(output_abs_path) output_directory = posixpath.dirname(output_abs_path) if output_directory: output_directories.add(output_directory) outputs_name = '${target}__output_' + str(count) SetVariableList(out, outputs_name, outputs) out.write('add_custom_command(OUTPUT ') WriteVariable(out, outputs_name) out.write('\n') if output_directories: out.write(' COMMAND ${CMAKE_COMMAND} -E make_directory "') out.write('" "'.join( [CMakeStringEscape(d) for d in output_directories])) out.write('"\n') script = target.properties['script'] # TODO: need to expand {{xxx}} in arguments arguments = target.properties['args'] out.write(' COMMAND python "') out.write(CMakeStringEscape(project.GetAbsolutePath(script))) out.write('"') if arguments: out.write('\n "') expand = functools.partial(ExpandPlaceholders, source_abs_path) out.write('"\n "'.join( [CMakeStringEscape(expand(a)) for a in arguments])) out.write('"') out.write('\n') out.write(' DEPENDS') if 'input' in sources: WriteVariable(out, sources['input'], ' ') out.write(' "') out.write(CMakeStringEscape(source_abs_path)) out.write('"\n') #TODO: CMake 3.7 is introducing DEPFILE out.write(' WORKING_DIRECTORY "') out.write(CMakeStringEscape(project.build_path)) out.write('"\n') out.write(' COMMENT "Action ${target} on ') out.write(CMakeStringEscape(source_abs_path)) out.write('"\n') out.write(' VERBATIM)\n') synthetic_dependencies.add(outputs_name)
def parent(self): return DatastorePath(self.datastore, posixpath.dirname(self._rel_path))
def _is_child(self, path): return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
def EstablishConnection(self): """Finds a connection to the server and initializes the client. This method tries all pairs of location urls and known proxies until it finds one that works. It does so by downloading the server.pem from the GRR server and verifies it directly. Note that this also refreshes the server certificate. Returns: A boolean indicating success. """ # This gets proxies from the platform specific proxy settings. proxies = client_utils.FindProxies() # Also try to connect directly if all proxies fail. proxies.append("") # Also try all proxies configured in the config system. proxies.extend(config_lib.CONFIG["Client.proxy_servers"]) for server_url in config_lib.CONFIG["Client.control_urls"]: for proxy in proxies: try: proxydict = {} if proxy: proxydict["http"] = proxy proxy_support = urllib2.ProxyHandler(proxydict) opener = urllib2.build_opener(proxy_support) urllib2.install_opener(opener) cert_url = "/".join( (posixpath.dirname(server_url), "server.pem")) request = urllib2.Request(cert_url, None, {"Cache-Control": "no-cache"}) handle = urllib2.urlopen(request, timeout=10) server_pem = handle.read() if "BEGIN CERTIFICATE" in server_pem: # Now we know that this proxy is working. We still have # to verify the certificate. self.communicator.LoadServerCertificate( server_certificate=server_pem, ca_certificate=self.ca_cert) # If we reach this point, the server can be reached and the # certificate is valid. self.server_certificate = server_pem self.active_server_url = server_url handle.close() return True except urllib2.URLError: pass except Exception as e: # pylint: disable=broad-except client_utils_common.ErrorOnceAnHour( "Unable to verify server certificate at %s: %s", cert_url, e) logging.info( "Unable to verify server certificate at %s: %s", cert_url, e) # No connection is possible at all. logging.info("Could not connect to GRR server.") return False
def prepare(vobject_items, path, content_type, permissions, parent_permissions, tag=None, write_whole_collection=None): if (write_whole_collection or permissions and not parent_permissions): write_whole_collection = True tag = radicale_item.predict_tag_of_whole_collection( vobject_items, MIMETYPE_TAGS.get(content_type)) if not tag: raise ValueError("Can't determine collection tag") collection_path = pathutils.strip_path(path) elif (write_whole_collection is not None and not write_whole_collection or not permissions and parent_permissions): write_whole_collection = False if tag is None: tag = radicale_item.predict_tag_of_parent_collection(vobject_items) collection_path = posixpath.dirname(pathutils.strip_path(path)) props = None stored_exc_info = None items = [] try: if tag: radicale_item.check_and_sanitize_items( vobject_items, is_collection=write_whole_collection, tag=tag) if write_whole_collection and tag == "VCALENDAR": vobject_components = [] vobject_item, = vobject_items for content in ("vevent", "vtodo", "vjournal"): vobject_components.extend( getattr(vobject_item, "%s_list" % content, [])) vobject_components_by_uid = itertools.groupby( sorted(vobject_components, key=radicale_item.get_uid), radicale_item.get_uid) for _, components in vobject_components_by_uid: vobject_collection = vobject.iCalendar() for component in components: vobject_collection.add(component) item = radicale_item.Item(collection_path=collection_path, vobject_item=vobject_collection) item.prepare() items.append(item) elif write_whole_collection and tag == "VADDRESSBOOK": for vobject_item in vobject_items: item = radicale_item.Item(collection_path=collection_path, vobject_item=vobject_item) item.prepare() items.append(item) elif not write_whole_collection: vobject_item, = vobject_items item = radicale_item.Item(collection_path=collection_path, vobject_item=vobject_item) item.prepare() items.append(item) if write_whole_collection: props = {} if tag: props["tag"] = tag if tag == "VCALENDAR" and vobject_items: if hasattr(vobject_items[0], "x_wr_calname"): calname = vobject_items[0].x_wr_calname.value if calname: props["D:displayname"] = calname if hasattr(vobject_items[0], "x_wr_caldesc"): caldesc = vobject_items[0].x_wr_caldesc.value if caldesc: props["C:calendar-description"] = caldesc radicale_item.check_and_sanitize_props(props) except Exception: stored_exc_info = sys.exc_info() # Use generator for items and delete references to free memory # early def items_generator(): while items: yield items.pop(0) return (items_generator(), tag, write_whole_collection, props, stored_exc_info)
def Dirname(self): return posixpath.dirname(self._string_urn)
def _locate_code(self, zf, path_to_zip_file): names = [ x if isinstance(x, unicode) else x.decode('utf-8') for x in zf.namelist() ] names = [x[1:] if x[0] == '/' else x for x in names] plugin_name = None for name in names: name, ext = posixpath.splitext(name) if name.startswith('plugin-import-name-') and ext == '.txt': plugin_name = name.rpartition('-')[-1] if plugin_name is None: c = 0 while True: c += 1 plugin_name = 'dummy%d' % c if plugin_name not in self.loaded_plugins: break else: if self._identifier_pat.match(plugin_name) is None: raise InvalidPlugin( ('The plugin at %r uses an invalid import name: %r' % (path_to_zip_file, plugin_name))) pynames = [x for x in names if x.endswith('.py')] candidates = [ posixpath.dirname(x) for x in pynames if x.endswith('/__init__.py') ] candidates.sort(key=lambda x: x.count('/')) valid_packages = set() for candidate in candidates: parts = candidate.split('/') parent = '.'.join(parts[:-1]) if parent and parent not in valid_packages: continue valid_packages.add('.'.join(parts)) names = OrderedDict() for candidate in pynames: parts = posixpath.splitext(candidate)[0].split('/') package = '.'.join(parts[:-1]) if package and package not in valid_packages: continue name = '.'.join(parts) names[name] = zf.getinfo(candidate) # Legacy plugins if '__init__' not in names: for name in list(names.iterkeys()): if '.' not in name and name.endswith('plugin'): names['__init__'] = names[name] break if '__init__' not in names: raise InvalidPlugin( ('The plugin in %r is invalid. It does not ' 'contain a top-level __init__.py file') % path_to_zip_file) with self._lock: self.loaded_plugins[plugin_name] = (path_to_zip_file, names) return plugin_name
def files_data(container, *args): for name, path in iteritems(container.name_path_map): yield File(name, posixpath.dirname(name), posixpath.basename(name), safe_size(container, name), get_category(name, container.mime_map.get(name, '')))
def _create_parent_directories(project_path, project_id, object_client): parent_path = posixpath.dirname(project_path) object_client.create_directory(project_id, parent_path, parents=True)
def write_RV(EVENT_input_path): # open the event file and get the list of events with open(EVENT_input_path, 'r') as f: EVENT_in = json.load(f) event_list = EVENT_in['randomVariables'][0]['elements'] evt = EVENT_in['Events'][0] data_dir = evt['data_dir'] f_scale = evt['unitScaleFactor'] file_sample_dict = {} for e_i, event in enumerate(event_list): filename, sample_id, __ = event.split('x') if filename not in file_sample_dict.keys(): file_sample_dict.update({filename: [[], []]}) file_sample_dict[filename][0].append(e_i) file_sample_dict[filename][1].append(int(sample_id)) EDP_output = None for filename in file_sample_dict.keys(): # get the header header_data = np.genfromtxt(posixpath.join(data_dir, filename), delimiter=',', names=True, max_rows=1) header = header_data.dtype.names data = np.genfromtxt(posixpath.join(data_dir, filename), delimiter=',', skip_header=1) # get the number of columns and reshape the data col_count = len(header) if col_count > 1: data = data.reshape((data.size // col_count, col_count)) else: data = np.atleast_1d(data) # choose the right samples samples = data[file_sample_dict[filename][1]] if EDP_output is None: if len(samples.shape) > 1: EDP_output = np.zeros((len(event_list), samples.shape[1])) else: EDP_output = np.zeros(len(event_list)) EDP_output[file_sample_dict[filename][0]] = samples if len(EDP_output.shape) == 1: EDP_output = np.reshape(EDP_output, (EDP_output.shape[0], 1)) EDP_output = EDP_output * f_scale index = np.reshape(np.arange(EDP_output.shape[0]), (EDP_output.shape[0], 1)) EDP_output = np.concatenate([index, EDP_output], axis=1) working_dir = posixpath.dirname(EVENT_input_path) # prepare the header header_out = [] for h_label in header: h_label = h_label.strip() if h_label.endswith('_h'): header_out.append(f'1-{h_label[:-2]}-1-1') elif h_label.endswith('_v'): header_out.append(f'1-{h_label[:-2]}-1-3') elif h_label.endswith('_x'): header_out.append(f'1-{h_label[:-2]}-1-1') elif h_label.endswith('_y'): header_out.append(f'1-{h_label[:-2]}-1-2') else: header_out.append(f'1-{h_label.strip()}-1-1') np.savetxt(working_dir + 'response.csv', EDP_output, delimiter=',', header=',' + ', '.join(header_out), comments='')
def cp(self, from_info, to_info): self.hadoop_fs('mkdir -p {}'.format(posixpath.dirname(to_info['url'])), user=to_info['user']) self.hadoop_fs('cp -f {} {}'.format(from_info['url'], to_info['url']), user=to_info['user'])
def _path_to_checksum(self, path): relpath = posixpath.relpath(path, self.url) return posixpath.dirname(relpath) + posixpath.basename(relpath)
def dirname(path): return posixpath.dirname(normsep(path))
def __init__(self, options): MochitestDesktop.__init__(self, options.flavor, vars(options)) verbose = False if options.log_tbpl_level == 'debug' or options.log_mach_level == 'debug': verbose = True if hasattr(options, 'log'): delattr(options, 'log') self.certdbNew = True self.chromePushed = False self.mozLogName = "moz.log" self.device = ADBAndroid(adb=options.adbPath or 'adb', device=options.deviceSerial, test_root=options.remoteTestRoot, verbose=verbose) if options.remoteTestRoot is None: options.remoteTestRoot = self.device.test_root options.dumpOutputDirectory = options.remoteTestRoot self.remoteLogFile = posixpath.join(options.remoteTestRoot, "logs", "mochitest.log") logParent = posixpath.dirname(self.remoteLogFile) self.device.rm(logParent, force=True, recursive=True) self.device.mkdir(logParent) self.remoteProfile = posixpath.join(options.remoteTestRoot, "profile/") self.device.rm(self.remoteProfile, force=True, recursive=True) self.counts = dict() self.message_logger = MessageLogger(logger=None) self.message_logger.logger = self.log process_args = { 'messageLogger': self.message_logger, 'counts': self.counts } self.automation = RemoteAutomation(self.device, options.remoteappname, self.remoteProfile, self.remoteLogFile, processArgs=process_args) self.environment = self.automation.environment # Check that Firefox is installed expected = options.app.split('/')[-1] if not self.device.is_app_installed(expected): raise Exception("%s is not installed on this device" % expected) self.automation.deleteANRs() self.automation.deleteTombstones() self.device.clear_logcat() self.remoteModulesDir = posixpath.join(options.remoteTestRoot, "modules/") self.remoteCache = posixpath.join(options.remoteTestRoot, "cache/") self.device.rm(self.remoteCache, force=True, recursive=True) # move necko cache to a location that can be cleaned up options.extraPrefs += [ "browser.cache.disk.parent_directory=%s" % self.remoteCache ] self.remoteMozLog = posixpath.join(options.remoteTestRoot, "mozlog") self.device.rm(self.remoteMozLog, force=True, recursive=True) self.device.mkdir(self.remoteMozLog) self.remoteChromeTestDir = posixpath.join(options.remoteTestRoot, "chrome") self.device.rm(self.remoteChromeTestDir, force=True, recursive=True) self.device.mkdir(self.remoteChromeTestDir) procName = options.app.split('/')[-1] self.device.stop_application(procName) if self.device.process_exist(procName): self.log.warning("unable to kill %s before running tests!" % procName) # Add Android version (SDK level) to mozinfo so that manifest entries # can be conditional on android_version. self.log.info( "Android sdk version '%s'; will use this to filter manifests" % str(self.device.version)) mozinfo.info['android_version'] = str(self.device.version)
def authorized_item(self, user, path, permission): """Check if the user is allowed to read or write the item.""" path = storage.sanitize_path(path) parent_path = storage.sanitize_path("/%s/" % posixpath.dirname(path.strip("/"))) return self.authorized(user, parent_path, permission)
def dirname(p): return posixpath.dirname(p)
'<td class="Button"><a href="report/%(report)s">Report Bug</a></td>')) # Insert report crashes link. # Disabled for the time being until we decide exactly when this should # be enabled. Also the radar reporter needs to be fixed to report # multiple files. #kReportReplacements.append((re.compile('<!-- REPORTCRASHES -->'), # '<br>These files will automatically be attached to ' + # 'reports filed here: <a href="report_crashes">Report Crashes</a>.')) ### # Other simple parameters kResources = posixpath.join(posixpath.dirname(__file__), 'Resources') kConfigPath = os.path.expanduser('~/.scanview.cfg') ### __version__ = "0.1" __all__ = ["create_server"] class ReporterThread(threading.Thread): def __init__(self, report, reporter, parameters, server): threading.Thread.__init__(self) self.report = report self.server = server self.reporter = reporter
def set_current_url(self, current_url): self.base_path = posixpath.dirname(current_url)
def do_PUT(self, environ, base_prefix, path, user): """Manage PUT request.""" if not self._access(user, path, "w"): return httputils.NOT_ALLOWED try: content = self._read_content(environ) except RuntimeError as e: logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST except socket.timeout: logger.debug("client timed out", exc_info=True) return httputils.REQUEST_TIMEOUT # Prepare before locking content_type = environ.get("CONTENT_TYPE", "").split(";")[0] parent_path = pathutils.unstrip_path( posixpath.dirname(pathutils.strip_path(path)), True) permissions = rights.intersect( self._rights.authorization(user, path), "Ww") parent_permissions = rights.intersect( self._rights.authorization(user, parent_path), "w") try: vobject_items = tuple(vobject.readComponents(content or "")) except Exception as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare( vobject_items, path, content_type, permissions, parent_permissions) with self._storage.acquire_lock("w", user): item = next(self._storage.discover(path), None) parent_item = next(self._storage.discover(parent_path), None) if not parent_item: return httputils.CONFLICT write_whole_collection = ( isinstance(item, storage.BaseCollection) or not parent_item.get_meta("tag")) if write_whole_collection: tag = prepared_tag else: tag = parent_item.get_meta("tag") if write_whole_collection: if ("w" if tag else "W") not in self._rights.authorization( user, path): return httputils.NOT_ALLOWED elif "w" not in self._rights.authorization(user, parent_path): return httputils.NOT_ALLOWED etag = environ.get("HTTP_IF_MATCH", "") if not item and etag: # Etag asked but no item found: item has been removed return httputils.PRECONDITION_FAILED if item and etag and item.etag != etag: # Etag asked but item not matching: item has changed return httputils.PRECONDITION_FAILED match = environ.get("HTTP_IF_NONE_MATCH", "") == "*" if item and match: # Creation asked but item found: item can't be replaced return httputils.PRECONDITION_FAILED if (tag != prepared_tag or prepared_write_whole_collection != write_whole_collection): (prepared_items, prepared_tag, prepared_write_whole_collection, prepared_props, prepared_exc_info) = prepare( vobject_items, path, content_type, permissions, parent_permissions, tag, write_whole_collection) props = prepared_props if prepared_exc_info: logger.warning( "Bad PUT request on %r: %s", path, prepared_exc_info[1], exc_info=prepared_exc_info) return httputils.BAD_REQUEST if write_whole_collection: try: etag = self._storage.create_collection( path, prepared_items, props).etag except ValueError as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST else: prepared_item, = prepared_items if (item and item.uid != prepared_item.uid or not item and parent_item.has_uid(prepared_item.uid)): return self._webdav_error_response("%s:no-uid-conflict" % ( "C" if tag == "VCALENDAR" else "CR")) href = posixpath.basename(pathutils.strip_path(path)) try: etag = parent_item.upload(href, prepared_item).etag except ValueError as e: logger.warning( "Bad PUT request on %r: %s", path, e, exc_info=True) return httputils.BAD_REQUEST headers = {"ETag": etag} return client.CREATED, headers, None
# Configure logging logging.basicConfig( stream=sys.stdout, level=logging.DEBUG, format="%(asctime)s %(levelname)7s %(name)34s | %(message)s" ) logger = logging.getLogger(__name__) # Initialize emulator emulator = Emulator( vfp_inst_set=True, vfs_root=posixpath.join(posixpath.dirname(__file__), "vfs") ) # Register Java class. # emulator.java_classloader.add_class(MainActivity) emulator.java_classloader.add_class(XGorgen) emulator.java_classloader.add_class(secuni_b) emulator.java_classloader.add_class(UserInfo) emulator.java_classloader.add_class(java_lang_System) emulator.java_classloader.add_class(java_lang_Thread) emulator.java_classloader.add_class(java_lang_StackTraceElement) # Load all libraries. emulator.load_library("samples/example_binaries/libdl.so") emulator.load_library("samples/example_binaries/libc.so") emulator.load_library("samples/example_binaries/libstdc++.so")
def test_dirname(self): self.assertEqual(posixpath.dirname("/foo/bar"), "/foo") self.assertEqual(posixpath.dirname("/"), "/") self.assertEqual(posixpath.dirname("foo"), "") self.assertEqual(posixpath.dirname("////foo"), "////") self.assertEqual(posixpath.dirname("//foo//bar"), "//foo")
def dirname(self): return posixpath.dirname(self._rel_path)
def parentdir(path): return posixpath.dirname(path) if '/' in path else ntpath.dirname(path)
def _path_to_etag(self, path): relpath = posixpath.relpath(path, self.prefix) return posixpath.dirname(relpath) + posixpath.basename(relpath)
def _stackSignal(self, index=-1, load=False): ddict = self._lastDatasetDict filename = ddict['file'] name = ddict['name'] sel = {} sel['SourceName'] = self.data.sourceName * 1 sel['SourceType'] = "HDF5" fileIndex = self.data.sourceName.index(filename) phynxFile = self.data._sourceObjectList[fileIndex] title = filename + " " + name sel['selection'] = {} sel['selection']['sourcename'] = filename #single dataset selection scanlist = None sel['selection']['x'] = [] sel['selection']['y'] = [name] sel['selection']['m'] = [] sel['selection']['index'] = index self._checkWidgetDict() widget = QStackWidget.QStackWidget() widget.setWindowTitle(title) widget.notifyCloseEventToWidget(self) #different ways to fill the stack if h5py.version.version < '2.0': useInstance = True else: useInstance = False groupName = posixpath.dirname(name) if useInstance: #this crashes with h5py 1.x #this way it is not loaded into memory unless requested #and cannot crash because same instance is used stack = phynxFile[name] else: #create a new instance phynxFile = h5py.File(filename, 'r') stack = phynxFile[name] # try to find out the "energy" axis axesList = [] xData = None try: group = phynxFile[groupName] if 'axes' in stack.attrs.keys(): axes = stack.attrs['axes'] if sys.version > '2.9': try: axes = axes.decode('utf-8') except: print("WARNING: Cannot decode axes") axes = axes.split(":") for axis in axes: if axis in group.keys(): axesList.append(posixpath.join(groupName, axis)) if len(axesList): xData = phynxFile[axesList[index]].value except: # I cannot afford this Nexus specific things # to break the generic HDF5 functionality if DEBUG: raise axesList = [] #the only problem is that, if the shape is not of type (a, b, c), #it will not be possible to reshape it. In that case I have to #actually read the values nDim = len(stack.shape) if (load) or (nDim != 3): stack = stack.value shape = stack.shape if index == 0: #Stack of images n = 1 for dim in shape[:-2]: n = n * dim stack.shape = n, shape[-2], shape[-1] if len(axesList): if xData.size != n: xData = None else: #stack of mca n = 1 for dim in shape[:-1]: n = n * dim if nDim != 3: stack.shape = 1, n, shape[-1] if len(axesList): if xData.size != shape[-1]: xData = None #index equal -1 should be able to handle it #if not, one would have to uncomment next line #index = 2 actualStack = DataObject.DataObject() actualStack.data = stack if xData is not None: actualStack.x = [xData] widget.setStack(actualStack, mcaindex=index) wid = id(widget) self._lastWidgetId = wid self._widgetDict[wid] = widget widget.show()
def dirname(self, path): return posixpath.dirname(path)
def test_realpath_pardir(self): self.assertEqual(realpath('..'), dirname(os.getcwd())) self.assertEqual(realpath('../..'), dirname(dirname(os.getcwd()))) self.assertEqual(realpath('/'.join(['..'] * 100)), '/')
def _get_opf_resources(self, opf_path, opf_elem, epub_zip): """ Get all relevant files inside an epub file, using the opf file as reference. :param opf_path: The original OPF file path :param opf_elem: A lxml.etree object :param epub_zip: A zipfile.ZipFile object :return: A tuple containing two dictionaries, the first one containing all epub resources by path {path: (id, …)} and the second containing all resources paths by id {id: path} """ # manifest # TODO: Check OPS Core Media Types # MIME Media Type Description # image/gif Used for raster graphics # image/jpeg Used for raster graphics # image/png Used for raster graphics # image/svg+xml Used for vector graphics # application/xhtml+xml Used for OPS Content Documents # application/x-dtbook+xml Used for OPS Content Documents # text/css Used for OPS CSS-subset style sheets # application/xml Used for Out-Of-Line XML Islands # text/x-oeb1-document Deprecated; OEBPS 1.0.1 and 1.2 Documents # text/x-oeb1-css Deprecated; OEBPS 1.0.1 and 1.2 CSS # application/x-dtbncx+xml The NCX # # Do not render img or object elements of unsupported media types, # in the absence of fallbacks. resources = {} resources_by_id = {} opf_dir_path = posixpath.dirname(opf_path) manifest_elem = opf_elem.find(OPF + 'manifest') for child in manifest_elem: if not etree.iselement(child) and child.tag != OPF + 'item': continue if isinstance(child, etree._Comment): continue props = child.get('properties', '') if props: res_props = props.split(' ') else: res_props = [] res_type = child.get('media-type') res_id = child.get('id') res_inner_path = Soup.URI.decode(child.get('href')) res_path = posixpath.join(opf_dir_path, res_inner_path) res_content = self._read_inner_zip_path(epub_zip, res_path) resources[res_path] = { 'id': res_id, 'content': res_content, 'mimetype': res_type, 'properties': res_props } resources_by_id[res_id] = res_path return resources, resources_by_id
def __opfpath(self, path): return posixpath.join(posixpath.dirname(self._opfpath), path)
#!/usr/bin/env python # # Script for automatically updating share.qbs # Usage: Just call it without arguments. # import os import posixpath as path import inspect scriptFileName = path.basename(inspect.getfile(inspect.currentframe())) shareDirPath = path.dirname(inspect.getfile(inspect.currentframe())) print "updating " + shareDirPath + "/share.qbs" os.chdir(shareDirPath) try: f = open('share.qbs', 'w') except: print "Could not open share.qbs" quit(1) def writeln(line): f.write(line) f.write("\n") writeln("import qbs.base 1.0") writeln("") writeln("Product {") writeln(" type: [\"installed_content\"]")