def test_log_artifacts(): for artifact_path in [None, "sub_dir", "very/nested/sub/dir"]: file_content_1 = "A simple test artifact\nThe artifact is located in: " + str( artifact_path) file_content_2 = os.urandom(300) file1 = "meta.yaml" directory = "saved_model" file2 = "sk_model.pickle" with TempDir() as local, TempDir() as remote: with open(os.path.join(local.path(), file1), "w") as f: f.write(file_content_1) os.mkdir(os.path.join(local.path(), directory)) with open(os.path.join(local.path(), directory, file2), "wb") as f: f.write(file_content_2) sftp_path = "sftp://" + remote.path() store = SFTPArtifactRepository(sftp_path) store.log_artifacts(local.path(), artifact_path) remote_dir = posixpath.join( remote.path(), "." if artifact_path is None else artifact_path) assert posixpath.isdir(remote_dir) assert posixpath.isdir(posixpath.join(remote_dir, directory)) assert posixpath.isfile(posixpath.join(remote_dir, file1)) assert posixpath.isfile( posixpath.join(remote_dir, directory, file2)) with open(posixpath.join(remote_dir, file1), "r") as remote_content: assert remote_content.read() == file_content_1 with open(posixpath.join(remote_dir, directory, file2), "rb") as remote_content: assert remote_content.read() == file_content_2
def markdown_page(request, path='', base=''): context = {} context['path'] = path template = 'markdown_page.html' absbase = posixpath.split(__file__)[0] print base, path, posixpath.join(base, path) if posixpath.splitext(path)[1] in ['.jpg', '.png', '.gif']: fullpath = posixpath.join(absbase, base, path) if posixpath.isfile(fullpath): filetype = mimetypes.guess_type(fullpath)[0] return HttpResponse(FileWrapper(file(fullpath)), content_type=filetype) else: return HttpResponse('No such file: %s' % path) for ext in ['', '.reveal.md', '.md', '.markdown', '.reveal.html', '.html']: newpath = posixpath.join(absbase, base, path + ext) #print absbase, newpath if posixpath.isfile(newpath): if not ext: ext = posixpath.splitext(path)[1] with open(newpath, 'r') as f: text = f.read() if ext == '.reveal.html': html = text template = 'reveal.html' elif ext == '.reveal.md': html = text template = 'reveal.html' context['markdown'] = True elif ext == '.html': # TODO: extract some metadata from HTML html = text if '<html' in html: # Serve plain HTML return HttpResponse(html, content_type="text/html") else: md = markdown.Markdown(extensions=[ 'markdown.extensions.meta', 'markdown.extensions.attr_list' ], safe_mode='escape') html = md.convert(text.decode('utf-8')) context['meta'] = md.Meta context['html'] = html break return TemplateResponse(request, template, context=context)
def test_isfile(self): self.assertIs(posixpath.isfile(test_support.TESTFN), False) f = open(test_support.TESTFN, "wb") try: f.write("foo") f.close() self.assertIs(posixpath.isfile(test_support.TESTFN), True) os.remove(test_support.TESTFN) os.mkdir(test_support.TESTFN) self.assertIs(posixpath.isfile(test_support.TESTFN), False) os.rmdir(test_support.TESTFN) finally: if not f.close(): f.close() try: os.remove(test_support.TESTFN) except os.error: pass try: os.rmdir(test_support.TESTFN) except os.error: pass self.assertRaises(TypeError, posixpath.isdir) def test_samefile(self): f = open(test_support.TESTFN + "1", "wb") try: f.write("foo") f.close() self.assertIs(posixpath.samefile(test_support.TESTFN + "1", test_support.TESTFN + "1"), True) # If we don't have links, assume that os.stat doesn't return resonable # inode information and thus, that samefile() doesn't work if hasattr(os, "symlink"): os.symlink(test_support.TESTFN + "1", test_support.TESTFN + "2") self.assertIs(posixpath.samefile(test_support.TESTFN + "1", test_support.TESTFN + "2"), True) os.remove(test_support.TESTFN + "2") f = open(test_support.TESTFN + "2", "wb") f.write("bar") f.close() self.assertIs(posixpath.samefile(test_support.TESTFN + "1", test_support.TESTFN + "2"), False) finally: if not f.close(): f.close() try: os.remove(test_support.TESTFN + "1") except os.error: pass try: os.remove(test_support.TESTFN + "2") except os.error: pass self.assertRaises(TypeError, posixpath.samefile)
def find_cmd(name) : if posix.environ.has_key("PATH") : pathlist = string.split(posix.environ["PATH"], ":") + ["."] else : pathlist = ["."] pathlist = pathlist + sys.path # This covers the case where an absolute path has been given! if posixpath.isfile(name) : return(name) for p in pathlist : if posixpath.isfile(p + "/" + name) : return(p + "/" + name) return(None)
def find_cmd(name): if posix.environ.has_key("PATH"): pathlist = string.split(posix.environ["PATH"], ":") + ["."] else: pathlist = ["."] pathlist = pathlist + sys.path # This covers the case where an absolute path has been given! if posixpath.isfile(name): return (name) for p in pathlist: if posixpath.isfile(p + "/" + name): return (p + "/" + name) return (None)
def test_isfile(self): self.assertIs(posixpath.isfile(test_support.TESTFN), False) f = open(test_support.TESTFN, "wb") try: f.write("foo") f.close() self.assertIs(posixpath.isfile(test_support.TESTFN), True) os.remove(test_support.TESTFN) os.mkdir(test_support.TESTFN) self.assertIs(posixpath.isfile(test_support.TESTFN), False) os.rmdir(test_support.TESTFN) finally: if not f.close(): f.close() self.assertRaises(TypeError, posixpath.isdir)
def HandleEvent(self, event): """Unified FAM event handler for DirShadow.""" action = event.code2str() if event.filename[0] == '/': return epath = "".join([self.data, self.handles[event.requestID], event.filename]) if posixpath.isdir(epath): ident = self.handles[event.requestID] + event.filename else: ident = self.handles[event.requestID][:-1] if action in ['exists', 'created']: if posixpath.isdir(epath): self.AddDirectoryMonitor(epath[len(self.data):]) if ident not in self.entries and posixpath.isfile(epath): dirpath = "".join([self.data, ident]) self.entries[ident] = self.es_cls(self.filename_pattern, dirpath, self.es_child_cls, self.encoding) self.Entries['Path'][ident] = self.entries[ident].bind_entry if not posixpath.isdir(epath): # do not pass through directory events self.entries[ident].handle_event(event) if action == 'changed' and ident in self.entries: self.entries[ident].handle_event(event) elif action == 'deleted': fbase = self.handles[event.requestID] + event.filename if fbase in self.entries: # a directory was deleted del self.entries[fbase] del self.Entries['Path'][fbase] else: self.entries[ident].handle_event(event)
def create_wotmod_package(self): """ Inserts files from bdist-dir to .wotmod package and stores it to dist-dir. :return: path to wotmod package file """ zip_filename = self.get_output_file_path() mkpath(os.path.dirname(zip_filename)) log.info("creating '%s' and adding '%s' to it", zip_filename, self.bdist_dir) archive_root = to_posix_separators(self.bdist_dir) with zipfile.ZipFile(zip_filename, 'w') as zip: for dirpath, dirnames, filenames in os.walk(archive_root): dirpath = to_posix_separators(dirpath) # Build relative path from bdist_dir forward archive_dirpath = dirpath.replace(posixpath.commonprefix( [dirpath, archive_root]), '').strip('/') # Create files for name in filenames: archive_path = posixpath.join(archive_dirpath, name) path = posixpath.normpath(posixpath.join(dirpath, name)) if posixpath.isfile(path): log.info("adding '%s'" % archive_path) zip.write(path, archive_path) # Set correct flags for directories for name in dirnames: archive_path = posixpath.join(archive_dirpath, name) + '/' log.info("adding '%s'" % archive_path) zip.writestr(archive_path, '') return zip_filename
def navigate_essays(tmppath): for item in os.listdir(tmppath): if exclude_item(item): continue filepath = posixpath.join(tmppath, item) if posixpath.isfile(filepath) and item.endswith(conf['ESSAYS_EXT']): #Its an item. Lets generate and add to the index print 'Generating {0}'.format(filepath) contents = [] pip = subprocess.Popen(['perl', conf['ESSAY_PROCESSOR'], filepath], stdout=subprocess.PIPE) filehtml = pip.stdout.read() contents.append(filehtml) htmlname = item.replace(conf['ESSAYS_EXT'],'.html') properties = parse_essay_properties(filepath) properties['htmlname'] = htmlname properties['url'] = conf['ESSAYS_DIR'] insert_to_essays_index(properties) pagehtml = html_from_tpl(properties['title'], ''.join(contents)) with open(posixpath.join(conf['PUBLICATION_DIR'], conf['ESSAYS_DIR'], htmlname), 'w') as f: f.write(pagehtml)
def test_stale_pidfile(pyscript): script = pyscript(""" import sys import time from daemonocle import Daemon def worker(): time.sleep(10) daemon = Daemon(worker=worker, name='foo', pid_file='foo.pid') daemon.do_action(sys.argv[1]) """) pid_file = posixpath.realpath(posixpath.join(script.dirname, 'foo.pid')) script.run('start') with open(pid_file, 'rb') as f: pid = int(f.read()) os.kill(pid, signal.SIGKILL) result = script.run('status') assert result.returncode == 1 assert result.stdout == b'foo -- not running\n' assert result.stderr == b'' assert not posixpath.isfile(pid_file) result = script.run('stop') assert result.returncode == 0 assert result.stdout == b'' assert result.stderr == b'WARNING: foo is not running\n'
def blind_match(filename, outname=None): # dir = tempfile.mkdtemp(prefix='match') # wcs = None # binname = None f = Favor2() if not outname: # outname = posixpath.splitext(filename)[0] + '.new' # if outname == filename: # outname = filename + '.new' fd, outname = tempfile.mkstemp(prefix='match', suffix='.new') os.close(fd) f.blind_match_file(filename, outfile=outname) # for path in ['.', '/usr/local', '/opt/local']: # if os.path.isfile(posixpath.join(path, 'astrometry', 'bin', 'solve-field')): # binname = posixpath.join(path, 'astrometry', 'bin', 'solve-field') # break # if binname: # command = "%s -D %s --overwrite --no-fits2fits --no-plots --no-verify --use-sextractor -t 2 %s --new-fits %s" % (binname, dir, filename, outname) # #os.system("%s >/dev/null 2>/dev/null" % (command)) # os.system("%s" % (command)) # shutil.rmtree(dir, ignore_errors=True) if not posixpath.isfile(outname): outname = None return outname
def handler(self, args): access_token = args.access_token filepath = args.filepath project = get_active_project() oauth2_section = DEFAULT_OAUTH2_SECTION token_section = DEFAULT_TOKEN_SECTION config = read_config(project) if not access_token: access_token = config_get(config.get, token_section, 'access_token') endpoint = config_get(config.get, oauth2_section, 'endpoint') api = API(access_token, endpoint=endpoint, timeout=18.) if not posixpath.isfile(filepath): output('[red]只能上传文件[/red]') sys.exit(1) filename = args.filename cmk_id = args.cmk_id if not filename: filename = posixpath.split(filepath)[1] message = '正在上传文件 {}'.format(filepath) with console.status(message, spinner='earth'): try: with open(filepath, 'rb') as fp: object_name = api.upload(filename, fp, cmk_id=cmk_id) except APIError as e: output('[red]请求失败:[/red]') output_json(e.result) sys.exit(1) output('[green]文件上传成功,存储位置:[/green]{}'.format(object_name))
def resolve_path(self, path, normpath=normpath): """ Resolve ``path`` to a source and physical path if possible. :param path: a path of the form `<source>/<fspath>` or `<fspath>` :returns: a tuple of `(source, mapped_path, cache_max_age)`. `source` may be `None` if no source was identified. """ path = normpath(path.lstrip('/')) try: source, remaining = path.split('/', 1) except ValueError: source = None remaining = path if source and source in self.sources: map_path, d, cache_max_age = self.sources[source] path = map_path(self.resource_manager, join(d, remaining)) return source, path, cache_max_age for s, p, cache_max_age in self.map_path_all_sources(path): if isfile(p): return s, p, cache_max_age return None, path, self.cache_max_age
def do_GET(self): self.path = self.path.lstrip('/').rstrip('/') path_chars = list(self.path) path_chars.insert(0, '/') self.path = ''.join(path_chars) if self.path == '/': self.path = '.' if posixpath.isdir(self.path): data = directory(self.path) self.send_response(200) self.send_header('content-type', 'text/html') self.end_headers() self.copyfile(data, self.wfile) data.close() return if posixpath.isfile(self.path): data = self.get_head() prinn(data) data = self.send_head() if data: try: self.copyfile(data, self.wfile) except Exception as error: print(error) finally: data.close()
def UpdateListBoxes(self): from os import listdir from posixpath import isfile, isdir, join, basename from commands import getoutput from string import splitfields cwd = self.cwd self.fileLb.delete(0, self.fileLb.size()) filter = self.filterEntry.get() # '*' will list recurively, we don't want that. if filter == '*': filter = '' cmd = "/bin/ls " + join(cwd, filter) cmdOutput = getoutput(cmd) files = splitfields(cmdOutput, "\n") files.sort() for i in range(len(files)): if isfile(join(cwd, files[i])): self.fileLb.insert('end', basename(files[i])) self.dirLb.delete(0, self.dirLb.size()) files = listdir(cwd) files.sort() for i in range(len(files)): if isdir(join(cwd, files[i])): self.dirLb.insert('end', files[i]) self.dirLabel['text'] = "Directory:" + self.cwd_print()
def scheduler_target_rgb_image(request, id=0): base = posixpath.join("/tmp/fweb/targets", str(id)) try: os.makedirs(base) except: pass rgbname = posixpath.join(base, "rgb.jpg") if not posixpath.isfile(rgbname): # No cached file try: target = SchedulerTargets.objects.get(id=id) images = Images.objects.raw( "select *,get_filter_name(filter) as filter_string from images where keywords->'TARGET UUID'='%s' and q3c_radial_query(ra0, dec0, %s, %s, 2.0) order by channel_id" % (target.uuid, target.ra, target.dec)) files_b, files_v, files_r = [], [], [] for image in images: filename = posixpath.join(settings.BASE, image.filename) filename = fix_remote_path(filename, image.channel_id) if image.filter_string == 'B': files_b.append(filename) if image.filter_string == 'V': files_v.append(filename) if image.filter_string == 'R': files_r.append(filename) if len(files_b) and len(files_v) and len(files_r): print files_b[0], files_v[0], files_r[0] coadd_rgb(name_blue=files_b[0], name_green=files_v[0], name_red=files_r[0], out=rgbname) except: pass if not posixpath.isfile(rgbname): return HttpResponse("Can't create RGB image for target %s" % str(id)) response = HttpResponse(FileWrapper(file(rgbname)), content_type='image/jpeg') response['Content-Length'] = posixpath.getsize(rgbname) return response
def delete_empty_folders(self, path): real_path = self.real_path(path) if posixpath.isfile(real_path): return for f in os.listdir(real_path): self.delete_empty_folders(posixpath.join(path, f)) if len(os.listdir(real_path)) == 0: os.rmdir(real_path)
def findInPath(fileName): """ Searches the current directory and then the system path for the specified file. Returns the full path or None. """ import posixpath for pathdir in (["."]+sys.path): if posixpath.isfile(pathdir+"/"+fileName): return pathdir+"/"+fileName
def HandleEvent(self, event=None): """ Updates which files this plugin handles based upon filesystem events. Allows configuration items to be added/removed without server restarts. """ action = event.code2str() if event.filename[0] == "/": return epath = "".join([self.data, self.handles[event.requestID], event.filename]) if posixpath.isdir(epath): ident = self.handles[event.requestID] + event.filename else: ident = self.handles[event.requestID][:-1] fname = "".join([ident, "/", event.filename]) if event.filename.endswith(".xml"): if action in ["exists", "created", "changed"]: if event.filename.endswith("key.xml"): key_spec = dict(list(lxml.etree.parse(epath).find("Key").items())) self.key_specs[ident] = {"bits": key_spec.get("bits", 2048), "type": key_spec.get("type", "rsa")} self.Entries["Path"][ident] = self.get_key elif event.filename.endswith("cert.xml"): cert_spec = dict(list(lxml.etree.parse(epath).find("Cert").items())) ca = cert_spec.get("ca", "default") self.cert_specs[ident] = { "ca": ca, "format": cert_spec.get("format", "pem"), "key": cert_spec.get("key"), "days": cert_spec.get("days", 365), "C": cert_spec.get("c"), "L": cert_spec.get("l"), "ST": cert_spec.get("st"), "OU": cert_spec.get("ou"), "O": cert_spec.get("o"), "emailAddress": cert_spec.get("emailaddress"), } cp = ConfigParser() cp.read(self.core.cfile) self.CAs[ca] = dict(cp.items("sslca_" + ca)) self.Entries["Path"][ident] = self.get_cert if action == "deleted": if ident in self.Entries["Path"]: del self.Entries["Path"][ident] else: if action in ["exists", "created"]: if posixpath.isdir(epath): self.AddDirectoryMonitor(epath[len(self.data) :]) if ident not in self.entries and posixpath.isfile(epath): self.entries[fname] = self.__child__(epath) self.entries[fname].HandleEvent(event) if action == "changed": self.entries[fname].HandleEvent(event) elif action == "deleted": if fname in self.entries: del self.entries[fname] else: self.entries[fname].HandleEvent(event)
def music_search(self, path, res): real_path = self.real_path(path) if posixpath.isfile(real_path): if real_path.endswith('.mp3'): res.append(path) else: print('Bad extension:', real_path) return for f in sorted(os.listdir(real_path)): self.music_search(posixpath.join(path, f), res)
def get_recipes(category): ''' For a given category, get the list of recipe titles. ''' recipe_files = [x for x in os.listdir(category)] titles = [] for filename in recipe_files: fullname = posixpath.join(category, filename) # here lies the issue if (posixpath.isfile(fullname)) and fullname.endswith('.md'): title = get_title(fullname) titles.append((title, fullname)) return titles
def test_delete_artifacts(artifact_path): file_content_1 = f"A simple test artifact\nThe artifact is located in: {artifact_path}" file_content_2 = os.urandom(300) file1 = "meta.yaml" directory = "saved_model" file2 = "sk_model.pickle" with TempDir() as local, TempDir() as remote: with open(os.path.join(local.path(), file1), "w", encoding="utf8") as f: f.write(file_content_1) os.mkdir(os.path.join(local.path(), directory)) with open(os.path.join(local.path(), directory, file2), "wb") as f: f.write(file_content_2) sftp_path = f"sftp://{remote.path()}" store = SFTPArtifactRepository(sftp_path) store.log_artifacts(local.path(), artifact_path) remote_dir = posixpath.join( remote.path(), "." if artifact_path is None else artifact_path) assert posixpath.isdir(remote_dir) assert posixpath.isdir(posixpath.join(remote_dir, directory)) assert posixpath.isfile(posixpath.join(remote_dir, file1)) assert posixpath.isfile(posixpath.join(remote_dir, directory, file2)) with open(posixpath.join(remote_dir, file1), "r", encoding="utf8") as remote_content: assert remote_content.read() == file_content_1 with open(posixpath.join(remote_dir, directory, file2), "rb") as remote_content: assert remote_content.read() == file_content_2 store.delete_artifacts(remote.path()) assert not posixpath.exists(posixpath.join(remote_dir, directory)) assert not posixpath.exists(posixpath.join(remote_dir, file1)) assert not posixpath.exists( posixpath.join(remote_dir, directory, file2)) assert not posixpath.exists(remote_dir) assert not posixpath.exists(remote.path())
def getExtraInfo(self): if not hasattr(DevPath, 'ExtraInfo'): extra = os.path.join(os.environ['TMP'], 'Extra') if posixpath.isfile(extra): os.unlink(extra) mod = Module('Extra') mod.Incopy(extra) fd = open(extra, 'r') extra_regex = re.compile('\.\w+\s+\w+') # create static member DevPath.ExtraInfo = filter(extra_regex.match, fd.readlines()) fd.close()
def __init__(self, path, mappedpath, agent, metadataid, title): self.path = path self.mappedpath = mappedpath if posixpath.isfile(self.mappedpath): self.fullpath = posixpath.dirname( posixpath.abspath(self.mappedpath)) else: self.fullpath = self.mappedpath self.id = agent self.metadataid = metadataid self.title = title
def load_yamls_with_paths(path): yamls = [ join(path, f) for f in listdir(path) if isfile(join(path, f)) if f.endswith('.yaml') or f.endswith('.yml') ] result = [] for yaml in yamls: try: result.append(REACTutils.read_yaml_file(yaml)) except ScannerError: raise ScannerError('yaml is bad! %s' % yaml) return (result, yamls)
def _read_dir( self ): "Einlesen des Verzeichnisses" def is_thumb( name, file_name ): "Kleine Hilfsfunktion um Thumbnails raus zu filtern" for suffix in cfg.thumb_suffix: if name[-len(suffix):] == suffix: # Aktuelle Datei ist ein Thumbnail! clean_name = name[:-len(suffix)] self.thumbnails[clean_name] = file_name return True return False if cfg.debug: print "read '%s'..." % self.absolute_path files = [] dirs = [] for item in os.listdir( self.absolute_path ): abs_path = posixpath.join( self.absolute_path, item ) if posixpath.isfile( abs_path ): # Dateien verarbeiten if item in cfg.file_filter: # Datei soll nicht angezeigt werden continue name, ext = posixpath.splitext( item ) # Thumbnails rausfiltern if is_thumb( name, item ): # Ist ein Thumbnail -> soll nicht in die files-Liste! continue if ext in cfg.ext_whitelist: files.append( item ) else: # Verzeichnis verarbeiten if cfg.allow_subdirs: # Unterverz. sollen angezeigt werden if not item in cfg.dir_filter: dirs.append( item ) files.sort() dirs.sort() if self.relativ_path != ".": # Nur erweitern, wenn man nicht schon im Hauptverzeichnis ist dirs.insert(0,"..") return files, dirs
def _read_dir(self): "Einlesen des Verzeichnisses" def is_thumb(name, file_name): "Kleine Hilfsfunktion um Thumbnails raus zu filtern" for suffix in cfg.thumb_suffix: if name[-len(suffix):] == suffix: # Aktuelle Datei ist ein Thumbnail! clean_name = name[:-len(suffix)] self.thumbnails[clean_name] = file_name return True return False if cfg.debug: print "read '%s'..." % self.absolute_path files = [] dirs = [] for item in os.listdir(self.absolute_path): abs_path = posixpath.join(self.absolute_path, item) if posixpath.isfile(abs_path): # Dateien verarbeiten if item in cfg.file_filter: # Datei soll nicht angezeigt werden continue name, ext = posixpath.splitext(item) # Thumbnails rausfiltern if is_thumb(name, item): # Ist ein Thumbnail -> soll nicht in die files-Liste! continue if ext in cfg.ext_whitelist: files.append(item) else: # Verzeichnis verarbeiten if cfg.allow_subdirs: # Unterverz. sollen angezeigt werden if not item in cfg.dir_filter: dirs.append(item) files.sort() dirs.sort() if self.relativ_path != ".": # Nur erweitern, wenn man nicht schon im Hauptverzeichnis ist dirs.insert(0, "..") return files, dirs
def load(self, filepath): filepath = posixpath.expanduser(filepath) if not posixpath.isfile(filepath): raise PersistError("File not found: %s" % filepath) if posixpath.getsize(filepath) == 0: return try: self._hardmap = self._backend.load(filepath) except: filepathold = filepath+".old" if (posixpath.isfile(filepathold) and posixpath.getsize(filepathold) > 0): # warning("Broken configuration file at %s" % filepath) # warning("Trying backup at %s" % filepathold) try: self._hardmap = self._backend.load(filepathold) except: raise PersistError("Broken configuration file at %s" % filepathold) else: raise PersistError("Broken configuration file at %s" % filepath)
def add_entry(self, event): epath = self.event_path(event) ident = self.event_id(event) if posixpath.isdir(epath): self.AddDirectoryMonitor(epath[len(self.data):]) if ident not in self.entries and posixpath.isfile(epath): dirpath = "".join([self.data, ident]) self.entries[ident] = self.es_cls(self.filename_pattern, dirpath, self.es_child_cls, self.encoding) self.Entries['Path'][ident] = self.entries[ident].bind_entry if not posixpath.isdir(epath): # do not pass through directory events self.entries[ident].handle_event(event)
def refineWCS(self): fd, tmpname = tempfile.mkstemp(suffix='.fits') os.close(fd) self.wcs = self.favor2.blind_match_file(posixpath.join(self.out_path, 'median.fits'), outfile=tmpname, order=self.wcs_order) if posixpath.isfile(tmpname): print "Successfully refined WCS using median frame" self.header = pyfits.getheader(tmpname, -1) os.unlink(tmpname) return True else: print "WCS refinement failed, using original WCS" return False
def parseFile(company, file): print("Parsing %s,%s " % (company, file)) parser = SVDParser.for_packaged_svd(company, file + ".svd", 1) extention = [] jsonPath = posixpath.join(posixpath.dirname(posixpath.abspath(__file__)), "Extention", company, file + ".json") #print(jsonPath) device = parser.get_device() if posixpath.isfile(jsonPath): extFile = open(jsonPath, "r", encoding='utf-8') extention = json.load(extFile) subdir = Ft.formatCpuName(Ft.getKey(extention, ["device", "cpu", "name"]), device) chipDir = posixpath.join('..', '..', 'Lib', 'Chip') subdir = posixpath.join(chipDir, subdir) if not posixpath.exists(subdir): os.makedirs(subdir) subdir = posixpath.join(subdir, company) if not posixpath.exists(subdir): os.makedirs(subdir) subdir = posixpath.join(subdir, file) if not posixpath.exists(subdir): os.makedirs(subdir) chipText = "#pragma once \n" chipText += "#include <cstdint>\n" incDir = subdir[10:] if Ft.getKey(extention, ["kvasir", "io"]): parseIo(extention, device, subdir) chipText += "#include <%s>\n" % (posixpath.join(incDir, "Io.hpp")) for peripheral in device.peripherals: if peripheral.name is not None: chipText += "#include <%s>\n" % (posixpath.join( incDir, peripheral.name + ".hpp")) out = "#pragma once \n#include <Register/Utility.hpp>\n" out += "namespace Kvasir {\n" out += parsePeripheral( peripheral, Ft.getKey(extention, ['.' + peripheral.name])) out += "}\n" outFile = open(posixpath.join(subdir, peripheral.name + ".hpp"), 'w', encoding='utf-8') outFile.write(out) else: print("error no name in %s" % (file)) outFile = open(posixpath.join(chipDir, file + ".hpp"), 'w', encoding='utf-8') outFile.write(chipText)
def get_resource(): args = flask.request.args resourceType = args["resourceType"] resource = args["resource"] relativePath = posixpath.normpath("/".join( ["static", resourceTypes[resourceType], resource])) return json.dumps({ "path": "/" + relativePath, "exists": posixpath.isfile(posixpath.abspath(relativePath)) })
def appname(p): """Given an etcdir or etcfile, returns the inferred the appname.""" if pp.isfile(p): p = pp.dirname(p) # Strip away 'etc' assert pp.basename(p) == 'etc', p p = pp.dirname(p) prefix = pp.commonprefix([env.directory, p]) p = p[len(prefix):] if p[0] == '/': p = p[1:] return p
def CheckDeletedFiles(self): ''' The database is "walked" looking for missing files. ''' with self._db as c: cur = self._db.execute('select * from files order by path collate wincase') for row in cur: if row['deleted']: continue remotepath = posixpath.join(self._remotepath, row['path']) filepath = posixpath.join(self._localpath, row['path'], row['file']) if row['isfolder']: if not posixpath.isdir(filepath): self._rmdir(row['path'], row['file'], remotepath, c) elif not posixpath.isfile(filepath): self._rm(row['path'], row['file'], remotepath, c)
def find_library(name): import ctypes.util where = ctypes.util.find_library(name) if where is not None: return where import os, posixpath root = 'lib' + name dn = posixpath.dirname(__file__) base = posixpath.abspath(dn) for fn in os.listdir(base): items = fn.split('.') if len(items) > 1 and items[0] == root and \ items[1] in ('so', 'dll', 'dylib'): where = posixpath.join(base, fn) if posixpath.isfile(where): return where
def mput(self, src_dir, pat, verbose=False): try: find = re.compile(pat) files = os.listdir(src_dir) for f in files: if posixpath.isfile(f) and find.match(f): if verbose: print(" * put %s" % f) self.put(posixpath.join(src_dir, f), f) except sre_constants.error as e: raise RemoteIOError("Error in regular expression: %s" % e)
def items(self): items = [] paths = path_expand(self.path) for path in paths: key = posixpath.basename(path) if posixpath.isfile(path): value = FilenameRegistry(path) elif posixpath.isdir(path): value = RecursiveDirectoryRegistry(path) else: logging.info("Unknown sysctl path: %s", path) continue items.append((key, value)) return items
def file_exists(file_name, type = 'file'): if len(file_name) == 0: return 0 else: if exists(file_name): if type == 'file' and isfile(file_name): return 1 elif type == 'dir' and isdir(file_name): return 1 else: if islink(file_name): print "INFO: using '%s' which is a link" % file_name return 1 else: return 0 else: return 0
def save(self, filepath=None): """Save the persist to the given C{filepath}. If None is specified, then the filename passed during construction will be used. """ if filepath is None: if self.filename is None: return filepath = self.filename filepath = posixpath.expanduser(filepath) if posixpath.isfile(filepath): os.rename(filepath, filepath+".old") dirname = posixpath.dirname(filepath) if dirname and not posixpath.isdir(dirname): os.makedirs(dirname) self._backend.save(filepath, self._hardmap)
def copy_hdu(fromfile,tofile,fromhdu = -1, verbose = 0): ##################################################################### # Copies the fromhdu of fromfile to the end of tofile # ##################################################################### import posixpath if verbose: print "Opening ",fromfile, p = pcfitsio.fits_open_file(fromfile,0) if verbose: print ". Done." if fromhdu!=-1: pcfitsio.fits_movabs_hdu(p,fromhdu) if posixpath.isfile(tofile): if verbose: print "Opening old file ",tofile t = pcfitsio.fits_open_file(tofile,1) else: if verbose: print "Creating new file ",tofile t = pcfitsio.fits_create_file(tofile) pcfitsio.fits_copy_hdu(p,t,0) pcfitsio.fits_close_file(p) pcfitsio.fits_close_file(t)
def parseFile(company, file): print("Parsing %s,%s " % (company, file)) parser = SVDParser.for_packaged_svd(company, file + ".svd", 1) extention = [] jsonPath = posixpath.join(posixpath.dirname(posixpath.abspath(__file__)), "Extention", company, file + ".json") # print(jsonPath) device = parser.get_device() if posixpath.isfile(jsonPath): extFile = open(jsonPath, "r", encoding="utf-8") extention = json.load(extFile) subdir = Ft.formatCpuName(Ft.getKey(extention, ["device", "cpu", "name"]), device) chipDir = posixpath.join("..", "..", "Lib", "Chip") subdir = posixpath.join(chipDir, subdir) if not posixpath.exists(subdir): os.makedirs(subdir) subdir = posixpath.join(subdir, company) if not posixpath.exists(subdir): os.makedirs(subdir) subdir = posixpath.join(subdir, file) if not posixpath.exists(subdir): os.makedirs(subdir) chipText = "#pragma once \n" incDir = subdir[10:] if Ft.getKey(extention, ["kvasir", "io"]): parseIo(extention, device, subdir) chipText += "#include <%s>\n" % (posixpath.join(incDir, "Io.hpp")) for peripheral in device.peripherals: if peripheral.name is not None: chipText += "#include <%s>\n" % (posixpath.join(incDir, peripheral.name + ".hpp")) out = "#pragma once \n#include <Register/Utility.hpp>\n" out += "namespace Kvasir {\n" out += parsePeripheral(peripheral, Ft.getKey(extention, ["." + peripheral.name])) out += "}\n" outFile = open(posixpath.join(subdir, peripheral.name + ".hpp"), "w", encoding="utf-8") outFile.write(out) else: print("error no name in %s" % (file)) outFile = open(posixpath.join(chipDir, file + ".hpp"), "w", encoding="utf-8") outFile.write(chipText)
def write_data(filename, data, bitpix = -1, hdu = -1, append=0, verbose=0): ############################################################################# # Write an array, or a list, to the file filename # # The type of the array determines the value of bitpix, which can be # # overwritten if necessary. Data will be written to the extension # # indicated by the parameter hdu=. If that extension does not exist but the # # parameter appen= is set to 1, then a new hdu is appended to the data # # Setting verbose=1 turns on the verbose mode # ############################################################################# import posixpath if type(filename) != type(" "): raise InvalidFilename try: data = numarray.asarray(data) # data = numarray.transpose(data) naxes = numarray.asarray(numarray.shape(data)).tolist() l = [] for i in range(len(naxes)): l.append(naxes[len(naxes)-i-1]) naxes=l except: raise InvalidData if bitpix == -1: if data.typecode()=='l': bitpix = 32 if data.typecode()=='d': bitpix = -32 if not (bitpix in [-32,8,16,32]): print "Invalid BITPIX: ",bitpix return ok = 0 new = 1 if (posixpath.isfile(filename)): p = pcfitsio.fits_open_file(filename,1) ok = 1 new = 0 if hdu != -1: try: if verbose: print "Trying to move to extension #"+`hdu` pcfitsio.fits_movabs_hdu(p,hdu) append = 0 ok = 1 except: if verbose: print "Failed to move to extension #"+`hdu` if append: # Create a new image extension if verbose: print "Appending an image extension.", pcfitsio.fits_create_img(p,bitpix,naxes) if verbose: print "Done.", # Find out the number of extensions # hdunum = pcfitsio.fits_get_num_hdus(p) # if verbose: print "Total #hdu:",hdunum # pcfitsio.fits_movabs_hdu(p,hdunum) ok = 1 else: if verbose: print "File not found." if verbose: print "Creating:",filename, p = pcfitsio.fits_create_file(filename) new = 1 if verbose: print "Done." if verbose: print "Appending an image extension.", pcfitsio.fits_create_img(p,bitpix,naxes) pcfitsio.fits_close_file(p) p = pcfitsio.fits_open_file(filename,1) if verbose: print "Done.", # Find out the number of extensions hdunum = pcfitsio.fits_get_num_hdus(p) if verbose: print "Total #hdu:",hdunum pcfitsio.fits_movabs_hdu(p,hdunum) ok = 1 # Write the data to the file which is now opened/created if ok: if new == 0: if verbose: print "Resizing img ext..", pcfitsio.fits_resize_img(p,bitpix,naxes) if verbose: print "Writing data.",naxes, if verbose: print numarray.multiply.reduce(naxes) pcfitsio.fits_write_img(p,1,numarray.multiply.reduce(naxes),data) if verbose: print "Done." if verbose: print "Closing",filename, pcfitsio.fits_close_file(p) if verbose: print "Done."
def HandleEvent(self, event=None): """ Updates which files this plugin handles based upon filesystem events. Allows configuration items to be added/removed without server restarts. """ action = event.code2str() if event.filename[0] == '/': return epath = "".join([self.data, self.handles[event.requestID], event.filename]) if posixpath.isdir(epath): ident = self.handles[event.requestID] + event.filename else: ident = self.handles[event.requestID][:-1] fname = "".join([ident, '/', event.filename]) if event.filename.endswith('.xml'): if action in ['exists', 'created', 'changed']: if event.filename.endswith('key.xml'): key_spec = dict(list(lxml.etree.parse(epath, parser=Bcfg2.Server.XMLParser).find('Key').items())) self.key_specs[ident] = { 'bits': key_spec.get('bits', 2048), 'type': key_spec.get('type', 'rsa') } self.Entries['Path'][ident] = self.get_key elif event.filename.endswith('cert.xml'): cert_spec = dict(list(lxml.etree.parse(epath, parser=Bcfg2.Server.XMLParser).find('Cert').items())) ca = cert_spec.get('ca', 'default') self.cert_specs[ident] = { 'ca': ca, 'format': cert_spec.get('format', 'pem'), 'key': cert_spec.get('key'), 'days': cert_spec.get('days', 365), 'C': cert_spec.get('c'), 'L': cert_spec.get('l'), 'ST': cert_spec.get('st'), 'OU': cert_spec.get('ou'), 'O': cert_spec.get('o'), 'emailAddress': cert_spec.get('emailaddress') } cp = ConfigParser.ConfigParser() cp.read(self.core.cfile) self.CAs[ca] = dict(cp.items('sslca_' + ca)) self.Entries['Path'][ident] = self.get_cert if action == 'deleted': if ident in self.Entries['Path']: del self.Entries['Path'][ident] else: if action in ['exists', 'created']: if posixpath.isdir(epath): self.AddDirectoryMonitor(epath[len(self.data):]) if ident not in self.entries and posixpath.isfile(epath): self.entries[fname] = self.__child__(epath) self.entries[fname].HandleEvent(event) if action == 'changed': self.entries[fname].HandleEvent(event) elif action == 'deleted': if fname in self.entries: del self.entries[fname] else: self.entries[fname].HandleEvent(event)
def main(args): try: print 'Init generate wiki...' print 'Reading configuration' if posixpath.isfile(propertiesfile): read_configuration() else: print '[ERROR] The configuration file doesnt exist' exit() template_file = None if posixpath.isdir(conf['TEMPLATE_DIR']) and posixpath.isfile(conf['TEMPLATE_FILE']): print 'Reading the template...' with open(conf['TEMPLATE_FILE'], 'r') as f: template_file = f.read() else: print '[ERROR] The template folder/file doesnt exist' exit() if posixpath.isdir(conf['PUBLICATION_DIR']): print 'Cleaning web folder...' shutil.rmtree(conf['PUBLICATION_DIR']) #Copy the data from the template shutil.copytree(conf['TEMPLATE_DIR'],conf['PUBLICATION_DIR'], ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) set_tpl(template_file) print 'Generating essays html...' os.mkdir(posixpath.join(conf['PUBLICATION_DIR'], conf['ESSAYS_DIR'])) navigate_essays(conf['ESSAYS_DIR']) print 'Generating articles html...' os.mkdir(posixpath.join(conf['PUBLICATION_DIR'], conf['ARTICLES_DIR'])) navigate_articles(conf['ARTICLES_DIR'], '', []) print 'Generating index ...' gindexpath = posixpath.join(conf['PUBLICATION_DIR'], 'index'+conf['ARTICLES_EXT']) contents = [] if posixpath.isfile(gindexpath): pip = subprocess.Popen(['perl', conf['ESSAY_PROCESSOR'], gindexpath], stdout=subprocess.PIPE) mainhtml = pip.stdout.read() contents.append(mainhtml) print 'Render index ...' contents.append(render_essays_index()) contents.append(render_articles_index(articles_index)) pagehtml = html_from_tpl('Index', ''.join(contents), '.') with open(posixpath.join(conf['PUBLICATION_DIR'],'index.html'), 'w') as f: f.write(pagehtml) print 'End of generation...' except: print "Unexpected error:", sys.exc_info() raise return 1 else: return 0
# # Sean Frischmann # Time Tracker # =========================================================================== import sqlite3 as lite import posixpath as pos import sys import createProject import selectProject import sys import scripts import timerFunctions import errors create_database = pos.isfile('project_tracker.db') con = lite.connect('project_tracker.db') cur = con.cursor() if not create_database: createProject.list_of_projects(cur) createProject.create_worksheet(cur) print 'creating database' scripts.program_message() while True: try: scripts.main_menu() user_input = raw_input('>> ')
def open(self, receiver, context, m, mode="r"): if not posixpath.isfile(receiver.value): raise TypeError(receiver.value + " is not a file") mode = mode if mode == "r" else str(mode.eval(context)) return runtime.state.eval("""File clone() open("{0:s}", "{1:s}")""".format(receiver.value, mode))
def safe_remove_file(path): if posixpath.exists(path): if not posixpath.isfile(path): raise Exception, "Path is not a file: %s" % path os.remove(path)