def query( files ): id = ('id_%08d' % self.__message_id).encode('ascii') self.__message_id += 1 path = lambda f: \ ('%s\\%s>%d|\r' % (f.hash, f.name, f.time)).encode('utf-8') + f.load() \ if f.hash in files else \ ('%s\\%s>%d' % (f.hash, f.name, f.time)).encode('utf-8') self.__response[id] = self.__tested(callback) mcn = os.path.splitext(binary.name)[0] data = { b'ID': id, b'Command': b'test', b'Source': mcn.encode('utf-8'), b'ExeFile': path(binary), b'TestPath': path(test), b'AnswerPath': path(answer), b'InputName': input if input is not None else 'stdin', b'OutputName': output if input is not None else 'stdout', b'TimeLimit': ('%d' % int(1000 * time_limit)).encode('utf-8'), b'MemoryLimit': ('%d' % memory_limit).encode('utf-8'), b'CheckerPath': path(checker) } if run is not None: data[b'RunString'] = run.encode('utf-8') if checker_run is not None: data[b'CheckerRun'] = checker_run.encode('utf-8') self.__socket.send(Packet(data)())
def install_bundle(self, bundle_path, target_path=None): """Create new installation from bundle. :param bundle_path: Path to bundle to install from :param target_path: Path to install to (will install to system default) """ unpacked_bundle = self.unpack_bundle(bundle_path) target_path = path(target_path) if target_path \ else path(self.config["path"]) install = TBBInstallation(target_path) if install.exists(): new_path = install.path.normpath() + ".OLD" if new_path.exists(): self.as_root(["rm", "-rf", new_path]) self.as_root(["mv", install.path.normpath(), new_path]) self.as_root(["cp", "-R", # Recursive str(unpacked_bundle), str(install.path)]) # Being owned by root creates strange problems opening the # application and "Firefox is already running" errors. self.as_root(["chown", "-R", getpass.getuser(), str(install.path)]) return TBBInstallation(target_path)
def write_output(self, output, messageobj): ''' Given output text and a message object, chooses a path for writing the log message and appends the output to that file. ''' convo = messageobj.conversation proto = convo.protocol # Convert THIS timestamp to the local timezone so filenames are chosen based # on the local date. if convo.ischat: p = self.get_path_for_chat(convo) else: datefilename = fromutc(messageobj.timestamp).date().isoformat() # 2007-17-5 pathelems = (buddy_path(proto, convo.buddy), datefilename) p = path(path(self.OutputDir).joinpath(*pathelems) + '.' + self.OutputType) # assure path exists ( aim/digsby01/dotsyntax1337.html ) if not p.parent.isdir(): try: p.parent.makedirs() except WindowsError, e: # for race condition between exists check and makedirs if e.winerror == 183: pass else: raise
def prompt_patch_select(intro='Available Patches'): ''' Display a list of available patches in specified directory and prompt user to select a patch by entering it's number in the list. ''' selected = None while selected is None: print intro pos = 0 patches = [d for d in os.listdir(path('')) if os.path.isdir(path(d))] for patch in patches: pos += 1 print ' %s - %s'%(pos, patch) input = prompt("Enter patch number: ") if input: def valid(input): if not input.isdigit(): print '%s is not numeric' return False if not int(input)-1 in range(0, len(patches)): print '%s is not a valid patch number' % str(int(input)-1) return False return True if valid(input): selected = patches[int(input)-1] print 'Selected %s'%selected return selected
def set_val(name, value, section="DEFAULT", encoding=None): """Set a value in the per-user config file. Parameters ---------- name : str The name of the value to set. value : str or None The new value to set (or None to delete the value). section : str The section to store the name/value in. encoding : str The config file's encoding, defaults to :py:data:`default_encoding`. """ if encoding == None: encoding = default_encoding config = ConfigParser.ConfigParser() if os.path.exists(path()) == False: # touch file or config open(path(), 'w').close() # read chokes on missing file f = codecs.open(path(), 'r', encoding) config.readfp(f, path()) f.close() if value is not None: config.set(section, name, value) else: config.remove_option(section, name) f = codecs.open(path(), 'w', encoding) config.write(f) f.close()
def main(): args = opt() if args.nuc == None or args.tre == None or args.ctl == None or args.model == None or args.ous == None: os.system("python {0} -h".format(sys.argv[0])) exit("Error: Incomplete Options") nuc = args.nuc tre = args.tre ctl = args.ctl model = args.model ous = args.ous out = open(ous,"w") try: os.stat("{0}/paml".format(os.getcwd())) except: os.mkdir("{0}/paml".format(os.getcwd())) for p,ds,fs in os.walk(nuc): for f in fs: if f.endswith("nuc"): opath = "{0}/paml/{1}".format(os.getcwd(),os.path.splitext(f)[0]) try: os.path(opath) except: os.mkdir(opath) change_ctl(ctl,model,opath,"{0}/{1}".format(p,f),tre) if model in ["free","site"]: out.write("cd {0} && codeml\n".format(opath)) else: out.write("cd {0}/modela && codeml && cd {0}/modelanull && codeml\n")
def __setSyntax(self, view, file_name): path = lambda package_name: 'Packages{0}{1}{0}{2}'.format(os.sep, package_name, file_name) if os.path.isfile(os.path.join(sublime.packages_path(), "SublimeScalex")): view.set_syntax_file(path('SublimeScalex')) else: view.set_syntax_file(path('Scalex Documentation Search'))
def __init__(self): self.headImage = pygame.image.load(path("head.png")) self.middleImage = pygame.image.load(path("middle.png")) self.eaterImage = pygame.image.load(path("eater.png")) self.headImages = {} for degree in xrange(0, 360, 4): self.headImages[degree] = pygame.transform.rotate(self.headImage, degree) if degree < 90 or degree < 270: self.headImages[degree] = pygame.transform.flip(self.headImages[degree], True, False) elif degree > 270: image = pygame.transform.flip(self.headImages[degree], True, False) self.headImages[degree] = pygame.transform.rotate(image, degree) self.eaterImages = {} for degree in xrange(0, 360, 4): self.eaterImages[degree] = pygame.transform.rotate(self.eaterImage, degree) if degree < 90 or degree < 270: self.eaterImages[degree] = pygame.transform.flip(self.eaterImages[degree], True, False) elif degree > 270: image = pygame.transform.flip(self.eaterImages[degree], True, False) self.eaterImages[degree] = pygame.transform.rotate(image, degree) self.middleImages = {} for degree in xrange(0, 360, 4): self.middleImages[degree] = pygame.transform.rotate(self.middleImage, degree) if degree < 90 or degree < 270: self.middleImages[degree] = pygame.transform.flip(self.middleImages[degree], True, False) elif degree > 270: self.middleImages[degree] = pygame.transform.flip(self.middleImages[degree], False, True)
def coverage(): """ Generate test coverage report """ import subprocess import os.path # Remove the *.pyc because sometimes coverage gets confused with old versions subprocess.call(['rm $(find . -name *.pyc)'], shell=True) nose_options = ['--with-coverage', '--cover-package=emergent,emergent.test', '--cover-erase'] coverage_results = path('dist') / 'test_coverage' coverage_options = ['-a', '-d', str(coverage_results)] # Extend coverage_options with the paths to emergent/*.py coverage_options.extend( str(filename) for filename in path('emergent').walk() if os.path.splitext(str(filename))[1] == '.py' ) subprocess.call(['nosetests'] + nose_options) coverage_results.rmtree() coverage_results.makedirs() subprocess.call(['coverage'] + coverage_options)
def main(): p = "" savefile = "" if len(sys.argv) > 1: p = path(sys.argv[1]) if not p.exists(): p = "" if len(sys.argv) > 2: savefile = path(sys.argv[2]).expand() while not p: p = raw_input("Enter the path to the project: ") if p[0] == '"' and p[-1] == '"': p = p[1:-1] p = path(p).expand() if not p.exists(): p = "" proj = Project(p) if not proj.valid: log_warning("The project directory doesn't have the valid structure.") a = raw_input("Do you want to convert it?").lower() if a == "y" or a == "yes": proj.create() else: log_debug("Ok, aborting.") return proj.use() d = Data(proj) for i in range(len(d.list_imgs)): d.process(i) if not savefile: savefile = raw_input("Please enter the name of the file to save: [default:tracking.csv]") if not savefile: savefile = path("tracking.csv") savefile = path(savefile).expand() d.save(savefile)
def createFile(filename,wrtMode,info): file = open(filename,wrtMode) if(info == 1): print 'file:', file print 'filename:', file print os.path(file) return file
def load_html_templates(): html = namedtuple('HTMLTemplates', 'entry index_page tags progress') path = lambda fname: local_path(join('templates', fname)) return html(read_file(path('entry_template.html')), read_file(path('index_page_template.html')), read_file(path('tags_template.html')), read_file(path('progress_template.html')))
def main(): params = {} check_args(params) settings['debug'] = params['debug'] path(params)
def run(self): link = "/etc/simpleseer" if os.path.lexists(link): os.remove(link) supervisor_link = "/etc/supervisor/conf.d/simpleseer.conf" if os.path.lexists(supervisor_link): os.remove(supervisor_link) print "Linking %s to %s" % (self.options.directory, link) os.symlink(self.options.directory, link) hostname = gethostname() hostname_supervisor_filename = hostname + "_supervisor.conf" src_host_specific_supervisor = path(self.options.directory) / 'etc' / hostname_supervisor_filename regular_supervisor = "supervisor.conf" src_supervisor = path(self.options.directory) / 'etc' / regular_supervisor if os.path.exists(src_host_specific_supervisor): src_supervisor = src_host_specific_supervisor print "Linking %s to %s" % (src_supervisor, supervisor_link) os.symlink(src_supervisor, supervisor_link) print "Reloading supervisord" subprocess.check_output(['supervisorctl', 'reload'])
def setup(self, modes): """ Define the available modes and ensure there's a default working directory. """ self.modes = modes logger.info('Available modes: {}'.format(', '.join(self.modes.keys()))) # Ensure there is a workspace directory. wd = self.modes['python'].workspace_dir() if not os.path.exists(wd): logger.debug('Creating directory: {}'.format(wd)) os.makedirs(wd) # Ensure PyGameZero assets are copied over. images_path = os.path.join(wd, 'images') sounds_path = os.path.join(wd, 'sounds') if not os.path.exists(images_path): logger.debug('Creating directory: {}'.format(images_path)) os.makedirs(images_path) shutil.copy(path('alien.png', 'pygamezero/'), os.path.join(images_path, 'alien.png')) shutil.copy(path('alien_hurt.png', 'pygamezero/'), os.path.join(images_path, 'alien_hurt.png')) if not os.path.exists(sounds_path): logger.debug('Creating directory: {}'.format(sounds_path)) os.makedirs(sounds_path) shutil.copy(path('eep.wav', 'pygamezero/'), os.path.join(sounds_path, 'eep.wav')) # Start the timer to poll every second for an attached or removed # USB device. self._view.set_usb_checker(1, self.check_usb)
def bfs(lab, doors, x, y, w, h, xfinish, yfinish): solved = False # init explored and frontier explored = [] frontier = [] if x != xfinish or y != yfinish: frontier.append(path('', x, y, x, y, doors, None, None)) else: name = '' solved = True # do the search GeneratedNodes = len(frontier) while frontier and not solved: # save elements name = frontier[0].name x = frontier[0].x y = frontier[0].y prevx = frontier[0].prevx prevy = frontier[0].prevy doors = dict(frontier[0].doors) succ = successor(doors, lab, x, y, w, h) explored.append(frontier[0]) frontier.pop(0) for j in range(len(succ)): [newx, newy] = getnewpos(succ[j], x, y) # Check if labyrinth is solved if newx == xfinish and newy == yfinish and not solved: name += succ[j] solved = True if not solved and (newx != prevx or newy != prevy): GeneratedNodes += 1 doorsaux = copy.deepcopy(doors) if succ[j] == 'P': sval = lab[newx][newy] for var in doorsaux[sval%100]: doorsaux[sval%100][var] = 1-doorsaux[sval%100][var] # check if auxpath is in explored or frontier add = 1 for a in range(len(explored)): if newx == explored[a].x and newy == explored[a].y and doorsaux == explored[a].doors: add = 0 break if add: for a in range(len(frontier)): if newx == frontier[a].x and newy == frontier[a].y and doorsaux == frontier[a].doors: add = 0 break if add: frontier.append(path(name+succ[j], newx, newy, x, y, doorsaux, None, None)) return [solved, GeneratedNodes, name]
def tags(options): """Generates tags for Bespin using jsctags.""" args = [ "jsctags" ] for dir in path("plugins").dirs(): args += [ "-L", dir, dir ] for file in path("static").files("*.js"): args.append(file) sh(" ".join(args))
def __init__(self, install_path=None, debug=False): """ :param install_path: path to installation :parama debug: enable debug output """ self.config = SystemConfiguration() self.path = path(install_path) if install_path \ else path(self.config["path"]) self._debug = debug
def install_path(self): """Compute install path.""" to = self.data.INSTALL_TO if to: to = path(to) else: PROJECT = self.data.PROJECT to = path(self.data.PREFIX) / "share" / PROJECT return to
def assert_pth_and_source_work_together( self, dashm, package, source ): # pragma: not covered if env.METACOV: self.skip( "Can't test sub-process pth file suppport during metacoverage" ) def fullname(modname): if package and dashm: return '.'.join((package, modname)) else: return modname def path(basename): return os.path.join(package, basename) # Main will run sub.py. self.make_file(path("main.py"), """\ import %s if True: pass """ % fullname('sub')) if package: self.make_file(path("__init__.py"), "") # sub.py will write a few lines. self.make_file(path("sub.py"), """\ with open("out.txt", "w") as f: f.write("Hello, world!") """) self.make_file("coverage.ini", """\ [run] source = %s """ % fullname(source)) self.set_environ("COVERAGE_PROCESS_START", "coverage.ini") if dashm: cmd = (sys.executable, dashm, fullname('main')) else: cmd = (sys.executable, path('main.py')) # TODO: can we use run_command here instead of Popen? from subprocess import Popen Popen(cmd).wait() with open("out.txt") as f: self.assertEqual(f.read(), "Hello, world!") # Read the data from .coverage self.assert_exists(".coverage") data = coverage.CoverageData() data.read_file(".coverage") summary = data.summary() print(summary) self.assertEqual(summary[source + '.py'], 2) self.assertEqual(len(summary), 1)
def install_tiki(options): """Installs the versions of Tiki that are required. Can optionally download using git, so that you can keep up to date easier.""" snapshot = path("static") / "tiki.js" if snapshot.exists(): if options.force: snapshot.unlink() else: info("Tiki snapshot installed already.") return if not options.git: info("Downloading Tiki Snapshot") preamble = urllib2.urlopen("http://github.com/pcwalton/tiki/raw/master/__preamble__.js").read().decode("utf8") body = urllib2.urlopen("http://github.com/pcwalton/tiki/raw/master/lib/tiki.js").read().decode("utf8") postamble = urllib2.urlopen("http://github.com/pcwalton/tiki/raw/master/__postamble__.js").read().decode("utf8") TIKI_VERSION = u"1.0.0" package_id = u"::tiki/%s" % (TIKI_VERSION) snapshot.write_text(TIKI_TEMPLATE % locals(), "utf8") return def get_component(base_name, dest_name, dest_path=".", branch=None, account="pcwalton"): dest_complete = path(dest_path) / dest_name if dest_complete.exists(): info("%s is already here, no action being taken", base_name) return if not options.git: if branch is None: branch = "master" info("Downloading %s/%s as a tarball", base_name, branch) tarball = urllib2.urlopen("http://github.com/%s/%s/tarball/%s" % (account, base_name, branch)) dirname = tarball.url.split('/')[-1].split('.')[0] tar = tarfile.open(name=("%s.tgz" % base_name), fileobj=StringIO(tarball.read())) tar.extractall(dest_path) tar.close() os.rename(os.path.join(dest_path, dirname), os.path.join(dest_path, dest_name)) return info("Checking out %s/%s", base_name, branch) sh("git clone -q git://github.com/%s/%s.git %s" % (account, base_name, dest_name), cwd=dest_path) if branch: sh("git checkout --track origin/%s" % branch, cwd=os.path.join(dest_path, dest_name)) get_component("tiki", "tiki", dest_path="frameworks") get_component("core_test", "core_test", dest_path="frameworks") preamble = path("frameworks/tiki/__preamble__.js").text('utf8') postamble = path("frameworks/tiki/__postamble__.js").text('utf8') body = path("frameworks/tiki/lib/tiki.js").text('utf8') TIKI_VERSION = "1.0.0" package_id = "::tiki/%s" % (TIKI_VERSION) snapshot.write_text(TIKI_TEMPLATE % locals(), 'utf8')
def derive_filename(config, hash, newext): "Build target filename based on identifying pieces" if newext[0] == '.': newext = newext[1:] filename = path(config['filename']) if 'outputdir' in config: outputdir = path(config['outputdir']) else: outputdir = filename.dirname() newfilename = '%s-%s.%s' % (filename.namebase, hash, newext) return outputdir / newfilename
def get_webkit_dir(): from path import path for arg in sys.argv[:]: if arg.startswith('--webkit='): WEBKITDIR = path(arg[len('--webkit='):]) break else: WEBKITDIR = path(os.environ.get('WEBKITDIR', 'webkit')) if not WEBKITDIR.isdir(): raise Exception('%r is not a valid path\nplease set WEBKITDIR in the environment or pass --webkit=PATH to this script' % str(WEBKITDIR)) return WEBKITDIR
def detect_format(config): if 'format' not in config: try: filename = path(config['filename']) with filename.open('r') as f: header = f.read(5) if filename.ext in ('.gb', '.gbk') or header.startswith('LOCUS'): log.debug("Attempting %s as genbank", filename) seqrec = genbank.parse_seq_rec(config['filename']) config['format'] = 'genbank' config['id'] = seqrec.id config['description'] = seqrec.description seq = str(seqrec.seq) elif filename.ext in ('.fna', '.fasta') or header.startswith('>'): seqrec = SeqIO.read(filename, 'fasta') config['format'] = 'fasta' config['id'] = seqrec.id config['description'] = seqrec.description seq = str(seqrec.seq) else: with filename.open('r') as f: seq = f.read() seq = re.sub('\s', '', seq) config['format'] = 'raw' config['length'] = len(seq) ddna = derive_filename(config, filename.getmtime(), 'ddna') if not ddna.exists(): with mkstemp_rename(ddna) as f: f.write(seq.upper()) config['ddna'] = ddna except: log.exception("Error detecting format") config['format'] = None
def get_scms_for_path(path): """ Returns all scm's found at the given path. If no scm is recognized - empty list is returned. :param path: path to directory which should be checked. May be callable. :raises VCSError: if given ``path`` is not a directory """ from vcs.backends import get_backend if hasattr(path, '__call__'): path = path() if not os.path.isdir(path): raise VCSError("Given path %r is not a directory" % path) result = [] for key in ALIASES: dirname = os.path.join(path, '.' + key) if os.path.isdir(dirname): result.append(key) continue # We still need to check if it's not bare repository as # bare repos don't have working directories try: get_backend(key)(path) result.append(key) continue except RepositoryError: # Wrong backend pass except VCSError: # No backend at all pass return result
def test(self): """ Test whether is possible to write PDF file to spool. """ if not os.access(os.path.dirname(self.pdf_path), os.W_OK) \ and not os.access(os.path(self.pdf_path, os.W_OK)): raise PDFPrinterTestError('Cannot write PDF to {}'.format( self.pdf_path))
def setUp(self): def path(): import os.path here = os.path.dirname(os.path.abspath(__file__)) def j(*a): return os.path.join(here, *a) return j ft = loadTable(path()("data", "features.table")) # make copy and shift ft2 = ft.copy() def shift(t, col): ix = t.getIndex(col) for r in t.rows: r[ix] += 2.0 + 0.1 * r[ix] - 0.005 * r[ix]*r[ix] shift(ft2, "rt") shift(ft2, "rtmin") shift(ft2, "rtmax") pms = set(ft2.getValue(row, "peakmap") for row in ft2.rows) pmrtsbefore = [] assert len(pms) == 1 for pm in pms: for spec in pm.spectra: pmrtsbefore.append(spec.rt) spec.rt += 2.0 + 0.1 * spec.rt - 0.005 * spec.rt * spec.rt # delete one row, so ft should become reference map ! del ft2.rows[-1] self.ft = ft self.ft2 = ft2
def install(hostname=None, device=None, hostaddr=None, gateway=None, interface=None, dns="8.8.8.8,8.8.4.4"): """Install and configure CoreOS on a host waiting in the CoreOS ramdisk""" if hostname is None: abort("Hostname is required.") if device is None: abort("Installation device is required.") if hostaddr is not None: if (gateway is None) or (dns is None): abort("Gateway and DNS must be specified if hostaddr is specified") hostaddr = ipaddr.IPv4Network(hostaddr) gateway = ipaddr.IPv4Address(gateway) dns = dns.split(",") if os.path.isdir("coreos-mirror"): fake_wget = os.path.join(os.path.dirname(env.real_fabfile), "fake_wget.sh") put(fake_wget, "/tmp/wget", mode=777, use_sudo=True) put("coreos-mirror/*", "/tmp") with path("/tmp", behavior="prepend"): sudo("coreos-install -d %s" % device) @_contextmanager def _mount(): sudo("mount %s9 /mnt" % device) sudo("mount -o bind /usr /mnt/usr") sudo("mount -o bind /dev /mnt/dev") yield sudo("umount -R /mnt") with _mount(), cd("/mnt"): install_configure( hostname=hostname, hostaddr=hostaddr, gateway=gateway, interface=interface, dns=dns, path_prefix="/mnt" ) reboot()
def InitializeNewProfile(browser_path, process, browser_wait, extra_args, profile_dir, init_url, log): """Runs browser with the new profile directory, to negate any performance hit that could occur as a result of starting up with a new profile. Also kills the "extra" browser that gets spawned the first time browser is run with a new profile. Args: browser_path: String containing the path to the browser exe profile_dir: The full path to the profile directory to load """ PROFILE_REGEX = re.compile('__metrics(.*)__metrics', re.DOTALL|re.MULTILINE) command_line = ffprocess.GenerateBrowserCommandLine(browser_path, extra_args, profile_dir, init_url) bcontroller = path('talos/bcontroller.py') process = subprocess.Popen('python %s --command "%s" --name %s --timeout %d --log %s' % (bcontroller, command_line, process, browser_wait, log), universal_newlines=True, shell=True, bufsize=0, env=os.environ) res = 0 total_time = 0 while total_time < 600: #10 minutes time.sleep(1) if process.poll() != None: #browser_controller completed, file now full if not os.path.isfile(log): raise talosError("no output from browser") results_file = open(log, "r") results_raw = results_file.read() results_file.close() match = PROFILE_REGEX.search(results_raw) if match: res = 1 print match.group(1) break total_time += 1 return res
def test_untrusted_client(self): response = self.login_and_authorize() form_action = 'action="{}"'.format(path(reverse("oauth2:authorize"))) # Check that the consent form is presented self.assertContains(response, form_action, status_code=200)
Python 3.6.3 (v3.6.3:2c5fed8, Oct 3 2017, 17:26:49) [MSC v.1900 32 bit (Intel)] on win32 Type "copyright", "credits" or "license()" for more information. >>> import os >>> os.path.basename('E:\\A\\B\\C') 'C' >>> os.path.dirname('E:\\A\\B\\C') 'E:\\A\\B' >>> os.path.join('E:\\','A') 'E:\\A' >>> os.path.split('E:\\a\\b\\c') ('E:\\a\\b', 'c') >>> os.path.splitext('E:\\a\\b\\modern.family') ('E:\\a\\b\\modern', '.family') >>> os.path.splitext('E:\\a\\b\\modern_family.mp4') ('E:\\a\\b\\modern_family', '.mp4') >>> os.path('E:\\a\\b\\modern_family.mp4')
def get_urls(self): urls = [ path('import/', self.import_csv), ] + super().get_urls() return urls
from ._version import version import os.path from importlib.resources import path import magnet from magnet.utils.config import Config __version__ = version with path(magnet, 'magnet.ini') as ini_file: filenames = [ini_file] # If a magnet.ini file exists where we were imported from, use it after the pre-packaged .ini file # This allows overriding of values without having to alter the pre-packaged .ini file, which may # be buried deep inside the site-packages folder. if os.path.exists('magnet.ini'): filenames.append('magnet.ini') config = Config('magnet', filenames)
This is how the PyAutoFit source code stores the path to different components of the lens model, but it is not in-line with the PyAutoLens API used to compose a lens model. We can alternatively use the following API: """ samples = list(agg.values("samples"))[0] samples = samples.with_paths([ "galaxies.lens.mass.einstein_radius", "galaxies.source.bulge.sersic_index" ]) print( "All parameters of the very first sample (containing only the lens mass's einstein radius and " "source bulge's sersic index).") """ Above, we filtered the `Samples` but asking for all parameters which included the path ("galaxies", "lens", "mass", "einstein_radius"). We can alternatively filter the `Samples` object by removing all parameters with a certain path. Below, we remove the centres of the mass model to be left with 10 parameters. """ samples = list(agg.values("samples"))[0] print("Parameter paths in the model which are used for filtering:") print(samples.model.paths) print("Parameters of first sample") print(samples.parameter_lists[0]) print(samples.model.prior_count)
def location(self): # WHY this is likely a real error reported by pylint, # which suggests that this function is never run return os.path(os.path.dirname(__file__)) # TODO pylint: disable=not-callable
final_test[i] = (scipy.special.gamma((v + D) / 2) * ( (1 + (d1[i] / v))**(-(v + D) / 2))) / scipy.special.gamma(v / 2) return final_test def Posterior(test_face, test_non): Posterior_face = test_face / (test_face + test_non) Posterior_non = test_non / (test_face + test_non) return Posterior_face, Posterior_non # ############################################################################# # ----------------------------------------------------------------------------- iteration = 7 flattened_space_face = path(1) flattened_space_non = path(2) flattened_space_test_face = path(3) flattened_space_test_non = path(4) Mean, Cov, v, D = Initial_parameters() Mean_non, Cov_non, v_non, D_non = Initial_parameters() print "initial Cov face:", Cov print "initial Cov non face:", Cov_non for iter in range(iteration): if iter < iteration: print "iter: ", iter expectation_h, expectation_logh, d1 = Estep(flattened_space_face, Mean, Cov, v, D) Mean, Cov, v = Mstep(expectation_h, expectation_logh,
def resolvePath(relativePath): from pathlib import Path as path return str(path(__file__).parent.absolute().joinpath(relativePath))
import signal import os.path import tempfile import subprocess from paver import easy from paver.path import path from paver.options import Bunch CWD = os.path.abspath(os.curdir) BASKET = os.environ.get("BASKET", "") if BASKET: sys.stdout.write("Using Environment BASKET '%s'." % BASKET) MODELDIR = path(os.path.abspath(os.path.join(CWD, "model"))) ADMINCTLDIR = path(os.path.abspath(os.path.join(CWD, "adminctl"))) SERVICEDIR = path(os.path.abspath(os.path.join(CWD, "service"))) CLIENTDIR = path(os.path.abspath(os.path.join(CWD, "client"))) # Paver global options we'll add to: easy.options( # Defaults for environment: develop=Bunch( basket=None, target_dir=None, ),
# -*- coding: utf-8 -*- """ test_list_enumerated ~~~~~~~~~~~~~~~~~~~~ Test the ``:list: enumerated`` option. """ import os.path import re from util import path, with_app srcdir = path(__file__).parent.joinpath('list_enumerated').abspath() def teardown_module(): (srcdir / '_build').rmtree(True) @with_app(srcdir=srcdir, warningiserror=True) def test_encoding(app): app.builder.build_all() with open(os.path.join(app.outdir, "index.html")) as stream: assert re.search( '<ol .*id="bibtex-bibliography-index-0".* start="1".*>' '.*<li>.*Akkerdju.*</li>' '.*<li>.*Bro.*</li>' '.*<li>.*Chap.*</li>' '.*<li>.*Dude.*</li>' '.*</ol>'
__all__ = ["urlpatterns"] THALIA_SITEMAP = { "main-static": StaticViewSitemap, } THALIA_SITEMAP.update(activemembers_sitemap) THALIA_SITEMAP.update(members_sitemap) THALIA_SITEMAP.update(documents_sitemap) THALIA_SITEMAP.update(thabloid_sitemap) THALIA_SITEMAP.update(partners_sitemap) THALIA_SITEMAP.update(education_sitemap) THALIA_SITEMAP.update(events_sitemap) THALIA_SITEMAP.update(singlepages_sitemap) urlpatterns = [ path("admin/", admin.site.urls), path("", IndexView.as_view(), name="index"), # Default helpers path( "user/", include([ path( "oauth/", include( ( base_urlpatterns + [ path( "authorised-apps/", AuthorizedTokensListView.as_view(), name="authorized-token-list", ),
# -*- coding: utf-8 -*- """ test_list_bullet ~~~~~~~~~~~~~~~~ Test the ``:list: bullet`` option. """ import os.path import re from util import path, with_app srcdir = path(__file__).parent.joinpath('list_bullet').abspath() def teardown_module(): (srcdir / '_build').rmtree(True) @with_app(srcdir=srcdir, warningiserror=True) def test_encoding(app): app.builder.build_all() with open(os.path.join(app.outdir, "index.html")) as stream: assert re.search( '<ul id="bibtex-bibliography-index-0">' '.*<li>.*Akkerdju.*</li>' '.*<li>.*Bro.*</li>' '.*<li>.*Chap.*</li>' '.*<li>.*Dude.*</li>' '.*</ul>',
def __init__(self): self.conn = sqlite3.connect(path() + r"/db/stocks.db") self.create_tables()
def Check_path(path): if not os.path(path): os.makedirs(path)
'TEST_CHARSET': 'utf8', 'TEST_COLLATION': 'utf8_general_ci', }, } DATABASE_ROUTERS = ('multidb.PinningMasterSlaveRouter', ) SLAVE_DATABASES = [] # L10n TIME_ZONE = 'America/Los_Angeles' USE_I18N = True USE_L10N = True TEXT_DOMAIN = 'django' STANDALONE_DOMAINS = [TEXT_DOMAIN, 'djangojs'] LANGUAGE_CODE = 'en-US' LOCALE_PATHS = [path('locale')] # Tells the extract script what files to parse for strings and what functions to use. PUENTE = { 'BASE_DIR': ROOT, 'DOMAIN_METHODS': { 'django': [('mozillians/**.py', 'python'), ('mozillians/**/templates/**.html', 'django'), ('mozillians/**/jinja2/**.html', 'jinja2')] } } # Tells the product_details module where to find our local JSON files. # This ultimately controls how LANGUAGES are constructed. PROD_DETAILS_DIR = path('lib/product_details_json')
src_dir = sys.argv[1] dep_dir = sys.argv[2] f90_pkl = os.path.join(dep_dir, f90_pkl) c_pkl = os.path.join(dep_dir, c_pkl) f90_dep = os.path.join(dep_dir, f90_dep) c_dep = os.path.join(dep_dir, c_dep) # main script f90_names = [ get_name(src_file) for src_file in glob.glob(os.path.join(src_dir, "*.f90")) ] c_names = [ get_name(src_file) for src_file in glob.glob(os.path(src_dir, "*.c")) ] src_names = f90_names + c_names # module files check_compiled_files(MOD_EXT, f90_names) # object files and library if check_compiled_files(OBJ_EXT, src_names) > 0: libfile = glob.glob("*." + LIB_EXT) if len(libfile) > 0: remove_file(libfile[0]) # dictionaries if dictionary_is_obsolete(f90_pkl, f90_names): gendepend_f90.gendepend(f90_pkl)
import os.path from django.urls import path from django.conf import settings from django.conf.urls.static import static from . import views app_name = 'catalogue' urlpatterns = [ path('', views.IndexView.as_view(), name='index'), path('<int:pk>/', views.DetailView.as_view(), name='detail'), ]
import subprocess subprocess.call("ls") os.name ''' posix: linux unix Mac nt: windows ''' #查看环境变量 os.environ os.environ.get("PATH") os.path() # Run linux commandline #cmd=("mv genePi.out {0}").format(newname) cmd=("mv genePi.out {0}").format(outputfile) os.system(cmd) import os # create folder os.mkdir('/Users/michael/testdir') #creat folder os.rmdir('/Users/michael/testdir') #remove folder
def main(): path = os.path(os.path.dirname(__file__)) with open(path, 'subnet.conf', 'w') as f: for network in Network.objects.all(): f.write(network.build_subnet())
import sqlite3 from os import path ROOT = path.dirname(path(__file__)) conn = sqlite3.connect(ROOT + "/sample.db") c = conn.cursor() c.execute('SELECT email FROM users') for row in c.fetchall(): print row[0]
import os.path PROJECT_MODULE = '.'.join(__name__.split('.')[:-2]) def path(location): return os.path.join(PROJECT_MODULE, location) TEMPLATE_DIRS = (path('main/templates')) STATICFILES_DIRS = (path('main/static'), ) TIME_ZONE = 'America/New_York' LANGUAGE_CODE = 'en-us' USE_I18N = False USE_L10N = False USE_TZ = True STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader',
3] # Selecting the displaybox of .gallery-category .wallpaper displayBoxVC = soup.body.section.contents[5].contents[ 3] # Selecting the displaybox of .gallery-category .vector displayBoxPA = soup.body.section.contents[7].contents[ 3] # Selecting the displaybox of .gallery-category .pixelart displayBoxTR = soup.body.section.contents[9].contents[ 3] # Selecting the displaybox of .gallery-category .traditional displayBoxIC = soup.body.section.contents[11].contents[ 3] # Selecting the displaybox of .gallery-category .icon displayBoxBN = soup.body.section.contents[13].contents[ 3] # Selecting the displaybox of .gallery-category .banner # print(displayBoxWP) # Loop over images in the working dir and uploads everything to imgur directory = os.path('.', 'uploadHelper') for filename in os.listdir(directory): mo = namePattern.search(filename) if mo is None: #skips files that don't match regex continue # Open image file and encode to base64 with open(os.path.join(directory, filename), 'rb') as f: data = f.read() encodedData = base64.b64encode(data) f.close() category = mo.group(1) #get the category of the file via regex extension = mo.group(3) #get the extension of the file via regex
return (enc, dec) if __name__ == "__main__": import os import argparse os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1' parser = argparse.ArgumentParser() parser.add_argument('--exp_dir', type=str, default='./saved_models/wae_mmd/') args = parser.parse_args() logdir = args.exp_dir with open(os.path(logdir, "config.json")) as json_file: config = json.load(json_file) enc_chkpt = os.path.join(logdir, "models", "encoder", "encoder") dec_chkpt = os.path.join(logdir, "models", "decoder", "decoder") enc, dec = load_models(config, enc_chkpt, dec_chkpt) n_random_batches = 10 _, test_dataset = get_dataset(batch_size=config["batch_size"]) # These two following blocks of code are responsible for: ## Sampling the latent space and decoding a few images, then stacking these ## decoded samples into a png and saving it.
import plone.z3cform.templates import z3c.form.interfaces def path(filepart): return os.path.join( os.path.dirname(plone.app.z3cform.__file__), 'templates', filepart ) # Override the layout wrapper view default template with a more Plone-looking # one layout_factory = plone.z3cform.templates.ZopeTwoFormTemplateFactory( path('layout.pt'), form=plone.z3cform.interfaces.IFormWrapper, request=plone.app.z3cform.interfaces.IPloneFormLayer) # Override the form for the standard full-page form rendering form_factory = plone.z3cform.templates.ZopeTwoFormTemplateFactory( path('form.pt'), form=z3c.form.interfaces.IForm, request=plone.app.z3cform.interfaces.IPloneFormLayer) # The ploneform-macros view class Macros(BrowserView): def __getitem__(self, key):
import os print(os.path())
try: # import pya from klayout import pya if ("Application" in str(dir(pya))): from SiEPIC.utils import get_technology_by_name op_tag = "GUI" #import pya functions else: raise ImportError except: import klayout.db as pya from zeropdk import Tech op_tag = "script" lyp_filepath = os.path( str(os.path(os.path.dirname(os.path.realpath(__file__))).parent) + r"/klayout_Layers_GSiP.lyp") print(lyp_filepath) from pya import Box, Point, Polygon, Text, Trans, LayerInfo, \ PCellDeclarationHelper, DPoint, DPath, Path, ShapeProcessor, \ Library, CellInstArray path = os.path.dirname(os.path.abspath(__file__)) def linspace_without_numpy(low, high, length): step = ((high - low) * 1.0 / length) return [low + i * step for i in range(length)]
def location(self): return os.path(os.path.dirname(__file__))
def build_path(p): 'Fix a path so that it is relative to the build directory.' p = path(p) return OUTPUT_DIR.relpathto(p.parent) / p.name
if wxdir is None: print >> sys.stderr, 'Please specifiy --wx=PATH_TO_WX on the command line' sys.exit(-1) wxdir = path(wxdir).abspath() if not wxdir.isdir(): raise AssertionError('cannot find WXWIN at %s' % wxdir) return wxdir # Path to wxWidgets source and include files WXWIN = wx_path() from wxpyfeatures import emit_features_file VERBOSE = True BAKEFILES_VERBOSE = False OUTPUT_DIR = path('build') SRC_DIR = path('src') GENERATED_SRC_DIR = SRC_DIR / 'generated' # find out if we're running with a debug build of python DEBUG = hasattr(sys, 'gettotalrefcount') try: import sipconfig except ImportError: sys.modules.pop('sipconfig', None) sip_dir = os.environ.get('SIP_DIR') if sip_dir is not None: sys.path.append(sip_dir) import sipconfig else:
def __init__(self, cli_exec): self._executable = cli_exec self.name = path(self._executable).namebase
import os import zipfile from zipfile import ZipFile mod_path = __file__ mod_dir = os.path.split(mod_path)[0] zdir = os.path.join(".", "zip") os.listdir() zfiles = os.listdir(zdir) for fname in zfiles: fpath = os.path(zdir, fname) print (fpath) if __name__ == '__main__': pass