def get_sys_path(rcpath, app_name, section_name=None): """Return a folder path if it exists. First will check if it is an existing system path, if it is, will return it expanded and absoluted. If this fails will look for the rcpath variable in the app_name rcfiles or exclusively within the given section_name, if given. Parameters ---------- rcpath: str Existing folder path or variable name in app_name rcfile with an existing one. section_name: str Name of a section in the app_name rcfile to look exclusively there for variable names. app_name: str Name of the application to look for rcfile configuration files. Returns ------- sys_path: str A expanded absolute file or folder path if the path exists. Raises ------ IOError if the proposed sys_path does not exist. """ # first check if it is an existing path if op.exists(rcpath): return op.realpath(op.expanduser(rcpath)) # look for the rcfile try: settings = rcfile(app_name, section_name) except: raise # look for the variable within the rcfile configutarions try: sys_path = op.expanduser(settings[rcpath]) except KeyError: raise IOError('Could not find an existing variable with name {0} in' ' section {1} of {2}rc config setup. Maybe it is a ' ' folder that could not be found.'.format(rcpath, section_name, app_name)) # found the variable, now check if it is an existing path else: if not op.exists(sys_path): raise IOError('Could not find the path {3} indicated by the ' 'variable {0} in section {1} of {2}rc config ' 'setup.'.format(rcpath, section_name, app_name, sys_path)) # expand the path and return return op.realpath(op.expanduser(sys_path))
def doOutputDirectoryCreation(configFile): """ Create all the necessary output folders. :param str configFile: Name of configuration file. :raises OSError: If the directory tree cannot be created. """ config = ConfigParser() config.read(configFile) outputPath = config.get('Output', 'Path') log.info('Output will be stored under %s', outputPath) subdirs = ['tracks', 'windfield', 'plots', 'plots/timeseries', 'log', 'process', 'process/timeseries'] if not isdir(outputPath): try: os.makedirs(outputPath) except OSError: raise for subdir in subdirs: if not isdir(realpath(pjoin(outputPath, subdir))): try: os.makedirs(realpath(pjoin(outputPath, subdir))) except OSError: raise
def requirements(filename, module): ''' ''' head, tail = split(realpath(filename)) while tail and tail != module: head, tail = split(head) content = open(filename).read() requirements, seen = set(), set() for match in modules(content): parts = match.group().split('.') for index in range(1, len(parts)): reqname = sep.join(parts[:index+1]) + '.js' reqpath = join(head, reqname) if reqpath in seen: continue seen.add(reqpath) if not exists(reqpath): continue if realpath(filename) == realpath(reqpath): continue requirements.add(reqname) return list(requirements)
def iter_files(self): """""" seen_paths_ = [] files = self.filepaths dirs = self.dirpaths exclude_dirs = self.ignored_dirs for filepath in files: pth = path.realpath(path.abspath(filepath)) if pth not in seen_paths_: seen_paths_.append(pth) yield pth for dirpath in dirs: dirpath = path.abspath(dirpath) for dirpath, dirnames, filenames in walk(dirpath): ## remove excluded dirs for dir in [dir for dir in exclude_dirs if dir in dirnames]: self.log.debug('Ignoring dir: {0}'.format(dir)) dirnames.remove(dir) for filepath in filenames: pth = path.join(dirpath, filepath) pth = path.realpath(path.abspath(pth)) if pth not in seen_paths_: seen_paths_.append(pth) yield pth
def test_knownValues_file_from_modpath_2(self): from os import path self.assertEqual( path.realpath(modutils.file_from_modpath(["os", "path"]).replace(".pyc", ".py")), path.realpath(path.__file__.replace(".pyc", ".py")), )
def main(args): emacs_dir = path.dirname(path.realpath(__file__)) build_dir = path.join(path.dirname(path.realpath(__file__)), "build") em = Manager(build_dir, emacs_dir) if args["--remove"] or args["--all"]: em.remove() if args["--depends"] or args["--all"]: em.depends() if args["--get"] or args["--all"]: em.get() if args["--update"] or args["--all"]: em.update() if args["--configure"] or args["--all"]: em.configure() if args["--build"] or args["--all"]: em.build() if args["--pin"] or args["--all"]: em.pin()
def full_path_permission_for_user(prefix, path, username, skip_prefix=False): """ Assuming username is identical to the os username, this checks that the given user can read the specified path by checking the file permission and each parent directory permission. :type prefix: string :param prefix: a directory under which ``path`` is to be checked :type path: string :param path: a filename to check :type username: string :param username: a username matching the systems username :type skip_prefix: bool :param skip_prefix: skip the given prefix from being checked for permissions """ full_path = realpath(join(prefix, path)) top_path = realpath(prefix) if skip_prefix else None can_read = __path_permission_for_user(full_path, username) if can_read: depth = 0 max_depth = full_path.count(separator) parent_path = dirname(full_path) while can_read and depth != max_depth: if parent_path in [separator, top_path]: break if not __path_permission_for_user(parent_path, username): can_read = False depth += 1 parent_path = dirname(parent_path) return can_read
def ProcessFlags(env, flags): for f in flags: if not f: continue parsed_flags = env.ParseFlags(str(f)) for flag in parsed_flags.pop("CPPDEFINES"): if not isinstance(flag, list): env.Append(CPPDEFINES=flag) continue if '\"' in flag[1]: flag[1] = flag[1].replace('\"', '\\\"') env.Append(CPPDEFINES=[flag]) env.Append(**parsed_flags) # fix relative CPPPATH & LIBPATH for k in ("CPPPATH", "LIBPATH"): for i, p in enumerate(env.get(k, [])): if isdir(p): env[k][i] = realpath(p) # fix relative path for "-include" for i, f in enumerate(env.get("CCFLAGS", [])): if isinstance(f, tuple) and f[0] == "-include": env['CCFLAGS'][i] = (f[0], env.File(realpath(f[1].get_path()))) # Cancel any previous definition of name, either built in or # provided with a -D option // Issue #191 undefines = [u for u in env.get("CCFLAGS", []) if isinstance(u, basestring) and u.startswith("-U")] if undefines: for undef in undefines: env['CCFLAGS'].remove(undef) env.Append(_CPPDEFFLAGS=" %s" % " ".join(undefines))
def get_absolute_path(path): if path_isabs(path): abs_dir = realpath(path) else: abs_dir = realpath(expanduser(path_join(getcwd(), path))) return abs_dir
def mount(self): # try mounting through fstab first if self.mount_device is None: dev = self.partitionPath("1") else: # if previously mounted, use the same spot dev = self.mount_device try: fstab = open("/etc/fstab") lines = fstab.readlines() except IOError: return -1 fstab.close() for line in lines: parts = line.strip().split(" ") fspath = path.realpath(parts[0]) if path.realpath(fspath) == dev: print "[Harddisk] mounting:", fspath cmd = "mount -t ext3 " + fspath res = system(cmd) return (res >> 8) # device is not in fstab res = -1 if self.type == DEVTYPE_UDEV: # we can let udev do the job, re-read the partition table res = system('sfdisk -R ' + self.disk_path) # give udev some time to make the mount, which it will do asynchronously from time import sleep sleep(3) return (res >> 8)
def fetch_and_archive(service, email, archive_path, mid_list): logger.info( 'fetch_and_archive started. email: %s, archive_path: %s, mid_list: %d message(s)' % (email, archive_path, len(mid_list)) ) if path_isabs(archive_path): output_dir = realpath(archive_path) else: output_dir = realpath(expanduser(path_join(getcwd(), archive_path))) count = 0 error = 0 for mid in mid_list: file_name = path_join(output_dir, ('%x.gz' % mid)) message = fetch_mail(service, email, mid) if not message: error += 1 continue with gzip_open(file_name, 'wb') as f: f.write(urlsafe_b64decode(message['raw'])) logger.debug('Message id %x gzipped to %s.' % (mid, file_name)) count += 1 logger.info('fetch_and_archive completed. Total %d item(s) saved. Error %d item(s).' % (count, error))
def test_remove_file_handle_only(path): ds = Dataset(path).create(force=True) ds.save() ok_clean_git(ds.path) # make sure there is any key ok_(len(ds.repo.get_file_key('one'))) # both files link to the same key eq_(ds.repo.get_file_key('one'), ds.repo.get_file_key('two')) rpath_one = realpath(opj(ds.path, 'one')) eq_(rpath_one, realpath(opj(ds.path, 'two'))) path_two = opj(ds.path, 'two') ok_(exists(path_two)) # remove one handle, should not affect the other ds.remove('two', check=False, message="custom msg") eq_(ds.repo.repo.head.commit.message.rstrip(), "custom msg") eq_(rpath_one, realpath(opj(ds.path, 'one'))) ok_(exists(rpath_one)) ok_(not exists(path_two)) # remove file without specifying the dataset -- shouldn't fail with chpwd(path): remove('one', check=False) ok_(not exists("one")) # and we should be able to remove without saving ds.remove('three', check=False, save=False) ok_(ds.repo.dirty)
def addons(self, token_filter=None, model_filter=None, inherited_filter=None, data_filter=None, field_filter=None): config_filename = self.addon_config_filename addonsourcepath = self.get_addonsourcepath() filter_re = re.compile(token_filter) if token_filter else None def filter_name(p): return filter_re.search(p) is not None if token_filter else True def filter_addon(a): return ( (model_filter in a.models[0] if model_filter else True) and (data_filter in a.data if data_filter else True) and (inherited_filter in a.models[1] if inherited_filter else True) and (field_filter in [f for fn, cl, f in a.fields] if field_filter else True) ) for path, ds, fs in walk(self.sources_path, followlinks=True): if ( config_filename in fs and '__init__.py' in fs and filter_name(basename(path)) and realpath(path) != realpath(addonsourcepath) ): addon = Addon(join(path, config_filename)) if (filter_addon(addon)): yield addon
def test_common_and_host_file_collision(self): ''' What happens if a file with the same name exists in the common dir and in the "hostname" dir? Desired behavior is that common is linked first and hostname overwrites it, so that common can have a config that applies to all machines, but it gets overwritten on a machine by machine basis. ''' real_bothfile = path.join(self.indir, gethostname(), 'bothfile') real_bothfile_back = path.join(self.indir, 'common', 'bothfile') self.touch(real_bothfile) self.touch(real_bothfile_back) linker = Linker(self.indir, self.outdir) linker.make_links() bothfile=path.join(self.outdir, 'bothfile') bothfile_back=path.join(self.outdir, 'bothfile.back') try: self.assertTrue(path.exists(bothfile)) self.assertTrue(path.exists(bothfile_back)) self.assertTrue(path.islink(bothfile)) self.assertTrue(path.islink(bothfile_back)) self.assertEqual(real_bothfile, path.realpath(bothfile)) self.assertEqual(real_bothfile_back, path.realpath(bothfile_back)) finally: remove(real_bothfile) remove(real_bothfile_back)
def test_on_add(self): project = Project('pj') with temppath() as path: with open(path, 'w') as writer: writer.write('-- pig script') project.add_job('foo', PigJob({'pig.script': path})) eq_(project._files, {realpath(path).lstrip('/'): (realpath(path), False)})
def create_symlinks(origin, destin, display_passage=True): """we go over the files and folders in origin and create a symlink in destin folders if not existing will be explicitly created, others files symlinked""" origin = realpath(origin) destin = abspath(destin) os.chdir(origin) # make sure destin exists if display_passage is False and os.path.isdir(destin) is False: os.makedirs(destin) for root, dirs, files in os.walk(os.curdir): for dirn in dirs: #dir_orig = join(join(origin, root), dirn) dir_dest = join(join(destin, root), dirn) if (os.path.exists(dir_dest) is False): if display_passage is True: sys.stdout.write('%s does not exists, creating it.\n' % dir_dest) else: os.makedirs(dir_dest) for filen in files: file_orig = realpath(join(join(origin, root), filen)) file_dest = abspath(join(join(destin, root), filen)) if display_passage is True: msg = '%35s ---> %s\n' % (file_orig, file_dest) sys.stdout.write(msg) else: force_symlink(file_orig, file_dest) if display_passage is True: msg = """\n\n Will perform the previous listed actions. Do you wish to continue? <ctrl-c> to abort""" raw_input(msg) create_symlinks(origin, destin, display_passage=False)
def __init__(self, conf_file=config_file_name, nullconfig=False): if nullconfig: return main_dir = path.join(path.dirname(path.realpath(__file__)), path.pardir) main_dir = path.realpath(main_dir) json_file = path.join(main_dir, conf_file) if path.exists(json_file): try: with open(json_file) as json_config: config_data = json.load(json_config) except IOError as e: logger.error('Can not open config file: %s', str(e)) except ValueError as e: logger.error('Can not load json config file: %s', str(e)) def split_path(config_var, strpath): for key, value in config_var.items(): if strpath: new_strpath = '.'.join([strpath, key]) else: new_strpath = key if type(value) is dict: split_path(value, new_strpath) else: self.set(new_strpath, value) if 'config_data' in locals(): split_path(locals()['config_data'], None) self.set('main.dir', main_dir) self.set('main.start_time', datetime.now()) self.set('version', VERSION)
def dirsplit(path, basepath, maxdepth=10): """splits /home/hugo/foo/bar/baz into foo, bar, baz, assuming /home/hugo is the basepath """ path = realpath(path) basepath = realpath(basepath) return _dirsplit(path, basepath, maxdepth=maxdepth)
def settings_save(self, config_file=r"../settings.ini"): """Save settings into the ini file.""" config = SafeConfigParser() config.read(path.realpath(config_file)) # new values config.set('auth', 'app_id', self.app_id) config.set('auth', 'app_secret', self.app_secret) config.set('basics', 'out_folder', path.realpath(self.out_fold_path.get())) config.set('basics', 'excel_out', self.output_xl.get()) config.set('basics', 'excel_opt', str(self.opt_excel.get())) config.set('basics', 'word_opt', str(self.opt_word.get())) config.set('basics', 'word_tpl', self.tpl_input.get()) config.set('basics', 'word_out_prefix', str(self.out_word_prefix.get())) config.set('basics', 'word_opt_id', str(self.word_opt_id.get())) config.set('basics', 'word_opt_date', str(self.word_opt_date.get())) config.set('basics', 'xml_opt', str(self.opt_xml.get())) config.set('basics', 'xml_out_prefix', str(self.out_xml_prefix.get())) config.set('basics', 'xml_opt_id', str(self.xml_opt_id.get())) config.set('basics', 'xml_opt_date', str(self.xml_opt_date.get())) # writing with open(path.realpath(config_file), 'wb') as configfile: config.write(configfile) logging.info("Settings saved into: {}".format(config_file)) # end of method return
def __posix_places(self): result = [] labels = {} titles = {} by_label = "/dev/disk/by-label" if isdir(by_label): for label in listdir(by_label): labels[realpath(join(by_label, label))] = label with open("/proc/mounts") as fp: for line in fp: device, path, dummy = line.split(" ", 2) if "/" not in device: continue spl = path.split("\\") path = spl[0] for i in spl[1:]: path += chr(int(i[:3], 8)) + i[3:] if device.startswith("/"): device = realpath(device) device = labels.get(device, device) times = titles.get(device, 0) if times: title = "(%d) %s" % (times + 1, device) else: title = device titles[device] = times + 1 result.append((device, path)) return result
def walk( argz, target, followlinks =True): selfreplace= False if not exists( target): yield None, target else: if argz == [ target]: selfreplace = True target = dirname( target) assert isdir( target) for isrc in argz: src = realpath( isrc) if not isdir( src): yield src, join( target, basename( isrc)) else: srcdepth = len( levels( src)) for path,dirs,files in os.walk( src, followlinks= followlinks): if not included( path, optz.dirinclude, optz.direxclude): dirs[:] = [] continue #dirs[:] = [ d for d in dirs if included( d, optz.dirinclude, optz.direxclude) ] pathdeeper = levels( path)[ srcdepth:] targdeeper = join( target, *pathdeeper ) mkdir = False for f in files: if not included( f, optz.include, optz.exclude ): continue fsrc = realpath( join( path, f)) if not mkdir: yield None, targdeeper mkdir = True yield fsrc, join( targdeeper, f )
def check_symlinks(files): if readlink is False: return # Not on Unix system msgs = [] real_build_prefix = realpath(config.build_prefix) for f in files: path = join(real_build_prefix, f) if islink(path): link_path = readlink(path) real_link_path = realpath(path) if real_link_path.startswith(real_build_prefix): # If the path is in the build prefix, this is fine, but # the link needs to be relative if not link_path.startswith('.'): # Don't change the link structure if it is already a # relative link. It's possible that ..'s later in the path # can result in a broken link still, but we'll assume that # such crazy things don't happen. print("Making absolute symlink %s -> %s relative" % (f, link_path)) os.unlink(path) os.symlink(relpath(real_link_path, dirname(path)), path) else: # Symlinks to absolute paths on the system (like /usr) are fine. if real_link_path.startswith(config.croot): msgs.append("%s is a symlink to a path that may not " "exist after the build is completed (%s)" % (f, link_path)) if msgs: for msg in msgs: print("Error: %s" % msg, file=sys.stderr) sys.exit(1)
def _list_outputs(self): outputs = self._outputs().get() outputs["seg_file"] = op.realpath(self._seg_fname) outputs["peak_file"] = op.realpath(self._peak_fname) outputs["lut_file"] = op.realpath(self._lut_fname) return outputs
def main(): """ Creates an instance of the plugin manager and configures is appropriately. After configuration the plugin manager is executed. """ if 'help' in argv: print '\n\n########################################################################################\n' '# myProject #\n' '########################################################################################\n' print 'A program for ' exit() argvs=argv[0:1]+['-myProject']+argv[1:] config_file = expanduser(join('~', '.myProject.ini')) if not isfile(config_file): from lauescript.makeconfig import run run(outputName='~/.myProject.ini', data_path=join(dirname(dirname(dirname(dirname(realpath(__file__))))), join('lauescript', 'data')), plugin_path=join(dirname(dirname(realpath(__file__))), join('myproject', 'src'))) pm = pluginmanager.PluginManager(argvs=argvs, headline=' myProject ', bottomline=' Exiting myProject ', headlines=False, config=config_file, macro_file=False) pm.execute() exit()
def get_app_dir(): """Get the configured JupyterLab app directory. """ # Default to the override environment variable. if os.environ.get('JUPYTERLAB_DIR'): return osp.realpath(os.environ['JUPYTERLAB_DIR']) # Use the default locations for data_files. app_dir = pjoin(sys.prefix, 'share', 'jupyter', 'lab') # Check for a user level install. # Ensure that USER_BASE is defined if hasattr(site, 'getuserbase'): site.getuserbase() userbase = getattr(site, 'USER_BASE', None) if HERE.startswith(userbase) and not app_dir.startswith(userbase): app_dir = pjoin(userbase, 'share', 'jupyter', 'lab') # Check for a system install in '/usr/local/share'. elif (sys.prefix.startswith('/usr') and not osp.exists(app_dir) and osp.exists('/usr/local/share/jupyter/lab')): app_dir = '/usr/local/share/jupyter/lab' return osp.realpath(app_dir)
def _scrub_path(path): if isabs(path): path = realpath(path) else: cwd = abspath(getcwd()) path = realpath(join(cwd, path)) return path
def test(net_file,model_file,predict_file,gpunum,outdir,outputlayer): caffe.set_device(gpunum) caffe.set_mode_gpu() if not exists(outdir): makedirs(outdir) outfile = os.path.join(outdir,'bestiter.pred') outputlayer_split = outputlayer.split('_') outputlayer_cnt = len(outputlayer_split) flag = False outdata = [] net = caffe.Net(realpath(net_file), realpath(model_file),caffe.TEST) with open(predict_file,'r') as f: files = [x.strip() for x in f] with open(outfile,'w') as f: for batchfile in files: fi = h5py.File(batchfile, 'r') dataset = np.asarray(fi['data']) out = net.forward_all(data=dataset,blobs=outputlayer_split) for i in range(outputlayer_cnt): if not flag: outdata.append( np.vstack(np.asarray(out[outputlayer_split[i]])) ) else: outdata[i] = np.vstack((outdata[i],np.vstack(np.asarray(out[outputlayer_split[i]])))) flag = True for out in outdata[0]: f.write('%s\n' % '\t'.join([str(x) for x in out])) with open(join(outdir,'bestiter.pred.params.pkl'),'wb') as f: cPickle.dump((outdata,outputlayer_split),f,protocol=cPickle.HIGHEST_PROTOCOL)
def writeToFile(self, format=None, filename=None, options={}): """GraphContext.writeToFile(filename = None) -> void Writes the graph in this context out to a file, autodetecting the filetype from the name of the file. If filename == None, will attempt to use self.path. If self.path == None and filename == None, raises ExportError.""" # Detect what kind of file we're loading and # call a specialized loader if filename == None or format == None: if not self.path == None: filename = self.path filepath = path.realpath(filename) (dir, basename) = path.split(filepath) (prefix, ext) = path.splitext(basename) if not ext == '.mg' and not ext == '.mg2': raise ExportError, "Couldn't save into exported type." format = formatmanager.formats[ext[1:]] else: raise ExportError, "Couldn't save new file without filename." filepath = path.realpath(filename) format.write(self.graph, filepath, options=options) self.path = filename self.graph.change(False)
def iter_files(self): """""" seen_paths_ = [] files = self.filepaths dirs = self.dirpaths exclude_dirs = self.ignored_dirs for filepath in files: pth = path.realpath(path.abspath(filepath)) if pth not in seen_paths_: seen_paths_.append(pth) yield pth for dirpath in dirs: dirpath = path.abspath(dirpath) for dirpath, dirnames, filenames in walk(dirpath): ## remove excluded dirs # TODO: These are not patterns. Consider making them glob # patterns for dir in exclude_dirs: if dir in dirnames: self.log.debug(u'SublimeTODO ignoring dir: {0}'.format(dir)) dirnames.remove(dir) for filepath in filenames: pth = path.join(dirpath, filepath) pth = path.realpath(path.abspath(pth)) if pth not in seen_paths_: seen_paths_.append(pth) yield pth
def _list_outputs(self): outputs = self._outputs().get() for ftype in ["cope", "varcope", "dof"]: outputs[ftype + "_file"] = op.realpath(ftype + "_merged.nii.gz") outputs["mask_file"] = op.realpath("group_mask.nii.gz") return outputs
#!/usr/bin/env python3 from os.path import dirname, realpath dir_path = dirname(realpath(__file__)) with open(f'{dir_path}/input') as f: puzzle_input = f.read().split('\n\n') def parse_input(): return puzzle_input[:] def get_anyone_count_for_group(group): work = group.split() answers = set() for line in work: answers.update(line) return len(answers) def get_everyone_count_for_group(group): work = group.split() answers = set(work[0]) for line in work[1:]: answers = answers.intersection(line)
import os from os import path import pytest from nlp_architect.pipelines.spacy_np_annotator import ( NPAnnotator, get_noun_phrases, SpacyNPAnnotator, ) from nlp_architect.utils.io import download_unlicensed_file from nlp_architect.utils.text import SpacyInstance, try_to_load_spacy MODEL_URL = "https://d2zs9tzlek599f.cloudfront.net/models/chunker/" MODEL_FILE = "model.h5" MODEL_INFO = "model_info.dat.params" local_models_path = path.join(path.dirname(path.realpath(__file__)), "fixtures/data/chunker") if not try_to_load_spacy("en"): pytest.skip( "\n\nSkipping test_spacy_np_annotator.py. Reason: 'spacy en' model not installed." "Please see https://spacy.io/models/ for installation instructions.\n" "The terms and conditions of the data set and/or model license apply.\n" "Intel does not grant any rights to the data and/or model files.\n", allow_module_level=True, ) def check_dir(): if not os.path.exists(local_models_path): try: os.makedirs(local_models_path)
def inParentDirs(self, dir, parents): dir = os_path.realpath(dir) for p in parents: if dir.startswith(p): return True return False
def getMountpoint(self, file): file = os_path.join(os_path.realpath(file), "") for m in self.mountpoints: if file.startswith(m): return m return False
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ @author: Felipe Espic """ from shutil import copytree, copy2 import scripts.label_st_align_to_var_rate as ltvr from os.path import join, dirname, realpath, isdir import sys this_dir = dirname(realpath(__file__)) sys.path.append(realpath(this_dir + '/../../../tools/magphase/src')) import libutils as lu import magphase as mp import configparser # Install it with pip (it's not the same as 'ConfigParser' (old version)) from subprocess import call def feat_extraction(in_wav_dir, file_name_token, out_feats_dir, d_opts): # Display: print("\nAnalysing file: " + file_name_token + '.wav............................') # File setup: wav_file = join(in_wav_dir, file_name_token + '.wav') mp.analysis_for_acoustic_modelling(wav_file, out_feats_dir, mag_dim=d_opts['mag_dim'], phase_dim=d_opts['phase_dim'],
# TODO: add docstring and implement metadata database ''' ''' # Check for number of running containers if greater than allotted if len(self.docker_client.containers.list()) > self._MAX_JOBS: raise Exception('System already at set MAX_JOBS quota.') running_containers_list = self.docker_client.containers.list() while len(self._jobQ) != 0: if len(running_containers_list) < self._MAX_JOBS: job = self._jobQ.pop() self.setupJob(job) running_job = self.runJob(job, self.image_tag, '0-3') running_containers_list = self.docker_client.containers.list() if __name__ == '__main__': from os.path import realpath primary_path = realpath('../pocono_test_case') altered_domain_files = [ "../pocono_test_case/Route_Link.nc", "../pocono_test_case/Route_Link_1.nc", "../pocono_test_case/Route_Link_2.nc", "../pocono_test_case/Route_Link_3.nc", "../pocono_test_case/Route_Link_4.nc" ] # Map full path name to altered domain files altered_domain_files = map(lambda f: realpath(f), altered_domain_files) schedule = Scheduler.fromList(primary_path, altered_domain_files) schedule.startJobs()
data_dir = testing.data_path(download=False) subjects_dir = op.join(data_dir, 'subjects') report_dir = op.join(data_dir, 'MEG', 'sample') raw_fname = op.join(report_dir, 'sample_audvis_trunc_raw.fif') ms_fname = op.join(data_dir, 'SSS', 'test_move_anon_raw.fif') event_fname = op.join(report_dir, 'sample_audvis_trunc_raw-eve.fif') cov_fname = op.join(report_dir, 'sample_audvis_trunc-cov.fif') proj_fname = op.join(report_dir, 'sample_audvis_ecg-proj.fif') fwd_fname = op.join(report_dir, 'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif') trans_fname = op.join(report_dir, 'sample_audvis_trunc-trans.fif') inv_fname = op.join(report_dir, 'sample_audvis_trunc-meg-eeg-oct-6-meg-inv.fif') mri_fname = op.join(subjects_dir, 'sample', 'mri', 'T1.mgz') bdf_fname = op.realpath( op.join(op.dirname(__file__), '..', 'io', 'edf', 'tests', 'data', 'test.bdf')) edf_fname = op.realpath( op.join(op.dirname(__file__), '..', 'io', 'edf', 'tests', 'data', 'test.edf')) base_dir = op.realpath( op.join(op.dirname(__file__), '..', 'io', 'tests', 'data')) evoked_fname = op.join(base_dir, 'test-ave.fif') def _get_example_figures(): """Create two example figures.""" fig1 = plt.plot([1, 2], [1, 2])[0].figure fig2 = plt.plot([3, 4], [3, 4])[0].figure return [fig1, fig2]
inp_dim = int(model.net_info["height"]) assert inp_dim % 32 == 0 assert inp_dim > 32 #If there's a GPU availible, put the model on GPU if CUDA: model.cuda() #Set the model in evaluation mode model.eval() read_dir = time.time() #Detection phase try: imlist = [ osp.join(osp.realpath('.'), images, img) for img in os.listdir(images) ] except NotADirectoryError: imlist = [] imlist.append(osp.join(osp.realpath('.'), images)) except FileNotFoundError: print("No file or directory with the name {}".format(images)) exit() if not os.path.exists(args.det): os.makedirs(args.det) load_batch = time.time() loaded_ims = [cv2.imread(x) for x in imlist] #PyTorch Variables for images
GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with searx. If not, see < http://www.gnu.org/licenses/ >. (C) 2013- by Adam Tauber, <*****@*****.**> ''' import sys if sys.version_info[0] < 3: print('\033[1;31m Python2 is no longer supported\033[0m') exit(1) if __name__ == '__main__': from os.path import realpath, dirname sys.path.append(realpath(dirname(realpath(__file__)) + '/../')) # set Unix thread name try: import setproctitle except ImportError: pass else: import threading old_thread_init = threading.Thread.__init__ def new_thread_init(self, *args, **kwargs): old_thread_init(self, *args, **kwargs) setproctitle.setthreadtitle(self._name) threading.Thread.__init__ = new_thread_init
def _safe_realpath(path): try: return realpath(path) except OSError: return path
def is_parent(parent, path): path = op.realpath(path) # In case it's a symlink parent = op.realpath(parent) return path.startswith(parent)
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from datetime import date, datetime, timedelta import os from os.path import dirname, pardir, join, realpath import unittest import sys cwd = dirname(realpath(__file__)) log_level = int(os.environ.get('PythonLogLevel', 30)) tests = realpath(join(cwd, pardir, "Expected", "AcceptanceTests")) sys.path.append(join(tests, "ExtensibleEnums")) from msrest.serialization import Deserializer from extensibleenumsswagger.aio import PetStoreInc from extensibleenumsswagger.models import ( Pet, DaysOfWeekExtensibleEnum, IntEnum, ) import pytest
#!/usr/bin/env python3 """ Created on Fri May 26 12:50:08 2017 @author: Jackson """ import matplotlib.pyplot as plt from os.path import join, dirname, realpath from context import models as lf from context import data as hd plt.close('all') sampledir = join(dirname(realpath(__file__)), 'testData', 'RT WhiteA') RTfreqDir = join(sampledir, 'RTWhiteAFreq') RTfreqFiles = hd.dir_read(RTfreqDir) RTfreqData = hd.list_read(RTfreqFiles) RTfreq100hz = join(RTfreqDir, 'RT WhiteA 100Hz 8V 1Average Table1.tsv') RT100data = hd.HysteresisData() RT100data.tsv_read(RTfreq100hz) RT100data.hyst_plot RTWhiteFilm = lf.LandauSimple(thickness = 255E-7, area=1E-4) RTWhiteFilm.c = RTWhiteFilm.c_calc(RTfreqData, plot = 1) RT100compensated, RTWhiteFilm.pr = RTWhiteFilm.c_compensation(RT100data, plot = 1) # Following code plots a series of diff freq hystdata files on same plot
def test_add_platforms(): # Check adding platform to wheel name and tag section exp_items = [('Generator', 'bdist_wheel (0.23.0)'), ('Root-Is-Purelib', 'false'), ('Tag', 'cp27-none-macosx_10_6_intel'), ('Wheel-Version', '1.0')] assert_equal(get_winfo(PLAT_WHEEL, drop_version=False), exp_items) with InTemporaryDirectory() as tmpdir: # First wheel needs proper wheel filename for later unpack test out_fname = basename(PURE_WHEEL) plats = ('macosx_10_9_intel', 'macosx_10_9_x86_64') # Can't add platforms to a pure wheel assert_raises(WheelToolsError, add_platforms, PURE_WHEEL, plats, tmpdir) assert_false(exists(out_fname)) out_fname = ('fakepkg1-1.0-cp27-none-macosx_10_6_intel.' 'macosx_10_9_intel.macosx_10_9_x86_64.whl') assert_equal(realpath(add_platforms(PLAT_WHEEL, plats, tmpdir)), realpath(out_fname)) assert_true(isfile(out_fname)) # Expected output minus wheel-version (that might change) extra_exp = [('Generator', 'bdist_wheel (0.23.0)'), ('Root-Is-Purelib', 'false'), ('Tag', 'cp27-none-macosx_10_6_intel'), ('Tag', 'cp27-none-macosx_10_9_intel'), ('Tag', 'cp27-none-macosx_10_9_x86_64')] assert_equal(get_winfo(out_fname), extra_exp) # If wheel exists (as it does) then raise error assert_raises(WheelToolsError, add_platforms, PLAT_WHEEL, plats, tmpdir) # Unless clobber is set, no error add_platforms(PLAT_WHEEL, plats, tmpdir, clobber=True) # Assemble platform tags in two waves to check tags are not being # multiplied out_1 = 'fakepkg1-1.0-cp27-none-macosx_10_6_intel.macosx_10_9_intel.whl' assert_equal(realpath(add_platforms(PLAT_WHEEL, plats[0:1], tmpdir)), realpath(out_1)) assert_equal(get_winfo(out_1), extra_exp[:-1]) out_2 = splitext(out_1)[0] + '.macosx_10_9_x86_64.whl' assert_equal(realpath(add_platforms(out_1, plats[1:], tmpdir, True)), realpath(out_2)) assert_equal(get_winfo(out_2), extra_exp) # Default is to write into directory of wheel os.mkdir('wheels') shutil.copy2(PLAT_WHEEL, 'wheels') local_plat = pjoin('wheels', basename(PLAT_WHEEL)) local_out = pjoin('wheels', out_fname) add_platforms(local_plat, plats) assert_true(exists(local_out)) assert_raises(WheelToolsError, add_platforms, local_plat, plats) add_platforms(local_plat, plats, clobber=True) # If platforms already present, don't write more res = sorted(os.listdir('wheels')) assert_equal(add_platforms(local_out, plats, clobber=True), None) assert_equal(sorted(os.listdir('wheels')), res) assert_equal(get_winfo(out_fname), extra_exp) # But WHEEL tags if missing, even if file name is OK shutil.copy2(local_plat, local_out) add_platforms(local_out, plats, clobber=True) assert_equal(sorted(os.listdir('wheels')), res) assert_equal(get_winfo(out_fname), extra_exp)
def run(self): if not self.info: if not self.is_enabled(): return args = {"dir": realpath(self.info.build_folder)} self.window.run_command("open_dir", args=args)
#!/usr/bin/env python import math import time from os.path import (join, realpath) import sys sys.path.insert(0, realpath(join(__file__, "../../../"))) from hummingbot.core.event.event_logger import EventLogger from hummingbot.core.event.events import (OrderBookTradeEvent, TradeType, OrderBookEvent) import asyncio import logging import unittest from typing import (Dict, Optional, List) from hummingbot.market.ddex.ddex_order_book_tracker import DDEXOrderBookTracker from hummingbot.core.data_type.order_book import OrderBook from hummingbot.core.data_type.order_book_tracker import ( OrderBookTrackerDataSourceType) from hummingbot.core.utils.async_utils import ( safe_ensure_future, safe_gather, ) class DDEXOrderBookTrackerUnitTest(unittest.TestCase): order_book_tracker: Optional[DDEXOrderBookTracker] = None events: List[OrderBookEvent] = [OrderBookEvent.TradeEvent] trading_pairs: List[str] = [ "USDC-DAI", "WETH-DAI", "QKC-WETH", "ECOREAL-TUSD" ] @classmethod
# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os.path as path import sys sys.path.insert(0, path.join(path.realpath('..'), "Dlib")) import Dpowers, Dhelpers from Dpowers import keyb from Dpowers.events.sending import event_sender # -- Project information ----------------------------------------------------- project = 'Dpowers' copyright = '2021, dp0s' author = 'dp0s' version = Dpowers.__version__ # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be
#!/usr/bin/env python3 from __future__ import print_function from collections import defaultdict from os import listdir from os.path import abspath, basename, dirname, isdir, isfile, join, realpath, relpath, splitext import re from subprocess import Popen, PIPE import sys # Runs the tests. REPO_DIR = dirname(realpath(__file__)) OUTPUT_EXPECT = re.compile(r'// expect: ?(.*)') ERROR_EXPECT = re.compile(r'// (Error.*)') ERROR_LINE_EXPECT = re.compile( r'// \[((java|c|swift) )?line (\d+)\] (Error.*)') RUNTIME_ERROR_EXPECT = re.compile(r'// expect runtime error: (.+)') SYNTAX_ERROR_RE = re.compile(r'\[.*line (\d+)\] (Error.+)') STACK_TRACE_RE = re.compile(r'\[line (\d+)\]') NONTEST_RE = re.compile(r'// nontest') passed = 0 failed = 0 num_skipped = 0 expectations = 0 interpreter = None filter_path = None INTERPRETERS = {}
def test_eval_hook(): with pytest.raises(AssertionError): # `save_best` should be a str test_dataset = Model() data_loader = DataLoader(test_dataset) EvalHook(data_loader, save_best=True) with pytest.raises(TypeError): # dataloader must be a pytorch DataLoader test_dataset = Model() data_loader = [DataLoader(test_dataset)] EvalHook(data_loader) with pytest.raises(ValueError): # save_best must be valid when rule_map is None test_dataset = ExampleDataset() data_loader = DataLoader(test_dataset) EvalHook(data_loader, save_best='unsupport') with pytest.raises(KeyError): # rule must be in keys of rule_map test_dataset = Model() data_loader = DataLoader(test_dataset) EvalHook(data_loader, save_best='auto', rule='unsupport') test_dataset = ExampleDataset() loader = DataLoader(test_dataset) model = Model() data_loader = DataLoader(test_dataset) eval_hook = EvalHook(data_loader, save_best=None) with tempfile.TemporaryDirectory() as tmpdir: # total_epochs = 1 logger = get_logger('test_eval') runner = EpochBasedRunner(model=model, work_dir=tmpdir, logger=logger) runner.register_hook(eval_hook) runner.run([loader], [('train', 1)], 1) test_dataset.evaluate.assert_called_with( test_dataset, [torch.tensor([1])], logger=runner.logger) assert runner.meta is None or 'best_score' not in runner.meta[ 'hook_msgs'] assert runner.meta is None or 'best_ckpt' not in runner.meta[ 'hook_msgs'] # when `save_best` is set to 'auto', first metric will be used. loader = DataLoader(EvalDataset()) model = Model() data_loader = DataLoader(EvalDataset()) eval_hook = EvalHook(data_loader, interval=1, save_best='auto') with tempfile.TemporaryDirectory() as tmpdir: logger = get_logger('test_eval') runner = EpochBasedRunner(model=model, work_dir=tmpdir, logger=logger) runner.register_checkpoint_hook(dict(interval=1)) runner.register_hook(eval_hook) runner.run([loader], [('train', 1)], 8) ckpt_path = osp.join(tmpdir, 'best_acc_epoch_4.pth') assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(ckpt_path) assert osp.exists(ckpt_path) assert runner.meta['hook_msgs']['best_score'] == 7 # total_epochs = 8, return the best acc and corresponding epoch loader = DataLoader(EvalDataset()) model = Model() data_loader = DataLoader(EvalDataset()) eval_hook = EvalHook(data_loader, interval=1, save_best='acc') with tempfile.TemporaryDirectory() as tmpdir: logger = get_logger('test_eval') runner = EpochBasedRunner(model=model, work_dir=tmpdir, logger=logger) runner.register_checkpoint_hook(dict(interval=1)) runner.register_hook(eval_hook) runner.run([loader], [('train', 1)], 8) ckpt_path = osp.join(tmpdir, 'best_acc_epoch_4.pth') assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(ckpt_path) assert osp.exists(ckpt_path) assert runner.meta['hook_msgs']['best_score'] == 7 # total_epochs = 8, return the best score and corresponding epoch data_loader = DataLoader(EvalDataset()) eval_hook = EvalHook( data_loader, interval=1, save_best='score', rule='greater') with tempfile.TemporaryDirectory() as tmpdir: logger = get_logger('test_eval') runner = EpochBasedRunner(model=model, work_dir=tmpdir, logger=logger) runner.register_checkpoint_hook(dict(interval=1)) runner.register_hook(eval_hook) runner.run([loader], [('train', 1)], 8) ckpt_path = osp.join(tmpdir, 'best_score_epoch_4.pth') assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(ckpt_path) assert osp.exists(ckpt_path) assert runner.meta['hook_msgs']['best_score'] == 7 # total_epochs = 8, return the best score using less compare func # and indicate corresponding epoch data_loader = DataLoader(EvalDataset()) eval_hook = EvalHook(data_loader, save_best='acc', rule='less') with tempfile.TemporaryDirectory() as tmpdir: logger = get_logger('test_eval') runner = EpochBasedRunner(model=model, work_dir=tmpdir, logger=logger) runner.register_checkpoint_hook(dict(interval=1)) runner.register_hook(eval_hook) runner.run([loader], [('train', 1)], 8) ckpt_path = osp.join(tmpdir, 'best_acc_epoch_6.pth') assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(ckpt_path) assert osp.exists(ckpt_path) assert runner.meta['hook_msgs']['best_score'] == -3 # Test the EvalHook when resume happend data_loader = DataLoader(EvalDataset()) eval_hook = EvalHook(data_loader, save_best='acc') with tempfile.TemporaryDirectory() as tmpdir: logger = get_logger('test_eval') runner = EpochBasedRunner(model=model, work_dir=tmpdir, logger=logger) runner.register_checkpoint_hook(dict(interval=1)) runner.register_hook(eval_hook) runner.run([loader], [('train', 1)], 2) ckpt_path = osp.join(tmpdir, 'best_acc_epoch_2.pth') assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(ckpt_path) assert osp.exists(ckpt_path) assert runner.meta['hook_msgs']['best_score'] == 4 resume_from = osp.join(tmpdir, 'latest.pth') loader = DataLoader(ExampleDataset()) eval_hook = EvalHook(data_loader, save_best='acc') runner = EpochBasedRunner(model=model, work_dir=tmpdir, logger=logger) runner.register_checkpoint_hook(dict(interval=1)) runner.register_hook(eval_hook) runner.resume(resume_from) runner.run([loader], [('train', 1)], 8) ckpt_path = osp.join(tmpdir, 'best_acc_epoch_4.pth') assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(ckpt_path) assert osp.exists(ckpt_path) assert runner.meta['hook_msgs']['best_score'] == 7
#!/usr/bin/env python import sys import django from django.conf import settings, global_settings as default_settings from django.core.management import execute_from_command_line from os import path # Give feedback on used versions sys.stderr.write('Using Python version {0} from {1}\n'.format(sys.version[:5], sys.executable)) sys.stderr.write('Using Django version {0} from {1}\n'.format( django.get_version(), path.dirname(path.abspath(django.__file__))) ) if not settings.configured: module_root = path.dirname(path.realpath(__file__)) sys.path.insert(0, path.join(module_root, 'example')) MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.locale.LocaleMiddleware', # / will be redirected to /<locale>/ ) settings.configure( DEBUG = False, # will be False anyway by DjangoTestRunner. DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3',
def test_get_workdir(self): directory = realpath(self.repo.workdir) expected = realpath(self.repo_path) self.assertEqual(directory, expected)
def _get_timelog_path(): log_dir = path.dirname(path.realpath(__file__)) return "{}/{}".format(log_dir, TIME_LOG)
try: res[m][seed] = {l:{}} except: res[m] = {seed: {l:{}}} torch.manual_seed(args.seed) torch.cuda.manual_seed_all(args.seed) np.random.seed(args.seed) random.seed(args.seed) torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False os.environ['PYTHONHASHSEED'] = str(args.seed) dataset = args.dataset path = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', dataset) dataset = Planetoid(path, dataset, 'public', T.NormalizeFeatures()) data = dataset[0] if args.use_gdc: gdc = T.GDC(self_loop_weight=1, normalization_in='sym', normalization_out='col', diffusion_kwargs=dict(method='ppr', alpha=0.05), sparsification_kwargs=dict(method='topk', k=128, dim=0), exact=True) data = gdc(data) labels = data.y.cuda() edge_index, edge_weight = data.edge_index.cuda(), data.edge_attr print(labels.size())
def test_set_workdir(self): directory = tempfile.mkdtemp() self.repo.workdir = directory self.assertEqual(realpath(self.repo.workdir), realpath(directory))
def add_libdir_to_path(): from os.path import dirname, exists, join, realpath js_src_dir = dirname(dirname(realpath(sys.argv[0]))) assert exists(join(js_src_dir,'jsapi.h')) sys.path.append(join(js_src_dir, 'lib')) sys.path.append(join(js_src_dir, 'tests', 'lib'))
def test_get_path(self): directory = realpath(self.repo.path) expected = realpath(join(self.repo_path, '.git')) self.assertEqual(directory, expected)
def __init__(self): global memory_service global session self.memory_service = memory_service self.session = session webnsock.WebServer.__init__(self) TEMPLATE_DIR = path.realpath(path.join(path.dirname(__file__), 'www')) print TEMPLATE_DIR, __file__ os.chdir(TEMPLATE_DIR) render = web.template.render(TEMPLATE_DIR, base='base', globals=globals()) serv_self = self class Index(self.page): path = '/' def GET(self): plans = serv_self.find_plans() ip = web.ctx.host.split(':')[0] return render.index(plans.keys(), ip) class tmux(self.page): path = '/tmux' def GET(self): ip = web.ctx.host.split(':')[0] return render.tmux(ip) class blockly(self.page): path = '/blockly' def GET(self): ip = web.ctx.host.split(':')[0] return render.blockly(ip) class admin(self.page): path = '/admin' def GET(self): ip = web.ctx.host.split(':')[0] plans = serv_self.find_plans() actions = serv_self.find_actions() return render.admin(plans, actions, ip) class modim(self.page): path = '/modim' def GET(self): ip = web.ctx.host.split(':')[0] return render.modim(ip) class spqrel(self.page): path = '/spqrel' def GET(self): return render.spqrel()
import glob import torch from os import path as osp from torch.utils.ffi import create_extension abs_path = osp.dirname(osp.realpath(__file__)) sources = [] headers = [] sources += ['src/binop_cpu_comp.c'] sources += ['src/binop_cpu_comp_kernel.c'] headers += ['include/binop_cpu_comp.h'] headers += ['include/binop_cpu_comp_kernel.h'] ffi = create_extension('binop_cpu_comp', headers=headers, sources=sources, relative_to=__file__, include_dirs=[osp.join(abs_path, 'include')], extra_compile_args=[ "-std=c99", "-Ofast", "-fopenmp", "-mtune=native", "-march=x86-64" ]) if __name__ == '__main__': ffi.build()
print ('problem finding hardware info') exit() else: print ('Hardware info found') if hardware_id in hardwareinfo: print ("Initialising device") device, endpoint = init_device() devices = list() while True: data = device.read(endpoint.bEndpointAddress, endpoint.wMaxPacketSize) if data is not None: datac = data.strip('\n')[1:] devices.append(datac) print (devices) else: print('hardware id not found') if __name__ == "__main__": ldir = dirname(realpath(__file__)) parser = argparse.ArgumentParser() parser.add_argument("--file", "-f", default = "hardware.json", help="I/O hardware filename, including extension") parser.add_argument("--hardware_id", "-hid", help="Final name of time index") parser.add_argument("--numsensors", "-n", default = 4, help="Number of sensors") args = parser.parse_args() input_data(join(ldir, args.file), args.hardware_id, args.numsensors)
from os import path import pytest from aerofiles.errors import ParserError from aerofiles.seeyou import Reader, Converter from tests import assert_waypoint FOLDER = path.dirname(path.realpath(__file__)) DATA_PATH = path.join(FOLDER, 'data', 'SEEYOU.CUP') SIMPLE_CUPFILE = path.join(FOLDER, 'data', 'simple.cup') if_data_available = pytest.mark.skipif(not path.exists(DATA_PATH), reason="requires SEEYOU.CUP") WAYPOINTS = [ ('"Meiersberg","MEIER",DE,5117.983N,00657.383E,164m,4,130,800m,130.125,"Flugplatz"', { # noqa 'name': 'Meiersberg', 'code': 'MEIER', 'country': 'DE', 'latitude': 51.29972222222222, 'longitude': 6.956388888888889, 'elevation': { 'value': 164, 'unit': 'm', }, 'style': 4, 'runway_direction': 130, 'runway_length': { 'value': 800,