def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL: %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local break source['script'] = '' for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, config, opts): break return source
def __init__(self, name = None, original = None, rtdir = '.'): self.files = [] self.macro_filter = re.compile(r'%{[^}]+}') if original is None: self.macros = {} self.read_maps = [] self.read_map_locked = False self.write_map = 'global' self.macros['global'] = {} self.macros['global']['nil'] = ('none', 'none', '') self.macros['global']['_cwd'] = ('dir', 'required', path.abspath(os.getcwd())) self.macros['global']['_rtdir'] = ('dir', 'required', path.abspath(rtdir)) self.macros['global']['_rttop'] = ('dir', 'required', path.abspath(path.dirname(rtdir))) else: self.macros = {} for m in original.macros: if m not in self.macros: self.macros[m] = {} for k in original.macros[m]: self.macros[m][k] = copy.copy(original.macros[m][k]) self.read_maps = sorted(copy.copy(original.read_maps)) self.read_map_locked = copy.copy(original.read_map_locked) self.write_map = copy.copy(original.write_map) if name is not None: self.load(name)
def __init__(self, name=None, original=None, rtdir="."): self.files = [] self.macro_filter = re.compile(r"%{[^}]+}") if original is None: self.macros = {} self.read_maps = [] self.read_map_locked = False self.write_map = "global" self.rtpath = path.abspath(path.dirname(inspect.getfile(macros))) if path.dirname(self.rtpath).endswith("/share/rtems"): self.prefix = path.dirname(self.rtpath)[: -len("/share/rtems")] else: self.prefix = "." self.macros["global"] = {} self.macros["global"]["nil"] = ("none", "none", "") self.macros["global"]["_cwd"] = ("dir", "required", path.abspath(os.getcwd())) self.macros["global"]["_prefix"] = ("dir", "required", self.prefix) self.macros["global"]["_rtdir"] = ("dir", "required", path.abspath(self.expand(rtdir))) self.macros["global"]["_rttop"] = ("dir", "required", self.prefix) else: self.macros = {} for m in original.macros: if m not in self.macros: self.macros[m] = {} for k in original.macros[m]: self.macros[m][k] = copy.copy(original.macros[m][k]) self.read_maps = sorted(copy.copy(original.read_maps)) self.read_map_locked = copy.copy(original.read_map_locked) self.write_map = copy.copy(original.write_map) if name is not None: self.load(name)
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source["url"] = url colon = url.find(":") if url[colon + 1 : colon + 3] != "//": raise error.general("malforned URL: %s" % (url)) source["path"] = url[: colon + 3] + path.dirname(url[colon + 3 :]) source["file"] = path.basename(url) source["name"], source["ext"] = path.splitext(source["file"]) if source["name"].endswith(".tar"): source["name"] = source["name"][:-4] source["ext"] = ".tar" + source["ext"] # # Get the file. Checks the local source directory first. # source["local"] = None for p in config.define(pathkey).split(":"): local = path.join(path.abspath(p), source["file"]) if source["local"] is None: source["local_prefix"] = path.abspath(p) source["local"] = local if path.exists(local): source["local_prefix"] = path.abspath(p) source["local"] = local break source["script"] = "" for p in parsers: if url.startswith(p): source["type"] = p if parsers[p](source, config, opts): break return source
def __init__(self, name = None, original = None, rtdir = '.'): self.files = [] self.macro_filter = re.compile(r'%{[^}]+}') if original is None: self.macros = {} self.read_maps = [] self.read_map_locked = False self.write_map = 'global' self.rtpath = path.abspath(path.dirname(inspect.getfile(macros))) if path.dirname(self.rtpath).endswith('/share/rtems'): self.prefix = path.dirname(self.rtpath)[:-len('/share/rtems')] else: self.prefix = '.' self.macros['global'] = {} self.macros['global']['nil'] = ('none', 'none', '') self.macros['global']['_cwd'] = ('dir', 'required', path.abspath(os.getcwd())) self.macros['global']['_prefix'] = ('dir', 'required', self.prefix) self.macros['global']['_rtdir'] = ('dir', 'required', path.abspath(self.expand(rtdir))) self.macros['global']['_rttop'] = ('dir', 'required', self.prefix) else: self.macros = {} for m in original.macros: if m not in self.macros: self.macros[m] = {} for k in original.macros[m]: self.macros[m][k] = copy.copy(original.macros[m][k]) self.read_maps = sorted(copy.copy(original.read_maps)) self.read_map_locked = copy.copy(original.read_map_locked) self.write_map = copy.copy(original.write_map) if name is not None: self.load(name)
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url source['path'] = path.dirname(url) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) # # Get the file. Checks the local source directory first. # source['local'] = None for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local break source['script'] = '' for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, config, opts): break return source
def __init__(self, argv, argopts, defaults): command_path = path.dirname(path.abspath(argv[1])) if len(command_path) == 0: command_path = '.' self.command_path = command_path self.command_name = path.basename(argv[0]) extras = ['--dry-run', '--with-download', '--quiet', '--without-log', '--without-error-report', '--without-release-url'] self.argv = argv self.args = argv[1:] + extras self.defaults = macros.macros(name = defaults, sbdir = command_path) self.load_overrides() self.opts = { 'params' : extras } self.sb_git() self.rtems_bsp() if argopts.download_dir is not None: self.defaults['_sourcedir'] = ('dir', 'optional', path.abspath(argopts.download_dir)) self.defaults['_patchdir'] = ('dir', 'optional', path.abspath(argopts.download_dir))
def __init__(self, name=None, original=None, sbdir='.'): self.files = [] self.macro_filter = re.compile(r'%{[^}]+}') if original is None: self.macros = {} self.read_maps = [] self.read_map_locked = False self.write_map = 'global' self.macros['global'] = {} self.macros['global']['_cwd'] = ('dir', 'required', path.abspath(os.getcwd())) self.macros['global']['_sbdir'] = ('dir', 'required', path.abspath(sbdir)) self.macros['global']['_sbtop'] = ('dir', 'required', path.abspath( path.dirname(sbdir))) else: self.macros = {} for m in original.macros: if m not in self.macros: self.macros[m] = {} for k in original.macros[m]: self.macros[m][k] = original.macros[m][k] self.read_maps = sorted(original.read_maps) self.read_map_locked = original.read_map_locked self.write_map = original.write_map if name is not None: self.load(name)
def _local_path(source, pathkey, config): for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local _hash_check(source['file'], local, config.macros) break
def files(self, package): if self.create_tar_files and not self.macros.get("%{_disable_packaging"): self.script.append('echo "==> %files:"') inpath = path.abspath(self.config.expand("%{buildroot}")) tardir = path.abspath(self.config.expand("%{_tardir}")) self.script.append(self.config.expand("if test -d %s; then" % (inpath))) self.script.append(self.config.expand(" %%{__mkdir_p} %s" % tardir)) self.script.append(self.config.expand(" cd " + inpath)) tar = path.join(tardir, package.long_name() + ".tar.bz2") cmd = self.config.expand(" %{__tar} -cf - . " + "| %{__bzip2} > " + tar) self.script.append(cmd) self.script.append(self.config.expand(" cd %{_builddir}")) self.script.append("fi")
def files(self, package): if self.create_tar_files \ and not self.macros.get('%{_disable_packaging'): self.script.append('echo "==> %files:"') inpath = path.abspath(self.config.expand('%{buildroot}')) tardir = path.abspath(self.config.expand('%{_tardir}')) self.script.append(self.config.expand('if test -d %s; then' % (inpath))) self.script.append(self.config.expand(' %%{__mkdir_p} %s' % tardir)) self.script.append(self.config.expand(' cd ' + inpath)) tar = path.join(tardir, package.long_name() + '.tar.bz2') cmd = self.config.expand(' %{__tar} -cf - . ' + '| %{__bzip2} > ' + tar) self.script.append(cmd) self.script.append(self.config.expand(' cd %{_builddir}')) self.script.append('fi')
def parse(self, bset): def _clean(line): line = line[0:-1] b = line.find('#') if b >= 0: line = line[1:b] return line.strip() bsetname = bset if not path.exists(bsetname): for cp in self.macros.expand('%{_configdir}').split(':'): configdir = path.abspath(cp) bsetname = path.join(configdir, bset) if path.exists(bsetname): break bsetname = None if bsetname is None: raise error.general('no build set file found: %s' % (bset)) try: log.trace('_bset: %s: open: %s' % (self.bset, bsetname)) bset = open(path.host(bsetname), 'r') except IOError, err: raise error.general('error opening bset file: %s' % (bsetname))
def get(src_hdfs_path, dest_path, **kwargs): """ Copy the contents of ``src_hdfs_path`` to ``dest_path``. ``dest_path`` is forced to be interpreted as an ordinary local path (see :func:`~path.abspath`). Additional keyword arguments, if any, are handled like in :func:`open`. """ cp(src_hdfs_path, path.abspath(dest_path, local=True), **kwargs)
def stageinvisitor(data, dirname, fileindir): #IGNORE:W0613 if len(fileindir) > 0: stcmd = "stagein -A deferred --nowait --rdonly " stcmd += " -p %s " % stagepool for f in fileindir: fulname = dirname + "/" + f if getstatus(fulname) == "NotPresent" and ismigrated(fulname): stcmd += " -M " + path.abspath(fulname) print stcmd os.system(stcmd)
def stagedir(pathname): if isdir(pathname): data = None path.walk(pathname, stageinvisitor, data) else: stcmd = "stagein -A deferred --nowait --rdonly " stcmd += " -p %s " % stagepool stcmd += " -M " + path.abspath(pathname) print stcmd os.system(stcmd)
def getstatus(filename): status = "" if isfile(filename): status = "NotPresent" stcmd = "stageqry --noheader" stcmd += " -p %s " % stagepool stcmd += "-M " + path.abspath(filename) out = os.popen(stcmd).read().split() if out: status = out[7] return status
def __init__(self, argv, argopts, defaults): command_path = path.dirname(path.abspath(argv[1])) if len(command_path) == 0: command_path = '.' self.command_path = command_path self.command_name = path.basename(argv[0]) extras = ['--dry-run', '--with-download', '--quiet', '--without-log', '--without-error-report', '--without-release-url'] self.argv = argv self.args = argv[1:] + extras self.defaults = macros.macros(name = defaults, sbdir = command_path) self.opts = { 'params' : extras } self.sb_git() if argopts.download_dir is not None: self.defaults['_sourcedir'] = ('dir', 'optional', path.abspath(argopts.download_dir)) self.defaults['_patchdir'] = ('dir', 'optional', path.abspath(argopts.download_dir))
def parse_url(url, pathkey, config, opts): # # Split the source up into the parts we need. # source = {} source['url'] = url colon = url.find(':') if url[colon + 1:colon + 3] != '//': raise error.general('malforned URL: %s' % (url)) source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:]) source['file'] = path.basename(url) source['name'], source['ext'] = path.splitext(source['file']) if source['name'].endswith('.tar'): source['name'] = source['name'][:-4] source['ext'] = '.tar' + source['ext'] # # Get the file. Checks the local source directory first. # source['local'] = None for p in config.define(pathkey).split(':'): local = path.join(path.abspath(p), source['file']) if source['local'] is None: source['local_prefix'] = path.abspath(p) source['local'] = local if path.exists(local): source['local_prefix'] = path.abspath(p) source['local'] = local _hash_check(source['file'], local, config.macros) break source['script'] = '' for p in parsers: if url.startswith(p): source['type'] = p if parsers[p](source, config, opts): break return source
def get_configs(opts): def _scan(_path, ext): configs = [] for root, dirs, files in os.walk(_path): prefix = root[len(_path) + 1:] for file in files: for e in ext: if file.endswith(e): configs += [path.join(prefix, file)] return configs configs = {'paths': [], 'files': []} for cp in opts.defaults.expand('%{_configdir}').split(':'): hcp = path.host(path.abspath(cp)) configs['paths'] += [hcp] configs['files'] += _scan(hcp, ['.cfg', '.bset']) configs['files'] = sorted(set(configs['files'])) return configs
def get_configs(opts): def _scan(_path, ext): configs = [] for root, dirs, files in os.walk(_path): prefix = root[len(_path) + 1 :] for file in files: for e in ext: if file.endswith(e): configs += [path.join(prefix, file)] return configs configs = {"paths": [], "files": []} for cp in opts.defaults.expand("%{_configdir}").split(":"): hcp = path.host(path.abspath(cp)) configs["paths"] += [hcp] configs["files"] += _scan(hcp, [".cfg", ".bset"]) configs["files"] = sorted(configs["files"]) return configs
def get_configs(opts): def _scan(_path, ext): configs = [] for root, dirs, files in os.walk(_path): prefix = root[len(_path) + 1:] for file in files: for e in ext: if file.endswith(e): configs += [path.join(prefix, file)] return configs configs = { 'paths': [], 'files': [] } for cp in opts.defaults.expand('%{_configdir}').split(':'): hcp = path.host(path.abspath(cp)) configs['paths'] += [hcp] configs['files'] += _scan(hcp, ['.cfg', '.bset']) configs['files'] = sorted(configs['files']) return configs
def parse(self, bset): # # Ouch, this is a copy of the setbuilder.py code. # def _clean(line): line = line[0:-1] b = line.find('#') if b >= 0: line = line[1:b] return line.strip() bsetname = bset if not path.exists(bsetname): for cp in self.macros.expand('%{_configdir}').split(':'): configdir = path.abspath(cp) bsetname = path.join(configdir, bset) if path.exists(bsetname): break bsetname = None if bsetname is None: raise error.general('no build set file found: %s' % (bset)) try: log.trace('_bset: %s: open: %s' % (self.bset, bsetname)) bset = open(path.host(bsetname), 'r') except IOError as err: raise error.general('error opening bset file: %s' % (bsetname)) configs = [] try: lc = 0 for l in bset: lc += 1 l = _clean(l) if len(l) == 0: continue log.trace('_bset: %s: %03d: %s' % (self.bset, lc, l)) ls = l.split() if ls[0][-1] == ':' and ls[0][:-1] == 'package': self.bset_pkg = ls[1].strip() self.macros['package'] = self.bset_pkg elif ls[0][0] == '%': def err(msg): raise error.general('%s:%d: %s' % (self.bset, lc, msg)) if ls[0] == '%define': if len(ls) > 2: self.macros.define(ls[1].strip(), ' '.join([f.strip() for f in ls[2:]])) else: self.macros.define(ls[1].strip()) elif ls[0] == '%undefine': if len(ls) > 2: raise error.general('%s:%d: %undefine requires just the name' % \ (self.bset, lc)) self.macros.undefine(ls[1].strip()) elif ls[0] == '%include': configs += self.parse(ls[1].strip()) elif ls[0] in ['%patch', '%source']: sources.process(ls[0][1:], ls[1:], self.macros, err) elif ls[0] == '%hash': sources.hash(ls[1:], self.macros, err) else: l = l.strip() c = build.find_config(l, self.configs) if c is None: raise error.general('%s:%d: cannot find file: %s' % (self.bset, lc, l)) configs += [c] except: bset.close() raise bset.close() return configs
def __init__(self, path_or_host, port=None, memory='4g', lang='en', timeout=1500, quiet=True, logging_level=logging.WARNING, max_retries=5): self.path_or_host = path_or_host self.port = port self.memory = memory self.lang = lang self.timeout = timeout self.quiet = quiet self.logging_level = logging_level logging.basicConfig(level=self.logging_level) here = path.abspath(path.dirname(__file__)) # Check args self._check_args() if self.path_or_host.startswith('http'): self.url = self.path_or_host + ':' + str(port) logging.info('Using an existing server {}'.format(self.url)) else: # Check Java if not subprocess.call(['java', '-version'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) == 0: raise RuntimeError('Java not found.') # Check if the dir exists if not os.path.isdir(self.path_or_host): raise IOError(str(self.path_or_host) + ' is not a directory.') directory = os.path.normpath(self.path_or_host) + os.sep self.class_path_dir = directory # Check if the language specific model file exists switcher = { 'en': 'stanford-corenlp-[0-9].[0-9].[0-9]-models.jar', 'zh': 'stanford-chinese-corenlp-[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-models.jar', 'ar': 'stanford-arabic-corenlp-[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-models.jar', 'fr': 'stanford-french-corenlp-[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-models.jar', 'de': 'stanford-german-corenlp-[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-models.jar', 'es': 'stanford-spanish-corenlp-[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]-models.jar' } jars = { 'en': 'stanford-corenlp-x.x.x-models.jar', 'zh': 'stanford-chinese-corenlp-yyyy-MM-dd-models.jar', 'ar': 'stanford-arabic-corenlp-yyyy-MM-dd-models.jar', 'fr': 'stanford-french-corenlp-yyyy-MM-dd-models.jar', 'de': 'stanford-german-corenlp-yyyy-MM-dd-models.jar', 'es': 'stanford-spanish-corenlp-yyyy-MM-dd-models.jar' } if len(glob.glob(directory + switcher.get(self.lang))) <= 0: raise IOError( jars.get(self.lang) + ' not exists. You should download and place it in the ' + directory + ' first.') # If port not set, auto select if self.port is None: for port_candidate in range(9000, 65535): if port_candidate not in [ conn.laddr[1] for conn in psutil.net_connections() ]: self.port = port_candidate break # Check if the port is in use if self.port in [ conn.laddr[1] for conn in psutil.net_connections() ]: raise IOError('Port ' + str(self.port) + ' is already in use.') # Start native server logging.info('Initializing native server...') cmd = "java" java_args = "-Xmx{}".format(self.memory) java_class = "edu.stanford.nlp.pipeline.StanfordCoreNLPServer" class_path = '"{}*"'.format(directory) args = [ cmd, java_args, '-cp', class_path, java_class, '-port', str(self.port) ] args = ' '.join(args) logging.info(args) # Silence with open(os.devnull, 'w') as null_file: out_file = None if self.quiet: out_file = null_file self.p = subprocess.Popen(args, shell=True, stdout=out_file, stderr=subprocess.STDOUT) logging.info('Server shell PID: {}'.format(self.p.pid)) self.url = 'http://localhost:' + str(self.port) # Wait until server starts sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) host_name = urlparse(self.url).hostname time.sleep(1) # OSX, not tested trial = 1 while sock.connect_ex((host_name, self.port)): if trial > max_retries: raise ValueError('Corenlp server is not available') logging.info('Waiting until the server is available.') trial += 1 time.sleep(1) logging.info('The server is available.')
def abspath(self, rpath): return path.abspath(self.define(rpath))
def load(args, optargs = None, defaults = '%{_sbdir}/defaults.mc', logfile = True): """ Copy the defaults, get the host specific values and merge them overriding any matching defaults, then create an options object to handle the command line merging in any command line overrides. Finally post process the command line. """ global host_windows global host_posix # # Adjust the args to remove the wrapper. # args = args[1:] # # The path to this command. # command_path = path.dirname(path.abspath(args[0])) if len(command_path) == 0: command_path = '.' # # The command line contains the base defaults object all build objects copy # and modify by loading a configuration. # o = command_line(args, optargs, macros.macros(name = defaults, sbdir = command_path), command_path) overrides = None if os.name == 'nt': try: import windows overrides = windows.load() host_windows = True host_posix = False except: raise error.general('failed to load Windows host support') elif os.name == 'posix': uname = os.uname() try: if uname[0].startswith('MINGW64_NT'): import windows overrides = windows.load() host_windows = True elif uname[0].startswith('CYGWIN_NT'): import windows overrides = windows.load() elif uname[0] == 'Darwin': import darwin overrides = darwin.load() elif uname[0] == 'FreeBSD': import freebsd overrides = freebsd.load() elif uname[0] == 'NetBSD': import netbsd overrides = netbsd.load() elif uname[0] == 'Linux': import linux overrides = linux.load() elif uname[0] == 'SunOS': import solaris overrides = solaris.load() except error.general as ge: raise error.general('failed to load %s host support: %s' % (uname[0], ge)) except: raise error.general('failed to load %s host support' % (uname[0])) else: raise error.general('unsupported host type; please add') if overrides is None: raise error.general('no hosts defaults found; please add') for k in overrides: o.defaults[k] = overrides[k] o.sb_released() o.sb_git() o.rtems_options() o.pre_process() o.process() o.post_process(logfile) # # Load the release settings # version.load_release_settings(o.defaults) return o
"""Setup.""" import path from setuptools import setup, find_packages base_dir = path.abspath(path.dirname(__file__)) with open(path.join(base_dir, 'README.md')) as desc: long_description = desc.read() with open(path.join(base_dir, 'LICENSE')) as desc: license = desc.read() with open(path.join(base_dir, 'version.py')) as version: exec(version.read()) setup( name='angelos_lab', version=__version__, # noqa F821 description='A laboratory environment for the Angelos project.', long_description=long_description, author=__author__, # noqa F821 author_email=__author_email__, # noqa F821 url=__url__, # noqa F821 license=license, packages=find_packages(exclude=('tests', 'docs')))
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self.in_error = False self.lc = 0 self.name = name self.conditionals = {} self._packages = {} self.package = 'main' self._packages[self.package] = package(self.package, self.define('%{_arch}'), self) self.load_depth += 1 save_name = self.name save_lc = self.lc # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.name, path.host(configname))) config = open(path.host(configname), 'r') except IOError, err: raise error.general('error opening config file: %s' % (path.host(configname)))
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self.in_error = False self.lc = 0 self.name = name self.conditionals = {} self.load_depth += 1 save_name = self.name save_lc = self.lc self.name = name self.lc = 0 # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.init_name, path.host(configname))) config = open(path.host(configname), 'r') except IOError as err: raise error.general('error opening config file: %s' % (path.host(configname))) self.configpath += [configname] self._includes += [configname] try: dir = None info = None data = [] while True: r = self._parse(config, dir, info) if r[0] == 'control': if r[1] == '%end': break log.warning("unexpected '%s'" % (r[1])) elif r[0] == 'directive': if r[1] == '%include': self.load(r[2][0]) continue dir, info, data = self._process_directive( r, dir, info, data) elif r[0] == 'data': dir, info, data = self._process_data(r, dir, info, data) else: self._error("%d: invalid parse state: '%s" % (self.lc, r[0])) if dir is not None: self._directive_extend(dir, data) except: config.close() raise config.close() self.name = save_name self.lc = save_lc self.load_depth -= 1
def load(args, optargs=None, defaults='%{_sbdir}/defaults.mc', logfile=True): """ Copy the defaults, get the host specific values and merge them overriding any matching defaults, then create an options object to handle the command line merging in any command line overrides. Finally post process the command line. """ global host_windows global host_posix # # Adjust the args to remove the wrapper. # args = args[1:] # # The path to this command. # command_path = path.dirname(path.abspath(args[0])) if len(command_path) == 0: command_path = '.' # # The command line contains the base defaults object all build objects copy # and modify by loading a configuration. # o = command_line(args, optargs, macros.macros(name=defaults, sbdir=command_path), command_path) overrides = None if os.name == 'nt': try: import windows overrides = windows.load() host_windows = True host_posix = False except: raise error.general('failed to load Windows host support') elif os.name == 'posix': uname = os.uname() try: if uname[0].startswith('MINGW64_NT'): import windows overrides = windows.load() host_windows = True elif uname[0].startswith('CYGWIN_NT'): import windows overrides = windows.load() elif uname[0] == 'Darwin': import darwin overrides = darwin.load() elif uname[0] == 'FreeBSD': import freebsd overrides = freebsd.load() elif uname[0] == 'NetBSD': import netbsd overrides = netbsd.load() elif uname[0] == 'Linux': import linux overrides = linux.load() elif uname[0] == 'SunOS': import solaris overrides = solaris.load() except error.general as ge: raise error.general('failed to load %s host support: %s' % (uname[0], ge)) except: raise error.general('failed to load %s host support' % (uname[0])) else: raise error.general('unsupported host type; please add') if overrides is None: raise error.general('no hosts defaults found; please add') for k in overrides: o.defaults[k] = overrides[k] o.sb_released() o.sb_git() o.rtems_options() o.pre_process() o.process() o.post_process(logfile) # # Load the release settings # version.load_release_settings(o.defaults) return o
#will be used to describe database from sqlalchemy import(create_engine, Column, String, Integer, Boolean, Table, ForeignKey) from sqlalchemy.orm import sessionmaker, relationships from sqlalchemy.ext.declarative import declarative_base database_filename = 'twitter.sqlite3' diretory = path.abspath(path.dirname(__file__)) database_filepath = path.join(diretory, database_filename) #create engine using file path engine_url = 'sqlite:///{}'.format(database_filepath) engine = create_engine(engine_url) #the database class objects are going to inherit from #this class Base = declarative_base(bind=engine) #create a configured "Session" class Session = sessionmaker(bind=engine, autoflush=False) #create a session
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self._reset(name) self._packages[self.package] = package(self.package, self.define('%{_arch}'), self) self.load_depth += 1 save_name = self.name save_lc = self.lc # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.name, path.host(configname))) config = open(path.host(configname), 'r') except IOError, err: raise error.general('error opening config file: %s' % (path.host(configname)))
def _lo_path(self, opt, macro, value): if value is None: raise error.general('option requires a path: %s' % (opt)) value = path.abspath(value) self.opts[opt[2:]] = value self.defaults[macro] = value
def load(self, name): def common_end(left, right): end = '' while len(left) and len(right): if left[-1] != right[-1]: return end end = left[-1] + end left = left[:-1] right = right[:-1] return end if self.load_depth == 0: self.in_error = False self.lc = 0 self.name = name self.conditionals = {} self.load_depth += 1 save_name = self.name save_lc = self.lc self.name = name self.lc = 0 # # Locate the config file. Expand any macros then add the # extension. Check if the file exists, therefore directly # referenced. If not see if the file contains ':' or the path # separator. If it does split the path else use the standard config dir # path in the defaults. # exname = self.expand(name) # # Macro could add an extension. # if exname.endswith('.cfg'): configname = exname else: configname = '%s.cfg' % (exname) name = '%s.cfg' % (name) if ':' in configname: cfgname = path.basename(configname) else: cfgname = common_end(configname, name) if not path.exists(configname): if ':' in configname: configdirs = path.dirname(configname).split(':') else: configdirs = self.define('_configdir').split(':') for cp in configdirs: configname = path.join(path.abspath(cp), cfgname) if path.exists(configname): break configname = None if configname is None: raise error.general('no config file found: %s' % (cfgname)) try: log.trace('config: %s: _open: %s' % (self.init_name, path.host(configname))) config = open(path.host(configname), 'r') except IOError as err: raise error.general('error opening config file: %s' % (path.host(configname))) self.configpath += [configname] self._includes += [configname] try: dir = None info = None data = [] while True: r = self._parse(config, dir, info) if r[0] == 'control': if r[1] == '%end': break log.warning("unexpected '%s'" % (r[1])) elif r[0] == 'directive': if r[1] == '%include': self.load(r[2][0]) continue dir, info, data = self._process_directive(r, dir, info, data) elif r[0] == 'data': dir, info, data = self._process_data(r, dir, info, data) else: self._error("%d: invalid parse state: '%s" % (self.lc, r[0])) if dir is not None: self._directive_extend(dir, data) except: config.close() raise config.close() self.name = save_name self.lc = save_lc self.load_depth -= 1
def parse(self, bset): def _clean(line): line = line[0:-1] b = line.find('#') if b >= 0: line = line[1:b] return line.strip() bsetname = bset if not path.exists(bsetname): for cp in self.macros.expand('%{_configdir}').split(':'): configdir = path.abspath(cp) bsetname = path.join(configdir, bset) if path.exists(bsetname): break bsetname = None if bsetname is None: raise error.general('no build set file found: %s' % (bset)) try: log.trace('_bset: %s: open: %s' % (self.bset, bsetname)) bset = open(path.host(bsetname), 'r') except IOError as err: raise error.general('error opening bset file: %s' % (bsetname)) configs = [] try: lc = 0 for l in bset: lc += 1 l = _clean(l) if len(l) == 0: continue log.trace('_bset: %s: %03d: %s' % (self.bset, lc, l)) ls = l.split() if ls[0][-1] == ':' and ls[0][:-1] == 'package': self.bset_pkg = ls[1].strip() self.macros['package'] = self.bset_pkg elif ls[0][0] == '%': def err(msg): raise error.general('%s:%d: %s' % (self.bset, lc, msg)) if ls[0] == '%define': if len(ls) > 2: self.macros.define( ls[1].strip(), ' '.join([f.strip() for f in ls[2:]])) else: self.macros.define(ls[1].strip()) elif ls[0] == '%undefine': if len(ls) > 2: raise error.general('%s:%d: %undefine requires just the name' % \ (self.bset, lc)) self.macros.undefine(ls[1].strip()) elif ls[0] == '%include': configs += self.parse(ls[1].strip()) elif ls[0] in ['%patch', '%source']: sources.process(ls[0][1:], ls[1:], self.macros, err) elif ls[0] == '%hash': sources.hash(ls[1:], self.macros, err) else: l = l.strip() c = build.find_config(l, self.configs) if c is None: raise error.general('%s:%d: cannot find file: %s' % (self.bset, lc, l)) configs += [c] except: bset.close() raise bset.close() return configs
from setuptools import setup, find_packages from codecs import openfrom os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, "README.md"), encoding="utf-8") as f: long_description = f.read() setup( name="fritzhome", version="1.0.0", description="Query information from your FRITZ!Box (mostly energy)", long_description=long_description, url="https://github.com/DerMitch/fritzbox-smarthome", author="Michael Mayr", author_email="*****@*****.**", license="MIT", classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', ], keywords="fritzbox smarthome avm energy", packages=["fritzhome"], install_requires=[ 'requests>=2.7.0', 'click>=4.0.0', ], entry_points={ 'console_scripts': [ 'fritzhome=fritzhome.__main__:cli', ], })
$NetBSD$ Pick up `cvss' module from the parent directory in order to properly run the tests without having the cvss module installed. --- tests/test_cvss2.py.orig 2017-01-11 09:11:40.000000000 +0000 +++ tests/test_cvss2.py @@ -2,6 +2,8 @@ from os import path import sys import unittest +sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__)))) + from cvss import CVSS2 from cvss.exceptions import CVSS2MalformedError, CVSS2MandatoryError, CVSS2RHScoreDoesNotMatch, \ CVSS2RHMalformedError