Exemple #1
0
 def post_process(self):
     # Handle the log first.
     log.default = log.log(self.logfiles())
     if self.trace():
         log.tracing = True
     if self.quiet():
         log.quiet = True
     # Handle the jobs for make
     if '_ncpus' not in self.defaults:
         raise error.general('host number of CPUs not set')
     ncpus = self.jobs(self.defaults['_ncpus'])
     if ncpus > 1:
         self.defaults['_smp_mflags'] = '-j %d' % (ncpus)
     else:
         self.defaults['_smp_mflags'] = self.defaults['nil']
     # Load user macro files
     um = self.user_macros()
     if um:
         checked = path.exists(um)
         if False in checked:
             raise error.general('macro file not found: %s' %
                                 (um[checked.index(False)]))
         for m in um:
             self.defaults.load(m)
     # Check if the user has a private set of macros to load
     if 'RSB_MACROS' in os.environ:
         if path.exists(os.environ['RSB_MACROS']):
             self.defaults.load(os.environ['RSB_MACROS'])
     if 'HOME' in os.environ:
         rsb_macros = path.join(os.environ['HOME'], '.rsb_macros')
         if path.exists(rsb_macros):
             self.defaults.load(rsb_macros)
    def parse(self, bset):

        def _clean(line):
            line = line[0:-1]
            b = line.find('#')
            if b >= 0:
                line = line[1:b]
            return line.strip()

        bsetname = bset

        if not path.exists(bsetname):
            for cp in self.macros.expand('%{_configdir}').split(':'):
                configdir = path.abspath(cp)
                bsetname = path.join(configdir, bset)
                if path.exists(bsetname):
                    break
                bsetname = None
            if bsetname is None:
                raise error.general('no build set file found: %s' % (bset))
        try:
            log.trace('_bset: %s: open: %s' % (self.bset, bsetname))
            bset = open(path.host(bsetname), 'r')
        except IOError, err:
            raise error.general('error opening bset file: %s' % (bsetname))
Exemple #3
0
 def __init__(self, opts, prefix, arch_bsp):
     self.opts = opts
     self.prefix = prefix
     if not path.exists(prefix):
         raise error.general('RTEMS prefix path not found: %s' % (prefix))
     self.makefile_inc = None
     if '/' in arch_bsp:
         arch, bsp = arch_bsp.split('/', 1)
     else:
         arch = None
         bsp = arch_bsp
     makefile_incs = _collect(prefix, 'Makefile.inc')
     for mi in makefile_incs:
         found = True
         if arch is not None and arch not in mi:
             found = False
         if bsp not in mi:
             found = False
         if found:
             self.makefile_inc = mi
             break
     if self.makefile_inc is None:
         raise error.general('RTEMS BSP not found: %s' % (arch_bsp))
     if not path.exists(self.makefile_inc):
         raise error.general('RTEMS BSP configuration not found: %s: %s' % \
                                 (arch_bsp, self.makefile_inc))
     self.command = command(opts, [
         '%{__make}', '-f'
         '%{_sbdir}/sb/rtemsconfig.mk',
         'makefile_inc=%s' % (self.makefile_inc)
     ])
     self.command.run()
     self.parse(self.command.output)
def _http_downloader(url, local, config, opts):
    if path.exists(local):
        return True
    #
    # Hack for GitHub.
    #
    if url.startswith('https://api.github.com'):
        url = urlparse.urljoin(url, config.expand('tarball/%{version}'))
    log.notice('download: %s -> %s' % (url, os.path.relpath(path.host(local))))
    failed = False
    if not opts.dry_run():
        _in = None
        _out = None
        try:
            _in = urllib2.urlopen(url)
            _out = open(path.host(local), 'wb')
            _out.write(_in.read())
        except IOError, err:
            log.notice('download: %s: error: %s' % (url, str(err)))
            if path.exists(local):
                os.remove(path.host(local))
            failed = True
        except ValueError, err:
            log.notice('download: %s: error: %s' % (url, str(err)))
            if path.exists(local):
                os.remove(path.host(local))
            failed = True
def _http_downloader(url, local, config, opts):
    if path.exists(local):
        return True
    #
    # Hack for GitHub.
    #
    if url.startswith('https://api.github.com'):
        url = urlparse.urljoin(url, config.expand('tarball/%{version}'))
    log.notice('download: %s -> %s' % (url, os.path.relpath(path.host(local))))
    failed = False
    if not opts.dry_run():
        _in = None
        _out = None
        try:
            _in = urllib2.urlopen(url)
            _out = open(path.host(local), 'wb')
            _out.write(_in.read())
        except IOError, err:
            log.notice('download: %s: error: %s' % (url, str(err)))
            if path.exists(local):
                os.remove(path.host(local))
            failed = True
        except ValueError, err:
            log.notice('download: %s: error: %s' % (url, str(err)))
            if path.exists(local):
                os.remove(path.host(local))
            failed = True
Exemple #6
0
 def post_process(self):
     # Handle the log first.
     log.default = log.log(self.logfiles())
     if self.trace():
         log.tracing = True
     if self.quiet():
         log.quiet = True
     # Handle the jobs for make
     if '_ncpus' not in self.defaults:
         raise error.general('host number of CPUs not set')
     ncpus = self.jobs(self.defaults['_ncpus'])
     if ncpus > 1:
         self.defaults['_smp_mflags'] = '-j %d' % (ncpus)
     else:
         self.defaults['_smp_mflags'] = self.defaults['nil']
     # Load user macro files
     um = self.user_macros()
     if um:
         checked = path.exists(um)
         if False in checked:
             raise error.general('macro file not found: %s' % (um[checked.index(False)]))
         for m in um:
             self.defaults.load(m)
     # Check if the user has a private set of macros to load
     if 'RSB_MACROS' in os.environ:
         if path.exists(os.environ['RSB_MACROS']):
             self.defaults.load(os.environ['RSB_MACROS'])
     if 'HOME' in os.environ:
         rsb_macros = path.join(os.environ['HOME'], '.rsb_macros')
         if path.exists(rsb_macros):
             self.defaults.load(rsb_macros)
 def __init__(self, opts, prefix, arch_bsp):
     self.opts = opts
     self.prefix = prefix
     if not path.exists(prefix):
         raise error.general('RTEMS prefix path not found: %s' % (prefix))
     self.makefile_inc = None
     if '/' in arch_bsp:
         arch, bsp = arch_bsp.split('/', 1)
     else:
         arch = None
         bsp = arch_bsp
     makefile_incs = _collect(prefix, 'Makefile.inc')
     for mi in makefile_incs:
         found = True
         if arch is not None and arch not in mi:
             found = False
         if bsp not in mi:
             found = False
         if found:
             self.makefile_inc = mi
             break
     if self.makefile_inc is None:
         raise error.general('RTEMS BSP not found: %s' % (arch_bsp))
     if not path.exists(self.makefile_inc):
         raise error.general('RTEMS BSP configuration not found: %s: %s' % \
                                 (arch_bsp, self.makefile_inc))
     self.command = command(opts, ['%{__make}',
                                   '-f' '%{_sbdir}/sb/rtemsconfig.mk',
                                   'makefile_inc=%s' % (self.makefile_inc)])
     self.command.run()
     self.parse(self.command.output)
Exemple #8
0
 def post_process(self, logfile=True):
     # Handle the log first.
     logctrl = self.parse_args('--without-log')
     if logctrl is None:
         if logfile:
             logfiles = self.logfiles()
         else:
             logfiles = None
         log.default = log.log(streams=logfiles)
     if self.trace():
         log.tracing = True
     if self.quiet():
         log.quiet = True
     # Must have a host
     if self.defaults['_host'] == self.defaults['nil']:
         raise error.general('--host not set')
     # Must have a host
     if self.defaults['_build'] == self.defaults['nil']:
         raise error.general('--build not set')
     # Default prefix
     prefix = self.parse_args('--prefix')
     if prefix is None:
         value = path.join(self.defaults['_prefix'], 'rtems',
                           str(self.defaults['rtems_version']))
         self.opts['prefix'] = value
         self.defaults['_prefix'] = value
     # Manage the regression option
     if self.opts['regression'] != '0':
         self.opts['no-install'] = '1'
         self.defaults['_no_install'] = '1'
         self.opts['keep-going'] = '1'
         self.defaults['_keep_going'] = '1'
         self.opts['always-clean'] = '1'
         self.defaults['_always_clean'] = '1'
     # Handle the jobs for make
     if '_ncpus' not in self.defaults:
         raise error.general('host number of CPUs not set')
     ncpus = self.jobs(self.defaults['_ncpus'])
     if ncpus > 1:
         self.defaults['_smp_mflags'] = '-j %d' % (ncpus)
     else:
         self.defaults['_smp_mflags'] = self.defaults['nil']
     # Load user macro files
     um = self.user_macros()
     if um:
         checked = path.exists(um)
         if False in checked:
             raise error.general('macro file not found: %s' % \
                                 (um[checked.index(False)]))
         for m in um:
             self.defaults.load(m)
     # Check if the user has a private set of macros to load
     if 'RSB_MACROS' in os.environ:
         if path.exists(os.environ['RSB_MACROS']):
             self.defaults.load(os.environ['RSB_MACROS'])
     if 'HOME' in os.environ:
         rsb_macros = path.join(os.environ['HOME'], '.rsb_macros')
         if path.exists(rsb_macros):
             self.defaults.load(rsb_macros)
 def post_process(self, logfile = True):
     # Handle the log first.
     logctrl = self.parse_args('--without-log')
     if logctrl is None:
         if logfile:
             logfiles = self.logfiles()
         else:
             logfiles = None
         log.default = log.log(streams = logfiles)
     if self.trace():
         log.tracing = True
     if self.quiet():
         log.quiet = True
     # Must have a host
     if self.defaults['_host'] == self.defaults['nil']:
         raise error.general('--host not set')
     # Must have a host
     if self.defaults['_build'] == self.defaults['nil']:
         raise error.general('--build not set')
     # Default prefix
     prefix = self.parse_args('--prefix')
     if prefix is None:
         value = path.join(self.defaults['_prefix'], 'rtems', str(self.defaults['rtems_version']))
         self.opts['prefix'] = value
         self.defaults['_prefix'] = value
     # Manage the regression option
     if self.opts['regression'] != '0':
         self.opts['no-install'] = '1'
         self.defaults['_no_install'] = '1'
         self.opts['keep-going'] = '1'
         self.defaults['_keep_going'] = '1'
         self.opts['always-clean'] = '1'
         self.defaults['_always_clean'] = '1'
     # Handle the jobs for make
     if '_ncpus' not in self.defaults:
         raise error.general('host number of CPUs not set')
     ncpus = self.jobs(self.defaults['_ncpus'])
     if ncpus > 1:
         self.defaults['_smp_mflags'] = '-j %d' % (ncpus)
     else:
         self.defaults['_smp_mflags'] = self.defaults['nil']
     # Load user macro files
     um = self.user_macros()
     if um:
         checked = path.exists(um)
         if False in checked:
             raise error.general('macro file not found: %s' % (um[checked.index(False)]))
         for m in um:
             self.defaults.load(m)
     # Check if the user has a private set of macros to load
     if 'RSB_MACROS' in os.environ:
         if path.exists(os.environ['RSB_MACROS']):
             self.defaults.load(os.environ['RSB_MACROS'])
     if 'HOME' in os.environ:
         rsb_macros = path.join(os.environ['HOME'], '.rsb_macros')
         if path.exists(rsb_macros):
             self.defaults.load(rsb_macros)
Exemple #10
0
 def post_process(self):
     # Handle the log first.
     log.default = log.log(self.logfiles())
     if self.trace():
         log.tracing = True
     if self.quiet():
         log.quiet = True
     # Must have a host
     if self.defaults['_host'] == self.defaults['nil']:
         raise error.general('--host not set')
     # Must have a host
     if self.defaults['_build'] == self.defaults['nil']:
         raise error.general('--build not set')
     # Manage the regression option
     if self.opts['regression'] != '0':
         self.opts['no-install'] = '1'
         self.defaults['_no_install'] = '1'
         self.opts['keep-going'] = '1'
         self.defaults['_keep_going'] = '1'
         self.opts['always-clean'] = '1'
         self.defaults['_always_clean'] = '1'
     # Handle the jobs for make
     if '_ncpus' not in self.defaults:
         raise error.general('host number of CPUs not set')
     ncpus = self.jobs(self.defaults['_ncpus'])
     if ncpus > 1:
         self.defaults['_smp_mflags'] = '-j %d' % (ncpus)
     else:
         self.defaults['_smp_mflags'] = self.defaults['nil']
     # Load user macro files
     um = self.user_macros()
     if um:
         checked = path.exists(um)
         if False in checked:
             raise error.general('macro file not found: %s' %
                                 (um[checked.index(False)]))
         for m in um:
             self.defaults.load(m)
     # Check if the user has a private set of macros to load
     if 'RSB_MACROS' in os.environ:
         if path.exists(os.environ['RSB_MACROS']):
             self.defaults.load(os.environ['RSB_MACROS'])
     if 'HOME' in os.environ:
         rsb_macros = path.join(os.environ['HOME'], '.rsb_macros')
         if path.exists(rsb_macros):
             self.defaults.load(rsb_macros)
     # If a Cxc build disable installing.
     if self.canadian_cross():
         self.opts['no-install'] = '1'
         self.defaults['_no_install'] = '1'
 def post_process(self):
     # Handle the log first.
     log.default = log.log(self.logfiles())
     if self.trace():
         log.tracing = True
     if self.quiet():
         log.quiet = True
     # Must have a host
     if self.defaults['_host'] == self.defaults['nil']:
         raise error.general('--host not set')
     # Must have a host
     if self.defaults['_build'] == self.defaults['nil']:
         raise error.general('--build not set')
     # Manage the regression option
     if self.opts['regression'] != '0':
         self.opts['no-install'] = '1'
         self.defaults['_no_install'] = '1'
         self.opts['keep-going'] = '1'
         self.defaults['_keep_going'] = '1'
         self.opts['always-clean'] = '1'
         self.defaults['_always_clean'] = '1'
     # Handle the jobs for make
     if '_ncpus' not in self.defaults:
         raise error.general('host number of CPUs not set')
     ncpus = self.jobs(self.defaults['_ncpus'])
     if ncpus > 1:
         self.defaults['_smp_mflags'] = '-j %d' % (ncpus)
     else:
         self.defaults['_smp_mflags'] = self.defaults['nil']
     # Load user macro files
     um = self.user_macros()
     if um:
         checked = path.exists(um)
         if False in checked:
             raise error.general('macro file not found: %s' % (um[checked.index(False)]))
         for m in um:
             self.defaults.load(m)
     # Check if the user has a private set of macros to load
     if 'RSB_MACROS' in os.environ:
         if path.exists(os.environ['RSB_MACROS']):
             self.defaults.load(os.environ['RSB_MACROS'])
     if 'HOME' in os.environ:
         rsb_macros = path.join(os.environ['HOME'], '.rsb_macros')
         if path.exists(rsb_macros):
             self.defaults.load(rsb_macros)
     # If a Cxc build disable installing.
     if self.canadian_cross():
         self.opts['no-install'] = '1'
         self.defaults['_no_install'] = '1'
 def __init__(self, infile, outdir):
     self.infile = infile
     self.outdir = outdir
     if not path.exists(outdir):
         path.mkdir(outdir)
     self.mobi7dir = os.path.join(outdir,'mobi7')
     if not path.exists(self.mobi7dir):
         path.mkdir(self.mobi7dir)
     self.imgdir = os.path.join(self.mobi7dir, 'Images')
     if not path.exists(self.imgdir):
         path.mkdir(self.imgdir)
     self.hdimgdir = os.path.join(outdir,'HDImages')
     if not path.exists(self.hdimgdir):
         path.mkdir(self.hdimgdir)
     self.outbase = os.path.join(outdir, os.path.splitext(os.path.split(infile)[1])[0])
Exemple #13
0
def parse_url(url, pathkey, config, opts):
    #
    # Split the source up into the parts we need.
    #
    source = {}
    source['url'] = url
    source['path'] = path.dirname(url)
    source['file'] = path.basename(url)
    source['name'], source['ext'] = path.splitext(source['file'])
    #
    # Get the file. Checks the local source directory first.
    #
    source['local'] = None
    for p in config.define(pathkey).split(':'):
        local = path.join(path.abspath(p), source['file'])
        if source['local'] is None:
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
        if path.exists(local):
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
            break
    source['script'] = ''
    for p in parsers:
        if url.startswith(p):
            source['type'] = p
            if parsers[p](source, config, opts):
                break
    return source
def get_file(url, local, opts, config):
    if local is None:
        raise error.general("source/patch path invalid")
    if not path.isdir(path.dirname(local)) and not opts.download_disabled():
        log.notice("Creating source directory: %s" % (os.path.relpath(path.host(path.dirname(local)))))
    log.output("making dir: %s" % (path.host(path.dirname(local))))
    if not opts.dry_run():
        path.mkdir(path.dirname(local))
    if not path.exists(local) and opts.download_disabled():
        raise error.general("source not found: %s" % (path.host(local)))
    #
    # Check if a URL has been provided on the command line.
    #
    url_bases = opts.urls()
    urls = []
    if url_bases is not None:
        for base in url_bases:
            if base[-1:] != "/":
                base += "/"
            url_path = urlparse.urlsplit(url)[2]
            slash = url_path.rfind("/")
            if slash < 0:
                url_file = url_path
            else:
                url_file = url_path[slash + 1 :]
            urls.append(urlparse.urljoin(base, url_file))
    urls += url.split()
    log.trace("_url: %s -> %s" % (",".join(urls), local))
    for url in urls:
        for dl in downloaders:
            if url.startswith(dl):
                if downloaders[dl](url, local, config, opts):
                    return
    if not opts.dry_run():
        raise error.general("downloading %s: all paths have failed, giving up" % (url))
def parse_url(url, pathkey, config, opts):
    #
    # Split the source up into the parts we need.
    #
    source = {}
    source["url"] = url
    colon = url.find(":")
    if url[colon + 1 : colon + 3] != "//":
        raise error.general("malforned URL: %s" % (url))
    source["path"] = url[: colon + 3] + path.dirname(url[colon + 3 :])
    source["file"] = path.basename(url)
    source["name"], source["ext"] = path.splitext(source["file"])
    if source["name"].endswith(".tar"):
        source["name"] = source["name"][:-4]
        source["ext"] = ".tar" + source["ext"]
    #
    # Get the file. Checks the local source directory first.
    #
    source["local"] = None
    for p in config.define(pathkey).split(":"):
        local = path.join(path.abspath(p), source["file"])
        if source["local"] is None:
            source["local_prefix"] = path.abspath(p)
            source["local"] = local
        if path.exists(local):
            source["local_prefix"] = path.abspath(p)
            source["local"] = local
            break
    source["script"] = ""
    for p in parsers:
        if url.startswith(p):
            source["type"] = p
            if parsers[p](source, config, opts):
                break
    return source
Exemple #16
0
    def from_address(self):
        def _clean(l):
            if '#' in l:
                l = l[:l.index('#')]
            if '\r' in l:
                l = l[:l.index('r')]
            if '\n' in l:
                l = l[:l.index('\n')]
            return l.strip()

        addr = self.opts.get_arg('--mail-from')
        if addr is not None:
            return addr[1]
        mailrc = None
        if 'MAILRC' in os.environ:
            mailrc = os.environ['MAILRC']
        if mailrc is None and 'HOME' in os.environ:
            mailrc = path.join(os.environ['HOME'], '.mailrc')
        if mailrc is not None and path.exists(mailrc):
            # set from="Joe Blow <*****@*****.**>"
            try:
                mrc = open(mailrc, 'r')
                lines = mrc.readlines()
                mrc.close()
            except IOError, err:
                raise error.general('error reading: %s' % (mailrc))
            for l in lines:
                l = _clean(l)
                if 'from' in l:
                    fa = l[l.index('from') + len('from'):]
                    if '=' in fa:
                        addr = fa[fa.index('=') + 1:].replace('"', ' ').strip()
            if addr is not None:
                return addr
Exemple #17
0
    def from_address(self):

        def _clean(l):
            if '#' in l:
                l = l[:l.index('#')]
            if '\r' in l:
                l = l[:l.index('r')]
            if '\n' in l:
                l = l[:l.index('\n')]
            return l.strip()

        addr = self.opts.get_arg('--mail-from')
        if addr is not None:
            return addr[1]
        mailrc = None
        if 'MAILRC' in os.environ:
            mailrc = os.environ['MAILRC']
        if mailrc is None and 'HOME' in os.environ:
            mailrc = path.join(os.environ['HOME'], '.mailrc')
        if mailrc is not None and path.exists(mailrc):
            # set from="Joe Blow <*****@*****.**>"
            try:
                mrc = open(mailrc, 'r')
                lines = mrc.readlines()
                mrc.close()
            except IOError, err:
                raise error.general('error reading: %s' % (mailrc))
            for l in lines:
                l = _clean(l)
                if 'from' in l:
                    fa = l[l.index('from') + len('from'):]
                    if '=' in fa:
                        addr = fa[fa.index('=') + 1:].replace('"', ' ').strip()
            if addr is not None:
                return addr
Exemple #18
0
 def status(self):
     _status = {}
     if path.exists(self.path):
         ec, output = self._run(['status'])
         if ec == 0:
             state = 'none'
             for l in output.split('\n'):
                 if l.startswith('# '):
                     l = l[2:]
                 if l.startswith('On branch '):
                     _status['branch'] = l[len('On branch '):]
                 elif l.startswith('Changes to be committed:'):
                     state = 'staged'
                 elif l.startswith('Changes not staged for commit:'):
                     state = 'unstaged'
                 elif l.startswith('Untracked files:'):
                     state = 'untracked'
                 elif l.startswith('HEAD detached'):
                     state = 'detached'
                 elif state != 'none' and len(l.strip()) != 0:
                     if l[0].isspace():
                         l = l.strip()
                         if l[0] != '(':
                             if state not in _status:
                                 _status[state] = []
                             l = l[1:]
                             if ':' in l:
                                 l = l.split(':')[1]
                             _status[state] += [l.strip()]
     return _status
Exemple #19
0
 def patch(self, package, args):
     #
     # Scan the patches found in the config file for the one we are
     # after. Infos or tags are lists.
     #
     patches = package.patches()
     url = None
     for p in patches:
         if args[0][1:].lower() == p:
             url = patches[p][0]
             break
     if url is None:
         raise error.general('patch tag not found: %s' % (args[0]))
     #
     # Parse the URL first in the source builder's patch directory.
     #
     patch = download.parse_url(url, '_patchdir', self.config, self.opts)
     #
     # If not in the source builder package check the source directory.
     #
     if not path.exists(patch['local']):
         patch = download.parse_url(url, '_patchdir', self.config, self.opts)
     download.get_file(patch['url'], patch['local'], self.opts, self.config)
     if 'compressed' in patch:
         patch['script'] = patch['compressed'] + ' ' +  patch['local']
     else:
         patch['script'] = '%{__cat} ' + patch['local']
     patch['script'] += ' | %{__patch} ' + ' '.join(args[1:])
     self.script.append(self.config.expand(patch['script']))
def path_check(opts, silent=False):
    if 'PATH' in os.environ:
        paths = os.environ['PATH'].split(os.pathsep)
        for p in paths:
            try:
                if len(p.strip()) == 0:
                    if not silent:
                        log.notice(
                            'error: environment PATH contains an empty path')
                    return False
                elif not options.host_windows and (p.strip() == '.'
                                                   or p.strip() == '..'):
                    if not silent:
                        log.notice('error: environment PATH invalid path: %s' %
                                   (p))
                    return False
                elif not path.exists(p):
                    if not silent and opts.warn_all():
                        log.notice('warning: environment PATH not found: %s' %
                                   (p))
                elif not path.isdir(p):
                    if not silent and opts.warn_all():
                        log.notice(
                            'warning: environment PATH not a directory: %s' %
                            (p))
            except Exception as e:
                if not silent:
                    log.notice(
                        'warning: environment PATH suspicious path: %s' % (e))
    return True
def _hash_check(file_, absfile, macros, remove=True):
    failed = False
    hash = sources.get_hash(file_.lower(), macros)
    if hash is not None:
        hash = hash.split()
        if len(hash) != 2:
            raise error.internal('invalid hash format: %s' % (file_))
        try:
            hashlib_algorithms = hashlib.algorithms
        except:
            hashlib_algorithms = [
                'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'
            ]
        if hash[0] not in hashlib_algorithms:
            raise error.general('invalid hash algorithm for %s: %s' %
                                (file_, hash[0]))
        if hash[0] in ['md5', 'sha1']:
            raise error.general('hash: %s: insecure: %s' % (file_, hash[0]))
        hasher = None
        _in = None
        try:
            hasher = hashlib.new(hash[0])
            _in = open(path.host(absfile), 'rb')
            hasher.update(_in.read())
        except IOError as err:
            log.notice('hash: %s: read error: %s' % (file_, str(err)))
            failed = True
        except:
            msg = 'hash: %s: error' % (file_)
            log.stderr(msg)
            log.notice(msg)
            if _in is not None:
                _in.close()
            raise
        if _in is not None:
            _in.close()
        hash_hex = hasher.hexdigest()
        hash_base64 = base64.b64encode(hasher.digest()).decode('utf-8')
        log.output('checksums: %s: (hex: %s) (b64: %s) => %s' %
                   (file_, hash_hex, hash_base64, hash[1]))
        if hash_hex != hash[1] and hash_base64 != hash[1]:
            log.warning('checksum error: %s' % (file_))
            failed = True
        if failed and remove:
            log.warning('removing: %s' % (file_))
            if path.exists(absfile):
                try:
                    os.remove(path.host(absfile))
                except IOError as err:
                    raise error.general('hash: %s: remove: %s' %
                                        (absfile, str(err)))
                except:
                    raise error.general('hash: %s: remove error' % (file_))
        if hasher is not None:
            del hasher
    else:
        if version.released():
            raise error.general('%s: no hash found in released RSB' % (file_))
        log.warning('%s: no hash found' % (file_))
    return not failed
Exemple #22
0
 def deletefile(self, id):
     id = utf8_str(id)
     filepath = self.id_to_filepath.get(id, None)
     if id is None:
         raise WrapperException('id does not exist in manifest')
     add_to_deleted = True
     # if file was added or modified, delete file from outdir
     if id in self.added or id in self.modified.keys():
         filepath = os.path.join(self.outdir, filepath)
         if path.exists(filepath) and path.isfile(filepath):
             os.remove(pathof(filepath))
         if id in self.added:
             self.added.remove(id)
             add_to_deleted = False
         if id in self.modified.keys():
             del self.modified[id]
     # remove from manifest
     href = self.id_to_href[id]
     del self.id_to_href[id]
     del self.id_to_mime[id]
     del self.href_to_id[href]
     # remove from spine
     new_spine = []
     was_modified = False
     for sid, linear in self.spine:
         if sid != id:
             new_spine.append((sid, linear))
         else:
             was_modified = True
     if was_modified:
         setspine(new_spine)
     if add_to_deleted:
         self.deleted.append(id)
         self.modified['OEBPS/content.opf'] = 'file'
     del self.id_to_filepath[id]
Exemple #23
0
	def test_depth_one(self):
		foo = path.Node("foo")
		path.mount("/", foo)
		self.assertEqual(path.select("/"), ("foo",))
		self.assertTrue(path.exists("/foo"))
		path.umount("/foo")
		self.assertEqual(path.select("/"), ())
def parse_url(url, pathkey, config, opts):
    #
    # Split the source up into the parts we need.
    #
    source = {}
    source['url'] = url
    colon = url.find(':')
    if url[colon + 1:colon + 3] != '//':
        raise error.general('malforned URL: %s' % (url))
    source['path'] = url[:colon + 3] + path.dirname(url[colon + 3:])
    source['file'] = path.basename(url)
    source['name'], source['ext'] = path.splitext(source['file'])
    if source['name'].endswith('.tar'):
        source['name'] = source['name'][:-4]
        source['ext'] = '.tar' + source['ext']
    #
    # Get the file. Checks the local source directory first.
    #
    source['local'] = None
    for p in config.define(pathkey).split(':'):
        local = path.join(path.abspath(p), source['file'])
        if source['local'] is None:
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
        if path.exists(local):
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
            break
    source['script'] = ''
    for p in parsers:
        if url.startswith(p):
            source['type'] = p
            if parsers[p](source, config, opts):
                break
    return source
Exemple #25
0
def renames(old, new):
    """renames(old, new)

    Super-rename; create directories as necessary and delete any left
    empty.  Works like rename, except creation of any intermediate
    directories needed to make the new pathname good is attempted
    first.  After the rename, directories corresponding to rightmost
    path segments of the old name will be pruned way until either the
    whole path is consumed or a nonempty directory is found.

    Note: this function can fail with the new directory structure made
    if you lack permissions needed to unlink the leaf directory or
    file.

    """
    head, tail = path.split(new)
    if head and tail and not path.exists(head):
        makedirs(head)
    rename(old, new)
    head, tail = path.split(old)
    if head and tail:
        try:
            removedirs(head)
        except error:
            pass
def download(language, location, force=False, token=False):
    Path(location).mkdir(parents=True, exist_ok=True)
    print(f"{location}/{language}")
    if not path.exists(f"{location}/{language}") or force:
        try:
            if force:
                shutil.rmtree(f"{location}/{language}")
            Path(f"{location}/{language}").mkdir(parents=True, exist_ok=True)
            os.chdir(f"{location}/{language}")
            if token:
                download_path = f"http://opus.nlpl.eu/download.php?f=OpenSubtitles/v2018/mono/OpenSubtitles.{language}.gz"
            else:
                download_path = f"http://opus.nlpl.eu/download.php?f=OpenSubtitles/v2018/mono/OpenSubtitles.raw.{language}.gz"
            subprocess.check_call(
                f"wget -O {language}.txt.gz {download_path}".split())
            local_filename = f"{location}/{language}/{language}.txt.gz"
            with gzip.open(local_filename, "rb") as f_in:
                with open(local_filename.replace(".gz", ""), "wb") as f_out:
                    shutil.copyfileobj(f_in, f_out)
            os.remove(local_filename)
        except KeyboardInterrupt:
            shutil.rmtree(f"{location}/{language}")
        except subprocess.CalledProcessError:
            shutil.rmtree(f"{location}/{language}")
    else:
        click.echo(f"Language {language} already downloaded.")
 def valid(self):
     if path.exists(self.path):
         ec, output = self._run(['-n', 'up', '-l'])
         if ec == 0:
             if not output.startswith('cvs status: No CVSROOT specified'):
                 return True
     return False
Exemple #28
0
 def status(self):
     _status = {}
     if path.exists(self.path):
         ec, output = self._run(['status'])
         if ec == 0:
             state = 'none'
             for l in output.split('\n'):
                 if l.startswith('# '):
                     l = l[2:]
                 if l.startswith('On branch '):
                     _status['branch'] = l[len('On branch '):]
                 elif l.startswith('Changes to be committed:'):
                     state = 'staged'
                 elif l.startswith('Changes not staged for commit:'):
                     state = 'unstaged'
                 elif l.startswith('Untracked files:'):
                     state = 'untracked'
                 elif l.startswith('HEAD detached'):
                     state = 'detached'
                 elif state != 'none' and len(l.strip()) != 0:
                     if l[0].isspace():
                         l = l.strip()
                         if l[0] != '(':
                             if state not in _status:
                                 _status[state] = []
                             l = l[1:]
                             if ':' in l:
                                 l = l.split(':')[1]
                             _status[state] += [l.strip()]
     return _status
Exemple #29
0
 def deletefile(self, id):
     id = utf8_str(id)
     filepath = self.id_to_filepath.get(id, None)
     if id is None:
         raise WrapperException('id does not exist in manifest')
     add_to_deleted = True
     # if file was added or modified, delete file from outdir
     if id in self.added or id in self.modified.keys():
         filepath = os.path.join(self.outdir,filepath)
         if path.exists(filepath) and path.isfile(filepath):
             os.remove(pathof(filepath))
         if id in self.added:
             self.added.remove(id)
             add_to_deleted = False
         if id in self.modified.keys():
             del self.modified[id]
     # remove from manifest
     href = self.id_to_href[id]
     del self.id_to_href[id]
     del self.id_to_mime[id]
     del self.href_to_id[href]
     # remove from spine
     new_spine = []
     was_modified = False
     for sid, linear in self.spine:
         if sid != id:
             new_spine.append((sid, linear))
         else:
             was_modified = True
     if was_modified:
         setspine(new_spine)
     if add_to_deleted:
         self.deleted.append(id)
         self.modified['OEBPS/content.opf'] = 'file'
     del self.id_to_filepath[id]
Exemple #30
0
 def patch(self, package, args):
     #
     # Scan the patches found in the config file for the one we are
     # after. Infos or tags are lists.
     #
     patches = package.patches()
     url = None
     for p in patches:
         if args[0][1:].lower() == p:
             url = patches[p][0]
             break
     if url is None:
         raise error.general('patch tag not found: %s' % (args[0]))
     #
     # Parse the URL first in the source builder's patch directory.
     #
     patch = download.parse_url(url, '_patchdir', self.config, self.opts)
     #
     # If not in the source builder package check the source directory.
     #
     if not path.exists(patch['local']):
         patch = download.parse_url(url, '_patchdir', self.config,
                                    self.opts)
     download.get_file(patch['url'], patch['local'], self.opts, self.config)
     if 'compressed' in patch:
         patch['script'] = patch['compressed'] + ' ' + patch['local']
     else:
         patch['script'] = '%{__cat} ' + patch['local']
     patch['script'] += ' | %{__patch} ' + ' '.join(args[1:])
     self.script.append(self.config.expand(patch['script']))
def parse_url(url, pathkey, config, opts):
    #
    # Split the source up into the parts we need.
    #
    source = {}
    source['url'] = url
    source['path'] = path.dirname(url)
    source['file'] = path.basename(url)
    source['name'], source['ext'] = path.splitext(source['file'])
    #
    # Get the file. Checks the local source directory first.
    #
    source['local'] = None
    for p in config.define(pathkey).split(':'):
        local = path.join(path.abspath(p), source['file'])
        if source['local'] is None:
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
        if path.exists(local):
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
            break
    source['script'] = ''
    for p in parsers:
        if url.startswith(p):
            source['type'] = p
            if parsers[p](source, config, opts):
                break
    return source
Exemple #32
0
 def write_opf(self):
     if self.op is not None:
         filepath = utf8_str(os.path.join(self.outdir, 'OEBPS', self.opfname))
         base = os.path.dirname(filepath)
         if not path.exists(base):
             os.makedirs(pathof(base))
         with open(pathof(filepath),'wb') as fp:
             fp.write(self.build_opf())
Exemple #33
0
 def __init__(self, infile, outdir):
     self.infile = infile
     self.outdir = outdir
     if not path.exists(outdir):
         path.mkdir(outdir)
     self.mobi7dir = os.path.join(outdir, 'mobi7')
     if not path.exists(self.mobi7dir):
         path.mkdir(self.mobi7dir)
     self.imgdir = os.path.join(self.mobi7dir, 'Images')
     if not path.exists(self.imgdir):
         path.mkdir(self.imgdir)
     self.hdimgdir = os.path.join(outdir, 'HDImages')
     if not path.exists(self.hdimgdir):
         path.mkdir(self.hdimgdir)
     self.outbase = os.path.join(
         outdir,
         os.path.splitext(os.path.split(infile)[1])[0])
Exemple #34
0
 def write_opf(self):
     if self.op is not None:
         filepath = utf8_str(
             os.path.join(self.outdir, 'OEBPS', self.opfname))
         base = os.path.dirname(filepath)
         if not path.exists(base):
             os.makedirs(pathof(base))
         with open(pathof(filepath), 'wb') as fp:
             fp.write(self.build_opf())
def _load_released_version_config():
    top = _top()
    for ver in [top, '..']:
        if path.exists(path.join(ver, 'VERSION')):
            import ConfigParser
            v = ConfigParser.SafeConfigParser()
            v.read(path.join(ver, 'VERSION'))
            return v
    return None
Exemple #36
0
def _load_released_version_config():
    top = _top()
    for ver in [top, '..']:
        if path.exists(path.join(ver, 'VERSION')):
            import ConfigParser
            v = ConfigParser.SafeConfigParser()
            v.read(path.join(ver, 'VERSION'))
            return v
    return None
def _file_downloader(url, local, config, opts):
    if not path.exists(local):
        try:
            src = url[7:]
            dst = local
            log.notice('download: copy %s -> %s' % (src, dst))
            path.copy(src, dst)
        except:
            return False
    return True
def get_file(url, local, opts, config):
    if local is None:
        raise error.general('source/patch path invalid')
    if not path.isdir(path.dirname(local)) and not opts.download_disabled():
        log.notice('Creating source directory: %s' % \
                       (os.path.relpath(path.host(path.dirname(local)))))
    log.output('making dir: %s' % (path.host(path.dirname(local))))
    if _do_download(opts):
        path.mkdir(path.dirname(local))
    if not path.exists(local) and opts.download_disabled():
        raise error.general('source not found: %s' % (path.host(local)))
    #
    # Check if a URL has been provided on the command line. If the package is
    # release push to the start the RTEMS URL.
    #
    url_bases = opts.urls()
    if version.released():
        rtems_release_url = config.macros.expand('%{rtems_release_url}/%{rsb_version}/sources')
        log.trace('release url: %s' % (rtems_release_url))
        #
        # If the URL being fetched is under the release path do not add the
        # sources release path because it is already there.
        #
        if not url.startswith(rtems_release_url):
            if url_bases is None:
                url_bases = [rtems_release_url]
            else:
                url_bases.append(rtems_release_url)
    urls = []
    if url_bases is not None:
        #
        # Split up the URL we are being asked to download.
        #
        url_path = urlparse.urlsplit(url)[2]
        slash = url_path.rfind('/')
        if slash < 0:
            url_file = url_path
        else:
            url_file = url_path[slash + 1:]
        log.trace('url_file: %s' %(url_file))
        for base in url_bases:
            if base[-1:] != '/':
                base += '/'
            next_url = urlparse.urljoin(base, url_file)
            log.trace('url: %s' %(next_url))
            urls.append(next_url)
    urls += url.split()
    log.trace('_url: %s -> %s' % (','.join(urls), local))
    for url in urls:
        for dl in downloaders:
            if url.startswith(dl):
                if downloaders[dl](url, local, config, opts):
                    return
    if _do_download(opts):
        raise error.general('downloading %s: all paths have failed, giving up' % (url))
def _local_path(source, pathkey, config):
    for p in config.define(pathkey).split(':'):
        local = path.join(path.abspath(p), source['file'])
        if source['local'] is None:
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
        if path.exists(local):
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
            _hash_check(source['file'], local, config.macros)
            break
Exemple #40
0
	def test_depth_two(self):
		foo = path.Node("foo")
		path.mount("/", foo)
		bar = path.Node("bar")
		path.mount("/foo", bar)
		self.assertTrue(path.exists("/foo/bar"))
		self.assertEqual(path.select("/foo"), ("bar",))
		self.assertEqual(path.get("/foo/bar"), bar)
		path.umount("/foo/bar")
		self.assertEqual(path.select("/foo"), ())
		self.assertEqual(path.select("/"), ("foo",))
def _local_path(source, pathkey, config):
    for p in config.define(pathkey).split(':'):
        local = path.join(path.abspath(p), source['file'])
        if source['local'] is None:
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
        if path.exists(local):
            source['local_prefix'] = path.abspath(p)
            source['local'] = local
            _hash_check(source['file'], local, config.macros)
            break
Exemple #42
0
 def _find(name, opts):
     ename = opts.defaults.expand(name)
     if ':' in ename:
         paths = path.dirname(ename).split(':')
         name = path.basename(name)
     else:
         paths = opts.defaults.get_value('_configdir').split(':')
     for p in paths:
         n = path.join(opts.defaults.expand(p), name)
         if path.exists(n):
             return n
     return None
Exemple #43
0
    def __call__(self, env, start_response):
        path = self.config.output + env.get('PATH_INFO')
        path_index = path + 'index.html'

        if not path.exists():
            return self._not_found(start_response)
        if path.isfile():
            return self._serve_file(path, start_response)
        if path_index.exists():
            return self._serve_file(path_index, start_response)

        return self._list_dir(path, start_response)
Exemple #44
0
 def _find(name, opts):
     ename = opts.defaults.expand(name)
     if ':' in ename:
         paths = path.dirname(ename).split(':')
         name = path.basename(name)
     else:
         paths = opts.defaults.get_value('_configdir').split(':')
     for p in paths:
         n = path.join(opts.defaults.expand(p), name)
         if path.exists(n):
             return n
     return None
Exemple #45
0
 def user_macros(self):
     #
     # Return something even if it does not exist.
     #
     if self.opts['macros'] is None:
         return None
     um = []
     configs = self.defaults.expand('%{_configdir}').split(':')
     for m in self.opts['macros'].split(','):
         if path.exists(m):
             um += [m]
         else:
             # Get the expanded config macros then check them.
             cm = path.expand(m, configs)
             ccm = path.exists(cm)
             if True in ccm:
                 # Pick the first found
                 um += [cm[ccm.index(True)]]
             else:
                 um += [m]
     return um if len(um) else None
Exemple #46
0
 def load(self, name):
     names = self.expand(name).split(':')
     for n in names:
         if path.exists(n):
             try:
                 mc = open(path.host(n), 'r')
                 macros = self.parse(mc)
                 mc.close()
                 self.files += [n]
                 return
             except IOError, err:
                 pass
 def user_macros(self):
     #
     # Return something even if it does not exist.
     #
     if self.opts['macros'] is None:
         return None
     um = []
     configs = self.defaults.expand('%{_configdir}').split(':')
     for m in self.opts['macros'].split(','):
         if path.exists(m):
             um += [m]
         else:
             # Get the expanded config macros then check them.
             cm = path.expand(m, configs)
             ccm = path.exists(cm)
             if True in ccm:
                 # Pick the first found
                 um += [cm[ccm.index(True)]]
             else:
                 um += [m]
     return um if len(um) else None
Exemple #48
0
 def load(self, name):
     names = self.expand(name).split(':')
     for n in names:
         if path.exists(n):
             try:
                 mc = open(path.host(n), 'r')
                 macros = self.parse(mc)
                 mc.close()
                 self.files += [n]
                 return
             except IOError, err:
                 pass
Exemple #49
0
    def __call__(self, env, start_response):
        path = self.config.output + env.get('PATH_INFO')
        path_index = path + 'index.html'

        if not path.exists():
            return self._not_found(start_response)
        if path.isfile():
            return self._serve_file(path, start_response)
        if path_index.exists():
            return self._serve_file(path_index, start_response)

        return self._list_dir(path, start_response)
def _hash_check(file_, absfile, macros, remove = True):
    failed = False
    hash = sources.get_hash(file_.lower(), macros)
    if hash is not None:
        hash = hash.split()
        if len(hash) != 2:
            raise error.internal('invalid hash format: %s' % (file_))
        try:
            hashlib_algorithms = hashlib.algorithms
        except:
            hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']
        if hash[0] not in hashlib_algorithms:
            raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0]))
        if hash[0] in ['md5', 'sha1']:
            raise error.general('hash: %s: insecure: %s' % (file_, hash[0]))
        hasher = None
        _in = None
        try:
            hasher = hashlib.new(hash[0])
            _in = open(path.host(absfile), 'rb')
            hasher.update(_in.read())
        except IOError as err:
            log.notice('hash: %s: read error: %s' % (file_, str(err)))
            failed = True
        except:
            msg = 'hash: %s: error' % (file_)
            log.stderr(msg)
            log.notice(msg)
            if _in is not None:
                _in.close()
            raise
        if _in is not None:
            _in.close()
        log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1]))
        if hasher.hexdigest() != hash[1]:
            log.warning('checksum error: %s' % (file_))
            failed = True
        if failed and remove:
            log.warning('removing: %s' % (file_))
            if path.exists(absfile):
                try:
                    os.remove(path.host(absfile))
                except IOError as err:
                    raise error.general('hash: %s: remove: %s' % (absfile, str(err)))
                except:
                    raise error.general('hash: %s: remove error' % (file_))
        if hasher is not None:
            del hasher
    else:
        if version.released():
            raise error.general('%s: no hash found in released RSB' % (file_))
        log.warning('%s: no hash found' % (file_))
    return not failed
Exemple #51
0
def read_file(filename):
    # 获取文件的绝对路径
    filepath = path.join(path.dirname(__file__), filename)

    # 判断文件是否存在
    assert_msg(path.exists(filepath), 'file not exist')

    # 读取CSV文件并返回
    return pd.read_csv(filepath,
                        index_col=0,
                        parse_dates=True,
                        infer_datetime_format=True)
    def from_file(cls, location):
        '''
        Loads authentication state from a given location.
        '''
        if not exists(location):
            print('No serialized state.')
            return None

        try:
            with open(location, 'r') as handle:
                payload = json.load(handle)
                client_id = payload['client_id']
                client_secret = payload['client_secret']
                discovery_endpoint = payload['discovery_endpoint']
                scopes = payload['scopes']
                device_auth = DeviceAuth(client_id=client_id,
                                         client_secret=client_secret,
                                         discovery_endpoint=discovery_endpoint,
                                         scopes=scopes)

                if 'saved_location' in payload:
                    saved_location = payload['saved_location']
                    device_auth.saved_location = saved_location

                if 'discovered' in payload:
                    device_auth_endpoint = payload['device_auth_endpoint']
                    token_endpoint = payload['token_endpoint']
                    device_auth._discovered = True
                    device_auth._device_auth_endpoint = device_auth_endpoint
                    device_auth._token_endpoint = token_endpoint

                if 'authorized' in payload:
                    refresh_token = payload['refresh_token']
                    access_token = payload['access_token']
                    token_acquired_at = payload['token_acquired_at']
                    token_expires_in = payload['token_expires_in']
                    device_auth._authorization_completed = True
                    device_auth._refresh_token = refresh_token
                    device_auth._access_token = access_token
                    device_auth._token_acquired_at = token_acquired_at
                    device_auth._token_expires_in = token_expires_in

                return device_auth
        except Exception as error:
            print('Unable to create an instance of DeviceAuth.', error)
            try:
                os.remove(location)
            except OSError as error:
                # Do nothing
                pass

            return None
Exemple #53
0
 def _run(self, args, check = False):
     e = execute.capture_execution()
     if path.exists(self.path):
         cwd = self.path
     else:
         cwd = None
     cmd = [self.git] + args
     log.trace('cmd: (%s) %s' % (str(cwd), ' '.join(cmd)))
     exit_code, proc, output = e.spawn(cmd, cwd = path.host(cwd))
     log.trace(output)
     if check:
         self._git_exit_code(exit_code)
     return exit_code, output
Exemple #54
0
 def _run(self, args, check=False):
     e = execute.capture_execution()
     if path.exists(self.path):
         cwd = self.path
     else:
         cwd = None
     cmd = [self.git] + args
     log.trace('cmd: (%s) %s' % (str(cwd), ' '.join(cmd)))
     exit_code, proc, output = e.spawn(cmd, cwd=path.host(cwd))
     log.trace(output)
     if check:
         self._git_exit_code(exit_code)
     return exit_code, output
 def _run(self, args, check = False, cwd = None):
     e = execute.capture_execution()
     if cwd is None:
         cwd = path.join(self.path, self.prefix)
     if not path.exists(cwd):
         raise error.general('cvs path needs to exist: %s' % (cwd))
     cmd = [self.cvs, '-z', '9', '-q'] + args
     log.output('cmd: (%s) %s' % (str(cwd), ' '.join(cmd)))
     exit_code, proc, output = e.spawn(cmd, cwd = path.host(cwd))
     log.trace(output)
     if check:
         self._cvs_exit_code(cmd, exit_code, output)
     return exit_code, output
Exemple #56
0
 def writeotherfile(self, book_href, data):
     id = utf8_str(book_href)
     filepath = self.id_to_filepath.get(id, None)
     if filepath is None:
         raise WrapperException('book href does not exist')
     if id in PROTECTED_FILES:
         raise WrapperException('Attempt to modify protected file')
     filepath = os.path.join(self.outdir, filepath)
     base = os.path.dirname(filepath)
     if not path.exists(base):
         os.makedirs(pathof(base))
     with open(pathof(filepath), 'wb') as fp:
         fp.write(data)
     self.modified[id] = 'file'
 def load(self, name):
     names = self.expand(name).split(':')
     for n in names:
         if path.exists(n):
             try:
                 mc = open(path.host(n), 'r')
                 macros = self.parse(mc)
                 mc.close()
                 self.files += [n]
                 return
             except IOError as err:
                 pass
     raise error.general('opening macro file: %s' % \
                             (path.host(self.expand(name))))