def _hash_check(file_, absfile, macros, remove=True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = [ 'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512' ] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) if hash[0] in ['md5', 'sha1']: raise error.general('hash: %s: insecure: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError as err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() hash_hex = hasher.hexdigest() hash_base64 = base64.b64encode(hasher.digest()).decode('utf-8') log.output('checksums: %s: (hex: %s) (b64: %s) => %s' % (file_, hash_hex, hash_base64, hash[1])) if hash_hex != hash[1] and hash_base64 != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError as err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else: if version.released(): raise error.general('%s: no hash found in released RSB' % (file_)) log.warning('%s: no hash found' % (file_)) return not failed
def patch_setup(self, package, args): name = args[1] args = args[2:] _map = 'patch-%s' % (name) default_opts = ' '.join(args) patch_keys = [p for p in self.macros.map_keys(_map) if p != 'setup'] patches = [] for p in patch_keys: pm = self.macros.get(p, globals = False, maps = _map) if pm is None: raise error.internal('patch macro not found: %s in %s (%s)' % \ (p, name, _map)) opts = [] url = [] for pp in pm[2].split(): if len(url) == 0 and pp[0] == '-': opts += [pp] else: url += [pp] if len(url) == 0: raise error.general('patch URL not found: %s' % (' '.join(opts))) # # Look for --rsb-file as an option we use as a local file name. # This can be used if a URL has no reasonable file name the # download URL parser can figure out. # file_override = None if len(opts) > 0: for o in opts: if o.startswith('--rsb-file'): os_ = o.split('=') if len(os_) != 2: raise error.general('invalid --rsb-file option: %s' % \ (' '.join(opts))) if os_[0] != '--rsb-file': raise error.general('invalid --rsb-file option: %s' % \ (' '.join(opts))) file_override = os_[1] opts = [o for o in opts if not o.startswith('--rsb-')] if len(opts) == 0: opts = default_opts else: opts = ' '.join(opts) opts = self.config.expand(opts) url = self.config.expand(' '.join(url)) # # Parse the URL first in the source builder's patch directory. # patch = download.parse_url(url, '_patchdir', self.config, self.opts, file_override) # # Download the patch # download.get_file(patch['url'], patch['local'], self.opts, self.config) if 'compressed' in patch: patch['script'] = patch['compressed'] + ' ' + patch['local'] else: patch['script'] = '%{__cat} ' + patch['local'] patch['script'] += ' | %%{__patch} %s' % (opts) self.script_build.append(self.config.expand(patch['script']))
def source(self, name): # # Return the list of sources. Merge in any macro defined sources as # these may be overridden by user loaded macros. # _map = 'source-%s' % (name) src_keys = [s for s in self.macros.map_keys(_map) if s != 'setup'] if len(src_keys) == 0: raise error.general('no source set: %s (%s)' % (name, _map)) srcs = [] for s in src_keys: sm = self.macros.get(s, globals=False, maps=_map) if sm is None: raise error.internal('source macro not found: %s in %s (%s)' % \ (s, name, _map)) url = self.config.expand(sm[2]) src = download.parse_url(url, '_sourcedir', self.config, self.opts) download.get_file(src['url'], src['local'], self.opts, self.config) if 'symlink' in src: sname = name.replace('-', '_') src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % ( src['symlink'], sname) elif 'compressed' in src: # # Zip files unpack as well so do not use tar. # src['script'] = '%s %s' % (src['compressed'], src['local']) if src['compressed-type'] != 'zip': src['script'] += ' | %{__tar_extract} -' else: src['script'] = '%%{__tar_extract} %s' % (src['local']) srcs += [src] return srcs
def source(self, name): # # Return the list of sources. Merge in any macro defined sources as # these may be overridden by user loaded macros. # _map = "source-%s" % (name) src_keys = self.macros.map_keys(_map) if len(src_keys) == 0: raise error.general("no source set: %s (%s)" % (name, _map)) srcs = [] for s in src_keys: sm = self.macros.get(s, globals=False, maps=_map) if sm is None: raise error.internal("source macro not found: %s in %s (%s)" % (s, name, _map)) url = self.config.expand(sm[2]) src = download.parse_url(url, "_sourcedir", self.config, self.opts) download.get_file(src["url"], src["local"], self.opts, self.config) if "symlink" in src: src["script"] = "%%{__ln_s} %s ${source_dir_%s}" % (src["symlink"], name) elif "compressed" in src: # # Zip files unpack as well so do not use tar. # src["script"] = "%s %s" % (src["compressed"], src["local"]) if src["compressed-type"] != "zip": src["script"] += " | %{__tar_extract} -" else: src["script"] = "%%{__tar_extract} %s" % (src["local"]) srcs += [src] return srcs
def _pkgconfig_check(self, test): # Hack to by pass pkgconfig checks when just wanting to download the # source. if self.macros['_dry_run'] == '1' and self.macros['with_download'] == '1': return '0' ok = False if type(test) == str: test = test.split() if not self._cross_compile() or self.pkgconfig_crosscompile: try: pkg = pkgconfig.package(test[0], prefix = self.pkgconfig_prefix, output = self._output, src = log.trace) if len(test) != 1 and len(test) != 3: self._error('malformed check: %s' % (' '.join(test))) else: op = '>=' ver = '0' if len(test) == 3: op = test[1] ver = self.macros.expand(test[2]) ok = pkg.check(op, ver) except pkgconfig.error as pe: self._error('pkgconfig: check: %s' % (pe)) except: raise error.internal('pkgconfig failure') if ok: return '1' return '0'
def _pkgconfig_check(self, test): # Hack to by pass pkgconfig checks when just wanting to download the # source. if self.macros['_dry_run'] == '1' and self.macros[ 'with_download'] == '1': return '0' ok = False if type(test) == str: test = test.split() if not self._cross_compile() or self.pkgconfig_crosscompile: try: pkg = pkgconfig.package(test[0], prefix=self.pkgconfig_prefix, output=self._output, src=log.trace) if len(test) != 1 and len(test) != 3: self._error('malformed check: %s' % (' '.join(test))) else: op = '>=' ver = '0' if len(test) == 3: op = test[1] ver = self.macros.expand(test[2]) ok = pkg.check(op, ver) except pkgconfig.error as pe: self._error('pkgconfig: check: %s' % (pe)) except: raise error.internal('pkgconfig failure') if ok: return '1' return '0'
def source(self, name): # # Return the list of sources. Merge in any macro defined sources as # these may be overridden by user loaded macros. # _map = 'source-%s' % (name) src_keys = self.macros.map_keys(_map) if len(src_keys) == 0: raise error.general('no source set: %s (%s)' % (name, _map)) srcs = [] for s in src_keys: sm = self.macros.get(s, globals = False, maps = _map) if sm is None: raise error.internal('source macro not found: %s in %s (%s)' % \ (s, name, _map)) url = self.config.expand(sm[2]) src = download.parse_url(url, '_sourcedir', self.config, self.opts) download.get_file(src['url'], src['local'], self.opts, self.config) if 'symlink' in src: src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % (src['symlink'], name) elif 'compressed' in src: # # Zip files unpack as well so do not use tar. # src['script'] = '%s %s' % (src['compressed'], src['local']) if src['compressed-type'] != 'zip': src['script'] += ' | %{__tar_extract} -' else: src['script'] = '%%{__tar_extract} %s' % (src['local']) srcs += [src] return srcs
def jobs(self, cpus): cpus = int(cpus) if self.opts['jobs'] == 'none': cpus = 0 elif self.opts['jobs'] == 'max': pass elif self.opts['jobs'] == 'half': cpus = cpus / 2 else: ok = False try: i = int(self.opts['jobs']) cpus = i ok = True except: pass if not ok: try: f = float(self.opts['jobs']) cpus = f * cpus ok = True except: pass if not ok: raise error.internal('bad jobs option: %s' % (self.opts['jobs'])) if cpus <= 0: cpu = 1 return cpus
def jobs(self, cpus): cpus = int(cpus) if self.opts['jobs'] == 'none': cpus = 0 elif self.opts['jobs'] == 'max': pass elif self.opts['jobs'] == 'half': cpus = cpus / 2 else: ok = False try: i = int(self.opts['jobs']) cpus = i ok = True except: pass if not ok: try: f = float(self.opts['jobs']) cpus = f * cpus ok = True except: pass if not ok: raise error.internal('bad jobs option: %s' % (self.opts['jobs'])) if cpus <= 0: cpu = 1 return cpus
def _hash_check(file_, absfile, macros, remove=True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = [ 'md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512' ] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError, err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except:
def find_arg(self, arg): if self.optargs is None or arg not in self.optargs: raise error.internal('bad arg: %s' % (arg)) for a in self.args: sa = a.split('=') if sa[0].startswith(arg): return sa return None
def find_arg(self, arg): if self.optargs is None or arg not in self.optargs: raise error.internal('bad arg: %s' % (arg)) for a in self.args: sa = a.split('=') if sa[0].startswith(arg): return sa return None
def patch_setup(self, package, args): name = args[1] args = args[2:] _map = 'patch-%s' % (name) default_opts = ' '.join(args) patch_keys = [p for p in self.macros.map_keys(_map) if p != 'setup'] patches = [] for p in patch_keys: pm = self.macros.get(p, globals = False, maps = _map) if pm is None: raise error.internal('patch macro not found: %s in %s (%s)' % \ (p, name, _map)) opts = [] url = [] for pp in pm[2].split(): if len(url) == 0 and pp[0] == '-': opts += [pp] else: url += [pp] if len(url) == 0: raise error.general('patch URL not found: %s' % (' '.join(args))) # # Look for --rsb-file as an option we use as a local file name. # This can be used if a URL has no reasonable file name the # download URL parser can figure out. # file_override = None if len(opts) > 0: for o in opts: if o.startswith('--rsb-file'): os_ = o.split('=') if len(os_) != 2: raise error.general('invalid --rsb-file option: %s' % (' '.join(args))) if os_[0] != '--rsb-file': raise error.general('invalid --rsb-file option: %s' % (' '.join(args))) file_override = os_[1] opts = [o for o in opts if not o.startswith('--rsb-')] if len(opts) == 0: opts = default_opts else: opts = ' '.join(opts) opts = self.config.expand(opts) url = self.config.expand(' '.join(url)) # # Parse the URL first in the source builder's patch directory. # patch = download.parse_url(url, '_patchdir', self.config, self.opts, file_override) # # Download the patch # download.get_file(patch['url'], patch['local'], self.opts, self.config) if 'compressed' in patch: patch['script'] = patch['compressed'] + ' ' + patch['local'] else: patch['script'] = '%{__cat} ' + patch['local'] patch['script'] += ' | %%{__patch} %s' % (opts) self.script.append(self.config.expand(patch['script']))
def source(self, name): # # Return the list of sources. Merge in any macro defined sources as # these may be overridden by user loaded macros. # _map = 'source-%s' % (name) src_keys = [s for s in self.macros.map_keys(_map) if s != 'setup'] if len(src_keys) == 0: raise error.general('no source set: %s (%s)' % (name, _map)) srcs = [] for s in src_keys: sm = self.macros.get(s, globals = False, maps = _map) if sm is None: raise error.internal('source macro not found: %s in %s (%s)' % \ (s, name, _map)) opts = [] url = [] for sp in sm[2].split(): if len(url) == 0 and sp[0] == '-': opts += [sp] else: url += [sp] if len(url) == 0: raise error.general('source URL not found: %s' % (' '.join(args))) # # Look for --rsb-file as an option we use as a local file name. # This can be used if a URL has no reasonable file name the # download URL parser can figure out. # file_override = None if len(opts) > 0: for o in opts: if o.startswith('--rsb-file'): os_ = o.split('=') if len(os_) != 2: raise error.general('invalid --rsb-file option: %s' % (' '.join(args))) if os_[0] != '--rsb-file': raise error.general('invalid --rsb-file option: %s' % (' '.join(args))) file_override = os_[1] opts = [o for o in opts if not o.startswith('--rsb-')] url = self.config.expand(' '.join(url)) src = download.parse_url(url, '_sourcedir', self.config, self.opts, file_override) download.get_file(src['url'], src['local'], self.opts, self.config) if 'symlink' in src: sname = name.replace('-', '_') src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % (src['symlink'], sname) elif 'compressed' in src: # # Zip files unpack as well so do not use tar. # src['script'] = '%s %s' % (src['compressed'], src['local']) if src['compressed-type'] != 'zip': src['script'] += ' | %{__tar_extract} -' else: src['script'] = '%%{__tar_extract} %s' % (src['local']) srcs += [src] return srcs
def row(cols, data, indent=0, marker='|', linesep=os.linesep): if len(cols) != len(data): raise error.internal('data size (%d) does not' \ ' match columns (%d)' % (len(data), len(cols))) s = ' ' * indent + '|' for c in range(0, len(cols)): if c < len(cols) - 1: m = marker else: m = '|' s += '%-*s%s' % (cols[c] - 1, str(data[c]), m) return s + linesep
def _hash_check(file_, absfile, macros, remove = True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) if hash[0] in ['md5', 'sha1']: raise error.general('hash: %s: insecure: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError as err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except: msg = 'hash: %s: error' % (file_) log.stderr(msg) log.notice(msg) if _in is not None: _in.close() raise if _in is not None: _in.close() log.output('checksums: %s: %s => %s' % (file_, hasher.hexdigest(), hash[1])) if hasher.hexdigest() != hash[1]: log.warning('checksum error: %s' % (file_)) failed = True if failed and remove: log.warning('removing: %s' % (file_)) if path.exists(absfile): try: os.remove(path.host(absfile)) except IOError as err: raise error.general('hash: %s: remove: %s' % (absfile, str(err))) except: raise error.general('hash: %s: remove error' % (file_)) if hasher is not None: del hasher else: if version.released(): raise error.general('%s: no hash found in released RSB' % (file_)) log.warning('%s: no hash found' % (file_)) return not failed
def jobs(self, cpus): try: cpus = int(cpus) except: raise error.general('invalid host cpu value') opt_jobs = self.opts['jobs'] if opt_jobs == 'default': _jobs = self.defaults.get_value('jobs') if _jobs is not None: if _jobs == 'none': cpus = 0 elif _jobs == 'max': pass elif _jobs == 'half': cpus = cpus / 2 else: try: cpus = int(_jobs) except: raise error.general('invalid %%{jobs} value: %s' % (_jobs)) else: opt_jobs = 'max' if opt_jobs != 'default': if opt_jobs == 'none': cpus = 0 elif opt_jobs == 'max': pass elif opt_jobs == 'half': cpus = cpus / 2 else: ok = False try: i = int(opt_jobs) cpus = i ok = True except: pass if not ok: try: f = float(opt_jobs) cpus = f * cpus ok = True except: pass if not ok: raise error.internal('bad jobs option: %s' % (opt_jobs)) if cpus <= 0: cpu = 1 return cpus
def jobs(self, cpus): try: cpus = int(cpus) except: raise error.general('invalid host cpu value') opt_jobs = self.opts['jobs'] if opt_jobs == 'default': _jobs = self.defaults.get_value('jobs') if _jobs is not None: if _jobs == 'none': cpus = 0 elif _jobs == 'max': pass elif _jobs == 'half': cpus = cpus / 2 else: try: cpus = int(_jobs) except: raise error.general('invalid %%{jobs} value: %s' % (_jobs)) else: opt_jobs = 'max' if opt_jobs != 'default': if opt_jobs == 'none': cpus = 0 elif opt_jobs == 'max': pass elif opt_jobs == 'half': cpus = cpus / 2 else: ok = False try: i = int(opt_jobs) cpus = i ok = True except: pass if not ok: try: f = float(opt_jobs) cpus = f * cpus ok = True except: pass if not ok: raise error.internal('bad jobs option: %s' % (opt_jobs)) if cpus <= 0: cpu = 1 return cpus
def patch_setup(self, package, args): name = args[1] args = args[2:] _map = 'patch-%s' % (name) default_opts = ' '.join(args) patch_keys = [p for p in self.macros.map_keys(_map) if p != 'setup'] patches = [] for p in patch_keys: pm = self.macros.get(p, globals=False, maps=_map) if pm is None: raise error.internal('patch macro not found: %s in %s (%s)' % \ (p, name, _map)) opts = [] url = [] for pp in pm[2].split(): if len(url) == 0 and pp[0] == '-': opts += [pp] else: url += [pp] if len(url) == 0: raise error.general('patch URL not found: %s' % (' '.join(args))) if len(opts) == 0: opts = default_opts else: opts = ' '.join(opts) opts = self.config.expand(opts) url = self.config.expand(' '.join(url)) # # Parse the URL first in the source builder's patch directory. # patch = download.parse_url(url, '_patchdir', self.config, self.opts) # # If not in the source builder package check the source directory. # if not path.exists(patch['local']): patch = download.parse_url(url, '_patchdir', self.config, self.opts) download.get_file(patch['url'], patch['local'], self.opts, self.config) if 'compressed' in patch: patch['script'] = patch['compressed'] + ' ' + patch['local'] else: patch['script'] = '%{__cat} ' + patch['local'] patch['script'] += ' | %%{__patch} %s' % (opts) self.script.append(self.config.expand(patch['script']))
def patch_setup(self, package, args): name = args[1] args = args[2:] _map = 'patch-%s' % (name) default_opts = ' '.join(args) patch_keys = self.macros.map_keys(_map) patches = [] for p in patch_keys: pm = self.macros.get(p, globals = False, maps = _map) if pm is None: raise error.internal('patch macro not found: %s in %s (%s)' % \ (p, name, _map)) opts = [] url = [] for pp in pm[2].split(): if len(url) == 0 and pp[0] == '-': opts += [pp] else: url += [pp] if len(url) == 0: raise error.general('patch URL not found: %s' % (' '.join(args))) if len(opts) == 0: opts = default_opts else: opts = ' '.join(opts) opts = self.config.expand(opts) url = self.config.expand(' '.join(url)) # # Parse the URL first in the source builder's patch directory. # patch = download.parse_url(url, '_patchdir', self.config, self.opts) # # If not in the source builder package check the source directory. # if not path.exists(patch['local']): patch = download.parse_url(url, '_patchdir', self.config, self.opts) download.get_file(patch['url'], patch['local'], self.opts, self.config) if 'compressed' in patch: patch['script'] = patch['compressed'] + ' ' + patch['local'] else: patch['script'] = '%{__cat} ' + patch['local'] patch['script'] += ' | %%{__patch} %s' % (opts) self.script.append(self.config.expand(patch['script']))
def patch_setup(self, package, args): name = args[1] args = args[2:] _map = "patch-%s" % (name) default_opts = " ".join(args) patch_keys = self.macros.map_keys(_map) patches = [] for p in patch_keys: pm = self.macros.get(p, globals=False, maps=_map) if pm is None: raise error.internal("patch macro not found: %s in %s (%s)" % (p, name, _map)) opts = [] url = [] for pp in pm[2].split(): if len(url) == 0 and pp[0] == "-": opts += [pp] else: url += [pp] if len(url) == 0: raise error.general("patch URL not found: %s" % (" ".join(args))) if len(opts) == 0: opts = default_opts else: opts = " ".join(opts) opts = self.config.expand(opts) url = self.config.expand(" ".join(url)) # # Parse the URL first in the source builder's patch directory. # patch = download.parse_url(url, "_patchdir", self.config, self.opts) # # If not in the source builder package check the source directory. # if not path.exists(patch["local"]): patch = download.parse_url(url, "_patchdir", self.config, self.opts) download.get_file(patch["url"], patch["local"], self.opts, self.config) if "compressed" in patch: patch["script"] = patch["compressed"] + " " + patch["local"] else: patch["script"] = "%{__cat} " + patch["local"] patch["script"] += " | %%{__patch} %s" % (opts) self.script.append(self.config.expand(patch["script"]))
def _pkgconfig_flags(self, package, flags): pkg_flags = None if not self._cross_compile() or self.pkgconfig_crosscompile: try: pkg = pkgconfig.package(package, prefix = self.pkgconfig_prefix, output = self._output, src = log.trace) pkg_flags = pkg.get(flags) if pkg_flags and self.pkgconfig_filter_flags: fflags = [] for f in pkg_flags.split(): if not f.startswith('-W'): fflags += [f] pkg_flags = ' '.join(fflags) log.trace('pkgconfig: %s: %s' % (flags, pkg_flags)) except pkgconfig.error as pe: self._error('pkgconfig: %s: %s' % (flags, pe)) except: raise error.internal('pkgconfig failure') if pkg_flags is None: pkg_flags = '' return pkg_flags
def _hash_check(file_, absfile, macros, remove = True): failed = False hash = sources.get_hash(file_.lower(), macros) if hash is not None: hash = hash.split() if len(hash) != 2: raise error.internal('invalid hash format: %s' % (file_)) try: hashlib_algorithms = hashlib.algorithms except: hashlib_algorithms = ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] if hash[0] not in hashlib_algorithms: raise error.general('invalid hash algorithm for %s: %s' % (file_, hash[0])) hasher = None _in = None try: hasher = hashlib.new(hash[0]) _in = open(path.host(absfile), 'rb') hasher.update(_in.read()) except IOError, err: log.notice('hash: %s: read error: %s' % (file_, str(err))) failed = True except:
def _pkgconfig_flags(self, package, flags): pkg_flags = None if not self._cross_compile() or self.pkgconfig_crosscompile: try: pkg = pkgconfig.package(package, prefix=self.pkgconfig_prefix, output=self._output, src=log.trace) pkg_flags = pkg.get(flags) if pkg_flags and self.pkgconfig_filter_flags: fflags = [] for f in pkg_flags.split(): if not f.startswith('-W'): fflags += [f] pkg_flags = ' '.join(fflags) log.trace('pkgconfig: %s: %s' % (flags, pkg_flags)) except pkgconfig.error as pe: self._error('pkgconfig: %s: %s' % (flags, pe)) except: raise error.internal('pkgconfig failure') if pkg_flags is None: pkg_flags = '' return pkg_flags
prefix=self.pkgconfig_prefix, output=self._output, src=log.trace) if len(test) != 1 and len(test) != 3: self._error('malformed check: %s' % (' '.join(test))) else: op = '>=' ver = '0' if len(test) == 3: op = test[1] ver = self.macros.expand(test[2]) ok = pkg.check(op, ver) except pkgconfig.error, pe: self._error('pkgconfig: check: %s' % (pe)) except: raise error.internal('pkgconfig failure') if ok: return '1' return '0' def _pkgconfig_flags(self, package, flags): pkg_flags = None if not self._cross_compile() or self.pkgconfig_crosscompile: try: pkg = pkgconfig.package(package, prefix=self.pkgconfig_prefix, output=self._output, src=log.trace) pkg_flags = pkg.get(flags) if pkg_flags and self.pkgconfig_filter_flags: fflags = []
prefix = self.pkgconfig_prefix, output = self._output, src = log.trace) if len(test) != 1 and len(test) != 3: self._error('malformed check: %s' % (' '.join(test))) else: op = '>=' ver = '0' if len(test) == 3: op = test[1] ver = self.macros.expand(test[2]) ok = pkg.check(op, ver) except pkgconfig.error, pe: self._error('pkgconfig: check: %s' % (pe)) except: raise error.internal('pkgconfig failure') if ok: return '1' return '0' def _pkgconfig_flags(self, package, flags): pkg_flags = None if not self._cross_compile() or self.pkgconfig_crosscompile: try: pkg = pkgconfig.package(package, prefix = self.pkgconfig_prefix, output = self._output, src = log.trace) pkg_flags = pkg.get(flags) if pkg_flags and self.pkgconfig_filter_flags: fflags = []
def _process_block(self, results, directive, info, data): raise error.internal('known block type: %s' % (results[0]))
def get_arg(self, arg): if self.optargs is None or arg not in self.optargs: raise error.internal('bad arg: %s' % (arg)) return self.parse_args(arg)
def parse(self, lines): def _clean(l): if "#" in l: l = l[: l.index("#")] if "\r" in l: l = l[: l.index("r")] if "\n" in l: l = l[: l.index("\n")] return l.strip() trace_me = False if trace_me: print("[[[[]]]] parsing macros") orig_macros = copy.copy(self.macros) map = "global" lc = 0 state = "key" token = "" macro = [] for l in lines: lc += 1 # print 'l:%s' % (l[:-1]) if len(l) == 0: continue l_remaining = l for c in l: if trace_me: print(']]]]]]]] c:%s(%d) s:%s t:"%s" m:%r M:%s' % (c, ord(c), state, token, macro, map)) l_remaining = l_remaining[1:] if c is "#" and not state.startswith("value"): break if c == "\n" or c == "\r": if not (state is "key" and len(token) == 0) and not state.startswith("value-multiline"): self.macros = orig_macros raise error.general("malformed macro line:%d: %s" % (lc, l)) if state is "key": if c not in string.whitespace: if c is "[": state = "map" elif c is "%": state = "directive" elif c is ":": macro += [token] token = "" state = "attribs" elif c is "#": break else: token += c elif state is "map": if c is "]": if token not in self.macros: self.macros[token] = {} map = token token = "" state = "key" elif c in string.printable and c not in string.whitespace: token += c else: self.macros = orig_macros raise error.general("invalid macro map:%d: %s" % (lc, l)) elif state is "directive": if c in string.whitespace: if token == "include": self.load(_clean(l_remaining)) token = "" state = "key" break elif c in string.printable and c not in string.whitespace: token += c else: self.macros = orig_macros raise error.general("invalid macro directive:%d: %s" % (lc, l)) elif state is "include": if c is string.whitespace: if token == "include": state = "include" elif c in string.printable and c not in string.whitespace: token += c else: self.macros = orig_macros raise error.general("invalid macro directive:%d: %s" % (lc, l)) elif state is "attribs": if c not in string.whitespace: if c is ",": macro += [token] token = "" if len(macro) == 3: state = "value-start" else: token += c elif state is "value-start": if c is "'": state = "value-line-start" elif state is "value-line-start": if c is "'": state = "value-multiline-start" else: state = "value-line" token += c elif state is "value-multiline-start": if c is "'": state = "value-multiline" else: macro += [token] state = "macro" elif state is "value-line": if c is "'": macro += [token] state = "macro" else: token += c elif state is "value-multiline": if c is "'": state = "value-multiline-end" else: token += c elif state is "value-multiline-end": if c is "'": state = "value-multiline-end-end" else: state = "value-multiline" token += "'" + c elif state is "value-multiline-end-end": if c is "'": macro += [token] state = "macro" else: state = "value-multiline" token += "''" + c else: self.macros = orig_macros raise error.internal("bad state: %s" % (state)) if state is "macro": self.macros[map][macro[0].lower()] = (macro[1], macro[2], macro[3]) macro = [] token = "" state = "key"
def source(self, name, strip_components, download_only): # # Return the list of sources. Merge in any macro defined sources as # these may be overridden by user loaded macros. # _map = 'source-%s' % (name) src_keys = [s for s in self.macros.map_keys(_map) if s != 'setup'] if len(src_keys) == 0: raise error.general('no source set: %s (%s)' % (name, _map)) srcs = [] for s in src_keys: sm = self.macros.get(s, globals=False, maps=_map) if sm is None: raise error.internal('source macro not found: %s in %s (%s)' % \ (s, name, _map)) opts = [] url = [] for sp in sm[2].split(): if len(url) == 0 and sp[0] == '-': opts += [sp] else: url += [sp] if len(url) == 0: raise error.general('source URL not found: %s' % (' '.join(args))) # # Look for --rsb-file as an option we use as a local file name. # This can be used if a URL has no reasonable file name the # download URL parser can figure out. # file_override = None if len(opts) > 0: for o in opts: if o.startswith('--rsb-file'): os_ = o.split('=') if len(os_) != 2: raise error.general('invalid --rsb-file option: %s' % \ (' '.join(args))) if os_[0] != '--rsb-file': raise error.general('invalid --rsb-file option: %s' % \ (' '.join(args))) file_override = os_[1] opts = [o for o in opts if not o.startswith('--rsb-')] url = self.config.expand(' '.join(url)) src = download.parse_url(url, '_sourcedir', self.config, self.opts, file_override) download.get_file(src['url'], src['local'], self.opts, self.config) if not download_only: if strip_components > 0: tar_extract = '%%{__tar_extract} --strip-components %d' % \ (strip_components) else: tar_extract = '%{__tar_extract}' if 'symlink' in src: sname = name.replace('-', '_') src['script'] = '%%{__ln_s} %s ${source_dir_%s}' % \ (src['symlink'], sname) elif 'compressed' in src: # # Zip files unpack as well so do not use tar. # src['script'] = '%s %s' % (src['compressed'], src['local']) if src['compressed-type'] != 'zip': src['script'] += ' | %s -f -' % (tar_extract) else: src['script'] = '%s -f %s' % (tar_extract, src['local']) srcs += [src] return srcs
def _process_block(self, results, directive, info, data): raise error.internal('known block type: %s' % (results[0]))
def parse(self, lines): def _clean(l): if '#' in l: l = l[:l.index('#')] if '\r' in l: l = l[:l.index('r')] if '\n' in l: l = l[:l.index('\n')] return l.strip() trace_me = False if trace_me: print('[[[[]]]] parsing macros') macros = {'global': {}} map = 'global' lc = 0 state = 'key' token = '' macro = [] for l in lines: lc += 1 #print 'l:%s' % (l[:-1]) if len(l) == 0: continue l = self._unicode_to_str(l) l_remaining = l for c in l: if trace_me: print(']]]]]]]] c:%s(%d) s:%s t:"%s" m:%r M:%s' % \ (c, ord(c), state, token, macro, map)) l_remaining = l_remaining[1:] if c == '#' and not state.startswith('value'): break if c == '\n' or c == '\r': if not (state == 'key' and len(token) == 0) and \ not state.startswith('value-multiline'): raise error.general('malformed macro line:%d: %s' % (lc, l)) if state == 'key': if c not in string.whitespace: if c == '[': state = 'map' elif c == '%': state = 'directive' elif c == ':': macro += [token] token = '' state = 'attribs' elif c == '#': break else: token += c elif state == 'map': if c == ']': if token not in macros: macros[token] = {} map = token token = '' state = 'key' elif c in string.printable and c not in string.whitespace: token += c else: raise error.general('invalid macro map:%d: %s' % (lc, l)) elif state == 'directive': if c in string.whitespace: if token == 'include': self.load(_clean(l_remaining)) token = '' state = 'key' break elif c in string.printable and c not in string.whitespace: token += c else: raise error.general('invalid macro directive:%d: %s' % (lc, l)) elif state == 'include': if c is string.whitespace: if token == 'include': state = 'include' elif c in string.printable and c not in string.whitespace: token += c else: raise error.general('invalid macro directive:%d: %s' % (lc, l)) elif state == 'attribs': if c not in string.whitespace: if c == ',': macro += [token] token = '' if len(macro) == 3: state = 'value-start' else: token += c elif state == 'value-start': if c == "'": state = 'value-line-start' elif state == 'value-line-start': if c == "'": state = 'value-multiline-start' else: state = 'value-line' token += c elif state == 'value-multiline-start': if c == "'": state = 'value-multiline' else: macro += [token] state = 'macro' elif state == 'value-line': if c == "'": macro += [token] state = 'macro' else: token += c elif state == 'value-multiline': if c == "'": state = 'value-multiline-end' else: token += c elif state == 'value-multiline-end': if c == "'": state = 'value-multiline-end-end' else: state = 'value-multiline' token += "'" + c elif state == 'value-multiline-end-end': if c == "'": macro += [token] state = 'macro' else: state = 'value-multiline' token += "''" + c else: raise error.internal('bad state: %s' % (state)) if state == 'macro': macros[map][self._unicode_to_str(macro[0].lower())] = \ (self._unicode_to_str(macro[1]), self._unicode_to_str(macro[2]), self._unicode_to_str(macro[3])) macro = [] token = '' state = 'key' for m in macros: if m not in self.macros: self.macros[m] = {} for mm in macros[m]: self.macros[m][mm] = macros[m][mm]
if not self._cross_compile() or self.pkgconfig_crosscompile: try: pkg = pkgconfig.package(test[0], prefix=self.pkgconfig_prefix, output=self._output, src=log.trace) if len(test) != 1 and len(test) != 3: self._error("malformed check: %s" % (" ".join(test))) else: op = ">=" ver = "0" if len(test) == 3: op = test[1] ver = self.macros.expand(test[2]) ok = pkg.check(op, ver) except pkgconfig.error, pe: self._error("pkgconfig: check: %s" % (pe)) except: raise error.internal("pkgconfig failure") if ok: return "1" return "0" def _pkgconfig_flags(self, package, flags): pkg_flags = None if not self._cross_compile() or self.pkgconfig_crosscompile: try: pkg = pkgconfig.package(package, prefix=self.pkgconfig_prefix, output=self._output, src=log.trace) pkg_flags = pkg.get(flags) if pkg_flags and self.pkgconfig_filter_flags: fflags = [] for f in pkg_flags.split(): if not f.startswith("-f") and not f.startswith("-W"): fflags += [f]
def parse(self, lines): def _clean(l): if '#' in l: l = l[:l.index('#')] if '\r' in l: l = l[:l.index('r')] if '\n' in l: l = l[:l.index('\n')] return l.strip() trace_me = False if trace_me: print '[[[[]]]] parsing macros' macros = { 'global': {} } map = 'global' lc = 0 state = 'key' token = '' macro = [] for l in lines: lc += 1 #print 'l:%s' % (l[:-1]) if len(l) == 0: continue l_remaining = l for c in l: if trace_me: print ']]]]]]]] c:%s(%d) s:%s t:"%s" m:%r M:%s' % \ (c, ord(c), state, token, macro, map) l_remaining = l_remaining[1:] if c is '#' and not state.startswith('value'): break if c == '\n' or c == '\r': if not (state is 'key' and len(token) == 0) and \ not state.startswith('value-multiline'): raise error.general('malformed macro line:%d: %s' % (lc, l)) if state is 'key': if c not in string.whitespace: if c is '[': state = 'map' elif c is '%': state = 'directive' elif c is ':': macro += [token] token = '' state = 'attribs' elif c is '#': break else: token += c elif state is 'map': if c is ']': if token not in macros: macros[token] = {} map = token token = '' state = 'key' elif c in string.printable and c not in string.whitespace: token += c else: raise error.general('invalid macro map:%d: %s' % (lc, l)) elif state is 'directive': if c in string.whitespace: if token == 'include': self.load(_clean(l_remaining)) token = '' state = 'key' break elif c in string.printable and c not in string.whitespace: token += c else: raise error.general('invalid macro directive:%d: %s' % (lc, l)) elif state is 'include': if c is string.whitespace: if token == 'include': state = 'include' elif c in string.printable and c not in string.whitespace: token += c else: raise error.general('invalid macro directive:%d: %s' % (lc, l)) elif state is 'attribs': if c not in string.whitespace: if c is ',': macro += [token] token = '' if len(macro) == 3: state = 'value-start' else: token += c elif state is 'value-start': if c is "'": state = 'value-line-start' elif state is 'value-line-start': if c is "'": state = 'value-multiline-start' else: state = 'value-line' token += c elif state is 'value-multiline-start': if c is "'": state = 'value-multiline' else: macro += [token] state = 'macro' elif state is 'value-line': if c is "'": macro += [token] state = 'macro' else: token += c elif state is 'value-multiline': if c is "'": state = 'value-multiline-end' else: token += c elif state is 'value-multiline-end': if c is "'": state = 'value-multiline-end-end' else: state = 'value-multiline' token += "'" + c elif state is 'value-multiline-end-end': if c is "'": macro += [token] state = 'macro' else: state = 'value-multiline' token += "''" + c else: raise error.internal('bad state: %s' % (state)) if state is 'macro': macros[map][macro[0].lower()] = (macro[1], macro[2], macro[3]) macro = [] token = '' state = 'key' for m in macros: if m not in self.macros: self.macros[m] = {} for mm in macros[m]: self.macros[m][mm] = macros[m][mm]
def get_arg(self, arg): if self.optargs is None or arg not in self.optargs: raise error.internal('bad arg: %s' % (arg)) return self.parse_args(arg)