def process(self, srctree, classes, lines_before, lines_after, handled): if 'buildsystem' in handled: return False autoconf = False if RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']): autoconf = True values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree) classes.extend(values.pop('inherit', '').split()) for var, value in values.iteritems(): lines_before.append('%s = "%s"' % (var, value)) else: conffile = RecipeHandler.checkfiles(srctree, ['configure']) if conffile: # Check if this is just a pre-generated autoconf configure script with open(conffile[0], 'r') as f: for i in range(1, 10): if 'Generated by GNU Autoconf' in f.readline(): autoconf = True break if autoconf: lines_before.append('# NOTE: if this software is not capable of being built in a separate build directory') lines_before.append('# from the source, you should replace autotools with autotools-brokensep in the') lines_before.append('# inherit line') classes.append('autotools') lines_after.append('# Specify any options you want to pass to the configure script using EXTRA_OECONF:') lines_after.append('EXTRA_OECONF = ""') lines_after.append('') handled.append('buildsystem') return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if "buildsystem" in handled: return False autoconf = False if RecipeHandler.checkfiles(srctree, ["configure.ac", "configure.in"]): autoconf = True values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, extravalues) classes.extend(values.pop("inherit", "").split()) for var, value in values.iteritems(): lines_before.append('%s = "%s"' % (var, value)) else: conffile = RecipeHandler.checkfiles(srctree, ["configure"]) if conffile: # Check if this is just a pre-generated autoconf configure script with open(conffile[0], "r") as f: for i in range(1, 10): if "Generated by GNU Autoconf" in f.readline(): autoconf = True break if autoconf and not ("PV" in extravalues and "PN" in extravalues): # Last resort conffile = RecipeHandler.checkfiles(srctree, ["configure"]) if conffile: with open(conffile[0], "r") as f: for line in f: line = line.strip() if line.startswith("VERSION=") or line.startswith("PACKAGE_VERSION="): pv = line.split("=")[1].strip("\"'") if pv and not "PV" in extravalues and validate_pv(pv): extravalues["PV"] = pv elif line.startswith("PACKAGE_NAME=") or line.startswith("PACKAGE="): pn = line.split("=")[1].strip("\"'") if pn and not "PN" in extravalues: extravalues["PN"] = pn if autoconf: lines_before.append("") lines_before.append("# NOTE: if this software is not capable of being built in a separate build directory") lines_before.append("# from the source, you should replace autotools with autotools-brokensep in the") lines_before.append("# inherit line") classes.append("autotools") lines_after.append("# Specify any options you want to pass to the configure script using EXTRA_OECONF:") lines_after.append('EXTRA_OECONF = ""') lines_after.append("") handled.append("buildsystem") return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'buildsystem' in handled: return False autoconf = False if RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']): autoconf = True values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, extravalues) classes.extend(values.pop('inherit', '').split()) for var, value in values.iteritems(): lines_before.append('%s = "%s"' % (var, value)) else: conffile = RecipeHandler.checkfiles(srctree, ['configure']) if conffile: # Check if this is just a pre-generated autoconf configure script with open(conffile[0], 'r') as f: for i in range(1, 10): if 'Generated by GNU Autoconf' in f.readline(): autoconf = True break if autoconf and not ('PV' in extravalues and 'PN' in extravalues): # Last resort conffile = RecipeHandler.checkfiles(srctree, ['configure']) if conffile: with open(conffile[0], 'r') as f: for line in f: line = line.strip() if line.startswith('VERSION=') or line.startswith('PACKAGE_VERSION='): pv = line.split('=')[1].strip('"\'') if pv and not 'PV' in extravalues and validate_pv(pv): extravalues['PV'] = pv elif line.startswith('PACKAGE_NAME=') or line.startswith('PACKAGE='): pn = line.split('=')[1].strip('"\'') if pn and not 'PN' in extravalues: extravalues['PN'] = pn if autoconf: lines_before.append('') lines_before.append('# NOTE: if this software is not capable of being built in a separate build directory') lines_before.append('# from the source, you should replace autotools with autotools-brokensep in the') lines_before.append('# inherit line') classes.append('autotools') lines_after.append('# Specify any options you want to pass to the configure script using EXTRA_OECONF:') lines_after.append('EXTRA_OECONF = ""') lines_after.append('') handled.append('buildsystem') return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): # There's not a conclusive way to tell a Qt2/3/4/5 .pro file apart, so we # just assume that qmake5 is a reasonable default if you have this layer # enabled if 'buildsystem' in handled: return False unmappedqt = [] files = RecipeHandler.checkfiles(srctree, ['*.pro']) deps = [] if files: for fn in files: self.parse_qt_pro(fn, deps, unmappedqt) classes.append('qmake5') if unmappedqt: outlines.append( '# NOTE: the following QT dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedqt)))) if deps: lines_before.append('DEPENDS = "%s"' % ' '.join(list(set(deps)))) handled.append('buildsystem') return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'PV' in extravalues and 'PN' in extravalues: return filelist = RecipeHandler.checkfiles(srctree, ['*.spec'], recursive=True) valuemap = {'Name': 'PN', 'Version': 'PV', 'Summary': 'SUMMARY', 'Url': 'HOMEPAGE', 'License': 'LICENSE'} foundvalues = {} for fileitem in filelist: linecount = 0 with open(fileitem, 'r', errors='surrogateescape') as f: for line in f: for value, varname in valuemap.items(): if line.startswith(value + ':') and not varname in foundvalues: foundvalues[varname] = line.split(':', 1)[1].strip() break if len(foundvalues) == len(valuemap): break if 'PV' in foundvalues: if not validate_pv(foundvalues['PV']): del foundvalues['PV'] license = foundvalues.pop('LICENSE', None) if license: liccomment = '# NOTE: spec file indicates the license may be "%s"' % license for i, line in enumerate(lines_before): if line.startswith('LICENSE ='): lines_before.insert(i, liccomment) break else: lines_before.append(liccomment) extravalues.update(foundvalues)
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'buildsystem' in handled: return False if RecipeHandler.checkfiles(srctree, ['*.pro']): classes.append('qmake2') handled.append('buildsystem') return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if "buildsystem" in handled: return False if RecipeHandler.checkfiles(srctree, ["*.pro"]): classes.append("qmake2") handled.append("buildsystem") return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if "buildsystem" in handled: return False if RecipeHandler.checkfiles(srctree, ["SConstruct", "Sconstruct", "sconstruct"]): classes.append("scons") lines_after.append("# Specify any options you want to pass to scons using EXTRA_OESCONS:") lines_after.append('EXTRA_OESCONS = ""') lines_after.append("") handled.append("buildsystem") return True return False
def process(self, srctree, classes, lines_before, lines_after, handled): if 'buildsystem' in handled: return False if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']): classes.append('cmake') lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:') lines_after.append('EXTRA_OECMAKE = ""') lines_after.append('') handled.append('buildsystem') return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'buildsystem' in handled: return False if RecipeHandler.checkfiles(srctree, ['SConstruct', 'Sconstruct', 'sconstruct']): classes.append('scons') lines_after.append('# Specify any options you want to pass to scons using EXTRA_OESCONS:') lines_after.append('EXTRA_OESCONS = ""') lines_after.append('') handled.append('buildsystem') return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'buildsystem' in handled: return False if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']): classes.append('cmake') values = CmakeRecipeHandler.extract_cmake_deps(lines_before, srctree, extravalues) for var, value in values.iteritems(): lines_before.append('%s = "%s"' % (var, value)) lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:') lines_after.append('EXTRA_OECMAKE = ""') lines_after.append('') handled.append('buildsystem') return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if "buildsystem" in handled: return False if RecipeHandler.checkfiles(srctree, ["CMakeLists.txt"]): classes.append("cmake") values = CmakeRecipeHandler.extract_cmake_deps(lines_before, srctree, extravalues) classes.extend(values.pop("inherit", "").split()) for var, value in values.iteritems(): lines_before.append('%s = "%s"' % (var, value)) lines_after.append("# Specify any options you want to pass to cmake using EXTRA_OECMAKE:") lines_after.append('EXTRA_OECMAKE = ""') lines_after.append("") handled.append("buildsystem") return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'buildsystem' in handled: return False if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']): classes.append('cmake') values = CmakeRecipeHandler.extract_cmake_deps(lines_before, srctree, extravalues) classes.extend(values.pop('inherit', '').split()) for var, value in values.iteritems(): lines_before.append('%s = "%s"' % (var, value)) lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:') lines_after.append('EXTRA_OECMAKE = ""') lines_after.append('') handled.append('buildsystem') return True return False
def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None): values = {} if cmakelistsfile: srcfiles = [cmakelistsfile] else: srcfiles = RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']) proj_re = re.compile('project\(([^)]*)\)', re.IGNORECASE) with open(srcfiles[0], 'r') as f: for line in f: res = proj_re.match(line.strip()) if res: extravalues['PN'] = res.group(1).split()[0] return values
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'PV' in extravalues and 'PN' in extravalues: return filelist = RecipeHandler.checkfiles(srctree, ['*.spec'], recursive=True) valuemap = { 'Name': 'PN', 'Version': 'PV', 'Summary': 'SUMMARY', 'Url': 'HOMEPAGE', 'License': 'LICENSE' } foundvalues = {} for fileitem in filelist: linecount = 0 with open(fileitem, 'r', errors='surrogateescape') as f: for line in f: for value, varname in valuemap.items(): if line.startswith(value + ':') and not varname in foundvalues: foundvalues[varname] = line.split(':', 1)[1].strip() break if len(foundvalues) == len(valuemap): break # Drop values containing unexpanded RPM macros for k in list(foundvalues.keys()): if '%' in foundvalues[k]: del foundvalues[k] if 'PV' in foundvalues: if not validate_pv(foundvalues['PV']): del foundvalues['PV'] license = foundvalues.pop('LICENSE', None) if license: liccomment = '# NOTE: spec file indicates the license may be "%s"' % license for i, line in enumerate(lines_before): if line.startswith('LICENSE ='): lines_before.insert(i, liccomment) break else: lines_before.append(liccomment) extravalues.update(foundvalues)
def parse_qt_pro(self, fn, deps, unmappedqt): with open(fn, "r") as f: for line in f: if re.match("^QT\s*[+=]+", line): if "=" in line: for item in line.split("=")[1].split(): dep = Qmake5RecipeHandler.qt_map.get(item, None) if dep: deps.append(dep) elif dep is not None: unmappedqt.append(item) elif re.match("^SUBDIRS\s*[+=]+", line): if "=" in line: for item in line.split("=")[1].split(): subfiles = RecipeHandler.checkfiles(os.path.join(os.path.dirname(fn), item), ["*.pro"]) for subfn in subfiles: self.parse_qt_pro(subfn, deps, unmappedqt) elif "qml" in line.lower(): deps.append("qtdeclarative")
def parse_qt_pro(self, fn, deps, unmappedqt): with open(fn, 'r') as f: for line in f: if re.match('^QT\s*[+=]+', line): if '=' in line: for item in line.split('=')[1].split(): dep = Qmake5RecipeHandler.qt_map.get(item, None) if dep: deps.append(dep) elif dep is not None: unmappedqt.append(item) elif re.match('^SUBDIRS\s*[+=]+', line): if '=' in line: for item in line.split('=')[1].split(): subfiles = RecipeHandler.checkfiles(os.path.join(os.path.dirname(fn), item), ['*.pro']) for subfn in subfiles: self.parse_qt_pro(subfn, deps, unmappedqt) elif 'qml' in line.lower(): deps.append('qtdeclarative')
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'PV' in extravalues and 'PN' in extravalues: return filelist = RecipeHandler.checkfiles(srctree, ['*.spec'], recursive=True) pn = None pv = None for fileitem in filelist: linecount = 0 with open(fileitem, 'r') as f: for line in f: if line.startswith('Name:') and not pn: pn = line.split(':')[1].strip() if line.startswith('Version:') and not pv: pv = line.split(':')[1].strip() if pv or pn: if pv and not 'PV' in extravalues and validate_pv(pv): extravalues['PV'] = pv if pn and not 'PN' in extravalues: extravalues['PN'] = pn break
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'buildsystem' in handled: return False files = RecipeHandler.checkfiles(srctree, ['package.json']) if files: with open(files[0], 'r') as f: data = json.loads(f.read()) if 'name' in data and 'version' in data: extravalues['PN'] = data['name'] extravalues['PV'] = data['version'] classes.append('npm') handled.append('buildsystem') if 'description' in data: lines_before.append('SUMMARY = "%s"' % data['description']) if 'homepage' in data: lines_before.append('HOMEPAGE = "%s"' % data['homepage']) return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if "PV" in extravalues and "PN" in extravalues: return filelist = RecipeHandler.checkfiles(srctree, ["*.spec"], recursive=True) pn = None pv = None for fileitem in filelist: linecount = 0 with open(fileitem, "r") as f: for line in f: if line.startswith("Name:") and not pn: pn = line.split(":")[1].strip() if line.startswith("Version:") and not pv: pv = line.split(":")[1].strip() if pv or pn: if pv and not "PV" in extravalues and validate_pv(pv): extravalues["PV"] = pv if pn and not "PN" in extravalues: extravalues["PN"] = pn break
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): # There's not a conclusive way to tell a Qt2/3/4/5 .pro file apart, so we # just assume that qmake5 is a reasonable default if you have this layer # enabled if 'buildsystem' in handled: return False unmappedqt = [] files = RecipeHandler.checkfiles(srctree, ['*.pro']) deps = [] if files: for fn in files: self.parse_qt_pro(fn, deps, unmappedqt) classes.append('qmake5') if unmappedqt: outlines.append('# NOTE: the following QT dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedqt)))) if deps: lines_before.append('DEPENDS = "%s"' % ' '.join(list(set(deps)))) handled.append('buildsystem') return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'PV' not in extravalues: # Look for a VERSION or version file containing a single line consisting # only of a version number filelist = RecipeHandler.checkfiles(srctree, ['VERSION', 'version']) version = None for fileitem in filelist: linecount = 0 with open(fileitem, 'r') as f: for line in f: line = line.rstrip().strip('"\'') linecount += 1 if line: if linecount > 1: version = None break else: if validate_pv(line): version = line if version: extravalues['PV'] = version break
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): import bb.utils import oe from collections import OrderedDict if 'buildsystem' in handled: return False def read_package_json(fn): with open(fn, 'r', errors='surrogateescape') as f: return json.loads(f.read()) files = RecipeHandler.checkfiles(srctree, ['package.json']) if files: check_npm(tinfoil.config_data) data = read_package_json(files[0]) if 'name' in data and 'version' in data: extravalues['PN'] = data['name'] extravalues['PV'] = data['version'] classes.append('npm') handled.append('buildsystem') if 'description' in data: extravalues['SUMMARY'] = data['description'] if 'homepage' in data: extravalues['HOMEPAGE'] = data['homepage'] deps = data.get('dependencies', {}) updated = self._handle_dependencies(tinfoil.config_data, deps, lines_before, srctree) if updated: # We need to redo the license stuff self._replace_license_vars(srctree, lines_before, handled, extravalues, tinfoil.config_data) # Shrinkwrap localfilesdir = tempfile.mkdtemp(prefix='recipetool-npm') self._shrinkwrap(srctree, localfilesdir, extravalues, lines_before) # Lockdown self._lockdown(srctree, localfilesdir, extravalues, lines_before) # Split each npm module out to is own package npmpackages = oe.package.npm_split_package_dirs(srctree) for item in handled: if isinstance(item, tuple): if item[0] == 'license': licvalues = item[1] break if licvalues: # Augment the license list with information we have in the packages licenses = {} license = self._handle_license(data) if license: licenses['${PN}'] = license for pkgname, pkgitem in npmpackages.items(): _, pdata = pkgitem license = self._handle_license(pdata) if license: licenses[pkgname] = license # Now write out the package-specific license values # We need to strip out the json data dicts for this since split_pkg_licenses # isn't expecting it packages = OrderedDict( (x, y[0]) for x, y in npmpackages.items()) packages['${PN}'] = '' pkglicenses = split_pkg_licenses(licvalues, packages, lines_after, licenses) all_licenses = list( set([ item for pkglicense in pkglicenses.values() for item in pkglicense ])) # Go back and update the LICENSE value since we have a bit more # information than when that was written out (and we know all apply # vs. there being a choice, so we can join them with &) for i, line in enumerate(lines_before): if line.startswith('LICENSE = '): lines_before[i] = 'LICENSE = "%s"' % ' & '.join( all_licenses) break # Need to move S setting after inherit npm for i, line in enumerate(lines_before): if line.startswith('S ='): lines_before.pop(i) lines_after.insert( 0, '# Must be set after inherit npm since that itself sets S' ) lines_after.insert(1, line) break return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): """Process the Catkin recipe. Read the key tags from the package.xml ROS file and generate the corresponding recipe variables for the recipe file. """ package_list = RecipeHandler.checkfiles(srctree, ['package.xml'], recursive=False) if len(package_list) > 0: handled.append('buildsystem') for package_file in package_list: LOGGER.info("Found package_file: " + package_file) xml = RosXmlParser(package_file) classes.append('catkin') extravalues['PN'] = xml.get_name() # Ignored if set extravalues['PV'] = xml.get_version() licenses = xml.get_licenses() if len(licenses) < 1: LOGGER.error("package.xml missing required LICENSE field!") else: self.process_license(srctree, classes, lines_before, lines_after, handled, extravalues, licenses, package_file) lines_after.append('# This is a Catkin (ROS) based recipe') lines_after.append('# ROS package.xml format version ' + xml.get_format()) lines_after.append('') lines_after.append("SUMMARY = \"" + "ROS package " + xml.get_name() + "\"") lines_after.append("DESCRIPTION = \"" + xml.get_description() + "\"") # Map the Catkin URLs to BitBake urls = xml.get_urls() if 'website' in urls: lines_after.append("HOMEPAGE = \"" + urls['website'] + "\"") else: if '' in urls: lines_after.append("HOMEPAGE = \"" + urls[''] + "\"") if 'bugtracker' in urls: lines_after.append("# ROS_BUGTRACKER = \"" + urls['bugtracker'] + "\"") if 'repository' in urls: lines_after.append("# SRC_URI = \"" + urls['repository'] + "\"") authors = xml.get_authors() if len(authors) > 0: lines_after.append("# ROS_AUTHOR = \"" + authors[0] + "\"") del authors[0] for author in authors: lines_after.append("# ROS_AUTHOR += \"" + author + "\"") maintainers = xml.get_maintainers() if len(maintainers) > 0: lines_after.append("# ROS_MAINTAINER = \"" + maintainers[0] + "\"") del maintainers[0] for maintainer in maintainers: lines_after.append("# ROS_MAINTAINER += \"" + maintainer + "\"") lines_after.append("SECTION = \"devel\"") dependencies = xml.get_build_dependencies() if len(dependencies) > 0: lines_after.append("DEPENDS = \"" + dependencies[0] + "\"") del dependencies[0] for dependency in dependencies: lines_after.append("DEPENDS += \"" + dependency + "\"") dependencies = xml.get_runtime_dependencies() if len(dependencies) > 0: lines_after.append("RDEPENDS_${PN}-dev = \"" + dependencies[0] + "\"") del dependencies[0] for dependency in dependencies: lines_after.append("RDEPENDS_${PN}-dev += \"" + dependency + "\"") return True return False
def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None): values = {} inherits = [] if cmakelistsfile: srcfiles = [cmakelistsfile] else: srcfiles = RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']) # Note that some of these are non-standard, but probably better to # be able to map them anyway if we see them cmake_pkgmap = {'alsa': 'alsa-lib', 'aspell': 'aspell', 'atk': 'atk', 'bison': 'bison-native', 'boost': 'boost', 'bzip2': 'bzip2', 'cairo': 'cairo', 'cups': 'cups', 'curl': 'curl', 'curses': 'ncurses', 'cvs': 'cvs', 'drm': 'libdrm', 'dbus': 'dbus', 'dbusglib': 'dbus-glib', 'egl': 'virtual/egl', 'expat': 'expat', 'flex': 'flex-native', 'fontconfig': 'fontconfig', 'freetype': 'freetype', 'gettext': '', 'git': '', 'gio': 'glib-2.0', 'giounix': 'glib-2.0', 'glew': 'glew', 'glib': 'glib-2.0', 'glib2': 'glib-2.0', 'glu': 'libglu', 'glut': 'freeglut', 'gobject': 'glib-2.0', 'gperf': 'gperf-native', 'gnutls': 'gnutls', 'gtk2': 'gtk+', 'gtk3': 'gtk+3', 'gtk': 'gtk+3', 'harfbuzz': 'harfbuzz', 'icu': 'icu', 'intl': 'virtual/libintl', 'jpeg': 'jpeg', 'libarchive': 'libarchive', 'libiconv': 'virtual/libiconv', 'liblzma': 'xz', 'libxml2': 'libxml2', 'libxslt': 'libxslt', 'opengl': 'virtual/libgl', 'openmp': '', 'openssl': 'openssl', 'pango': 'pango', 'perl': '', 'perllibs': '', 'pkgconfig': '', 'png': 'libpng', 'pthread': '', 'pythoninterp': '', 'pythonlibs': '', 'ruby': 'ruby-native', 'sdl': 'libsdl', 'sdl2': 'libsdl2', 'subversion': 'subversion-native', 'swig': 'swig-native', 'tcl': 'tcl-native', 'threads': '', 'tiff': 'tiff', 'wget': 'wget', 'x11': 'libx11', 'xcb': 'libxcb', 'xext': 'libxext', 'xfixes': 'libxfixes', 'zlib': 'zlib', } pcdeps = [] libdeps = [] deps = [] unmappedpkgs = [] proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE) pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE) pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE) findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE) checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE) include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE) subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE) dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?') def interpret_value(value): return value.strip('"') def parse_cmake_file(fn, paths=None): searchpaths = (paths or []) + [os.path.dirname(fn)] logger.debug('Parsing file %s' % fn) with open(fn, 'r') as f: for line in f: line = line.strip() res = include_re.match(line) if res: includefn = bb.utils.which(':'.join(searchpaths), res.group(1)) if includefn: parse_cmake_file(includefn, searchpaths) else: logger.debug('Unable to recurse into include file %s' % res.group(1)) continue res = subdir_re.match(line) if res: subdirfn = os.path.join(os.path.dirname(fn), res.group(1), 'CMakeLists.txt') if os.path.exists(subdirfn): parse_cmake_file(subdirfn, searchpaths) else: logger.debug('Unable to recurse into subdirectory file %s' % subdirfn) continue res = proj_re.match(line) if res: extravalues['PN'] = interpret_value(res.group(1).split()[0]) continue res = pkgcm_re.match(line) if res: res = dep_re.findall(res.group(2)) if res: pcdeps.extend([interpret_value(x[0]) for x in res]) inherits.append('pkgconfig') continue res = pkgsm_re.match(line) if res: res = dep_re.findall(res.group(2)) if res: # Note: appending a tuple here! item = tuple((interpret_value(x[0]) for x in res)) if len(item) == 1: item = item[0] pcdeps.append(item) inherits.append('pkgconfig') continue res = findpackage_re.match(line) if res: origpkg = res.group(1) pkg = interpret_value(origpkg.lower()) if pkg == 'gettext': inherits.append('gettext') elif pkg == 'perl': inherits.append('perlnative') elif pkg == 'pkgconfig': inherits.append('pkgconfig') elif pkg == 'pythoninterp': inherits.append('pythonnative') elif pkg == 'pythonlibs': inherits.append('python-dir') else: dep = cmake_pkgmap.get(pkg, None) if dep: deps.append(dep) elif dep is None: unmappedpkgs.append(origpkg) continue res = checklib_re.match(line) if res: lib = interpret_value(res.group(1)) if not lib.startswith('$'): libdeps.append(lib) if line.lower().startswith('useswig'): deps.append('swig-native') continue parse_cmake_file(srcfiles[0]) if unmappedpkgs: outlines.append('# NOTE: unable to map the following CMake package dependencies: %s' % ' '.join(unmappedpkgs)) RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data) if inherits: values['inherit'] = ' '.join(list(set(inherits))) return values
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'buildsystem' in handled: return False makefile = RecipeHandler.checkfiles(srctree, ['Makefile']) if makefile: lines_after.append('# NOTE: this is a Makefile-only piece of software, so we cannot generate much of the') lines_after.append('# recipe automatically - you will need to examine the Makefile yourself and ensure') lines_after.append('# that the appropriate arguments are passed in.') lines_after.append('') scanfile = os.path.join(srctree, 'configure.scan') skipscan = False try: stdout, stderr = bb.process.run('autoscan', cwd=srctree, shell=True) except bb.process.ExecutionError as e: skipscan = True if scanfile and os.path.exists(scanfile): values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, acfile=scanfile) classes.extend(values.pop('inherit', '').split()) for var, value in values.iteritems(): if var == 'DEPENDS': lines_before.append('# NOTE: some of these dependencies may be optional, check the Makefile and/or upstream documentation') lines_before.append('%s = "%s"' % (var, value)) lines_before.append('') for f in ['configure.scan', 'autoscan.log']: fp = os.path.join(srctree, f) if os.path.exists(fp): os.remove(fp) self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here']) func = [] func.append('# You will almost certainly need to add additional arguments here') func.append('oe_runmake') self.genfunction(lines_after, 'do_compile', func) installtarget = True try: stdout, stderr = bb.process.run('make -n install', cwd=srctree, shell=True) except bb.process.ExecutionError as e: if e.exitcode != 1: installtarget = False func = [] if installtarget: func.append('# This is a guess; additional arguments may be required') makeargs = '' with open(makefile[0], 'r') as f: for i in range(1, 100): if 'DESTDIR' in f.readline(): makeargs += " 'DESTDIR=${D}'" break func.append('oe_runmake install%s' % makeargs) else: func.append('# NOTE: unable to determine what to put here - there is a Makefile but no') func.append('# target named "install", so you will need to define this yourself') self.genfunction(lines_after, 'do_install', func) handled.append('buildsystem') else: lines_after.append('# NOTE: no Makefile found, unable to determine what needs to be done') lines_after.append('') self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here']) self.genfunction(lines_after, 'do_compile', ['# Specify compilation commands here']) self.genfunction(lines_after, 'do_install', ['# Specify install commands here'])
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'buildsystem' in handled: return False if not RecipeHandler.checkfiles(srctree, ['setup.py']): return # setup.py is always parsed to get at certain required information, such as # distutils vs setuptools # # If egg info is available, we use it for both its PKG-INFO metadata # and for its requires.txt for install_requires. # If PKG-INFO is available but no egg info is, we use that for metadata in preference to # the parsed setup.py, but use the install_requires info from the # parsed setup.py. setupscript = os.path.join(srctree, 'setup.py') try: setup_info, uses_setuptools, setup_non_literals, extensions = self.parse_setup_py(setupscript) except Exception: logger.exception("Failed to parse setup.py") setup_info, uses_setuptools, setup_non_literals, extensions = {}, True, [], [] egginfo = glob.glob(os.path.join(srctree, '*.egg-info')) if egginfo: info = self.get_pkginfo(os.path.join(egginfo[0], 'PKG-INFO')) requires_txt = os.path.join(egginfo[0], 'requires.txt') if os.path.exists(requires_txt): with codecs.open(requires_txt) as f: inst_req = [] extras_req = collections.defaultdict(list) current_feature = None for line in f.readlines(): line = line.rstrip() if not line: continue if line.startswith('['): current_feature = line[1:-1] elif current_feature: extras_req[current_feature].append(line) else: inst_req.append(line) info['Install-requires'] = inst_req info['Extras-require'] = extras_req elif RecipeHandler.checkfiles(srctree, ['PKG-INFO']): info = self.get_pkginfo(os.path.join(srctree, 'PKG-INFO')) if setup_info: if 'Install-requires' in setup_info: info['Install-requires'] = setup_info['Install-requires'] if 'Extras-require' in setup_info: info['Extras-require'] = setup_info['Extras-require'] else: if setup_info: info = setup_info else: info = self.get_setup_args_info(setupscript) # Grab the license value before applying replacements license_str = info.get('License', '').strip() self.apply_info_replacements(info) if uses_setuptools: classes.append('setuptools') else: classes.append('distutils') if license_str: for i, line in enumerate(lines_before): if line.startswith('LICENSE = '): lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) break if 'Classifier' in info: existing_licenses = info.get('License', '') licenses = [] for classifier in info['Classifier']: if classifier in self.classifier_license_map: license = self.classifier_license_map[classifier] if license == 'Apache' and 'Apache-2.0' in existing_licenses: license = 'Apache-2.0' elif license == 'GPL': if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses: license = 'GPL-2.0' elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses: license = 'GPL-3.0' elif license == 'LGPL': if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses: license = 'LGPL-2.1' elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses: license = 'LGPL-2.0' elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses: license = 'LGPL-3.0' licenses.append(license) if licenses: info['License'] = ' & '.join(licenses) # Map PKG-INFO & setup.py fields to bitbake variables for field, values in info.items(): if field in self.excluded_fields: continue if field not in self.bbvar_map: continue if isinstance(values, str): value = values else: value = ' '.join(str(v) for v in values if v) bbvar = self.bbvar_map[field] if bbvar not in extravalues and value: extravalues[bbvar] = value mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) extras_req = set() if 'Extras-require' in info: extras_req = info['Extras-require'] if extras_req: lines_after.append('# The following configs & dependencies are from setuptools extras_require.') lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.') lines_after.append('# The upstream names may not correspond exactly to bitbake package names.') lines_after.append('#') lines_after.append('# Uncomment this line to enable all the optional features.') lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req))) for feature, feature_reqs in extras_req.items(): unmapped_deps.difference_update(feature_reqs) feature_req_deps = ('python-' + r.replace('.', '-').lower() for r in sorted(feature_reqs)) lines_after.append('PACKAGECONFIG[{}] = ",,,{}"'.format(feature.lower(), ' '.join(feature_req_deps))) inst_reqs = set() if 'Install-requires' in info: if extras_req: lines_after.append('') inst_reqs = info['Install-requires'] if inst_reqs: unmapped_deps.difference_update(inst_reqs) inst_req_deps = ('python-' + r.replace('.', '-').lower() for r in sorted(inst_reqs)) lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These') lines_after.append('# upstream names may not correspond exactly to bitbake package names.') lines_after.append('RDEPENDS_${{PN}} += "{}"'.format(' '.join(inst_req_deps))) if mapped_deps: name = info.get('Name') if name and name[0] in mapped_deps: # Attempt to avoid self-reference mapped_deps.remove(name[0]) mapped_deps -= set(self.excluded_pkgdeps) if inst_reqs or extras_req: lines_after.append('') lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the') lines_after.append('# python sources, and might not be 100% accurate.') lines_after.append('RDEPENDS_${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps)))) unmapped_deps -= set(extensions) unmapped_deps -= set(self.assume_provided) if unmapped_deps: if mapped_deps: lines_after.append('') lines_after.append('# WARNING: We were unable to map the following python package/module') lines_after.append('# dependencies to the bitbake packages which include them:') lines_after.extend('# {}'.format(d) for d in sorted(unmapped_deps)) handled.append('buildsystem')
def extract_autotools_deps(outlines, srctree, extravalues=None, acfile=None): import shlex values = {} inherits = [] # FIXME this mapping is very thin progmap = {'flex': 'flex-native', 'bison': 'bison-native', 'm4': 'm4-native', 'tar': 'tar-native', 'ar': 'binutils-native'} progclassmap = {'gconftool-2': 'gconf', 'pkg-config': 'pkgconfig'} pkg_re = re.compile('PKG_CHECK_MODULES\(\[?[a-zA-Z0-9_]*\]?, *\[?([^,\]]*)\]?[),].*') pkgce_re = re.compile('PKG_CHECK_EXISTS\(\[?([^,\]]*)\]?[),].*') lib_re = re.compile('AC_CHECK_LIB\(\[?([^,\]]*)\]?,.*') libx_re = re.compile('AX_CHECK_LIBRARY\(\[?[^,\]]*\]?, *\[?([^,\]]*)\]?, *\[?([a-zA-Z0-9-]*)\]?,.*') progs_re = re.compile('_PROGS?\(\[?[a-zA-Z0-9_]*\]?, \[?([^,\]]*)\]?[),].*') dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?') ac_init_re = re.compile('AC_INIT\(([^,]+), *([^,]+)[,)].*') am_init_re = re.compile('AM_INIT_AUTOMAKE\(([^,]+), *([^,]+)[,)].*') define_re = re.compile(' *(m4_)?define\(([^,]+), *([^,]+)\)') defines = {} def subst_defines(value): newvalue = value for define, defval in defines.iteritems(): newvalue = newvalue.replace(define, defval) if newvalue != value: return subst_defines(newvalue) return value def process_value(value): value = value.replace('[', '').replace(']', '') if value.startswith('m4_esyscmd(') or value.startswith('m4_esyscmd_s('): cmd = subst_defines(value[value.index('(')+1:-1]) try: if '|' in cmd: cmd = 'set -o pipefail; ' + cmd stdout, _ = bb.process.run(cmd, cwd=srctree, shell=True) ret = stdout.rstrip() except bb.process.ExecutionError as e: ret = '' elif value.startswith('m4_'): return None ret = subst_defines(value) if ret: ret = ret.strip('"\'') return ret # Since a configure.ac file is essentially a program, this is only ever going to be # a hack unfortunately; but it ought to be enough of an approximation if acfile: srcfiles = [acfile] else: srcfiles = RecipeHandler.checkfiles(srctree, ['acinclude.m4', 'configure.ac', 'configure.in']) pcdeps = [] libdeps = [] deps = [] unmapped = [] def process_macro(keyword, value): if keyword == 'PKG_CHECK_MODULES': res = pkg_re.search(value) if res: res = dep_re.findall(res.group(1)) if res: pcdeps.extend([x[0] for x in res]) inherits.append('pkgconfig') elif keyword == 'PKG_CHECK_EXISTS': res = pkgce_re.search(value) if res: res = dep_re.findall(res.group(1)) if res: pcdeps.extend([x[0] for x in res]) inherits.append('pkgconfig') elif keyword in ('AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE'): inherits.append('gettext') elif keyword in ('AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL'): deps.append('intltool-native') elif keyword == 'AM_PATH_GLIB_2_0': deps.append('glib-2.0') elif keyword in ('AC_CHECK_PROG', 'AC_PATH_PROG', 'AX_WITH_PROG'): res = progs_re.search(value) if res: for prog in shlex.split(res.group(1)): prog = prog.split()[0] progclass = progclassmap.get(prog, None) if progclass: inherits.append(progclass) else: progdep = progmap.get(prog, None) if progdep: deps.append(progdep) else: if not prog.startswith('$'): unmapped.append(prog) elif keyword == 'AC_CHECK_LIB': res = lib_re.search(value) if res: lib = res.group(1) if not lib.startswith('$'): libdeps.append(lib) elif keyword == 'AX_CHECK_LIBRARY': res = libx_re.search(value) if res: lib = res.group(2) if not lib.startswith('$'): header = res.group(1) libdeps.add((lib, header)) elif keyword == 'AC_PATH_X': deps.append('libx11') elif keyword in ('AX_BOOST', 'BOOST_REQUIRE'): deps.append('boost') elif keyword in ('AC_PROG_LEX', 'AM_PROG_LEX', 'AX_PROG_FLEX'): deps.append('flex-native') elif keyword in ('AC_PROG_YACC', 'AX_PROG_BISON'): deps.append('bison-native') elif keyword == 'AX_CHECK_ZLIB': deps.append('zlib') elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'): deps.append('openssl') elif keyword == 'AX_LIB_CURL': deps.append('curl') elif keyword == 'AX_LIB_BEECRYPT': deps.append('beecrypt') elif keyword == 'AX_LIB_EXPAT': deps.append('expat') elif keyword == 'AX_LIB_GCRYPT': deps.append('libgcrypt') elif keyword == 'AX_LIB_NETTLE': deps.append('nettle') elif keyword == 'AX_LIB_READLINE': deps.append('readline') elif keyword == 'AX_LIB_SQLITE3': deps.append('sqlite3') elif keyword == 'AX_LIB_TAGLIB': deps.append('taglib') elif keyword == 'AX_PKG_SWIG': deps.append('swig') elif keyword == 'AX_PROG_XSLTPROC': deps.append('libxslt-native') elif keyword == 'AX_WITH_CURSES': deps.append('ncurses') elif keyword == 'AX_PATH_BDB': deps.append('db') elif keyword == 'AX_PATH_LIB_PCRE': deps.append('libpcre') elif keyword == 'AC_INIT': if extravalues is not None: res = ac_init_re.match(value) if res: extravalues['PN'] = process_value(res.group(1)) pv = process_value(res.group(2)) if validate_pv(pv): extravalues['PV'] = pv elif keyword == 'AM_INIT_AUTOMAKE': if extravalues is not None: if 'PN' not in extravalues: res = am_init_re.match(value) if res: if res.group(1) != 'AC_PACKAGE_NAME': extravalues['PN'] = process_value(res.group(1)) pv = process_value(res.group(2)) if validate_pv(pv): extravalues['PV'] = pv elif keyword == 'define(': res = define_re.match(value) if res: key = res.group(2).strip('[]') value = process_value(res.group(3)) if value is not None: defines[key] = value keywords = ['PKG_CHECK_MODULES', 'PKG_CHECK_EXISTS', 'AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE', 'AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL', 'AM_PATH_GLIB_2_0', 'AC_CHECK_PROG', 'AC_PATH_PROG', 'AX_WITH_PROG', 'AC_CHECK_LIB', 'AX_CHECK_LIBRARY', 'AC_PATH_X', 'AX_BOOST', 'BOOST_REQUIRE', 'AC_PROG_LEX', 'AM_PROG_LEX', 'AX_PROG_FLEX', 'AC_PROG_YACC', 'AX_PROG_BISON', 'AX_CHECK_ZLIB', 'AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO', 'AX_LIB_CURL', 'AX_LIB_BEECRYPT', 'AX_LIB_EXPAT', 'AX_LIB_GCRYPT', 'AX_LIB_NETTLE', 'AX_LIB_READLINE' 'AX_LIB_SQLITE3', 'AX_LIB_TAGLIB', 'AX_PKG_SWIG', 'AX_PROG_XSLTPROC', 'AX_WITH_CURSES', 'AX_PATH_BDB', 'AX_PATH_LIB_PCRE', 'AC_INIT', 'AM_INIT_AUTOMAKE', 'define(', ] for srcfile in srcfiles: nesting = 0 in_keyword = '' partial = '' with open(srcfile, 'r') as f: for line in f: if in_keyword: partial += ' ' + line.strip() if partial.endswith('\\'): partial = partial[:-1] nesting = nesting + line.count('(') - line.count(')') if nesting == 0: process_macro(in_keyword, partial) partial = '' in_keyword = '' else: for keyword in keywords: if keyword in line: nesting = line.count('(') - line.count(')') if nesting > 0: partial = line.strip() if partial.endswith('\\'): partial = partial[:-1] in_keyword = keyword else: process_macro(keyword, line.strip()) break if in_keyword: process_macro(in_keyword, partial) if extravalues: for k,v in extravalues.items(): if v: if v.startswith('$') or v.startswith('@') or v.startswith('%'): del extravalues[k] else: extravalues[k] = v.strip('"\'').rstrip('()') if unmapped: outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmapped)))) RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data) if inherits: values['inherit'] = ' '.join(list(set(inherits))) return values
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): import bb.process if 'buildsystem' in handled: return False module_inc_re = re.compile(r'^#include\s+<linux/module.h>$') makefiles = [] is_module = False makefiles = [] files = RecipeHandler.checkfiles(srctree, ['*.c', '*.h'], recursive=True) if files: for cfile in files: # Look in same dir or parent for Makefile for makefile in [ os.path.join(os.path.dirname(cfile), 'Makefile'), os.path.join(os.path.dirname(os.path.dirname(cfile)), 'Makefile') ]: if makefile in makefiles: break else: if os.path.exists(makefile): makefiles.append(makefile) break else: continue with open(cfile, 'r') as f: for line in f: if module_inc_re.match(line.strip()): is_module = True break if is_module: break if is_module: classes.append('module') handled.append('buildsystem') # module.bbclass and the classes it inherits do most of the hard # work, but we need to tweak it slightly depending on what the # Makefile does (and there is a range of those) # Check the makefile for the appropriate install target install_lines = [] compile_lines = [] in_install = False in_compile = False install_target = None with open(makefile, 'r') as f: for line in f: if line.startswith('install:'): if not install_lines: in_install = True install_target = 'install' elif line.startswith('modules_install:'): install_lines = [] in_install = True install_target = 'modules_install' elif line.startswith('modules:'): compile_lines = [] in_compile = True elif line.startswith(('all:', 'default:')): if not compile_lines: in_compile = True elif line: if line[0] == '\t': if in_install: install_lines.append(line) elif in_compile: compile_lines.append(line) elif ':' in line: in_install = False in_compile = False def check_target(lines, install): kdirpath = '' manual_install = False for line in lines: splitline = line.split() if splitline[0] in ['make', 'gmake', '$(MAKE)']: if '-C' in splitline: idx = splitline.index('-C') + 1 if idx < len(splitline): kdirpath = splitline[idx] break elif install and splitline[0] == 'install': if '.ko' in line: manual_install = True return kdirpath, manual_install kdirpath = None manual_install = False if install_lines: kdirpath, manual_install = check_target(install_lines, install=True) if compile_lines and not kdirpath: kdirpath, _ = check_target(compile_lines, install=False) if manual_install or not install_lines: lines_after.append( 'EXTRA_OEMAKE_append_task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"' ) elif install_target and install_target != 'modules_install': lines_after.append('MODULES_INSTALL_TARGET = "install"') warnmsg = None kdirvar = None if kdirpath: res = re.match(r'\$\(([^$)]+)\)', kdirpath) if res: kdirvar = res.group(1) if kdirvar != 'KERNEL_SRC': lines_after.append( 'EXTRA_OEMAKE += "%s=${STAGING_KERNEL_DIR}"' % kdirvar) elif kdirpath.startswith('/lib/'): warnmsg = 'Kernel path in install makefile is hardcoded - you will need to patch the makefile' if not kdirvar and not warnmsg: warnmsg = 'Unable to find means of passing kernel path into install makefile - if kernel path is hardcoded you will need to patch the makefile' if warnmsg: warnmsg += '. Note that the variable KERNEL_SRC will be passed in as the kernel source path.' logger.warn(warnmsg) lines_after.append('# %s' % warnmsg) return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): """Handle the npm recipe creation""" if "buildsystem" in handled: return False files = RecipeHandler.checkfiles(srctree, ["package.json"]) if not files: return False with open(files[0], "r") as f: data = json.load(f) if "name" not in data or "version" not in data: return False extravalues["PN"] = self._npm_name(data["name"]) extravalues["PV"] = data["version"] if "description" in data: extravalues["SUMMARY"] = data["description"] if "homepage" in data: extravalues["HOMEPAGE"] = data["homepage"] dev = bb.utils.to_boolean(str(extravalues.get("NPM_INSTALL_DEV", "0")), False) registry = self._get_registry(lines_before) bb.note("Checking if npm is available ...") # The native npm is used here (and not the host one) to ensure that the # npm version is high enough to ensure an efficient dependency tree # resolution and avoid issue with the shrinkwrap file format. # Moreover the native npm is mandatory for the build. bindir = self._ensure_npm() d = bb.data.createCopy(TINFOIL.config_data) d.prependVar("PATH", bindir + ":") d.setVar("S", srctree) bb.note("Generating shrinkwrap file ...") # To generate the shrinkwrap file the dependencies have to be installed # first. During the generation process some files may be updated / # deleted. By default devtool tracks the diffs in the srctree and raises # errors when finishing the recipe if some diffs are found. git_exclude_file = os.path.join(srctree, ".git", "info", "exclude") if os.path.exists(git_exclude_file): with open(git_exclude_file, "r+") as f: lines = f.readlines() for line in ["/node_modules/", "/npm-shrinkwrap.json"]: if line not in lines: f.write(line + "\n") lock_file = os.path.join(srctree, "package-lock.json") lock_copy = lock_file + ".copy" if os.path.exists(lock_file): bb.utils.copyfile(lock_file, lock_copy) self._run_npm_install(d, srctree, registry, dev) shrinkwrap_file = self._generate_shrinkwrap(d, srctree, dev) if os.path.exists(lock_copy): bb.utils.movefile(lock_copy, lock_file) # Add the shrinkwrap file as 'extrafiles' shrinkwrap_copy = shrinkwrap_file + ".copy" bb.utils.copyfile(shrinkwrap_file, shrinkwrap_copy) extravalues.setdefault("extrafiles", {}) extravalues["extrafiles"]["npm-shrinkwrap.json"] = shrinkwrap_copy url_local = "npmsw://%s" % shrinkwrap_file url_recipe = "npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json" if dev: url_local += ";dev=1" url_recipe += ";dev=1" # Add the npmsw url in the SRC_URI of the generated recipe def _handle_srcuri(varname, origvalue, op, newlines): """Update the version value and add the 'npmsw://' url""" value = origvalue.replace("version=" + data["version"], "version=${PV}") value = value.replace("version=latest", "version=${PV}") values = [line.strip() for line in value.strip('\n').splitlines()] values.append(url_recipe) return values, None, 4, False (_, newlines) = bb.utils.edit_metadata(lines_before, ["SRC_URI"], _handle_srcuri) lines_before[:] = [line.rstrip('\n') for line in newlines] # In order to generate correct licence checksums in the recipe the # dependencies have to be fetched again using the npmsw url bb.note("Fetching npm dependencies ...") bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True) fetcher = bb.fetch2.Fetch([url_local], d) fetcher.download() fetcher.unpack(srctree) bb.note("Handling licences ...") (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) extravalues["LIC_FILES_CHKSUM"] = licfiles split_pkg_licenses(guess_license(srctree, d), packages, lines_after, []) classes.append("npm") handled.append("buildsystem") return True
def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None): # Find all plugins that want to register handlers logger.debug('Loading cmake handlers') handlers = [] for plugin in plugins: if hasattr(plugin, 'register_cmake_handlers'): plugin.register_cmake_handlers(handlers) values = {} inherits = [] if cmakelistsfile: srcfiles = [cmakelistsfile] else: srcfiles = RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']) # Note that some of these are non-standard, but probably better to # be able to map them anyway if we see them cmake_pkgmap = {'alsa': 'alsa-lib', 'aspell': 'aspell', 'atk': 'atk', 'bison': 'bison-native', 'boost': 'boost', 'bzip2': 'bzip2', 'cairo': 'cairo', 'cups': 'cups', 'curl': 'curl', 'curses': 'ncurses', 'cvs': 'cvs', 'drm': 'libdrm', 'dbus': 'dbus', 'dbusglib': 'dbus-glib', 'egl': 'virtual/egl', 'expat': 'expat', 'flex': 'flex-native', 'fontconfig': 'fontconfig', 'freetype': 'freetype', 'gettext': '', 'git': '', 'gio': 'glib-2.0', 'giounix': 'glib-2.0', 'glew': 'glew', 'glib': 'glib-2.0', 'glib2': 'glib-2.0', 'glu': 'libglu', 'glut': 'freeglut', 'gobject': 'glib-2.0', 'gperf': 'gperf-native', 'gnutls': 'gnutls', 'gtk2': 'gtk+', 'gtk3': 'gtk+3', 'gtk': 'gtk+3', 'harfbuzz': 'harfbuzz', 'icu': 'icu', 'intl': 'virtual/libintl', 'jpeg': 'jpeg', 'libarchive': 'libarchive', 'libiconv': 'virtual/libiconv', 'liblzma': 'xz', 'libxml2': 'libxml2', 'libxslt': 'libxslt', 'opengl': 'virtual/libgl', 'openmp': '', 'openssl': 'openssl', 'pango': 'pango', 'perl': '', 'perllibs': '', 'pkgconfig': '', 'png': 'libpng', 'pthread': '', 'pythoninterp': '', 'pythonlibs': '', 'ruby': 'ruby-native', 'sdl': 'libsdl', 'sdl2': 'libsdl2', 'subversion': 'subversion-native', 'swig': 'swig-native', 'tcl': 'tcl-native', 'threads': '', 'tiff': 'tiff', 'wget': 'wget', 'x11': 'libx11', 'xcb': 'libxcb', 'xext': 'libxext', 'xfixes': 'libxfixes', 'zlib': 'zlib', } pcdeps = [] libdeps = [] deps = [] unmappedpkgs = [] proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE) pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE) pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE) findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE) findlibrary_re = re.compile('find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*') checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE) include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE) subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE) dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?') def find_cmake_package(pkg): RecipeHandler.load_devel_filemap(tinfoil.config_data) for fn, pn in RecipeHandler.recipecmakefilemap.iteritems(): splitname = fn.split('/') if len(splitname) > 1: if splitname[0].lower().startswith(pkg.lower()): if splitname[1] == '%s-config.cmake' % pkg.lower() or splitname[1] == '%sConfig.cmake' % pkg or splitname[1] == 'Find%s.cmake' % pkg: return pn return None def interpret_value(value): return value.strip('"') def parse_cmake_file(fn, paths=None): searchpaths = (paths or []) + [os.path.dirname(fn)] logger.debug('Parsing file %s' % fn) with open(fn, 'r') as f: for line in f: line = line.strip() for handler in handlers: if handler.process_line(srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values): continue res = include_re.match(line) if res: includefn = bb.utils.which(':'.join(searchpaths), res.group(1)) if includefn: parse_cmake_file(includefn, searchpaths) else: logger.debug('Unable to recurse into include file %s' % res.group(1)) continue res = subdir_re.match(line) if res: subdirfn = os.path.join(os.path.dirname(fn), res.group(1), 'CMakeLists.txt') if os.path.exists(subdirfn): parse_cmake_file(subdirfn, searchpaths) else: logger.debug('Unable to recurse into subdirectory file %s' % subdirfn) continue res = proj_re.match(line) if res: extravalues['PN'] = interpret_value(res.group(1).split()[0]) continue res = pkgcm_re.match(line) if res: res = dep_re.findall(res.group(2)) if res: pcdeps.extend([interpret_value(x[0]) for x in res]) inherits.append('pkgconfig') continue res = pkgsm_re.match(line) if res: res = dep_re.findall(res.group(2)) if res: # Note: appending a tuple here! item = tuple((interpret_value(x[0]) for x in res)) if len(item) == 1: item = item[0] pcdeps.append(item) inherits.append('pkgconfig') continue res = findpackage_re.match(line) if res: origpkg = res.group(1) pkg = interpret_value(origpkg) found = False for handler in handlers: if handler.process_findpackage(srctree, fn, pkg, deps, outlines, inherits, values): logger.debug('Mapped CMake package %s via handler %s' % (pkg, handler.__class__.__name__)) found = True break if found: continue elif pkg == 'Gettext': inherits.append('gettext') elif pkg == 'Perl': inherits.append('perlnative') elif pkg == 'PkgConfig': inherits.append('pkgconfig') elif pkg == 'PythonInterp': inherits.append('pythonnative') elif pkg == 'PythonLibs': inherits.append('python-dir') else: # Try to map via looking at installed CMake packages in pkgdata dep = find_cmake_package(pkg) if dep: logger.debug('Mapped CMake package %s to recipe %s via pkgdata' % (pkg, dep)) deps.append(dep) else: dep = cmake_pkgmap.get(pkg.lower(), None) if dep: logger.debug('Mapped CMake package %s to recipe %s via internal list' % (pkg, dep)) deps.append(dep) elif dep is None: unmappedpkgs.append(origpkg) continue res = checklib_re.match(line) if res: lib = interpret_value(res.group(1)) if not lib.startswith('$'): libdeps.append(lib) res = findlibrary_re.match(line) if res: libs = res.group(2).split() for lib in libs: if lib in ['HINTS', 'PATHS', 'PATH_SUFFIXES', 'DOC', 'NAMES_PER_DIR'] or lib.startswith(('NO_', 'CMAKE_', 'ONLY_CMAKE_')): break lib = interpret_value(lib) if not lib.startswith('$'): libdeps.append(lib) if line.lower().startswith('useswig'): deps.append('swig-native') continue parse_cmake_file(srcfiles[0]) if unmappedpkgs: outlines.append('# NOTE: unable to map the following CMake package dependencies: %s' % ' '.join(list(set(unmappedpkgs)))) RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data) for handler in handlers: handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values) if inherits: values['inherit'] = ' '.join(list(set(inherits))) return values
def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None): # Find all plugins that want to register handlers logger.debug("Loading cmake handlers") handlers = [] for plugin in plugins: if hasattr(plugin, "register_cmake_handlers"): plugin.register_cmake_handlers(handlers) values = {} inherits = [] if cmakelistsfile: srcfiles = [cmakelistsfile] else: srcfiles = RecipeHandler.checkfiles(srctree, ["CMakeLists.txt"]) # Note that some of these are non-standard, but probably better to # be able to map them anyway if we see them cmake_pkgmap = { "alsa": "alsa-lib", "aspell": "aspell", "atk": "atk", "bison": "bison-native", "boost": "boost", "bzip2": "bzip2", "cairo": "cairo", "cups": "cups", "curl": "curl", "curses": "ncurses", "cvs": "cvs", "drm": "libdrm", "dbus": "dbus", "dbusglib": "dbus-glib", "egl": "virtual/egl", "expat": "expat", "flex": "flex-native", "fontconfig": "fontconfig", "freetype": "freetype", "gettext": "", "git": "", "gio": "glib-2.0", "giounix": "glib-2.0", "glew": "glew", "glib": "glib-2.0", "glib2": "glib-2.0", "glu": "libglu", "glut": "freeglut", "gobject": "glib-2.0", "gperf": "gperf-native", "gnutls": "gnutls", "gtk2": "gtk+", "gtk3": "gtk+3", "gtk": "gtk+3", "harfbuzz": "harfbuzz", "icu": "icu", "intl": "virtual/libintl", "jpeg": "jpeg", "libarchive": "libarchive", "libiconv": "virtual/libiconv", "liblzma": "xz", "libxml2": "libxml2", "libxslt": "libxslt", "opengl": "virtual/libgl", "openmp": "", "openssl": "openssl", "pango": "pango", "perl": "", "perllibs": "", "pkgconfig": "", "png": "libpng", "pthread": "", "pythoninterp": "", "pythonlibs": "", "ruby": "ruby-native", "sdl": "libsdl", "sdl2": "libsdl2", "subversion": "subversion-native", "swig": "swig-native", "tcl": "tcl-native", "threads": "", "tiff": "tiff", "wget": "wget", "x11": "libx11", "xcb": "libxcb", "xext": "libxext", "xfixes": "libxfixes", "zlib": "zlib", } pcdeps = [] libdeps = [] deps = [] unmappedpkgs = [] proj_re = re.compile("project\s*\(([^)]*)\)", re.IGNORECASE) pkgcm_re = re.compile("pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)", re.IGNORECASE) pkgsm_re = re.compile( "pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)", re.IGNORECASE ) findpackage_re = re.compile("find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*", re.IGNORECASE) findlibrary_re = re.compile("find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*") checklib_re = re.compile("check_library_exists\s*\(\s*([^\s)]+)\s*.*", re.IGNORECASE) include_re = re.compile("include\s*\(\s*([^)\s]*)\s*\)", re.IGNORECASE) subdir_re = re.compile("add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)", re.IGNORECASE) dep_re = re.compile("([^ ><=]+)( *[<>=]+ *[^ ><=]+)?") def find_cmake_package(pkg): RecipeHandler.load_devel_filemap(tinfoil.config_data) for fn, pn in RecipeHandler.recipecmakefilemap.iteritems(): splitname = fn.split("/") if len(splitname) > 1: if splitname[0].lower().startswith(pkg.lower()): if ( splitname[1] == "%s-config.cmake" % pkg.lower() or splitname[1] == "%sConfig.cmake" % pkg or splitname[1] == "Find%s.cmake" % pkg ): return pn return None def interpret_value(value): return value.strip('"') def parse_cmake_file(fn, paths=None): searchpaths = (paths or []) + [os.path.dirname(fn)] logger.debug("Parsing file %s" % fn) with open(fn, "r") as f: for line in f: line = line.strip() for handler in handlers: if handler.process_line(srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values): continue res = include_re.match(line) if res: includefn = bb.utils.which(":".join(searchpaths), res.group(1)) if includefn: parse_cmake_file(includefn, searchpaths) else: logger.debug("Unable to recurse into include file %s" % res.group(1)) continue res = subdir_re.match(line) if res: subdirfn = os.path.join(os.path.dirname(fn), res.group(1), "CMakeLists.txt") if os.path.exists(subdirfn): parse_cmake_file(subdirfn, searchpaths) else: logger.debug("Unable to recurse into subdirectory file %s" % subdirfn) continue res = proj_re.match(line) if res: extravalues["PN"] = interpret_value(res.group(1).split()[0]) continue res = pkgcm_re.match(line) if res: res = dep_re.findall(res.group(2)) if res: pcdeps.extend([interpret_value(x[0]) for x in res]) inherits.append("pkgconfig") continue res = pkgsm_re.match(line) if res: res = dep_re.findall(res.group(2)) if res: # Note: appending a tuple here! item = tuple((interpret_value(x[0]) for x in res)) if len(item) == 1: item = item[0] pcdeps.append(item) inherits.append("pkgconfig") continue res = findpackage_re.match(line) if res: origpkg = res.group(1) pkg = interpret_value(origpkg) found = False for handler in handlers: if handler.process_findpackage(srctree, fn, pkg, deps, outlines, inherits, values): logger.debug( "Mapped CMake package %s via handler %s" % (pkg, handler.__class__.__name__) ) found = True break if found: continue elif pkg == "Gettext": inherits.append("gettext") elif pkg == "Perl": inherits.append("perlnative") elif pkg == "PkgConfig": inherits.append("pkgconfig") elif pkg == "PythonInterp": inherits.append("pythonnative") elif pkg == "PythonLibs": inherits.append("python-dir") else: # Try to map via looking at installed CMake packages in pkgdata dep = find_cmake_package(pkg) if dep: logger.debug("Mapped CMake package %s to recipe %s via pkgdata" % (pkg, dep)) deps.append(dep) else: dep = cmake_pkgmap.get(pkg.lower(), None) if dep: logger.debug("Mapped CMake package %s to recipe %s via internal list" % (pkg, dep)) deps.append(dep) elif dep is None: unmappedpkgs.append(origpkg) continue res = checklib_re.match(line) if res: lib = interpret_value(res.group(1)) if not lib.startswith("$"): libdeps.append(lib) res = findlibrary_re.match(line) if res: libs = res.group(2).split() for lib in libs: if lib in ["HINTS", "PATHS", "PATH_SUFFIXES", "DOC", "NAMES_PER_DIR"] or lib.startswith( ("NO_", "CMAKE_", "ONLY_CMAKE_") ): break lib = interpret_value(lib) if not lib.startswith("$"): libdeps.append(lib) if line.lower().startswith("useswig"): deps.append("swig-native") continue parse_cmake_file(srcfiles[0]) if unmappedpkgs: outlines.append( "# NOTE: unable to map the following CMake package dependencies: %s" % " ".join(list(set(unmappedpkgs))) ) RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data) for handler in handlers: handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values) if inherits: values["inherit"] = " ".join(list(set(inherits))) return values
def extract_autotools_deps(outlines, srctree, acfile=None): import shlex import oe.package values = {} inherits = [] # FIXME this mapping is very thin progmap = {'flex': 'flex-native', 'bison': 'bison-native', 'm4': 'm4-native'} progclassmap = {'gconftool-2': 'gconf', 'pkg-config': 'pkgconfig'} ignoredeps = ['gcc-runtime', 'glibc', 'uclibc'] pkg_re = re.compile('PKG_CHECK_MODULES\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)[),].*') lib_re = re.compile('AC_CHECK_LIB\(\[?([a-zA-Z0-9]*)\]?, .*') progs_re = re.compile('_PROGS?\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)\]?[),].*') dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?') # Build up lib library->package mapping shlib_providers = oe.package.read_shlib_providers(tinfoil.config_data) libdir = tinfoil.config_data.getVar('libdir', True) base_libdir = tinfoil.config_data.getVar('base_libdir', True) libpaths = list(set([base_libdir, libdir])) libname_re = re.compile('^lib(.+)\.so.*$') pkglibmap = {} for lib, item in shlib_providers.iteritems(): for path, pkg in item.iteritems(): if path in libpaths: res = libname_re.match(lib) if res: libname = res.group(1) if not libname in pkglibmap: pkglibmap[libname] = pkg[0] else: logger.debug('unable to extract library name from %s' % lib) # Now turn it into a library->recipe mapping recipelibmap = {} pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True) for libname, pkg in pkglibmap.iteritems(): try: with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: for line in f: if line.startswith('PN:'): recipelibmap[libname] = line.split(':', 1)[-1].strip() break except IOError as ioe: if ioe.errno == 2: logger.warn('unable to find a pkgdata file for package %s' % pkg) else: raise # Since a configure.ac file is essentially a program, this is only ever going to be # a hack unfortunately; but it ought to be enough of an approximation if acfile: srcfiles = [acfile] else: srcfiles = RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']) pcdeps = [] deps = [] unmapped = [] unmappedlibs = [] with open(srcfiles[0], 'r') as f: for line in f: if 'PKG_CHECK_MODULES' in line: res = pkg_re.search(line) if res: res = dep_re.findall(res.group(1)) if res: pcdeps.extend([x[0] for x in res]) inherits.append('pkgconfig') if line.lstrip().startswith('AM_GNU_GETTEXT'): inherits.append('gettext') elif 'AC_CHECK_PROG' in line or 'AC_PATH_PROG' in line: res = progs_re.search(line) if res: for prog in shlex.split(res.group(1)): prog = prog.split()[0] progclass = progclassmap.get(prog, None) if progclass: inherits.append(progclass) else: progdep = progmap.get(prog, None) if progdep: deps.append(progdep) else: if not prog.startswith('$'): unmapped.append(prog) elif 'AC_CHECK_LIB' in line: res = lib_re.search(line) if res: lib = res.group(1) libdep = recipelibmap.get(lib, None) if libdep: deps.append(libdep) else: if libdep is None: if not lib.startswith('$'): unmappedlibs.append(lib) elif 'AC_PATH_X' in line: deps.append('libx11') if unmapped: outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(unmapped)) if unmappedlibs: outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(unmappedlibs)) outlines.append('# (this is based on recipes that have previously been built and packaged)') recipemap = read_pkgconfig_provides(tinfoil.config_data) unmapped = [] for pcdep in pcdeps: recipe = recipemap.get(pcdep, None) if recipe: deps.append(recipe) else: if not pcdep.startswith('$'): unmapped.append(pcdep) deps = set(deps).difference(set(ignoredeps)) if unmapped: outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmapped)) outlines.append('# (this is based on recipes that have previously been built and packaged)') if deps: values['DEPENDS'] = ' '.join(deps) if inherits: values['inherit'] = ' '.join(list(set(inherits))) return values
def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None): # Find all plugins that want to register handlers logger.debug('Loading cmake handlers') handlers = [] for plugin in plugins: if hasattr(plugin, 'register_cmake_handlers'): plugin.register_cmake_handlers(handlers) values = {} inherits = [] if cmakelistsfile: srcfiles = [cmakelistsfile] else: srcfiles = RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']) # Note that some of these are non-standard, but probably better to # be able to map them anyway if we see them cmake_pkgmap = { 'alsa': 'alsa-lib', 'aspell': 'aspell', 'atk': 'atk', 'bison': 'bison-native', 'boost': 'boost', 'bzip2': 'bzip2', 'cairo': 'cairo', 'cups': 'cups', 'curl': 'curl', 'curses': 'ncurses', 'cvs': 'cvs', 'drm': 'libdrm', 'dbus': 'dbus', 'dbusglib': 'dbus-glib', 'egl': 'virtual/egl', 'expat': 'expat', 'flex': 'flex-native', 'fontconfig': 'fontconfig', 'freetype': 'freetype', 'gettext': '', 'git': '', 'gio': 'glib-2.0', 'giounix': 'glib-2.0', 'glew': 'glew', 'glib': 'glib-2.0', 'glib2': 'glib-2.0', 'glu': 'libglu', 'glut': 'freeglut', 'gobject': 'glib-2.0', 'gperf': 'gperf-native', 'gnutls': 'gnutls', 'gtk2': 'gtk+', 'gtk3': 'gtk+3', 'gtk': 'gtk+3', 'harfbuzz': 'harfbuzz', 'icu': 'icu', 'intl': 'virtual/libintl', 'jpeg': 'jpeg', 'libarchive': 'libarchive', 'libiconv': 'virtual/libiconv', 'liblzma': 'xz', 'libxml2': 'libxml2', 'libxslt': 'libxslt', 'opengl': 'virtual/libgl', 'openmp': '', 'openssl': 'openssl', 'pango': 'pango', 'perl': '', 'perllibs': '', 'pkgconfig': '', 'png': 'libpng', 'pthread': '', 'pythoninterp': '', 'pythonlibs': '', 'ruby': 'ruby-native', 'sdl': 'libsdl', 'sdl2': 'libsdl2', 'subversion': 'subversion-native', 'swig': 'swig-native', 'tcl': 'tcl-native', 'threads': '', 'tiff': 'tiff', 'wget': 'wget', 'x11': 'libx11', 'xcb': 'libxcb', 'xext': 'libxext', 'xfixes': 'libxfixes', 'zlib': 'zlib', } pcdeps = [] libdeps = [] deps = [] unmappedpkgs = [] proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE) pkgcm_re = re.compile( 'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE) pkgsm_re = re.compile( 'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE) findpackage_re = re.compile( 'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE) findlibrary_re = re.compile( 'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*' ) checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE) include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE) subdir_re = re.compile( 'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE) dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?') def find_cmake_package(pkg): RecipeHandler.load_devel_filemap(tinfoil.config_data) for fn, pn in RecipeHandler.recipecmakefilemap.items(): splitname = fn.split('/') if len(splitname) > 1: if splitname[0].lower().startswith(pkg.lower()): if splitname[1] == '%s-config.cmake' % pkg.lower( ) or splitname[ 1] == '%sConfig.cmake' % pkg or splitname[ 1] == 'Find%s.cmake' % pkg: return pn return None def interpret_value(value): return value.strip('"') def parse_cmake_file(fn, paths=None): searchpaths = (paths or []) + [os.path.dirname(fn)] logger.debug('Parsing file %s' % fn) with open(fn, 'r', errors='surrogateescape') as f: for line in f: line = line.strip() for handler in handlers: if handler.process_line(srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values): continue res = include_re.match(line) if res: includefn = bb.utils.which(':'.join(searchpaths), res.group(1)) if includefn: parse_cmake_file(includefn, searchpaths) else: logger.debug( 'Unable to recurse into include file %s' % res.group(1)) continue res = subdir_re.match(line) if res: subdirfn = os.path.join(os.path.dirname(fn), res.group(1), 'CMakeLists.txt') if os.path.exists(subdirfn): parse_cmake_file(subdirfn, searchpaths) else: logger.debug( 'Unable to recurse into subdirectory file %s' % subdirfn) continue res = proj_re.match(line) if res: extravalues['PN'] = interpret_value( res.group(1).split()[0]) continue res = pkgcm_re.match(line) if res: res = dep_re.findall(res.group(2)) if res: pcdeps.extend([interpret_value(x[0]) for x in res]) inherits.append('pkgconfig') continue res = pkgsm_re.match(line) if res: res = dep_re.findall(res.group(2)) if res: # Note: appending a tuple here! item = tuple((interpret_value(x[0]) for x in res)) if len(item) == 1: item = item[0] pcdeps.append(item) inherits.append('pkgconfig') continue res = findpackage_re.match(line) if res: origpkg = res.group(1) pkg = interpret_value(origpkg) found = False for handler in handlers: if handler.process_findpackage( srctree, fn, pkg, deps, outlines, inherits, values): logger.debug( 'Mapped CMake package %s via handler %s' % (pkg, handler.__class__.__name__)) found = True break if found: continue elif pkg == 'Gettext': inherits.append('gettext') elif pkg == 'Perl': inherits.append('perlnative') elif pkg == 'PkgConfig': inherits.append('pkgconfig') elif pkg == 'PythonInterp': inherits.append('pythonnative') elif pkg == 'PythonLibs': inherits.append('python-dir') else: # Try to map via looking at installed CMake packages in pkgdata dep = find_cmake_package(pkg) if dep: logger.debug( 'Mapped CMake package %s to recipe %s via pkgdata' % (pkg, dep)) deps.append(dep) else: dep = cmake_pkgmap.get(pkg.lower(), None) if dep: logger.debug( 'Mapped CMake package %s to recipe %s via internal list' % (pkg, dep)) deps.append(dep) elif dep is None: unmappedpkgs.append(origpkg) continue res = checklib_re.match(line) if res: lib = interpret_value(res.group(1)) if not lib.startswith('$'): libdeps.append(lib) res = findlibrary_re.match(line) if res: libs = res.group(2).split() for lib in libs: if lib in [ 'HINTS', 'PATHS', 'PATH_SUFFIXES', 'DOC', 'NAMES_PER_DIR' ] or lib.startswith( ('NO_', 'CMAKE_', 'ONLY_CMAKE_')): break lib = interpret_value(lib) if not lib.startswith('$'): libdeps.append(lib) if line.lower().startswith('useswig'): deps.append('swig-native') continue parse_cmake_file(srcfiles[0]) if unmappedpkgs: outlines.append( '# NOTE: unable to map the following CMake package dependencies: %s' % ' '.join(list(set(unmappedpkgs)))) RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data) for handler in handlers: handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values) if inherits: values['inherit'] = ' '.join(list(set(inherits))) return values
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): import bb.process if 'buildsystem' in handled: return False module_inc_re = re.compile(r'^#include\s+<linux/module.h>$') makefiles = [] is_module = False makefiles = [] files = RecipeHandler.checkfiles(srctree, ['*.c', '*.h'], recursive=True) if files: for cfile in files: # Look in same dir or parent for Makefile for makefile in [os.path.join(os.path.dirname(cfile), 'Makefile'), os.path.join(os.path.dirname(os.path.dirname(cfile)), 'Makefile')]: if makefile in makefiles: break else: if os.path.exists(makefile): makefiles.append(makefile) break else: continue with open(cfile, 'r') as f: for line in f: if module_inc_re.match(line.strip()): is_module = True break if is_module: break if is_module: classes.append('module') handled.append('buildsystem') # module.bbclass and the classes it inherits do most of the hard # work, but we need to tweak it slightly depending on what the # Makefile does (and there is a range of those) # Check the makefile for the appropriate install target install_lines = [] compile_lines = [] in_install = False in_compile = False install_target = None with open(makefile, 'r') as f: for line in f: if line.startswith('install:'): if not install_lines: in_install = True install_target = 'install' elif line.startswith('modules_install:'): install_lines = [] in_install = True install_target = 'modules_install' elif line.startswith('modules:'): compile_lines = [] in_compile = True elif line.startswith(('all:', 'default:')): if not compile_lines: in_compile = True elif line: if line[0] == '\t': if in_install: install_lines.append(line) elif in_compile: compile_lines.append(line) elif ':' in line: in_install = False in_compile = False def check_target(lines, install): kdirpath = '' manual_install = False for line in lines: splitline = line.split() if splitline[0] in ['make', 'gmake', '$(MAKE)']: if '-C' in splitline: idx = splitline.index('-C') + 1 if idx < len(splitline): kdirpath = splitline[idx] break elif install and splitline[0] == 'install': if '.ko' in line: manual_install = True return kdirpath, manual_install kdirpath = None manual_install = False if install_lines: kdirpath, manual_install = check_target(install_lines, install=True) if compile_lines and not kdirpath: kdirpath, _ = check_target(compile_lines, install=False) if manual_install or not install_lines: lines_after.append('EXTRA_OEMAKE_append_task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"') elif install_target and install_target != 'modules_install': lines_after.append('MODULES_INSTALL_TARGET = "install"') warnmsg = None kdirvar = None if kdirpath: res = re.match(r'\$\(([^$)]+)\)', kdirpath) if res: kdirvar = res.group(1) if kdirvar != 'KERNEL_SRC': lines_after.append('EXTRA_OEMAKE += "%s=${STAGING_KERNEL_DIR}"' % kdirvar) elif kdirpath.startswith('/lib/'): warnmsg = 'Kernel path in install makefile is hardcoded - you will need to patch the makefile' if not kdirvar and not warnmsg: warnmsg = 'Unable to find means of passing kernel path into install makefile - if kernel path is hardcoded you will need to patch the makefile' if warnmsg: warnmsg += '. Note that the variable KERNEL_SRC will be passed in as the kernel source path.' logger.warn(warnmsg) lines_after.append('# %s' % warnmsg) return True return False
def extract_autotools_deps(outlines, srctree, extravalues=None, acfile=None): import shlex import oe.package values = {} inherits = [] # FIXME this mapping is very thin progmap = { 'flex': 'flex-native', 'bison': 'bison-native', 'm4': 'm4-native', 'tar': 'tar-native', 'ar': 'binutils-native' } progclassmap = {'gconftool-2': 'gconf', 'pkg-config': 'pkgconfig'} ignoredeps = [ 'gcc-runtime', 'glibc', 'uclibc', 'tar-native', 'binutils-native' ] ignorelibs = ['socket'] pkg_re = re.compile( 'PKG_CHECK_MODULES\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)[),].*') lib_re = re.compile('AC_CHECK_LIB\(\[?([a-zA-Z0-9]*)\]?, .*') progs_re = re.compile( '_PROGS?\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)\]?[),].*') dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?') ac_init_re = re.compile('AC_INIT\(([^,]+), *([^,]+)[,)].*') am_init_re = re.compile('AM_INIT_AUTOMAKE\(([^,]+), *([^,]+)[,)].*') define_re = re.compile(' *(m4_)?define\(([^,]+), *([^,]+)\)') # Build up lib library->package mapping shlib_providers = oe.package.read_shlib_providers(tinfoil.config_data) libdir = tinfoil.config_data.getVar('libdir', True) base_libdir = tinfoil.config_data.getVar('base_libdir', True) libpaths = list(set([base_libdir, libdir])) libname_re = re.compile('^lib(.+)\.so.*$') pkglibmap = {} for lib, item in shlib_providers.iteritems(): for path, pkg in item.iteritems(): if path in libpaths: res = libname_re.match(lib) if res: libname = res.group(1) if not libname in pkglibmap: pkglibmap[libname] = pkg[0] else: logger.debug('unable to extract library name from %s' % lib) # Now turn it into a library->recipe mapping recipelibmap = {} pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True) for libname, pkg in pkglibmap.iteritems(): try: with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: for line in f: if line.startswith('PN:'): recipelibmap[libname] = line.split(':', 1)[-1].strip() break except IOError as ioe: if ioe.errno == 2: logger.warn( 'unable to find a pkgdata file for package %s' % pkg) else: raise defines = {} def subst_defines(value): newvalue = value for define, defval in defines.iteritems(): newvalue = newvalue.replace(define, defval) if newvalue != value: return subst_defines(newvalue) return value def process_value(value): value = value.replace('[', '').replace(']', '') if value.startswith('m4_esyscmd(') or value.startswith( 'm4_esyscmd_s('): cmd = subst_defines(value[value.index('(') + 1:-1]) try: if '|' in cmd: cmd = 'set -o pipefail; ' + cmd stdout, _ = bb.process.run(cmd, cwd=srctree, shell=True) ret = stdout.rstrip() except bb.process.ExecutionError as e: ret = '' elif value.startswith('m4_'): return None ret = subst_defines(value) if ret: ret = ret.strip('"\'') return ret # Since a configure.ac file is essentially a program, this is only ever going to be # a hack unfortunately; but it ought to be enough of an approximation if acfile: srcfiles = [acfile] else: srcfiles = RecipeHandler.checkfiles( srctree, ['acinclude.m4', 'configure.ac', 'configure.in']) pcdeps = [] deps = [] unmapped = [] unmappedlibs = [] def process_macro(keyword, value): if keyword == 'PKG_CHECK_MODULES': res = pkg_re.search(value) if res: res = dep_re.findall(res.group(1)) if res: pcdeps.extend([x[0] for x in res]) inherits.append('pkgconfig') elif keyword in ('AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE'): inherits.append('gettext') elif keyword in ('AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL'): deps.append('intltool-native') elif keyword == 'AM_PATH_GLIB_2_0': deps.append('glib-2.0') elif keyword == 'AC_CHECK_PROG' or keyword == 'AC_PATH_PROG': res = progs_re.search(value) if res: for prog in shlex.split(res.group(1)): prog = prog.split()[0] progclass = progclassmap.get(prog, None) if progclass: inherits.append(progclass) else: progdep = progmap.get(prog, None) if progdep: deps.append(progdep) else: if not prog.startswith('$'): unmapped.append(prog) elif keyword == 'AC_CHECK_LIB': res = lib_re.search(value) if res: lib = res.group(1) if lib in ignorelibs: logger.debug('Ignoring library dependency %s' % lib) else: libdep = recipelibmap.get(lib, None) if libdep: deps.append(libdep) else: if libdep is None: if not lib.startswith('$'): unmappedlibs.append(lib) elif keyword == 'AC_PATH_X': deps.append('libx11') elif keyword == 'AC_INIT': if extravalues is not None: res = ac_init_re.match(value) if res: extravalues['PN'] = process_value(res.group(1)) pv = process_value(res.group(2)) if validate_pv(pv): extravalues['PV'] = pv elif keyword == 'AM_INIT_AUTOMAKE': if extravalues is not None: if 'PN' not in extravalues: res = am_init_re.match(value) if res: if res.group(1) != 'AC_PACKAGE_NAME': extravalues['PN'] = process_value(res.group(1)) pv = process_value(res.group(2)) if validate_pv(pv): extravalues['PV'] = pv elif keyword == 'define(': res = define_re.match(value) if res: key = res.group(2).strip('[]') value = process_value(res.group(3)) if value is not None: defines[key] = value keywords = [ 'PKG_CHECK_MODULES', 'AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE', 'AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL', 'AM_PATH_GLIB_2_0', 'AC_CHECK_PROG', 'AC_PATH_PROG', 'AC_CHECK_LIB', 'AC_PATH_X', 'AC_INIT', 'AM_INIT_AUTOMAKE', 'define(', ] for srcfile in srcfiles: nesting = 0 in_keyword = '' partial = '' with open(srcfile, 'r') as f: for line in f: if in_keyword: partial += ' ' + line.strip() if partial.endswith('\\'): partial = partial[:-1] nesting = nesting + line.count('(') - line.count(')') if nesting == 0: process_macro(in_keyword, partial) partial = '' in_keyword = '' else: for keyword in keywords: if keyword in line: nesting = line.count('(') - line.count(')') if nesting > 0: partial = line.strip() if partial.endswith('\\'): partial = partial[:-1] in_keyword = keyword else: process_macro(keyword, line.strip()) break if in_keyword: process_macro(in_keyword, partial) if extravalues: for k, v in extravalues.items(): if v: if v.startswith('$') or v.startswith('@') or v.startswith( '%'): del extravalues[k] else: extravalues[k] = v.strip('"\'').rstrip('()') if unmapped: outlines.append( '# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmapped)))) if unmappedlibs: outlines.append( '# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedlibs)))) outlines.append( '# (this is based on recipes that have previously been built and packaged)' ) recipemap = read_pkgconfig_provides(tinfoil.config_data) unmapped = [] for pcdep in pcdeps: recipe = recipemap.get(pcdep, None) if recipe: deps.append(recipe) else: if not pcdep.startswith('$'): unmapped.append(pcdep) deps = set(deps).difference(set(ignoredeps)) if unmapped: outlines.append( '# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmapped)) outlines.append( '# (this is based on recipes that have previously been built and packaged)' ) if deps: values['DEPENDS'] = ' '.join(deps) if inherits: values['inherit'] = ' '.join(list(set(inherits))) return values
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if 'buildsystem' in handled: return False if not RecipeHandler.checkfiles(srctree, ['setup.py']): return # setup.py is always parsed to get at certain required information, such as # distutils vs setuptools # # If egg info is available, we use it for both its PKG-INFO metadata # and for its requires.txt for install_requires. # If PKG-INFO is available but no egg info is, we use that for metadata in preference to # the parsed setup.py, but use the install_requires info from the # parsed setup.py. setupscript = os.path.join(srctree, 'setup.py') try: setup_info, uses_setuptools, setup_non_literals, extensions = self.parse_setup_py( setupscript) except Exception: logger.exception("Failed to parse setup.py") setup_info, uses_setuptools, setup_non_literals, extensions = {}, True, [], [] egginfo = glob.glob(os.path.join(srctree, '*.egg-info')) if egginfo: info = self.get_pkginfo(os.path.join(egginfo[0], 'PKG-INFO')) requires_txt = os.path.join(egginfo[0], 'requires.txt') if os.path.exists(requires_txt): with codecs.open(requires_txt) as f: inst_req = [] extras_req = collections.defaultdict(list) current_feature = None for line in f.readlines(): line = line.rstrip() if not line: continue if line.startswith('['): current_feature = line[1:-1] elif current_feature: extras_req[current_feature].append(line) else: inst_req.append(line) info['Install-requires'] = inst_req info['Extras-require'] = extras_req elif RecipeHandler.checkfiles(srctree, ['PKG-INFO']): info = self.get_pkginfo(os.path.join(srctree, 'PKG-INFO')) if setup_info: if 'Install-requires' in setup_info: info['Install-requires'] = setup_info['Install-requires'] if 'Extras-require' in setup_info: info['Extras-require'] = setup_info['Extras-require'] else: if setup_info: info = setup_info else: info = self.get_setup_args_info(setupscript) # Grab the license value before applying replacements license_str = info.get('License', '').strip() self.apply_info_replacements(info) if uses_setuptools: classes.append('setuptools') else: classes.append('distutils') if license_str: for i, line in enumerate(lines_before): if line.startswith('LICENSE = '): lines_before.insert( i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) break if 'Classifier' in info: existing_licenses = info.get('License', '') licenses = [] for classifier in info['Classifier']: if classifier in self.classifier_license_map: license = self.classifier_license_map[classifier] if license == 'Apache' and 'Apache-2.0' in existing_licenses: license = 'Apache-2.0' elif license == 'GPL': if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses: license = 'GPL-2.0' elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses: license = 'GPL-3.0' elif license == 'LGPL': if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses: license = 'LGPL-2.1' elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses: license = 'LGPL-2.0' elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses: license = 'LGPL-3.0' licenses.append(license) if licenses: info['License'] = ' & '.join(licenses) # Map PKG-INFO & setup.py fields to bitbake variables for field, values in info.items(): if field in self.excluded_fields: continue if field not in self.bbvar_map: continue if isinstance(values, str): value = values else: value = ' '.join(str(v) for v in values if v) bbvar = self.bbvar_map[field] if bbvar not in extravalues and value: extravalues[bbvar] = value mapped_deps, unmapped_deps = self.scan_setup_python_deps( srctree, setup_info, setup_non_literals) extras_req = set() if 'Extras-require' in info: extras_req = info['Extras-require'] if extras_req: lines_after.append( '# The following configs & dependencies are from setuptools extras_require.' ) lines_after.append( '# These dependencies are optional, hence can be controlled via PACKAGECONFIG.' ) lines_after.append( '# The upstream names may not correspond exactly to bitbake package names.' ) lines_after.append('#') lines_after.append( '# Uncomment this line to enable all the optional features.' ) lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join( k.lower() for k in extras_req))) for feature, feature_reqs in extras_req.items(): unmapped_deps.difference_update(feature_reqs) feature_req_deps = ('python-' + r.replace('.', '-').lower() for r in sorted(feature_reqs)) lines_after.append('PACKAGECONFIG[{}] = ",,,{}"'.format( feature.lower(), ' '.join(feature_req_deps))) inst_reqs = set() if 'Install-requires' in info: if extras_req: lines_after.append('') inst_reqs = info['Install-requires'] if inst_reqs: unmapped_deps.difference_update(inst_reqs) inst_req_deps = ('python-' + r.replace('.', '-').lower() for r in sorted(inst_reqs)) lines_after.append( '# WARNING: the following rdepends are from setuptools install_requires. These' ) lines_after.append( '# upstream names may not correspond exactly to bitbake package names.' ) lines_after.append('RDEPENDS_${{PN}} += "{}"'.format( ' '.join(inst_req_deps))) if mapped_deps: name = info.get('Name') if name and name[0] in mapped_deps: # Attempt to avoid self-reference mapped_deps.remove(name[0]) mapped_deps -= set(self.excluded_pkgdeps) if inst_reqs or extras_req: lines_after.append('') lines_after.append( '# WARNING: the following rdepends are determined through basic analysis of the' ) lines_after.append( '# python sources, and might not be 100% accurate.') lines_after.append('RDEPENDS_${{PN}} += "{}"'.format(' '.join( sorted(mapped_deps)))) unmapped_deps -= set(extensions) unmapped_deps -= set(self.assume_provided) if unmapped_deps: if mapped_deps: lines_after.append('') lines_after.append( '# WARNING: We were unable to map the following python package/module' ) lines_after.append( '# dependencies to the bitbake packages which include them:') lines_after.extend('# {}'.format(d) for d in sorted(unmapped_deps)) handled.append('buildsystem')
def extract_autotools_deps(outlines, srctree, extravalues=None, acfile=None): import shlex import oe.package values = {} inherits = [] # FIXME this mapping is very thin progmap = {'flex': 'flex-native', 'bison': 'bison-native', 'm4': 'm4-native', 'tar': 'tar-native', 'ar': 'binutils-native'} progclassmap = {'gconftool-2': 'gconf', 'pkg-config': 'pkgconfig'} ignoredeps = ['gcc-runtime', 'glibc', 'uclibc', 'tar-native', 'binutils-native'] ignorelibs = ['socket'] pkg_re = re.compile('PKG_CHECK_MODULES\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)[),].*') lib_re = re.compile('AC_CHECK_LIB\(\[?([a-zA-Z0-9]*)\]?, .*') progs_re = re.compile('_PROGS?\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)\]?[),].*') dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?') ac_init_re = re.compile('AC_INIT\(([^,]+), *([^,]+)[,)].*') am_init_re = re.compile('AM_INIT_AUTOMAKE\(([^,]+), *([^,]+)[,)].*') define_re = re.compile(' *(m4_)?define\(([^,]+), *([^,]+)\)') # Build up lib library->package mapping shlib_providers = oe.package.read_shlib_providers(tinfoil.config_data) libdir = tinfoil.config_data.getVar('libdir', True) base_libdir = tinfoil.config_data.getVar('base_libdir', True) libpaths = list(set([base_libdir, libdir])) libname_re = re.compile('^lib(.+)\.so.*$') pkglibmap = {} for lib, item in shlib_providers.iteritems(): for path, pkg in item.iteritems(): if path in libpaths: res = libname_re.match(lib) if res: libname = res.group(1) if not libname in pkglibmap: pkglibmap[libname] = pkg[0] else: logger.debug('unable to extract library name from %s' % lib) # Now turn it into a library->recipe mapping recipelibmap = {} pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True) for libname, pkg in pkglibmap.iteritems(): try: with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: for line in f: if line.startswith('PN:'): recipelibmap[libname] = line.split(':', 1)[-1].strip() break except IOError as ioe: if ioe.errno == 2: logger.warn('unable to find a pkgdata file for package %s' % pkg) else: raise defines = {} def subst_defines(value): newvalue = value for define, defval in defines.iteritems(): newvalue = newvalue.replace(define, defval) if newvalue != value: return subst_defines(newvalue) return value def process_value(value): value = value.replace('[', '').replace(']', '') if value.startswith('m4_esyscmd(') or value.startswith('m4_esyscmd_s('): cmd = subst_defines(value[value.index('(')+1:-1]) try: if '|' in cmd: cmd = 'set -o pipefail; ' + cmd stdout, _ = bb.process.run(cmd, cwd=srctree, shell=True) ret = stdout.rstrip() except bb.process.ExecutionError as e: ret = '' elif value.startswith('m4_'): return None ret = subst_defines(value) if ret: ret = ret.strip('"\'') return ret # Since a configure.ac file is essentially a program, this is only ever going to be # a hack unfortunately; but it ought to be enough of an approximation if acfile: srcfiles = [acfile] else: srcfiles = RecipeHandler.checkfiles(srctree, ['acinclude.m4', 'configure.ac', 'configure.in']) pcdeps = [] deps = [] unmapped = [] unmappedlibs = [] def process_macro(keyword, value): if keyword == 'PKG_CHECK_MODULES': res = pkg_re.search(value) if res: res = dep_re.findall(res.group(1)) if res: pcdeps.extend([x[0] for x in res]) inherits.append('pkgconfig') elif keyword in ('AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE'): inherits.append('gettext') elif keyword in ('AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL'): deps.append('intltool-native') elif keyword == 'AM_PATH_GLIB_2_0': deps.append('glib-2.0') elif keyword == 'AC_CHECK_PROG' or keyword == 'AC_PATH_PROG': res = progs_re.search(value) if res: for prog in shlex.split(res.group(1)): prog = prog.split()[0] progclass = progclassmap.get(prog, None) if progclass: inherits.append(progclass) else: progdep = progmap.get(prog, None) if progdep: deps.append(progdep) else: if not prog.startswith('$'): unmapped.append(prog) elif keyword == 'AC_CHECK_LIB': res = lib_re.search(value) if res: lib = res.group(1) if lib in ignorelibs: logger.debug('Ignoring library dependency %s' % lib) else: libdep = recipelibmap.get(lib, None) if libdep: deps.append(libdep) else: if libdep is None: if not lib.startswith('$'): unmappedlibs.append(lib) elif keyword == 'AC_PATH_X': deps.append('libx11') elif keyword == 'AC_INIT': if extravalues is not None: res = ac_init_re.match(value) if res: extravalues['PN'] = process_value(res.group(1)) pv = process_value(res.group(2)) if validate_pv(pv): extravalues['PV'] = pv elif keyword == 'AM_INIT_AUTOMAKE': if extravalues is not None: if 'PN' not in extravalues: res = am_init_re.match(value) if res: if res.group(1) != 'AC_PACKAGE_NAME': extravalues['PN'] = process_value(res.group(1)) pv = process_value(res.group(2)) if validate_pv(pv): extravalues['PV'] = pv elif keyword == 'define(': res = define_re.match(value) if res: key = res.group(2).strip('[]') value = process_value(res.group(3)) if value is not None: defines[key] = value keywords = ['PKG_CHECK_MODULES', 'AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE', 'AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL', 'AM_PATH_GLIB_2_0', 'AC_CHECK_PROG', 'AC_PATH_PROG', 'AC_CHECK_LIB', 'AC_PATH_X', 'AC_INIT', 'AM_INIT_AUTOMAKE', 'define(', ] for srcfile in srcfiles: nesting = 0 in_keyword = '' partial = '' with open(srcfile, 'r') as f: for line in f: if in_keyword: partial += ' ' + line.strip() if partial.endswith('\\'): partial = partial[:-1] nesting = nesting + line.count('(') - line.count(')') if nesting == 0: process_macro(in_keyword, partial) partial = '' in_keyword = '' else: for keyword in keywords: if keyword in line: nesting = line.count('(') - line.count(')') if nesting > 0: partial = line.strip() if partial.endswith('\\'): partial = partial[:-1] in_keyword = keyword else: process_macro(keyword, line.strip()) break if in_keyword: process_macro(in_keyword, partial) if extravalues: for k,v in extravalues.items(): if v: if v.startswith('$') or v.startswith('@') or v.startswith('%'): del extravalues[k] else: extravalues[k] = v.strip('"\'').rstrip('()') if unmapped: outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmapped)))) if unmappedlibs: outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedlibs)))) outlines.append('# (this is based on recipes that have previously been built and packaged)') recipemap = read_pkgconfig_provides(tinfoil.config_data) unmapped = [] for pcdep in pcdeps: recipe = recipemap.get(pcdep, None) if recipe: deps.append(recipe) else: if not pcdep.startswith('$'): unmapped.append(pcdep) deps = set(deps).difference(set(ignoredeps)) if unmapped: outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmapped)) outlines.append('# (this is based on recipes that have previously been built and packaged)') if deps: values['DEPENDS'] = ' '.join(deps) if inherits: values['inherit'] = ' '.join(list(set(inherits))) return values
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): import bb.utils import oe.package from collections import OrderedDict if 'buildsystem' in handled: return False def read_package_json(fn): with open(fn, 'r', errors='surrogateescape') as f: return json.loads(f.read()) files = RecipeHandler.checkfiles(srctree, ['package.json']) if files: d = bb.data.createCopy(tinfoil.config_data) npm_bindir = self._ensure_npm() if not npm_bindir: sys.exit(14) d.prependVar('PATH', '%s:' % npm_bindir) data = read_package_json(files[0]) if 'name' in data and 'version' in data: extravalues['PN'] = data['name'] extravalues['PV'] = data['version'] classes.append('npm') handled.append('buildsystem') if 'description' in data: extravalues['SUMMARY'] = data['description'] if 'homepage' in data: extravalues['HOMEPAGE'] = data['homepage'] fetchdev = extravalues['fetchdev'] or None deps, optdeps, devdeps = self.get_npm_package_dependencies(data, fetchdev) self._handle_dependencies(d, deps, optdeps, devdeps, lines_before, srctree) # Shrinkwrap localfilesdir = tempfile.mkdtemp(prefix='recipetool-npm') self._shrinkwrap(srctree, localfilesdir, extravalues, lines_before, d) # Lockdown self._lockdown(srctree, localfilesdir, extravalues, lines_before, d) # Split each npm module out to is own package npmpackages = oe.package.npm_split_package_dirs(srctree) licvalues = None for item in handled: if isinstance(item, tuple): if item[0] == 'license': licvalues = item[1] break if not licvalues: licvalues = handle_license_vars(srctree, lines_before, handled, extravalues, d) if licvalues: # Augment the license list with information we have in the packages licenses = {} license = self._handle_license(data) if license: licenses['${PN}'] = license for pkgname, pkgitem in npmpackages.items(): _, pdata = pkgitem license = self._handle_license(pdata) if license: licenses[pkgname] = license # Now write out the package-specific license values # We need to strip out the json data dicts for this since split_pkg_licenses # isn't expecting it packages = OrderedDict((x,y[0]) for x,y in npmpackages.items()) packages['${PN}'] = '' pkglicenses = split_pkg_licenses(licvalues, packages, lines_after, licenses) all_licenses = list(set([item.replace('_', ' ') for pkglicense in pkglicenses.values() for item in pkglicense])) if '&' in all_licenses: all_licenses.remove('&') extravalues['LICENSE'] = ' & '.join(all_licenses) # Need to move S setting after inherit npm for i, line in enumerate(lines_before): if line.startswith('S ='): lines_before.pop(i) lines_after.insert(0, '# Must be set after inherit npm since that itself sets S') lines_after.insert(1, line) break return True return False
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): if "buildsystem" in handled: return False if not RecipeHandler.checkfiles(srctree, ["setup.py"]): return # setup.py is always parsed to get at certain required information, such as # distutils vs setuptools # # If egg info is available, we use it for both its PKG-INFO metadata # and for its requires.txt for install_requires. # If PKG-INFO is available but no egg info is, we use that for metadata in preference to # the parsed setup.py, but use the install_requires info from the # parsed setup.py. setupscript = os.path.join(srctree, "setup.py") try: setup_info, uses_setuptools, setup_non_literals, extensions = self.parse_setup_py(setupscript) except Exception: logger.exception("Failed to parse setup.py") setup_info, uses_setuptools, setup_non_literals, extensions = {}, True, [], [] egginfo = glob.glob(os.path.join(srctree, "*.egg-info")) if egginfo: info = self.get_pkginfo(os.path.join(egginfo[0], "PKG-INFO")) requires_txt = os.path.join(egginfo[0], "requires.txt") if os.path.exists(requires_txt): with codecs.open(requires_txt) as f: inst_req = [] extras_req = collections.defaultdict(list) current_feature = None for line in f.readlines(): line = line.rstrip() if not line: continue if line.startswith("["): current_feature = line[1:-1] elif current_feature: extras_req[current_feature].append(line) else: inst_req.append(line) info["Install-requires"] = inst_req info["Extras-require"] = extras_req elif RecipeHandler.checkfiles(srctree, ["PKG-INFO"]): info = self.get_pkginfo(os.path.join(srctree, "PKG-INFO")) if setup_info: if "Install-requires" in setup_info: info["Install-requires"] = setup_info["Install-requires"] if "Extras-require" in setup_info: info["Extras-require"] = setup_info["Extras-require"] else: if setup_info: info = setup_info else: info = self.get_setup_args_info(setupscript) self.apply_info_replacements(info) if uses_setuptools: classes.append("setuptools") else: classes.append("distutils") if "Classifier" in info: licenses = [] for classifier in info["Classifier"]: if classifier in self.classifier_license_map: license = self.classifier_license_map[classifier] licenses.append(license) if licenses: info["License"] = " & ".join(licenses) # Map PKG-INFO & setup.py fields to bitbake variables bbinfo = {} for field, values in info.iteritems(): if field in self.excluded_fields: continue if field not in self.bbvar_map: continue if isinstance(values, basestring): value = values else: value = " ".join(str(v) for v in values if v) bbvar = self.bbvar_map[field] if bbvar not in bbinfo and value: bbinfo[bbvar] = value comment_lic_line = None for pos, line in enumerate(list(lines_before)): if line.startswith("#") and "LICENSE" in line: comment_lic_line = pos elif line.startswith("LICENSE =") and "LICENSE" in bbinfo: if line in ('LICENSE = "Unknown"', 'LICENSE = "CLOSED"'): lines_before[pos] = 'LICENSE = "{}"'.format(bbinfo["LICENSE"]) if line == 'LICENSE = "CLOSED"' and comment_lic_line: lines_before[comment_lic_line:pos] = [ "# WARNING: the following LICENSE value is a best guess - it is your", "# responsibility to verify that the value is complete and correct.", ] del bbinfo["LICENSE"] src_uri_line = None for pos, line in enumerate(lines_before): if line.startswith("SRC_URI ="): src_uri_line = pos if bbinfo: mdinfo = [""] for k in sorted(bbinfo): v = bbinfo[k] mdinfo.append('{} = "{}"'.format(k, v)) if src_uri_line: lines_before[src_uri_line - 1 : src_uri_line - 1] = mdinfo else: lines_before.extend(mdinfo) mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) extras_req = set() if "Extras-require" in info: extras_req = info["Extras-require"] if extras_req: lines_after.append("# The following configs & dependencies are from setuptools extras_require.") lines_after.append("# These dependencies are optional, hence can be controlled via PACKAGECONFIG.") lines_after.append("# The upstream names may not correspond exactly to bitbake package names.") lines_after.append("#") lines_after.append("# Uncomment this line to enable all the optional features.") lines_after.append('#PACKAGECONFIG ?= "{}"'.format(" ".join(k.lower() for k in extras_req.iterkeys()))) for feature, feature_reqs in extras_req.iteritems(): unmapped_deps.difference_update(feature_reqs) feature_req_deps = ("python-" + r.replace(".", "-").lower() for r in sorted(feature_reqs)) lines_after.append( 'PACKAGECONFIG[{}] = ",,,{}"'.format(feature.lower(), " ".join(feature_req_deps)) ) inst_reqs = set() if "Install-requires" in info: if extras_req: lines_after.append("") inst_reqs = info["Install-requires"] if inst_reqs: unmapped_deps.difference_update(inst_reqs) inst_req_deps = ("python-" + r.replace(".", "-").lower() for r in sorted(inst_reqs)) lines_after.append("# WARNING: the following rdepends are from setuptools install_requires. These") lines_after.append("# upstream names may not correspond exactly to bitbake package names.") lines_after.append('RDEPENDS_${{PN}} += "{}"'.format(" ".join(inst_req_deps))) if mapped_deps: name = info.get("Name") if name and name[0] in mapped_deps: # Attempt to avoid self-reference mapped_deps.remove(name[0]) mapped_deps -= set(self.excluded_pkgdeps) if inst_reqs or extras_req: lines_after.append("") lines_after.append("# WARNING: the following rdepends are determined through basic analysis of the") lines_after.append("# python sources, and might not be 100% accurate.") lines_after.append('RDEPENDS_${{PN}} += "{}"'.format(" ".join(sorted(mapped_deps)))) unmapped_deps -= set(extensions) unmapped_deps -= set(self.assume_provided) if unmapped_deps: if mapped_deps: lines_after.append("") lines_after.append("# WARNING: We were unable to map the following python package/module") lines_after.append("# dependencies to the bitbake packages which include them:") lines_after.extend("# {}".format(d) for d in sorted(unmapped_deps)) handled.append("buildsystem")