Esempio n. 1
0
 def _load_main_config(self):
     if os.path.exists(DEFAULT_CONFIG_FILE):
         self._parse(DEFAULT_CONFIG_FILE)
     else:
         msg = _('Using default configuration because %s is missing') % \
             DEFAULT_CONFIG_FILE
         m.warning(msg)
Esempio n. 2
0
    def _search_libraries(self, files):
        '''
        Search libraries in the prefix. Unfortunately the filename might vary
        depending on the platform and we need to match the library name and
        it's extension. There is a corner case on windows where a libray might
        be named foo.dll, foo-1.dll, libfoo.dll, or libfoo-1.dll

        NOTE: Unlike other searchfuncs which return lists, this returns a dict
              with a mapping from the libname to the actual on-disk file. We use
              the libname (the key) in gen_library_file so we don't have to
              guess (sometimes incorrectly) based on the dll filename.
        '''
        libdir = self.extensions['sdir']
        libext = self.extensions['srext']
        libregex = self.extensions['sregex']

        libsmatch = {}
        notfound = []
        for f in files:
            libsmatch[f] = list(
                find_shlib_regex(f[3:], self.config.prefix, libdir, libext,
                                 libregex))
            if not libsmatch[f]:
                notfound.append(f)

        if notfound:
            msg = "Some libraries weren't found while searching!"
            for each in notfound:
                msg += '\n' + each
            m.warning(msg)
        return libsmatch
Esempio n. 3
0
 def _load_main_config(self):
     if os.path.exists(DEFAULT_CONFIG_FILE):
         self._parse(DEFAULT_CONFIG_FILE)
     else:
         msg = _('Using default configuration because %s is missing') % \
             DEFAULT_CONFIG_FILE
         m.warning(msg)
Esempio n. 4
0
    def pack(self, output_dir, devel=True, force=False, keep_temp=False,
             split=True, package_prefix=''):
        try:
            dist_files = self.files_list(PackageType.RUNTIME, force)
        except EmptyPackageError:
            m.warning(_("The runtime package is empty"))
            dist_files = []

        if devel:
            try:
                devel_files = self.files_list(PackageType.DEVEL, force)
            except EmptyPackageError:
                m.warning(_("The development package is empty"))
                devel_files = []
        else:
            devel_files = []

        if not split:
            dist_files += devel_files

        if not dist_files and not devel_files:
            raise EmptyPackageError(self.package.name)

        filenames = []
        if dist_files:
            runtime = self._create_tarball(output_dir, PackageType.RUNTIME,
                                           dist_files, force, package_prefix)
            filenames.append(runtime)

        if split and devel and len(devel_files) != 0:
            devel = self._create_tarball(output_dir, PackageType.DEVEL,
                                         devel_files, force, package_prefix)
            filenames.append(devel)
        return filenames
Esempio n. 5
0
    def pack(self, output_dir, devel=True, force=False, keep_temp=False, split=True, package_prefix=""):
        try:
            dist_files = self.files_list(PackageType.RUNTIME, force)
        except EmptyPackageError:
            m.warning(_("The runtime package is empty"))
            dist_files = []

        if devel:
            try:
                devel_files = self.files_list(PackageType.DEVEL, force)
            except EmptyPackageError:
                m.warning(_("The development package is empty"))
                devel_files = []
        else:
            devel_files = []

        if not split:
            dist_files += devel_files

        if not dist_files and not devel_files:
            raise EmptyPackageError(self.package.name)

        filenames = []
        if dist_files:
            runtime = self._create_tarball(output_dir, PackageType.RUNTIME, dist_files, force, package_prefix)
            filenames.append(runtime)

        if split and devel and len(devel_files) != 0:
            devel = self._create_tarball(output_dir, PackageType.DEVEL, devel_files, force, package_prefix)
            filenames.append(devel)
        return filenames
Esempio n. 6
0
    def run(self, config, args):
        cookbook = CookBook(config)
        failed = []
        p_name = args.package[0]

        store = PackagesStore(config)
        p = store.get_package(p_name)
        ordered_recipes = map(lambda x: cookbook.get_recipe(x),
                              p.recipes_dependencies())

        for recipe in ordered_recipes:
            if cookbook.recipe_needs_build(recipe.name):
                raise CommandError(
                    _("Recipe %s is not built yet" % recipe.name))

        for recipe in ordered_recipes:
            # call step function
            stepfunc = None
            try:
                stepfunc = getattr(recipe, 'check')
            except:
                m.message('%s has no check step, skipped' % recipe.name)

            if stepfunc:
                try:
                    m.message('Running checks for recipe %s' % recipe.name)
                    stepfunc()
                except Exception as ex:
                    failed.append(recipe.name)
                    m.warning(_("%s checks failed: %s") % (recipe.name, ex))
        if failed:
            raise CommandError(
                _("Error running %s checks on:\n    " + "\n    ".join(failed))
                % p_name)
Esempio n. 7
0
 def __init__(self, variants):
     # Set default values
     for v in self.__enabled_variants:
         setattr(self, v, True)
     for v in self.__disabled_variants:
         setattr(self, v, False)
     for v, choices in self.__mapping_variants.items():
         setattr(self, v, choices[0])
     # Set the configured values
     for v in variants:
         if '=' in v:
             key, value = v.split('=', 1)
             key = key.replace('-', '_')
             if key not in self.__mapping_variants:
                 raise AttributeError(
                     'Mapping variant {!r} is unknown'.format(key))
             if value not in self.__mapping_variants[key]:
                 raise AttributeError(
                     'Mapping variant {!r} value {!r} is unknown'.format(
                         key, value))
             setattr(self, key, value)
         elif v.startswith('no'):
             if v[2:] not in self.__bool_variants:
                 m.warning('Variant {!r} is unknown or obsolete'.format(
                     v[2:]))
             setattr(self, v[2:], False)
         else:
             if v not in self.__bool_variants:
                 m.warning('Variant {!r} is unknown or obsolete'.format(v))
             setattr(self, v, True)
     # Set auto mapping values based on other values
     if self.vscrt == 'auto':
         self.vscrt = 'md'
         if self.debug and not self.optimization:
             self.vscrt = 'mdd'
Esempio n. 8
0
    async def fetch_dep(self, config, dep, namespace):
        try:
            dep_path = os.path.join(config.home_dir,
                                    os.path.basename(dep['url']))
            await shell.download(dep['url'],
                                 dep_path,
                                 check_cert=True,
                                 overwrite=True)
            if dep['checksum'] == self.checksum(dep_path):
                await shell.unpack(dep_path, config.home_dir)
            else:
                m.warning("Corrupted dependency file, ignoring.")
            os.remove(dep_path)

            # Don't need to relocate on Windows and macOS since we build
            # pkg-config with --enable-define-prefix.
            # In case this needs to be re-enabled at some point, note that the
            # current self.build_dir value is hard-coded and is wrong on macOS
            # and Windows. It should instead be derived from CI env vars.
            if config.platform == Platform.LINUX:
                origin = self.build_dir % namespace
                m.message("Relocating from %s to %s" %
                          (origin, config.home_dir))
                # FIXME: Just a quick hack for now
                shell.call(("grep -lnrIU %(origin)s | xargs "
                            "sed \"s#%(origin)s#%(dest)s#g\" -i") % {
                                'origin': origin,
                                'dest': config.home_dir
                            }, config.home_dir)
        except FatalError as e:
            m.warning("Could not retrieve dependencies for commit %s: %s" %
                      (dep['commit'], e.msg))
Esempio n. 9
0
    def run(self, config, args):
        cookbook = CookBook(config)
        failed = []
        p_name = args.package[0]

        store = PackagesStore(config)
        p = store.get_package(p_name)
        ordered_recipes = map(lambda x: cookbook.get_recipe(x),
                    p.recipes_dependencies())

        for recipe in ordered_recipes:
            if cookbook.recipe_needs_build(recipe.name):
                raise CommandError(_("Recipe %s is not built yet" % recipe.name))

        for recipe in ordered_recipes:
            # call step function
            stepfunc = None
            try:
                stepfunc = getattr(recipe, 'check')
            except:
                m.message('%s has no check step, skipped' % recipe.name)

            if stepfunc:
                try:
                    m.message('Running checks for recipe %s' % recipe.name)
                    stepfunc()
                except Exception, ex:
                    failed.append(recipe.name)
                    m.warning(_("%s checks failed: %s") % (recipe.name, ex))
Esempio n. 10
0
    def create(self, libname, dllpath, arch, outputdir):
        bindir, dllname = os.path.split(dllpath)

        # Create the .def file
        shell.call('gendef %s' % dllpath, outputdir)

        defname = dllname.replace('.dll', '.def')
        implib = '%s.lib' % libname[3:]

        # Create the import library
        vc_path = self._get_vc_tools_path()

        # Prefer LIB.exe over dlltool:
        # http://sourceware.org/bugzilla/show_bug.cgi?id=12633
        if vc_path is not None:
            # Spaces msys and shell are a beautiful combination
            lib_path = to_unixpath(os.path.join(vc_path, 'lib.exe'))
            lib_path = lib_path.replace('\\', '/')
            lib_path = lib_path.replace('(', '\\\(').replace(')', '\\\)')
            lib_path = lib_path.replace(' ', '\\\\ ')
            if arch == Architecture.X86:
                arch = 'x86'
            else:
                arch = 'x64'
            shell.call(self.LIB_TPL % (lib_path, defname, implib, arch), outputdir)
        else:
            m.warning("Using dlltool instead of lib.exe! Resulting .lib files"
                " will have problems with Visual Studio, see "
                " http://sourceware.org/bugzilla/show_bug.cgi?id=12633")
            shell.call(self.DLLTOOL_TPL % (defname, implib, dllname), outputdir)
        return os.path.join(outputdir, implib)
Esempio n. 11
0
    def __new__(klass, config, build_tools_only):
        bs = []

        bs.append(BuildTools(config))
        if build_tools_only:
            return bs

        target_distro = config.target_distro
        distro = config.distro
        target_distro_version = config.target_distro_version
        distro_version = config.distro_version

        # Try to find a bootstrapper for the distro-distro_version combination,
        # both for the target host and the build one. For instance, when
        # bootstraping to cross-compile for windows we also need to bootstrap
        # the build host.
        target = (target_distro, target_distro_version)
        build = (distro, distro_version)

        if target == build:
            blist = [target]
        else:
            blist = [target, build]

        for d, v in blist:
            if d not in bootstrappers:
                raise FatalError(_("No bootstrapper for the distro %s" % d))
            if v not in bootstrappers[d]:
                # Be tolerant with the distro version
                m.warning(_("No bootstrapper for the distro version %s" % v))
                v = None

            bs.insert(0, bootstrappers[d][v](config))

        return bs
Esempio n. 12
0
    def _make_paths_relative(self):
        sofiles = shell.find_files('*.so', self.tmp_install_dir)
        for sof in sofiles:
            try:
                shell.call("chrpath -d %s" % sof, self.tmp_install_dir,
                           fail=False)
            except FatalError:
                m.warning("Could not 'chrpath' %s" % sof)

        shell.call("ln -s . usr", self.tmp_install_dir, fail=False)

        # Make gd-pixbuf loader.cache file use relative paths
        cache = os.path.join(self.tmp_install_dir, 'lib', 'gdk-pixbuf-2.0',
            '2.10.0', 'loaders.cache')
        shell.replace(cache, {self.config.install_dir: '.'})

        for icondir in os.listdir(os.path.join(self.tmp_install_dir, "share/icons/")):
            if os.path.exists(os.path.join(icondir, "index.theme")):
                shell.call("gtk-update-icon-cache %s" % icondir, fail=False)

        shell.call("update-mime-database %s" % os.path.join(self.tmp_install_dir, "share", "mime"), fail=False)

        # Use system wide applications in case the bundle needs to open apps not included in
        # the bundle (to show the documentation most probably)
        shell.call("rm -rf %s" % os.path.join(self.tmp_install_dir, "share", "applications"), fail=False)
        shell.call("ln -s %s %s" % (os.path.join("/usr", "share", "applications"),
                                    os.path.join(self.tmp_install_dir, "share", "applications")),
                   fail=False)
Esempio n. 13
0
def find_dll_implib(config, libname, prefix, libdir, ext, regex):
    implibdir = 'lib'
    implibs = [
        'lib{}.dll.a'.format(libname), libname + '.lib',
        'lib{}.lib'.format(libname)
    ]
    implib_notfound = []
    for implib in implibs:
        path = os.path.join(prefix, implibdir, implib)
        if not os.path.exists(path):
            implib_notfound.append(implib)
            continue
        dllname = get_implib_dllname(config, path)
        if dllname == 0:
            continue
        dllname = dllname.strip()
        if dllname == '':
            continue
        return [os.path.join(libdir, dllname)]
    # If import libraries aren't found, look for a DLL by exactly the specified
    # name. This is to cover cases like libgcc_s_sjlj-1.dll which don't have an
    # import library since they're only used at runtime.
    dllname = 'lib{}.dll'.format(libname)
    path = os.path.join(prefix, libdir, dllname)
    if os.path.exists(path):
        return [os.path.join(libdir, dllname)]
    if len(implib_notfound) == len(implibs):
        m.warning("No import libraries found for {!r}".format(libname))
    else:
        implibs = ', '.join(set(implibs) - set(implib_notfound))
        m.warning("No dllname found from implibs: {}".format(implibs))
    # This will trigger an error in _search_libraries()
    return []
Esempio n. 14
0
    def insert_python_site(self):
        try:
            import setuptools.version as stv
        except ImportError:
            return

        version = stv.__version__.split('.', 1)
        if len(version) < 1 or int(version[0]) < 49:
            return

        m.warning('detected setuptools >= 49.0.0, installing fallback site.py file. '
            'See https://github.com/pypa/setuptools/issues/2295')

        # Since python-setuptools 49.0.0, site.py is not installed by
        # easy_install/setup.py anymore which breaks python installs outside
        # the system prefix.
        # https://github.com/pypa/setuptools/issues/2295
        #
        # Install the previously installed site.py ourselves as a workaround
        config = self.cookbook.get_config()

        py_prefix = sysconfig.get_path('purelib', vars={'base': ''})
        # Must strip \/ to ensure that the path is relative
        py_prefix = PurePath(config.prefix) / PurePath(py_prefix.strip('\\/'))
        src_file = os.path.join(os.path.dirname(__file__), 'site-patch.py')
        shutil.copy(src_file, py_prefix / 'site.py')
Esempio n. 15
0
    def _create_bundle(self, files, package_type):
        '''
        Moves all the files that are going to be packaged to a temporary
        directory to create the bundle
        '''
        tmp = tempfile.mkdtemp()
        root = os.path.join(tmp, 'Root')
        resources = os.path.join(tmp, 'Resources')
        for f in files:
            in_path = os.path.join(self.config.prefix, f)
            if not os.path.exists(in_path):
                m.warning("File %s is missing and won't be added to the "
                          "package" % in_path)
                continue
            out_path = os.path.join(root, f)
            out_dir = os.path.split(out_path)[0]
            if not os.path.exists(out_dir):
                os.makedirs(out_dir)
            shutil.copy(in_path, out_path)
        if package_type == PackageType.DEVEL:
            self._create_framework_headers(self.config.prefix, self.include_dirs, root)

        # Copy scripts to the Resources directory
        os.makedirs(resources)
        if os.path.exists(self.package.resources_preinstall):
            shutil.copy(os.path.join(self.package.resources_preinstall),
                        os.path.join(resources, 'preinstall'))
        if os.path.exists(self.package.resources_postinstall):
            shutil.copy(os.path.join(self.package.resources_postinstall),
                        os.path.join(resources, 'postinstall'))
        return root, resources
Esempio n. 16
0
    def create(self, libname, dllpath, platform, target_arch, outputdir):
        # foo.lib must not start with 'lib'
        if libname.startswith('lib'):
            self.filename = libname[3:] + '.lib'
        else:
            self.filename = libname + '.lib'

        bindir, dllname = os.path.split(dllpath)

        # Create the .def file
        defname = self.gendef(dllpath, outputdir, libname)

        # Create the import library
        lib_path, paths = self._get_lib_exe_path(target_arch, platform)

        # Prefer LIB.exe over dlltool:
        # http://sourceware.org/bugzilla/show_bug.cgi?id=12633
        if lib_path is not None:
            if target_arch == Architecture.X86:
                arch = 'x86'
            else:
                arch = 'x64'
            env = self.config.env.copy()
            env['PATH'] = paths + ';' + env['PATH']
            cmd = [
                lib_path, '/DEF:' + defname, '/OUT:' + self.filename,
                '/MACHINE:' + arch
            ]
            shell.new_call(cmd, outputdir, logfile=self.logfile, env=env)
        else:
            m.warning("Using dlltool instead of lib.exe! Resulting .lib files"
                      " will have problems with Visual Studio, see "
                      " http://sourceware.org/bugzilla/show_bug.cgi?id=12633")
            self.dlltool(defname, dllname, outputdir)
        return os.path.join(outputdir, self.filename)
Esempio n. 17
0
    def run(self, config, args):
        cookbook = CookBook(config)
        if args.recipe == 'all':
            recipes = cookbook.get_recipes_list()
        else:
            recipes = [cookbook.get_recipe(args.recipe)]
        if len(recipes) == 0:
            m.message(_("No recipes found"))
        tagname = args.tagname
        tagdescription = args.tagdescription
        force = args.force
        for recipe in recipes:
            if recipe.stype != SourceType.GIT and \
               recipe.stype != SourceType.GIT_TARBALL:
                m.message(
                    _("Recipe '%s' has a custom source repository, "
                      "skipping") % recipe.name)
                continue

            recipe.fetch(checkout=False)

            tags = git.list_tags(recipe.repo_dir)
            exists = (tagname in tags)
            if exists:
                if not force:
                    m.warning(
                        _("Recipe '%s' tag '%s' already exists, "
                          "not updating" % (recipe.name, tagname)))
                    continue
                git.delete_tag(recipe.repo_dir, tagname)

            commit = 'origin/sdk-%s' % recipe.version
            git.create_tag(recipe.repo_dir, tagname, tagdescription, commit)
Esempio n. 18
0
    def _create_bundle(self, files, package_type):
        '''
        Moves all the files that are going to be packaged to a temporary
        directory to create the bundle
        '''
        tmp = tempfile.mkdtemp()
        root = os.path.join(tmp, 'Root')
        resources = os.path.join(tmp, 'Resources')
        for f in files:
            in_path = os.path.join(self.config.prefix, f)
            if not os.path.exists(in_path):
                m.warning("File %s is missing and won't be added to the "
                          "package" % in_path)
                continue
            out_path = os.path.join(root, f)
            out_dir = os.path.split(out_path)[0]
            if not os.path.exists(out_dir):
                os.makedirs(out_dir)
            shutil.copy(in_path, out_path)
        if package_type == PackageType.DEVEL:
            self._create_framework_headers(self.config.prefix,
                                           self.include_dirs, root)

        # Copy scripts to the Resources directory
        os.makedirs(resources)
        if os.path.exists(self.package.resources_preinstall):
            shutil.copy(os.path.join(self.package.resources_preinstall),
                        os.path.join(resources, 'preinstall'))
        if os.path.exists(self.package.resources_postinstall):
            shutil.copy(os.path.join(self.package.resources_postinstall),
                        os.path.join(resources, 'postinstall'))
        return tmp, root, resources
Esempio n. 19
0
    def create(self, libname, dllpath, arch, outputdir):
        bindir, dllname = os.path.split(dllpath)

        # Create the .def file
        shell.call('gendef %s' % dllpath, outputdir)

        defname = dllname.replace('.dll', '.def')
        implib = '%s.lib' % libname[3:]

        # Create the import library
        vc_path = self._get_vc_tools_path()

        # Prefer LIB.exe over dlltool:
        # http://sourceware.org/bugzilla/show_bug.cgi?id=12633
        if vc_path is not None:
            # Spaces msys and shell are a beautiful combination
            lib_path = to_unixpath(os.path.join(vc_path, 'lib.exe'))
            lib_path = lib_path.replace('\\', '/')
            lib_path = lib_path.replace('(', '\\\(').replace(')', '\\\)')
            lib_path = lib_path.replace(' ', '\\\\ ')
            if arch == Architecture.X86:
                arch = 'x86'
            else:
                arch = 'x64'
            shell.call(self.LIB_TPL % (lib_path, defname, implib, arch),
                       outputdir)
        else:
            m.warning("Using dlltool instead of lib.exe! Resulting .lib files"
                      " will have problems with Visual Studio, see "
                      " http://sourceware.org/bugzilla/show_bug.cgi?id=12633")
            shell.call(self.DLLTOOL_TPL % (defname, implib, dllname),
                       outputdir)
        return os.path.join(outputdir, implib)
Esempio n. 20
0
 def generate_gst_la_files(self):
     '''
     Generate .la files for all libraries and plugins packaged by this Meson
     recipe using the pkg-config files installed by our Meson build files.
     '''
     pluginpcdir = os.path.join(self.config.libdir, 'gstreamer-1.0', 'pkgconfig')
     env = os.environ.copy()
     env['PKG_CONFIG_LIBDIR'] += os.pathsep + pluginpcdir
     if self.use_system_libs:
         add_system_libs(self.config, env)
     # Get la file -> pkg-config name mapping
     libs_la_files = {}
     plugin_la_files = {}
     for f in self.devel_files_list():
         if not f.endswith('.a') or not f.startswith('lib/'):
             continue
         if f.startswith('lib/gstreamer-1.0/'):
             libtype = 'plugin'
         else:
             libtype = 'library'
         fpath = os.path.join(self._get_arch_prefix(), f)
         if not os.path.isfile(fpath):
             arch = self.config.target_arch
             m.warning('{} {} {!r} not found'.format(arch, libtype, fpath))
             continue
         pcname = os.path.basename(f)[3:-2]
         la = os.path.splitext(f)[0] + '.la'
         if libtype == 'plugin':
             self._write_gst_la_file(la, pcname, None, None, None, env)
         else:
             pcname = pcname.replace('gst', 'gstreamer-')
             # Same versioning as gstreamer
             minor, micro = (map(int, self.version.split('.')[1:3]))
             minor = minor * 100 + micro
             self._write_gst_la_file(la, pcname, 0, minor, 0, env)
Esempio n. 21
0
 def _load_packages_from_file(self, filepath, custom=None):
     packages = []
     d = {
         'Platform': Platform,
         'Architecture': Architecture,
         'Distro': Distro,
         'DistroVersion': DistroVersion,
         'License': License,
         'package': package,
         'PackageType': PackageType,
         'custom': custom
     }
     d_keys = set(list(d.keys()))
     try:
         new_d = d.copy()
         parse_file(filepath, new_d)
         # List new objects parsed added to the globals dict
         diff_vals = [new_d[x] for x in set(new_d.keys()) - d_keys]
         # Find all objects inheriting from Package
         for package_cls in [
                 x for x in diff_vals if self._is_package_class(x)
         ]:
             pkg = self._load_package_from_file(package_cls, filepath,
                                                custom)
             if pkg is not None:
                 packages.append(pkg)
     except Exception:
         m.warning("Error loading package from file %s" % filepath)
         traceback.print_exc()
     return packages
    def _load_release(self):
        rls = os.path.join(self.args.repo, 'bundle.yaml')

        if os.path.exists(self.args.repo) and not os.path.exists(rls):
            pkgs = Package(self.config).packages()
            pkgs['build-tools'] = BuildTools(self.config).get()
            for name, profile in pkgs.viewitems():
                filename = '%(name)s-%(platform)s-%(arch)s-%(version)s.yaml' % profile
                path = os.path.join(self.args.repo, filename)
                if os.path.exists(path):
                    pro = yaml.load(open(path))
                    pro['__file__'] = path

                    if pro['version'] != profile['version']:
                        m.warning('skip %s since %s != %s' %
                                  (name, pro['version'], profile['version']))
                    else:
                        self.profile[profile['name']] = pro
                else:
                    self.profile[profile['name']] = profile
        else:

            url = os.path.join(self.args.repo, 'bundle.yaml')
            path = cache(url, self.args.cache_dir)
            self._cache_bundle(path)
Esempio n. 23
0
    def _load_package_from_file(self, filepath):
        mod_name, file_ext = os.path.splitext(os.path.split(filepath)[-1])

        try:
            d = {'Platform': Platform, 'Architecture': Architecture,
                 'Distro': Distro, 'DistroVersion': DistroVersion,
                 'License': License, 'package': package,
                 'PackageType': PackageType}
            execfile(filepath, d)
            if 'Package' in d:
                p = d['Package'](self._config, self, self.cookbook)
            elif 'SDKPackage' in d:
                p = d['SDKPackage'](self._config, self)
            elif 'InstallerPackage' in d:
                p = d['InstallerPackage'](self._config, self)
            elif 'App' in d:
                p = d['App'](self._config, self, self.cookbook)
            else:
                raise Exception('Package, SDKPackage, InstallerPackage or App '
                                'class not found')
            p.prepare()
            # reload files from package now that we called prepare that
            # may have changed it
            p.load_files()
            return p
        except Exception, ex:
            import traceback
            traceback.print_exc()
            m.warning("Error loading package %s" % ex)
Esempio n. 24
0
    def _search_libraries(self, files):
        """
        Search libraries in the prefix. Unfortunately the filename might vary
        depending on the platform and we need to match the library name and
        it's extension. There is a corner case on windows where a libray might
        be named foo.dll, foo-1.dll, libfoo.dll, or libfoo-1.dll

        NOTE: Unlike other searchfuncs which return lists, this returns a dict
              with a mapping from the libname to the actual on-disk file. We use
              the libname (the key) in gen_library_file so we don't have to
              guess (sometimes incorrectly) based on the dll filename.
        """
        libdir = self.extensions["sdir"]
        libext = self.extensions["srext"]
        libregex = self.extensions["sregex"]

        libsmatch = {}
        notfound = []
        for f in files:
            libsmatch[f] = list(find_shlib_regex(f[3:], self.config.prefix, libdir, libext, libregex))
            if not libsmatch[f]:
                notfound.append(f)

        if notfound:
            msg = "Some libraries weren't found while searching!"
            for each in notfound:
                msg += "\n" + each
            m.warning(msg)
        return libsmatch
Esempio n. 25
0
 def _restore_cache(self):
     try:
         with open(self._cache_file(self.get_config()), 'rb') as f:
             self.status = pickle.load(f)
     except Exception:
         self.status = {}
         m.warning(_("Could not recover status"))
Esempio n. 26
0
 def override(self, variants):
     if not isinstance(variants, list):
         variants = [variants]
     # Set the configured values
     for v in variants:
         if '=' in v:
             key, value = v.split('=', 1)
             key = key.replace('-', '_')
             if key not in self.__mapping_variants:
                 raise AttributeError(
                     'Mapping variant {!r} is unknown'.format(key))
             if value not in self.__mapping_variants[key]:
                 raise AttributeError(
                     'Mapping variant {!r} value {!r} is unknown'.format(
                         key, value))
             setattr(self, key, value)
         elif v.startswith('no'):
             if v[2:] not in self.__bool_variants:
                 m.warning('Variant {!r} is unknown or obsolete'.format(
                     v[2:]))
             setattr(self, v[2:], False)
         else:
             if v not in self.__bool_variants:
                 m.warning('Variant {!r} is unknown or obsolete'.format(v))
             setattr(self, v, True)
     # Auto-set vscrt variant if it wasn't set explicitly
     if self.vscrt == 'auto':
         self.vscrt = 'md'
         if self.debug and not self.optimization:
             self.vscrt = 'mdd'
Esempio n. 27
0
    def _make_paths_relative(self):
        sofiles = shell.find_files('*.so', self.tmp_install_dir)
        for sof in sofiles:
            try:
                shell.call("chrpath -d %s" % sof,
                           self.tmp_install_dir,
                           fail=False)
            except FatalError:
                m.warning("Could not 'chrpath' %s" % sof)

        shell.call("ln -s . usr", self.tmp_install_dir, fail=False)

        for icondir in os.listdir(
                os.path.join(self.tmp_install_dir, "share/icons/")):
            if os.path.exists(os.path.join(icondir, "index.theme")):
                shell.call("gtk-update-icon-cache %s" % icondir, fail=False)

        shell.call("update-mime-database %s" %
                   os.path.join(self.tmp_install_dir, "share", "mime"),
                   fail=False)
        shell.call("glib-compile-schemas %s/share/glib-2.0/schemas" %
                   self.tmp_install_dir)

        # Use system wide applications in case the bundle needs to open apps not included in
        # the bundle (to show the documentation most probably)
        shell.call("rm -rf %s" %
                   os.path.join(self.tmp_install_dir, "share", "applications"),
                   fail=False)
        shell.call(
            "ln -s %s %s" %
            (os.path.join("/usr", "share", "applications"),
             os.path.join(self.tmp_install_dir, "share", "applications")),
            fail=False)
Esempio n. 28
0
 def _restore_cache(self):
     try:
         with open(self._cache_file(self.get_config()), 'rb') as f:
             self.status = pickle.load(f)
     except Exception:
         self.status = {}
         m.warning(_("Could not recover status"))
Esempio n. 29
0
    def _load_recipes_from_file(self, filepath, custom=None):
        recipes = []
        d = {'Platform': Platform, 'Architecture': Architecture,
                'BuildType': BuildType, 'SourceType': SourceType,
                'Distro': Distro, 'DistroVersion': DistroVersion,
                'License': License, 'recipe': crecipe, 'os': os,
                'BuildSteps': crecipe.BuildSteps,
                'InvalidRecipeError': InvalidRecipeError,
                'FatalError': FatalError,
                'custom': custom, '_': _, 'shell': shell}
        d_keys = set(list(d.keys()))
        try:
            new_d = d.copy ()
            parse_file(filepath, new_d)
            # List new objects parsed added to the globals dict
            diff_keys = [x for x in set(new_d.keys()) - d_keys]
            # Find all objects inheriting from Recipe
            for recipe_cls_key in [x for x in diff_keys if self._is_recipe_class(new_d[x])]:
                if self._config.target_arch != Architecture.UNIVERSAL:
                    recipe = self._load_recipe_from_class(
                        new_d[recipe_cls_key], self._config, filepath, custom)
                else:
                    recipe = self._load_universal_recipe(d, new_d[recipe_cls_key],
                        recipe_cls_key, filepath)

                if recipe is not None:
                    recipes.append(recipe)
        except Exception:
            m.warning("Error loading recipe in file %s" % (filepath))
            print(traceback.format_exc())
        return recipes
Esempio n. 30
0
    def run(self, config, args):
        cookbook = CookBook(config)
        if args.recipe == 'all':
            recipes = cookbook.get_recipes_list()
        else:
            recipes = [cookbook.get_recipe(args.recipe)]
        if len(recipes) == 0:
            m.message(_("No recipes found"))
        tagname = args.tagname
        tagdescription = args.tagdescription
        force = args.force
        for recipe in recipes:
            if recipe.stype != SourceType.GIT and \
               recipe.stype != SourceType.GIT_TARBALL:
                m.message(_("Recipe '%s' has a custom source repository, "
                        "skipping") % recipe.name)
                continue

            recipe.fetch(checkout=False)

            tags = git.list_tags(recipe.repo_dir)
            exists = (tagname in tags)
            if exists:
                if not force:
                    m.warning(_("Recipe '%s' tag '%s' already exists, "
                            "not updating" % (recipe.name, tagname)))
                    continue
                git.delete_tag(recipe.repo_dir, tagname)

            commit = 'origin/sdk-%s' % recipe.version
            git.create_tag(recipe.repo_dir, tagname, tagdescription,
                    commit)
Esempio n. 31
0
 def _load_recipes_from_dir(self, repo, skip_errors):
     recipes = {}
     recipes_files = shell.find_files('*%s' % self.RECIPE_EXT, repo)
     recipes_files.extend(shell.find_files('*/*%s' % self.RECIPE_EXT, repo))
     custom = None
     # If a manifest is being used or if recipes_commits is defined, disable
     # usage of tarballs when tagged for release. We need to do this before
     # `custom.py` is loaded, so we can't set it on the module afterwards.
     # We need to set it on the parent class.
     if self._config.manifest or self._config.recipes_commits:
         crecipe.Recipe._using_manifest_force_git = True
     m_path = os.path.join(repo, 'custom.py')
     if os.path.exists(m_path):
         custom = imp.load_source('custom', m_path)
     for f in recipes_files:
         # Try to load recipes with the custom.py module located in the
         # recipes dir which can contain private classes and methods with
         # common code for gstreamer recipes.
         try:
             recipes_from_file = self._load_recipes_from_file(
                 f, skip_errors, custom)
         except RecipeNotFoundError:
             m.warning(_("Could not found a valid recipe in %s") % f)
         if recipes_from_file is None:
             continue
         for recipe in recipes_from_file:
             recipes[recipe.name] = recipe
     return recipes
Esempio n. 32
0
def _get_vswhere_vs_install(vswhere, vs_versions):
    import json
    vswhere_exe = str(vswhere)
    # Get a list of installation paths for all installed Visual Studio
    # instances, from VS 2013 to the latest one, sorted from newest to
    # oldest, and including preview releases.
    # Will not include BuildTools installations.
    out = check_output([vswhere_exe, '-legacy', '-prerelease', '-format',
                        'json', '-utf8'])
    installs = _sort_vs_installs(json.loads(out))
    program_files = get_program_files_dir()
    for install in installs:
        version = install['installationVersion']
        vs_version = 'vs' + version.split('.', maxsplit=1)[0]
        if vs_version not in vs_versions:
            continue
        prefix = install['installationPath']
        suffix = VCVARSALLS[vs_version][1]
        path = program_files / prefix / suffix
        # Find the location of the Visual Studio installation
        if path.is_file():
            return path.as_posix(), vs_version
    m.warning('vswhere.exe could not find Visual Studio version(s) {}. Falling '
              'back to manual searching...' .format(', '.join(vs_versions)))
    return None
Esempio n. 33
0
    def find_dep(self, deps, sha):
        for dep in deps:
            if dep['commit'] == sha:
                return dep

        m.warning("Did not find cache for commit %s" % sha)
        return None
Esempio n. 34
0
    def run(self, config, args):
        self.store = PackagesStore(config, offline=args.offline)
        p = self.store.get_package(args.package[0])

        if args.skip_deps_build and args.only_build_deps:
            raise UsageError(
                _("Cannot use --skip-deps-build together with "
                  "--only-build-deps"))

        if not args.skip_deps_build:
            self._build_deps(config, p, args.no_devel, args.offline,
                             args.dry_run, args.jobs)

        if args.only_build_deps or args.dry_run:
            return

        if args.compress_method != 'default':
            m.message('Forcing tarball compression method as ' +
                      args.compress_method)
            config.package_tarball_compression = args.compress_method

        if p is None:
            raise PackageNotFoundError(args.package[0])

        p.pre_package()
        packager_class = Packager
        if args.tarball:
            if config.target_platform == Platform.ANDROID and \
               config.target_arch == Architecture.UNIVERSAL:
                packager_class = AndroidPackager
            else:
                packager_class = DistTarball
        elif config.variants.uwp:
            # Split devel/runtime packages are useless for UWP since we will
            # need both when building the package, and all needed runtime DLLs
            # are packaged with the app as assets.
            m.warning('Forcing single-tarball output for UWP package')
            args.no_split = True
            packager_class = DistTarball

        m.action(_("Creating package for %s") % p.name)
        pkg = packager_class(config, p, self.store)
        output_dir = os.path.abspath(args.output_dir)
        if isinstance(pkg, DistTarball):
            paths = pkg.pack(output_dir,
                             args.no_devel,
                             args.force,
                             args.keep_temp,
                             split=not args.no_split,
                             strip_binaries=p.strip)
        else:
            paths = pkg.pack(output_dir, args.no_devel, args.force,
                             args.keep_temp)
        if None in paths:
            paths.remove(None)
        paths = p.post_package(paths, output_dir) or paths
        m.action(
            _("Package successfully created in %s") %
            ' '.join([os.path.abspath(x) for x in paths]))
Esempio n. 35
0
 def install_xz(self):
     msys_xz = shutil.which('xz')
     if not msys_xz:
         m.warning('xz not found, are you not using an MSYS shell?')
     msys_bindir = os.path.dirname(msys_xz)
     src = os.path.join(self.xz_tmp_prefix.name, 'bin_x86-64')
     for b in ('xz.exe', 'xzdec.exe', 'lzmadec.exe', 'lzmainfo.exe'):
         shutil.copy2(os.path.join(src, b), os.path.join(msys_bindir, b))
Esempio n. 36
0
 def save(self):
     try:
         cache_file = self._cache_file(self.get_config())
         if not os.path.exists(os.path.dirname(cache_file)):
             os.makedirs(os.path.dirname(cache_file))
         with open(cache_file, 'wb') as f:
             pickle.dump(self.status, f)
     except IOError as ex:
         m.warning(_("Could not cache the CookBook: %s") % ex)
Esempio n. 37
0
 def save(self):
     try:
         cache_file = self._cache_file(self.get_config())
         if not os.path.exists(os.path.dirname(cache_file)):
             os.makedirs(os.path.dirname(cache_file))
         with open(cache_file, 'wb') as f:
             pickle.dump(self.status, f)
     except IOError as ex:
         m.warning(_("Could not cache the CookBook: %s") % ex)
Esempio n. 38
0
def _qmake_or_pkgdir(qmake):
    qmake_path = Path(qmake)
    if not qmake_path.is_file():
        m.warning('QMAKE={!r} does not exist'.format(str(qmake_path)))
        return (None, None)
    pkgdir = (qmake_path.parent.parent / 'lib/pkgconfig')
    if pkgdir.is_dir():
        return (pkgdir.as_posix(), qmake_path.as_posix())
    return (None, qmake_path.as_posix())
Esempio n. 39
0
    def strip_dir(self, dir_path):
        if not self.strip_cmd:
            m.warning('Strip command is not defined')
            return

        tasks = []
        for dirpath, dirnames, filenames in os.walk(dir_path):
            for f in filenames:
                tasks.append(self._async_strip_file(os.path.join(dirpath, f)))
        run_until_complete(tasks)
Esempio n. 40
0
    def get_deps(self, config, args):
        url = self.make_url(config, args, self.log_filename)
        deps = []

        try:
            deps = self.json_get(url)
        except FatalError as e:
            m.warning("Could not get cache list: %s" % e.msg)

        return deps
Esempio n. 41
0
    def __init__(self, args):
        if user_is_root():
            m.warning(_("Running as root"))

        self.check_in_cerbero_shell()
        self.init_logging()
        self.create_parser()
        self.load_commands()
        self.parse_arguments(args)
        self.load_config()
        self.run_command()
Esempio n. 42
0
 def _load_packages_from_dir(self, repo):
     packages_dict = {}
     packages = shell.find_files('*%s' % self.PKG_EXT, repo)
     packages.extend(shell.find_files('*/*%s' % self.PKG_EXT, repo))
     for f in packages:
         p = self._load_package_from_file(f)
         if p is None:
             m.warning(_("Could not found a valid package in %s") % f)
             continue
         packages_dict[p.name] = p
     return packages_dict
Esempio n. 43
0
def load_commands(subparsers):
    import os
    commands_dir = os.path.abspath(os.path.dirname(__file__))

    for name in os.listdir(commands_dir):
        name, extension = os.path.splitext(name)
        if extension != '.py':
            continue
        try:
            __import__('cerbero.commands.%s' % name)
        except ImportError, e:
            m.warning("Error importing command %s:\n %s" % (name, e))
Esempio n. 44
0
 def gen_library_file(self, output_dir=None):
     '''
     Generates library files (.lib) for the dll's provided by this recipe
     '''
     genlib = GenLib()
     for dllpath in self.libraries():
         try:
             implib = genlib.create(os.path.join(self.config.prefix, dllpath),
                     os.path.join(self.config.prefix, 'lib'))
             logging.debug('Created %s' % implib)
         except:
             m.warning("Could not create .lib, gendef might be missing")
Esempio n. 45
0
    def run(self, config, args):
        name = args.name[0]
        version = args.version[0]
        filename = os.path.join(config.recipes_dir, '%s.recipe' % name)
        if not args.force and os.path.exists(filename):
            m.warning(_("Recipe '%s' (%s) already exists, "
                "use -f to replace" % (name, filename)))
            return

        template_args = {}

        template = RECEIPT_TPL
        template_args['name'] = name
        template_args['version'] = version

        if args.licenses:
            licenses = args.licenses.split(',')
            self.validate_licenses(licenses)
            template += LICENSES_TPL
            template_args['licenses'] = ', '.join(
                    ['License.' + self.supported_licenses[l] \
                        for l in licenses])

        if args.commit:
            template += COMMIT_TPL
            template_args['commit'] = args.commit

        if args.origin:
            template += ORIGIN_TPL
            template_args['origin'] = args.origin

        if args.deps:
            template += DEPS_TPL
            deps = args.deps.split(',')
            cookbook = CookBook(config)
            for dname in deps:
                try:
                    recipe = cookbook.get_recipe(dname)
                except RecipeNotFoundError as ex:
                    raise UsageError(_("Error creating recipe: "
                            "dependant recipe %s does not exist") % dname)
            template_args['deps'] = deps

        try:
            f = open(filename, 'w')
            f.write(template % template_args)
            f.close()

            m.action(_("Recipe '%s' successfully created in %s") %
                    (name, filename))
        except IOError as ex:
            raise FatalError(_("Error creating recipe: %s") % ex)
Esempio n. 46
0
    def __init__(self, args):
        if user_is_root():
            m.warning(_("Running as root"))

        self.check_in_cerbero_shell()
        self.init_logging()
		
		# Create a argument parser with none config .cbc base module argparse
        self.create_parser()
        self.load_commands()
        self.parse_arguments(args)
        self.load_config()
        self.run_command()
    def __new__(klass, config, package, store):
        d = config.target_distro
        v = config.target_distro_version

        if d not in _packagers:
            raise FatalError(_("No packager available for the distro %s" % d))
        if v not in _packagers[d]:
            # Be tolerant with the distro version
            m.warning(_("No specific packager available for the distro "
                "version %s, using generic packager for distro %s" % (v, d)))
            v = None

        return _packagers[d][v](config, package, store)
Esempio n. 48
0
    def run(self, config, args):
        name = args.name[0]
        version = args.version[0]
        store = PackagesStore(config)
        filename = os.path.join(config.packages_dir, '%s.package' % name)
        if not args.force and os.path.exists(filename):
            m.warning(_("Package '%s' (%s) already exists, "
                "use -f to replace" % (name, filename)))
            return

        template_args = {}

        template = RECEIPT_TPL
        template_args['name'] = name
        template_args['version'] = version

        if args.short_desc:
            template_args['shortdesc'] = args.short_desc
        else:
            template_args['shortdesc'] = name

        if args.codename:
            template += CODENAME_TPL
            template_args['codename'] = args.codename

        if args.vendor:
            template += VENDOR_TPL
            template_args['vendor'] = args.vendor

        if args.url:
            template += URL_TPL
            template_args['url'] = args.url

        if args.license:
            self.validate_licenses([args.license])
            template += LICENSE_TPL
            template_args['license'] = \
                'License.' + self.supported_licenses[args.license]

        deps = []
        if args.deps:
            template += DEPS_TPL
            deps = args.deps.split(',')
            for dname in deps:
                try:
                    package = store.get_package(dname)
                except Exception, ex:
                    raise UsageError(_("Error creating package: "
                            "dependant package %s does not exist") % dname)
            template_args['deps'] = deps
Esempio n. 49
0
 def _create_merge_modules(self, package_type):
     packagedeps = {}
     for package in self.packagedeps:
         package.set_mode(package_type)
         m.action("Creating Merge Module for %s" % package)
         packager = MergeModulePackager(self.config, package, self.store)
         try:
             path = packager.create_merge_module(self.output_dir,
                        package_type, self.force, self.package.version,
                        self.keep_temp)
             packagedeps[package] = path
         except EmptyPackageError:
             m.warning("Package %s is empty" % package)
     self.packagedeps = packagedeps
     self.merge_modules[package_type] = packagedeps.values()
Esempio n. 50
0
 def _copy_installdir(self):
     '''
     Copy all the files that are going to be packaged to the bundle's
     temporary directory
     '''
     os.makedirs(self.tmp_install_dir)
     for f in set(self.package.files_list()):
         in_path = os.path.join(self.config.prefix, f)
         if not os.path.exists(in_path):
             m.warning("File %s is missing and won't be added to the "
                       "package" % in_path)
             continue
         out_path = os.path.join(self.tmp_install_dir, f)
         odir = os.path.split(out_path)[0]
         if not os.path.exists(odir):
             os.makedirs(odir)
         shutil.copy(in_path, out_path)
Esempio n. 51
0
 def _create_bundle(self):
     """
     Moves all the files that are going to be packaged to the bundle's
     temporary directory
     """
     out_dir = os.path.join(self.appdir, "Contents", "Home")
     os.makedirs(out_dir)
     for f in self.package.files_list():
         in_path = os.path.join(self.config.prefix, f)
         if not os.path.exists(in_path):
             m.warning("File %s is missing and won't be added to the " "package" % in_path)
             continue
         out_path = os.path.join(out_dir, f)
         odir = os.path.split(out_path)[0]
         if not os.path.exists(odir):
             os.makedirs(odir)
         shutil.copy(in_path, out_path)
Esempio n. 52
0
def copy_files(origdir, destdir, files, extensions, target_platform):
    for f in files:
        f = f % extensions
        install_dir = os.path.dirname(os.path.join(destdir, f))
        if not os.path.exists(install_dir):
            os.makedirs(install_dir)
        if destdir[1] == ':':
            # windows path
            relprefix = to_unixpath(destdir)[2:]
        else:
            relprefix = destdir[1:]
        orig = os.path.join(origdir, relprefix, f)
        dest = os.path.join(destdir, f)
        m.action("copying %s to %s" % (orig, dest))
        try:
            shutil.copy(orig, dest)
        except IOError:
            m.warning("Could not copy %s to %s" % (orig, dest))
Esempio n. 53
0
    def __new__(klass, config, package, store):
        d = config.target_distro
        v = config.target_distro_version

        if d not in _packagers:
            raise FatalError(_("No packager available for the distro %s" % d))

        if v not in _packagers[d]:
            # Be tolerant with the distro version
            m.warning(_("No specific packager available for the distro "
                "version %s, using generic packager for distro %s" % (v, d)))
            v = None

        if (d == Distro.WINDOWS and config.platform == Platform.LINUX):
            m.warning("Cross-compiling for Windows, overriding Packager")
            d = Distro.NONE

        return _packagers[d][v](config, package, store)
Esempio n. 54
0
 def _create_merge_modules(self, package_type, wix_use_fragment):
     packagedeps = {}
     for package in self.packagedeps:
         package.set_mode(package_type)
         package.wix_use_fragment = wix_use_fragment
         m.action("Creating Merge Module for %s" % package)
         packager = MergeModulePackager(self.config, package, self.store)
         if self.wix_wine_prefix:
            packager.wix_wine_prefix = self.wix_wine_prefix
         try:
             path = packager.create_merge_module(self.output_dir,
                        package_type, self.force, self.package.version,
                        self.keep_temp)
             packagedeps[package] = path
         except EmptyPackageError:
             m.warning("Package %s is empty" % package)
         self.packager_tmpdirs.append(packager._tmpdir())
     self.packagedeps = packagedeps
     self.merge_modules[package_type] = packagedeps.values()
Esempio n. 55
0
 def __init__(self):
     Source.__init__(self)
     self.remotes = {} if self.remotes is None else self.remotes.copy()
     if 'origin' in self.remotes:
         url = self.replace_name_and_version(self.remotes['origin'])
         o = urllib.parse.urlparse(url)
         if o.scheme in ('http', 'git'):
             raise FatalError('git remote origin URL {!r} must use HTTPS not {!r}'
                              ''.format(url, o.scheme))
         if o.scheme in ('file', 'ssh'):
             m.warning('git remote origin URL {!r} uses {!r}, please only use this '
                       'for testing'.format(url, o.scheme))
         self.remotes['origin'] = url
     else:
         # XXX: When is this used?
         self.remotes['origin'] = '%s/%s.git' % \
                                  (self.config.git_root, self.name)
     self.repo_dir = os.path.join(self.config.local_sources, self.name)
     self._previous_env = None
Esempio n. 56
0
 def files_list(self, package_type, force):
     if package_type == PackageType.DEVEL:
         files = self.package.devel_files_list()
     else:
         files = self.package.files_list()
     real_files = []
     for f in files:
         if os.path.exists(os.path.join(self.config.prefix, f)):
             real_files.append(f)
     diff = list(set(files) - set(real_files))
     if len(diff) != 0:
         if force:
             m.warning(_("Some files required by this package are missing "
                         "in the prefix:\n%s" % '\n'.join(diff)))
         else:
             raise MissingPackageFilesError(diff)
     if len(real_files) == 0:
         raise EmptyPackageError(self.package.name)
     return real_files
Esempio n. 57
0
 def _load_packages(self):
     self._packages = {}
     packages = defaultdict(dict)
     repos = self._config.get_packages_repos()
     for reponame, (repodir, priority) in repos.iteritems():
         packages[int(priority)].update(
                 self._load_packages_from_dir(repodir))
     # Add packages by ascending pripority
     for key in sorted(packages.keys()):
         self._packages.update(packages[key])
     # Add a package for every recipe
     for recipe in self.cookbook.get_recipes_list():
         if not recipe.allow_package_creation:
             continue
         p = self._package_from_recipe(recipe)
         if p.name in self._packages.keys():
             m.warning("Package with name '%s' already exists, not including it", p.name)
         else:
             self._packages[p.name] = p