def configure(self):
        if self.supports_non_src_build:
            self.config_sh = os.path.join(self.repo_dir, self.config_sh)

        # Only use --disable-maintainer mode for real autotools based projects
        if os.path.exists(os.path.join(self.make_dir, 'configure.in')) or\
                os.path.exists(os.path.join(self.make_dir, 'configure.ac')):
            self.configure_tpl += " --disable-maintainer-mode "
            self.configure_tpl += " --disable-silent-rules "

        if self.autoreconf:
            shell.call(self.autoreconf_sh, self.make_dir)

        files = shell.check_call('find %s -type f -name config.guess' %
                                 self.make_dir).split('\n')
        files.remove('')
        for f in files:
            o = os.path.join(self.config._relative_path('data'), 'autotools',
                             'config.guess')
            m.action("copying %s to %s" % (o, f))
            shutil.copy(o, f)

        files = shell.check_call('find %s -type f -name config.sub' %
                                 self.make_dir).split('\n')
        files.remove('')
        for f in files:
            o = os.path.join(self.config._relative_path('data'), 'autotools',
                             'config.sub')
            m.action("copying %s to %s" % (o, f))
            shutil.copy(o, f)

        if self.config.platform == Platform.WINDOWS and \
                self.supports_cache_variables:
            # On windows, environment variables are upperscase, but we still
            # need to pass things like am_cv_python_platform in lowercase for
            # configure and autogen.sh
            for k, v in os.environ.iteritems():
                if k[2:6] == '_cv_':
                    self.configure_tpl += ' %s="%s"' % (k, v)

        if self.add_host_build_target:
            if self.config.host is not None:
                self.configure_tpl += ' --host=%(host)s'
            if self.config.build is not None:
                self.configure_tpl += ' --build=%(build)s'
            if self.config.target is not None:
                self.configure_tpl += ' --target=%(target)s'

        use_configure_cache = self.config.use_configure_cache
        if self.use_system_libs and self.config.allow_system_libs:
            use_configure_cache = False

        if self.new_env or self.append_env:
            use_configure_cache = False

        if use_configure_cache and self.can_use_configure_cache:
            cache = os.path.join(self.config.sources, '.configure.cache')
            self.config_sh += ' --cache-file=%s' % cache

        MakefilesBase.configure(self)
Esempio n. 2
0
def generate_gir_h_from_gir(gir_file, gir_h_file):
    """
    Generate a .gir.h file from the specified .gir file, and write to the
    specified gir.h file location

    @gir_file: The .gir file
    @gir_h_file: The location to write the generated .gir.h file to
    """
    outfname = gir_h_file
    # FIXME: xxd is provided by vim-common, and not installed by
    # bootstrap/build-tools
    hexdump = shell.check_call('xxd -i ' + gir_file, shell=True, split=False)
    outf = open(outfname, 'w')
    outf.write(hexdump)
    # Append checksum to .gir.h file
    shasum = shell.check_call('shasum -a 1 -b < ' + gir_file, shell=True,
                              split=False)[:40]
    sha1fname = gir_file + '.sha1'
    sha1f = open(sha1fname, 'w')
    sha1f.write(shasum)
    sha1f.close()
    hexdump = shell.check_call('xxd -i ' + sha1fname, shell=True,
                               split=False)
    outf.write(hexdump)
    outf.close()
    os.unlink(sha1fname)
Esempio n. 3
0
def generate_gir_h_from_gir(gir_file, gir_h_file):
    """
    Generate a .gir.h file from the specified .gir file, and write to the
    specified gir.h file location

    @gir_file: The .gir file
    @gir_h_file: The location to write the generated .gir.h file to
    """
    outfname = gir_h_file
    # FIXME: xxd is provided by vim-common, and not installed by
    # bootstrap/build-tools
    hexdump = shell.check_call('xxd -i ' + gir_file, shell=True, split=False)
    outf = open(outfname, 'w')
    outf.write(hexdump)
    # Append checksum to .gir.h file
    shasum = shell.check_call('shasum -a 1 -b < ' + gir_file,
                              shell=True,
                              split=False)[:40]
    sha1fname = gir_file + '.sha1'
    sha1f = open(sha1fname, 'w')
    sha1f.write(shasum)
    sha1f.close()
    hexdump = shell.check_call('xxd -i ' + sha1fname, shell=True, split=False)
    outf.write(hexdump)
    outf.close()
    os.unlink(sha1fname)
Esempio n. 4
0
    def configure(self):
        if self.supports_non_src_build:
            self.config_sh = os.path.join(self.repo_dir, self.config_sh)

        # Only use --disable-maintainer mode for real autotools based projects
        if os.path.exists(os.path.join(self.make_dir, 'configure.in')) or\
                os.path.exists(os.path.join(self.make_dir, 'configure.ac')):
            self.configure_tpl += " --disable-maintainer-mode "
            self.configure_tpl += " --disable-silent-rules "

        if self.autoreconf:
            shell.call(self.autoreconf_sh, self.make_dir)

        files = shell.check_call('find %s -type f -name config.guess' %
                                 self.make_dir).split('\n')
        files.remove('')
        for f in files:
            o = os.path.join(self.config._relative_path('data'), 'autotools',
                             'config.guess')
            m.action("copying %s to %s" % (o, f))
            shutil.copy(o, f)

        files = shell.check_call('find %s -type f -name config.sub' %
                                 self.make_dir).split('\n')
        files.remove('')
        for f in files:
            o = os.path.join(self.config._relative_path('data'), 'autotools',
                             'config.sub')
            m.action("copying %s to %s" % (o, f))
            shutil.copy(o, f)

        if self.config.platform == Platform.WINDOWS and \
                self.supports_cache_variables:
            # On windows, environment variables are upperscase, but we still
            # need to pass things like am_cv_python_platform in lowercase for
            # configure and autogen.sh
            for k, v in os.environ.iteritems():
                if k[2:6] == '_cv_':
                    self.configure_tpl += ' %s="%s"' % (k, v)

        if self.add_host_build_target:
            if self.config.host is not None:
                self.configure_tpl += ' --host=%(host)s'
            if self.config.build is not None:
                self.configure_tpl += ' --build=%(build)s'
            if self.config.target is not None:
                self.configure_tpl += ' --target=%(target)s'

        use_configure_cache = self.config.use_configure_cache
        if self.use_system_libs and self.config.allow_system_libs:
            use_configure_cache = False

        if self.new_env or self.append_env:
            use_configure_cache = False

        if use_configure_cache and self.can_use_configure_cache:
            cache = os.path.join(self.config.sources, '.configure.cache')
            self.config_sh += ' --cache-file=%s' % cache

        MakefilesBase.configure(self)
Esempio n. 5
0
    def extract(self):
        # For Git with LFS there's a bug where we can not fetch from
        # a local folder. We overcome that by using the --reference arg
        # https://github.com/git-lfs/git-lfs/issues/1207#issuecomment-217455331
        if os.path.exists(self.build_dir):
            # fix read-only permissions
            if self.config.platform == Platform.WINDOWS:
                shell.call('chmod -R +w .git/', self.build_dir, fail=False)
            try:
                commit_hash = git.get_hash(self.repo_dir, self.commit)
                checkout_hash = git.get_hash(self.build_dir, 'HEAD')
                if commit_hash == checkout_hash:
                    return False
            except Exception:
                pass
            shutil.rmtree(self.build_dir)
        if not os.path.exists(self.build_dir):
            os.mkdir(self.build_dir)
        if self.supports_non_src_build:
            return

        # get the remote this commit belongs too
        branch = shell.check_call(
            'git branch -r --contains %s' % (self.commit), self.repo_dir)
        remote = branch.strip().split('/')[0]
        # this one appears on git 2.7
        remote_url = shell.check_call('git remote get-url %s' % (remote),
                                      self.repo_dir).rstrip()
        shell.call(
            'git clone --no-checkout --reference %s %s .' %
            (self.repo_dir, remote_url), self.build_dir)
        shell.call('git checkout -b build %s' % (self.commit), self.build_dir)
        shell.call('git submodule update --init --recursive', self.build_dir)
Esempio n. 6
0
    def extract(self):
        # For Git with LFS there's a bug where we can not fetch from
        # a local folder. We overcome that by using the --reference arg
        # https://github.com/git-lfs/git-lfs/issues/1207#issuecomment-217455331
        if os.path.exists(self.build_dir):
            # fix read-only permissions
            if self.config.platform == Platform.WINDOWS:
                shell.call('chmod -R +w .git/', self.build_dir, fail=False)
            try:
                commit_hash = git.get_hash(self.repo_dir, self.commit)
                checkout_hash = git.get_hash(self.build_dir, 'HEAD')
                if commit_hash == checkout_hash:
                    return False
            except Exception:
                pass
            shutil.rmtree(self.build_dir)
        if not os.path.exists(self.build_dir):
            os.mkdir(self.build_dir)
        if self.supports_non_src_build:
            return

        # get the remote this commit belongs too
        branch = shell.check_call('git branch -r --contains %s' % (self.commit), self.repo_dir)
        remote = branch.strip().split('/')[0]
        # this one appears on git 2.7
        remote_url = shell.check_call('git remote get-url %s' % (remote), self.repo_dir).rstrip()
        shell.call('git clone --no-checkout --reference %s %s .' % (self.repo_dir, remote_url), self.build_dir)
        shell.call('git checkout -b build %s' % (self.commit), self.build_dir)
        shell.call('git submodule update --init --recursive', self.build_dir)
Esempio n. 7
0
    def commit(self):
        rootd = os.path.join(os.path.dirname(__file__), '../..')
        rootd = os.path.abspath(rootd)

        commit = shell.check_call('git rev-parse HEAD', rootd).strip()
        tag = shell.check_call('git tag --contain %s' % commit, rootd).strip()
        branch = shell.check_call('git branch', rootd).strip('*').strip()
        return {'commit': commit, 'branch': branch, 'tag': tag}
Esempio n. 8
0
def parse_dir(dirpath, extension=None):
    if os.path.exists(".git"):
        files = shell.check_call("git ls-files %s" % dirpath).split("\n")
        files.remove("")
    else:
        files = shell.check_call("find %s -type f" % dirpath).split("\n")
        files.remove("")
    if extension is None:
        return files
    return [f for f in files if f.endswith(extension)]
Esempio n. 9
0
def parse_dir(dirpath, extension=None):
    if os.path.exists('.git'):
        files = shell.check_call('git ls-files %s' % dirpath).split('\n')
        files.remove('')
    else:
        files = shell.check_call('find %s -type f' % dirpath).split('\n')
        files.remove('')
    if extension is None:
        return files
    return [f for f in files if f.endswith(extension)]
Esempio n. 10
0
 def check_arch():
     native_arch = shell.check_call('dpkg --print-architecture')
     if native_arch == arch:
         return
     foreign_archs = shell.check_call('dpkg --print-foreign-architectures')
     if arch in foreign_archs.split():
         return
     raise ConfigurationError(('Architecture %s is missing from your setup. ' + \
                               'You can add it with: "dpkg --add-architeture %s",' + \
                               ' then run "apt-get update."') \
                               % (arch, arch))
Esempio n. 11
0
 def check_arch():
     native_arch = shell.check_call('dpkg --print-architecture')
     if native_arch == arch:
         return
     foreign_archs = shell.check_call('dpkg --print-foreign-architectures')
     if arch in foreign_archs.split():
         return
     raise ConfigurationError(('Architecture %s is missing from your setup. ' + \
                               'You can add it with: "dpkg --add-architecture %s",' + \
                               ' then run "apt-get update."') \
                               % (arch, arch))
Esempio n. 12
0
 def _perl_version(self):
     version = shell.check_call("perl -e 'print \"$]\";'")
     # FIXME: when perl's mayor is >= 10
     mayor = version[0]
     minor = str(int(version[2:5]))
     revision = str(int(version[5:8]))
     return '.'.join([mayor, minor, revision])
Esempio n. 13
0
def submodules_update(git_dir, src_dir=None, fail=True):
    '''
    Update somdules from local directory

    @param git_dir: path of the git repository
    @type git_dir: str
    @param src_dir: path or base URI of the source directory
    @type src_dir: src
    @param fail: raise an error if the command failed
    @type fail: false
    '''
    if src_dir:
        config = shell.check_call('%s config --file=.gitmodules --list' % GIT,
                                  git_dir)
        config_array = [s.split('=', 1) for s in config.split('\n')]
        for c in config_array:
            if c[0].startswith('submodule.') and c[0].endswith('.path'):
                submodule = c[0][len('submodule.'):-len('.path')]
                shell.call(
                    "%s config --file=.gitmodules submodule.%s.url %s" %
                    (GIT, submodule, os.path.join(src_dir, c[1])), git_dir)
    shell.call("%s submodule init" % GIT, git_dir)
    shell.call("%s submodule sync" % GIT, git_dir)
    shell.call("%s submodule update" % GIT, git_dir, fail=fail)
    if src_dir:
        for c in config_array:
            if c[0].startswith('submodule.') and c[0].endswith('.url'):
                shell.call(
                    "%s config --file=.gitmodules %s  %s" % (GIT, c[0], c[1]),
                    git_dir)
        shell.call("%s submodule sync" % GIT, git_dir)
Esempio n. 14
0
 def _perl_version(self):
     version = shell.check_call("perl -e 'print \"$]\";'")
     # FIXME: when perl's mayor is >= 10
     mayor = version[0]
     minor = str(int(version[2:5]))
     revision = str(int(version[5:8]))
     return '.'.join([mayor, minor, revision])
Esempio n. 15
0
async def submodules_update(git_dir, src_dir=None, fail=True, offline=False, logfile=None):
    '''
    Update submodules asynchronously from local directory

    @param git_dir: path of the git repository
    @type git_dir: str
    @param src_dir: path or base URI of the source directory
    @type src_dir: src
    @param fail: raise an error if the command failed
    @type fail: false
    @param offline: don't use the network
    @type offline: false
    '''
    if src_dir:
        config = shell.check_call('%s config --file=.gitmodules --list' % GIT,
                                  git_dir)
        config_array = [s.split('=', 1) for s in config.split('\n')]
        for c in config_array:
            if c[0].startswith('submodule.') and c[0].endswith('.path'):
                submodule = c[0][len('submodule.'):-len('.path')]
                shell.call("%s config --file=.gitmodules submodule.%s.url %s" %
                           (GIT, submodule, os.path.join(src_dir, c[1])),
                           git_dir, logfile=logfile)
    shell.call("%s submodule init" % GIT, git_dir, logfile=logfile)
    if src_dir or not offline:
        await shell.async_call("%s submodule sync" % GIT, git_dir, logfile=logfile, cpu_bound=False)
        await shell.async_call("%s submodule update" % GIT, git_dir, fail=fail, logfile=logfile, cpu_bound=False)
    else:
        await shell.async_call("%s submodule update --no-fetch" % GIT, git_dir, fail=fail, logfile=logfile, cpu_bound=False)
    if src_dir:
        for c in config_array:
            if c[0].startswith('submodule.') and c[0].endswith('.url'):
                shell.call("%s config --file=.gitmodules %s  %s" %
                           (GIT, c[0], c[1]), git_dir, logfile=logfile)
        await shell.async_call("%s submodule sync" % GIT, git_dir, logfile=logfile, cpu_bound=False)
Esempio n. 16
0
def submodules_update(git_dir, src_dir=None, fail=True):
    '''
    Update somdules from local directory

    @param git_dir: path of the git repository
    @type git_dir: str
    @param src_dir: path or base URI of the source directory
    @type src_dir: src
    @param fail: raise an error if the command failed
    @type fail: false
    '''
    if src_dir:
        config = shell.check_call('%s config --file=.gitmodules --list' % GIT,
                                  git_dir)
        config_array = [s.split('=', 1) for s in config.split('\n')]
        for c in config_array:
            if c[0].startswith('submodule.') and c[0].endswith('.path'):
                submodule = c[0][len('submodule.'):-len('.path')]
                shell.call("%s config --file=.gitmodules submodule.%s.url %s" %
                           (GIT, submodule, os.path.join(src_dir, c[1])),
                           git_dir)
    shell.call("%s submodule init" % GIT, git_dir)
    shell.call("%s submodule sync" % GIT, git_dir)
    shell.call("%s submodule update" % GIT, git_dir, fail=fail)
    if src_dir:
        for c in config_array:
            if c[0].startswith('submodule.') and c[0].endswith('.url'):
                shell.call("%s config --file=.gitmodules %s  %s" %
                           (GIT, c[0], c[1]), git_dir)
        shell.call("%s submodule sync" % GIT, git_dir)
Esempio n. 17
0
 def _get_la_deps_from_pc(self, laname, pcname, env):
     ret = shell.check_call('pkg-config --libs-only-l --static ' + pcname,
                            env=env)
     # Don't add the library itself to the list of dependencies
     return [
         'lib' + lib[2:] for lib in self._get_unique_ordered(ret.split())
         if lib[2:] != laname[3:]
     ]
Esempio n. 18
0
    def _create_framework_library(self, libraries):
        tmpdir = tempfile.mkdtemp()

        libname = os.path.basename(self.libname)  # just to make sure

        if self.arch == Architecture.UNIVERSAL:
            archs = self.universal_archs
        else:
            archs = [self.arch]

        archs = [a if a != Architecture.X86 else 'i386' for a in archs]

        for thin_arch in archs:
            object_files_md5 = []
            shell.call('mkdir -p %s' % thin_arch, tmpdir)
            tmpdir_thinarch = os.path.join(tmpdir, thin_arch)

            for lib in libraries:
                libprefix = os.path.split(lib)[-1].replace('.', '_')

                if len(
                        archs
                ) > 1:  #should be a fat file, split only to the arch we want
                    libprefix += '_%s_' % thin_arch
                    lib_tmpdir = self._split_static_lib(lib, thin_arch)
                else:
                    lib_tmpdir = self._split_static_lib(lib)

                obj_files = shell.ls_files(['*.o'], lib_tmpdir)
                for obj_f in obj_files:
                    obj_path = os.path.join(lib_tmpdir, obj_f)
                    md5 = shell.check_call('md5 -q %s' %
                                           obj_path).split('\n')[0]
                    md5 = '%s-%s' % (md5, os.path.getsize(obj_path))
                    if md5 not in object_files_md5:
                        shell.call(
                            'cp %s %s' % (obj_path, '%s-%s' %
                                          (libprefix, obj_f)), tmpdir_thinarch)
                        shell.call(
                            'ar -cqS %s %s-%s' % (libname, libprefix, obj_f),
                            tmpdir_thinarch)
                        object_files_md5.append(md5)
                shutil.rmtree(lib_tmpdir)
            shell.call('ar -s %s' % (libname), tmpdir_thinarch)

        files = [os.path.join(tmpdir, arch, libname) for arch in archs]
        self._check_duplicated_symbols(files, tmpdir)
        if len(archs) > 1:
            #merge the final libs into a fat file again
            shell.call(
                'lipo %s -create -output %s' %
                (' '.join(files), self.install_name), tmpdir)
        else:
            shell.call(
                'cp %s %s' %
                (os.path.join(tmpdir, self.arch, libname), self.install_name),
                tmpdir)
        shutil.rmtree(tmpdir)
Esempio n. 19
0
def revision(repo):
    '''
    Get the current revision of a repository with svnversion

    @param repo: the path to the repository
    @type  repo: str
    '''
    rev = shell.check_call('svn log', repo).split('\n')[1]
    return rev.split(' ')[0]
Esempio n. 20
0
 def list_file_deps(self, prefix, path):
     files = shell.check_call('otool -L %s' % path).split('\n')[1:]
     # Shared libraries might be relocated, we look for files with the
     # prefix or starting with @rpath
     files = [
         x.strip().split(' ')[0] for x in files
         if prefix in x or "@rpath" in x
     ]
     return [x.replace("@rpath/", prefix) for x in files]
Esempio n. 21
0
    def _split_static_lib(self, lib, thin_arch=None):
        '''Splits the static lib @lib into its object files

           Splits the static lib @lib into its object files and returns
           a new temporary directory where the .o files should be found.

           if @thin_arch was provided, it considers the @lib to be a fat
           binary and takes its thin version for the @thin_arch specified
           before retrieving the object files.
        '''
        lib_tmpdir = tempfile.mkdtemp()
        shutil.copy(lib, lib_tmpdir)
        tmplib = os.path.join(lib_tmpdir, os.path.basename(lib))

        if thin_arch:  #should be a fat file, split only to the arch we want
            newname = '%s_%s' % (thin_arch, os.path.basename(lib))
            cmd = ['lipo', tmplib, '-thin', thin_arch, '-output', newname]
            try:
                subprocess.check_output(cmd,
                                        cwd=lib_tmpdir,
                                        stderr=subprocess.STDOUT,
                                        universal_newlines=True)
            except subprocess.CalledProcessError as e:
                if 'does not contain the specified architecture' in e.output:
                    return None
                raise
            tmplib = os.path.join(lib_tmpdir, newname)

        shell.call('ar -x %s' % tmplib, lib_tmpdir)

        # object files with the same name in an archive are overwritten
        # when they are extracted. osx's ar does not support the N count
        # modifier so after extracting all the files we remove them from
        # the archive to extract those with duplicated names.
        # eg:
        # ar t libavcodec.a -> mlpdsp.o mlpdsp.o (2 objects with the same name)
        # ar d libavcodec.a mlpdsp.o (we remove the first one)
        # ar t libavcodec.a -> mlpdsp.o (we only the second one now)
        files = shell.check_call('ar -t %s' % tmplib, lib_tmpdir).split('\n')
        # FIXME: We should use collections.Count but it's only available in
        # python 2.7+
        dups = defaultdict(int)
        for f in files:
            dups[f] += 1
        for f in dups:
            if dups[f] <= 1:
                continue
            for x in range(dups[f]):
                path = os.path.join(lib_tmpdir, f)
                new_path = os.path.join(lib_tmpdir, 'dup%d_' % x + f)
                # The duplicated overwrote the first one, so extract it again
                shell.call('ar -x %s %s' % (tmplib, f), lib_tmpdir)
                shutil.move(path, new_path)
                shell.call('ar -d %s %s' % (tmplib, f), lib_tmpdir)

        return lib_tmpdir
Esempio n. 22
0
 def list_shared_libraries(object_file):
     cmd = '%s -L "%s"' % (OTOOL_CMD, object_file)
     res = shell.check_call(cmd).split('\n')
     # We don't use the first line
     libs = res[1:]
     # Remove the first character tabulation
     libs = [x[1:] for x in libs]
     # Remove the version info
     libs = [x.split(' ', 1)[0] for x in libs]
     return libs
Esempio n. 23
0
 def list_shared_libraries(object_file):
     cmd = '%s -L %s' % (OTOOL_CMD, object_file)
     res = shell.check_call(cmd).split('\n')
     # We don't use the first line
     libs = res[1:]
     # Remove the first character tabulation
     libs = [x[1:] for x in libs]
     # Remove the version info
     libs = [x.split(' ', 1)[0] for x in libs]
     return libs
 def _perl_version(self):
     try:
         version = shell.check_call("perl -e 'print \"$]\";'")
     except:
         m.warning(_("Perl not found, you may need to run bootstrap."))
         version = '0.000000'
     # FIXME: when perl's mayor is >= 10
     mayor = str(version[0])
     minor = str(int(version[2:5]))
     revision = str(int(version[5:8]))
     return '.'.join([mayor, minor, revision])
Esempio n. 25
0
File: svn.py Progetto: osdrv/cerbero
def revision(repo):
    '''
    Get the current revision of a repository with svnversion

    @param repo: the path to the repository
    @type  repo: str
    '''
    rev = shell.check_call('svnversion', repo, env=CLEAN_ENV).split('\n')[0]
    if rev[-1] == 'M':
        rev = rev[:-1]
    return rev
 def testCreateBundle(self):
     self._add_files()
     p = self.store.get_package('gstreamer-test1')
     self.files = p.files_list()
     packager = OSXPackage(self.config, p, self.store)
     files = OSXPackage.files_list(packager, PackageType.RUNTIME, False)
     tmpdest = packager._create_bundle(files, PackageType.RUNTIME)[0]
     bundlefiles = shell.check_call('find . -type f ', tmpdest).split('\n')
     bundlefiles = sorted([f[2:] for f in bundlefiles])[1:]
     self.assertEquals(bundlefiles, self.files)
     shutil.rmtree(tmpdest)
Esempio n. 27
0
def revision(repo):
    """
    Get the current revision of a repository with svnversion

    @param repo: the path to the repository
    @type  repo: str
    """
    rev = shell.check_call("svnversion", repo, env=CLEAN_ENV).split("\n")[0]
    if rev[-1] == "M":
        rev = rev[:-1]
    return rev
Esempio n. 28
0
def revision(repo):
    '''
    Get the current revision of a repository with svnversion

    @param repo: the path to the repository
    @type  repo: str
    '''
    rev = shell.check_call('svnversion', repo, env=CLEAN_ENV).split('\n')[0]
    if rev[-1] == 'M':
        rev = rev[:-1]
    return rev
Esempio n. 29
0
 def testCreateBundle(self):
     self._add_files()
     p = self.store.get_package('gstreamer-test1')
     self.files = p.files_list()
     packager = OSXPackage(self.config, p, self.store)
     files = OSXPackage.files_list(packager, PackageType.RUNTIME, False)
     tmpdest = packager._create_bundle(files, PackageType.RUNTIME)[0]
     bundlefiles = shell.check_call('find . -type f ', tmpdest).split('\n')
     bundlefiles = sorted([f[2:] for f in bundlefiles])[1:]
     self.assertEquals(bundlefiles, self.files)
     shutil.rmtree(tmpdest)
Esempio n. 30
0
 def list_file_deps(self, prefix, path):
     files = shell.check_call('LC_ALL=C objdump -xw %s' % path).splitlines()
     prog = re.compile(r"(?i)^.*DLL[^:]*: (\S+\.dll)$")
     files = [
         prog.sub(r"\1", x) for x in files if prog.match(x) is not None
     ]
     files = [
         os.path.join(prefix, 'bin', x) for x in files
         if x.lower().endswith('dll')
     ]
     return [os.path.realpath(x) for x in files if os.path.exists(x)]
Esempio n. 31
0
def get_hash(git_dir, commit):
    '''
    Get a commit hash from a valid commit.
    Can be used to check if a commit exists

    @param git_dir: path of the git repository
    @type git_dir: str
    @param commit: the commit to log
    @type commit: str
    '''
    return shell.check_call('%s show -s --pretty=%%H %s' %
                            (GIT, commit), git_dir)
Esempio n. 32
0
def get_hash(git_dir, commit):
    '''
    Get a commit hash from a valid commit.
    Can be used to check if a commit exists

    @param git_dir: path of the git repository
    @type git_dir: str
    @param commit: the commit to log
    @type commit: str
    '''
    return shell.check_call('%s show -s --pretty=%%H %s' %
                            (GIT, commit), git_dir)
Esempio n. 33
0
def get_hash(git_dir, commit):
    '''
    Get a commit hash from a valid commit.
    Can be used to check if a commit exists

    @param git_dir: path of the git repository
    @type git_dir: str
    @param commit: the commit to log
    @type commit: str
    '''
    return shell.check_call('%s rev-parse %s' % (GIT, commit),
            git_dir).split('\n')[0]
Esempio n. 34
0
    def list_file_deps(self, prefix, path):
        files = shell.check_call('objdump -x %s' % path).split('\n')
        files = [x.split(' ')[17] for x in files if 'NEEDED ' in x]
        files = [os.path.join(prefix, 'lib', x) for x in files]
        final_files = []
        for f in files:
            if os.path.exists(f):
                final_files.append(f)
                real_f = os.path.realpath(f)
                if real_f != f:
                    final_files.append(real_f)

        return final_files
Esempio n. 35
0
    def find_orphan_files(self, allfiles, prefix, excludes=[]):
        cmd = 'find . -type f %s'
        exc = map(lambda x: "\\( ! -name '%s' \\)" % x, excludes)
        cmd = cmd % ' '.join(exc)

        distfiles = shell.check_call(cmd, prefix).split('\n')
        # remove './' from the list of files
        distfiles = [f[2:] for f in distfiles]
        orphan = sorted(list((set(distfiles) - set(allfiles))))

        if len(orphan) > 0:
            m.message("Found orphan files:")
            m.message('\n'.join(orphan))
Esempio n. 36
0
    def find_orphan_files(self, allfiles, prefix, excludes=[]):
        cmd = 'find . -type f %s'
        exc = map(lambda x: "\\( ! -name '%s' \\)" % x, excludes)
        cmd = cmd % ' '.join(exc)

        distfiles = shell.check_call(cmd, prefix).split('\n')
        # remove './' from the list of files
        distfiles = [f[2:] for f in distfiles]
        orphan = sorted(list((set(distfiles) - set(allfiles))))

        if len(orphan) > 0:
            m.message("Found orphan files:")
            m.message('\n'.join(orphan))
Esempio n. 37
0
    def list_file_deps(self, prefix, path):
        files = shell.check_call('objdump -x %s' % path).split('\n')
        files = [x.split(' ')[17] for x in files if 'NEEDED ' in x]
        files = [os.path.join(prefix, 'lib', x) for x in files]
        final_files = []
        for f in files:
            if os.path.exists(f):
                final_files.append(f)
                real_f = os.path.realpath(f)
                if real_f != f:
                    final_files.append (real_f);

        return final_files
Esempio n. 38
0
    def _split_static_lib(self, lib, thin_arch=None):
        '''Splits the static lib @lib into its object files

           Splits the static lib @lib into its object files and returns
           a new temporary directory where the .o files should be found.

           if @thin_arch was provided, it considers the @lib to be a fat
           binary and takes its thin version for the @thin_arch specified
           before retrieving the object files.
        '''
        lib_tmpdir = tempfile.mkdtemp()
        shutil.copy(lib, lib_tmpdir)
        tmplib = os.path.join(lib_tmpdir, os.path.basename(lib))

        if thin_arch: #should be a fat file, split only to the arch we want
            newname = '%s_%s' % (thin_arch, os.path.basename(lib))
            shell.call('lipo %s -thin %s -output %s' % (tmplib,
                           thin_arch, newname), lib_tmpdir)
            tmplib = os.path.join (lib_tmpdir, newname)

        shell.call('ar -x %s' % tmplib, lib_tmpdir)

        # object files with the same name in an archive are overwritten
        # when they are extracted. osx's ar does not support the N count
        # modifier so after extracting all the files we remove them from
        # the archive to extract those with duplicated names.
        # eg:
        # ar t libavcodec.a -> mlpdsp.o mlpdsp.o (2 objects with the same name)
        # ar d libavcodec.a mlpdsp.o (we remove the first one)
        # ar t libavcodec.a -> mlpdsp.o (we only the second one now)
        files = shell.check_call('ar -t %s' % tmplib, lib_tmpdir).split('\n')
        # FIXME: We should use collections.Count but it's only available in
        # python 2.7+
        dups = defaultdict(int)
        for f in files:
            dups[f] += 1
        for f in dups:
            if dups[f] <= 1:
                continue
            for x in range(dups[f]):
                path = os.path.join(lib_tmpdir, f)
                new_path = os.path.join(lib_tmpdir, 'dup%d_' % x + f)
                # The duplicated overwrote the first one, so extract it again
                shell.call('ar -x %s %s' % (tmplib, f), lib_tmpdir)
                shutil.move (path, new_path)
                shell.call('ar -d %s %s' % (tmplib, f), lib_tmpdir)

        return lib_tmpdir
Esempio n. 39
0
 def _write_gst_la_file(self, la_path, pcname, major, minor, micro, env):
     ladir, laname = os.path.split(la_path)
     ladir = os.path.join(self._get_arch_prefix(), ladir)
     dep_libs = []
     ret = shell.check_call('pkg-config --libs-only-l --static ' + pcname, env=env)
     for lib in set(ret.split()):
         # Don't add the library itself to the list of dependencies
         if lib[2:] == laname[3:-3]:
             continue
         lafile = os.path.join(self.config.libdir, 'lib' + lib[2:] + '.la')
         if os.path.isfile(lafile):
             dep_libs.append(lib[2:])
         else:
             dep_libs.append(lib)
     LibtoolLibrary(laname[:-3], major, minor, micro, ladir,
                    self.config.target_platform, deps=dep_libs).save()
Esempio n. 40
0
def list_tags(git_dir, fail=True):
    '''
    List all tags

    @param git_dir: path of the git repository
    @type git_dir: str
    @param fail: raise an error if the command failed
    @type fail: false
    @return: list of tag names (str)
    @rtype: list
    '''
    tags = shell.check_call('%s tag -l' % GIT, git_dir, fail=fail)
    tags = tags.strip()
    if tags:
        tags = tags.split('\n')
    return tags
Esempio n. 41
0
def list_tags(git_dir, fail=True):
    '''
    List all tags

    @param git_dir: path of the git repository
    @type git_dir: str
    @param fail: raise an error if the command failed
    @type fail: false
    @return: list of tag names (str)
    @rtype: list
    '''
    tags = shell.check_call('%s tag -l' % GIT, git_dir, fail=fail)
    tags = tags.strip()
    if tags:
        tags = tags.split('\n')
    return tags
Esempio n. 42
0
def check_line_endings(platform):
    '''
    Checks if on windows we don't use the automatic line endings conversion
    as it breaks everything

    @param platform: the host platform
    @type platform: L{cerbero.config.Platform}
    @return: true if git config is core.autorlf=false
    @rtype: bool
    '''
    if platform != Platform.WINDOWS:
        return True
    val = shell.check_call('git config --get core.autocrlf')
    if ('false' in val.lower()):
        return True
    return False
Esempio n. 43
0
def check_line_endings(platform):
    '''
    Checks if on windows we don't use the automatic line endings conversion
    as it breaks everything

    @param platform: the host platform
    @type platform: L{cerbero.config.Platform}
    @return: true if git config is core.autorlf=false
    @rtype: bool
    '''
    if platform != Platform.WINDOWS:
        return True
    val = shell.check_call('git config --get core.autocrlf')
    if ('false' in val.lower()):
        return True
    return False
Esempio n. 44
0
    def _create_framework_library(self, libraries):
        tmpdir = tempfile.mkdtemp()

        libname = os.path.basename (self.libname) # just to make sure

        if self.arch == Architecture.UNIVERSAL:
            archs = self.universal_archs
        else:
            archs = [self.arch]

        archs = [a if a != Architecture.X86 else 'i386' for a in archs]

        for thin_arch in archs:
            object_files_md5 = []
            shell.call ('mkdir -p %s' % thin_arch, tmpdir)
            tmpdir_thinarch = os.path.join(tmpdir, thin_arch)

            for lib in libraries:
                libprefix = os.path.split(lib)[-1].replace('.', '_')

                if len(archs) > 1: #should be a fat file, split only to the arch we want
                    libprefix += '_%s_' % thin_arch
                    lib_tmpdir = self._split_static_lib(lib, thin_arch)
                else:
                    lib_tmpdir = self._split_static_lib(lib)

                obj_files = shell.ls_files(['*.o'], lib_tmpdir)
                for obj_f in obj_files:
                    obj_path = os.path.join(lib_tmpdir, obj_f)
                    md5 = shell.check_call('md5 -q %s' % obj_path).split('\n')[0]
                    md5 = '%s-%s' % (md5, os.path.getsize(obj_path))
                    if md5 not in object_files_md5:
                        shell.call('cp %s %s' % (obj_path, '%s-%s' % (libprefix, obj_f)), tmpdir_thinarch)
                        shell.call('ar -cqS %s %s-%s' % (libname, libprefix, obj_f), tmpdir_thinarch)
                        object_files_md5.append(md5)
                shutil.rmtree(lib_tmpdir)
            shell.call('ar -s %s' % (libname), tmpdir_thinarch)

        files = [os.path.join(tmpdir, arch, libname) for arch in archs]
        self._check_duplicated_symbols(files, tmpdir)
        if len(archs) > 1:
            #merge the final libs into a fat file again
            shell.call('lipo %s -create -output %s' % (' '.join(files), self.install_name), tmpdir)
        else:
            shell.call('cp %s %s' % (os.path.join(tmpdir, self.arch, libname), self.install_name), tmpdir)
        shutil.rmtree(tmpdir)
Esempio n. 45
0
def get_hash(git_dir, commit):
    '''
    Get a commit hash from a valid commit.
    Can be used to check if a commit exists

    @param git_dir: path of the git repository
    @type git_dir: str
    @param commit: the commit to log
    @type commit: str
    '''
    if not os.path.isdir(os.path.join(git_dir, '.git')):
        # If a recipe's source type is switched from tarball to git, then we
        # can get called from built_version() when the directory isn't git.
        # Return a fixed string + unix time to trigger a full fetch.
        return 'not-git-' + str(time.time())
    return shell.check_call('%s show -s --pretty=%%H %s' % (GIT, commit),
                            git_dir)
Esempio n. 46
0
def find_dll_implib(libname, prefix, libdir, ext, regex):
    implibdir = 'lib'
    implibs = [
        'lib{}.dll.a'.format(libname), libname + '.lib',
        'lib{}.lib'.format(libname)
    ]
    dlltool = os.environ.get('DLLTOOL', None)
    if not dlltool:
        raise FatalError('dlltool was not found, check cerbero configuration')
    implib_notfound = []
    for implib in implibs:
        path = os.path.join(prefix, implibdir, implib)
        if not os.path.exists(path):
            implib_notfound.append(implib)
            continue
        try:
            dllname = shell.check_call([dlltool, '-I', path])
        except FatalError:
            continue
        dllname = dllname.strip()
        if dllname == '':
            continue
        return [os.path.join(libdir, dllname)]
    # If import libraries aren't found, look for a DLL by exactly the specified
    # name. This is to cover cases like libgcc_s_sjlj-1.dll which don't have an
    # import library since they're only used at runtime.
    dllname = 'lib{}.dll'.format(libname)
    path = os.path.join(prefix, libdir, dllname)
    if os.path.exists(path):
        return [os.path.join(libdir, dllname)]
    # libvpx's build system does not build DLLs on Windows, so it's expected
    # that the DLL can't be found. Similar code exists in _search_libraries()
    # XXX: Remove this when libvpx is ported to Meson.
    if libname == 'vpx':
        return []
    if len(implib_notfound) == len(implibs):
        m.warning("No import libraries found for {!r}".format(libname))
    else:
        implibs = ', '.join(set(implibs) - set(implib_notfound))
        m.warning("No dllname found from implibs: {}".format(implibs))
    # This will trigger an error in _search_libraries()
    return []
Esempio n. 47
0
 def _check_duplicated_symbols(self, files, tmpdir):
     for f in files:
         syms = defaultdict(list)
         symbols = shell.check_call('nm -UA %s' % f, tmpdir).split('\n')
         # nm output is: test.o: 00000000 T _gzwrite
         # (filename, address, symbol type, symbols_name)
         for s in symbols:
             s = s.split(' ')
             if len(s) == 4 and s[2] == 'T':
                 syms[s[3]].append(s)
         dups = {}
         for k, v in syms.items():
             if len(v) > 1:
                 dups[k] = v
         if dups:
             m.warning("The static library contains duplicated symbols")
         for k, v in dups.items():
             m.message(k)  # symbol name
             for l in v:
                 m.message("     %s" % l[0])  # file
Esempio n. 48
0
def find_dll_implib(libname, prefix, libdir, ext, regex):
    implibdir = 'lib'
    implibs = ['lib{}.dll.a'.format(libname), libname + '.lib']
    dlltool = os.environ.get('DLLTOOL', None)
    if not dlltool:
        raise FatalError('dlltool was not found, check cerbero configuration')
    for implib in implibs:
        path = os.path.join(prefix, implibdir, implib)
        if not os.path.exists(path):
            continue
        try:
            dllname = shell.check_call([dlltool, '-I', path])
        except FatalError:
            continue
        dllname = dllname.strip()
        if dllname == '':
            continue
        return [os.path.join(libdir, dllname)]
    m.warning("No dllname from implibs {}".format(implibs))
    return []
Esempio n. 49
0
 def _check_duplicated_symbols(self, files, tmpdir):
     for f in files:
         syms = defaultdict(list)
         symbols = shell.check_call('nm -UA %s' % f, tmpdir).split('\n')
         # nm output is: test.o: 00000000 T _gzwrite
         # (filename, address, symbol type, symbols_name)
         for s in symbols:
             s = s.split(' ')
             if len(s) == 4 and s[2] == 'T':
                 syms[s[3]].append(s)
         dups = {}
         for k,v in syms.items():
             if len(v) > 1:
                 dups[k] = v
         if dups:
             m.warning ("The static library contains duplicated symbols")
         for k, v in dups.items():
             m.message (k)  # symbol name
             for l in v:
                 m.message ("     %s" % l[0])  # file
Esempio n. 50
0
def find_dll_implib(libname, prefix, libdir, ext, regex):
    implibdir = 'lib'
    implibs = ['lib{}.dll.a'.format(libname), libname + '.lib']
    dlltool = os.environ.get('DLLTOOL', None)
    if not dlltool:
        raise FatalError('dlltool was not found, check cerbero configuration')
    implib_notfound = []
    for implib in implibs:
        path = os.path.join(prefix, implibdir, implib)
        if not os.path.exists(path):
            implib_notfound.append(implib)
            continue
        try:
            dllname = shell.check_call([dlltool, '-I', path])
        except FatalError:
            continue
        dllname = dllname.strip()
        if dllname == '':
            continue
        return [os.path.join(libdir, dllname)]
    # If import libraries aren't found, look for a DLL by exactly the specified
    # name. This is to cover cases like libgcc_s_sjlj-1.dll which don't have an
    # import library since they're only used at runtime.
    dllname = 'lib{}.dll'.format(libname)
    path = os.path.join(prefix, libdir, dllname)
    if os.path.exists(path):
        return [os.path.join(libdir, dllname)]
    # libvpx's build system does not build DLLs on Windows, so it's expected
    # that the DLL can't be found. Similar code exists in _search_libraries()
    # XXX: Remove this when libvpx is ported to Meson.
    if libname == 'vpx':
        return []
    if len(implib_notfound) == len(implibs):
        m.warning("No import libraries found for {!r}".format(libname))
    else:
        implibs = ', '.join(set(implibs) - set(implib_notfound))
        m.warning("No dllname found from implibs: {}".format(implibs))
    # This will trigger an error in _search_libraries()
    return []
Esempio n. 51
0
 def _list_bom_dirs(self):
     return shell.check_call('lsbom %s -s -d' % self.bom_path)
Esempio n. 52
0
 def _list_bom_files(self):
     return shell.check_call('lsbom %s -p fm' % self.bom_path)
Esempio n. 53
0
 def list_deps(self, prefix,  path):
     files = shell.check_call('ldd %s' % path).split('\n')
     return [x.split(' ')[2] for x in files if prefix in x]
Esempio n. 54
0
 def list_file_deps(self, prefix, path):
     files = shell.check_call('otool -L %s' % path).split('\n')[1:]
     return [x.split(' ')[0][1:] for x in files if prefix in x]
 def _get_file_type(self, path):
     cmd = 'file -bh %s'
     return shell.check_call(cmd % path)[:-1]
Esempio n. 56
0
 def library_id_name(object_file):
     cmd = '%s -D %s' % (OTOOL_CMD, object_file)
     res = shell.check_call(cmd).split('\n')[0]
     # the library name ends with ':'
     lib_name = res[:-1]
     return lib_name