Beispiel #1
0
    def grab(self, url, filename, user=None, passwd=None):
        """Grab url to file."""

        log.debug("fetching %s => %s" % (url, filename))

        with open(filename, 'w') as outfile:
            self.change_url(url, outfile, user, passwd)
            self.perform()
Beispiel #2
0
    def grab(self, url, filename, user=None, passwd=None, no_cache=False):
        """Grab url to file."""

        log.debug("fetching %s => %s" % (url, filename))

        with open(filename, 'w') as outfile:
            self.change_url(url, outfile, user, passwd, no_cache)
            self.perform()
Beispiel #3
0
def show_file_from_rev(git_path, relative_path, commit_id):
    """Get a single file content from given git revision."""
    args = ['git', 'show', '%s:%s' % (commit_id, relative_path)]
    try:
        with Workdir(git_path):
            return  subprocess.Popen(args, stdout=subprocess.PIPE).communicate()[0]
    except (subprocess.CalledProcessError, OSError), err:
        log.debug('failed to checkout %s from %s:%s' % (relative_path,
                                                        commit_id, str(err)))
Beispiel #4
0
def show_file_from_rev(git_path, relative_path, commit_id):
    """Get a single file content from given git revision."""
    args = ['git', 'show', '%s:%s' % (commit_id, relative_path)]
    try:
        with Workdir(git_path):
            return subprocess.Popen(args,
                                    stdout=subprocess.PIPE).communicate()[0]
    except (subprocess.CalledProcessError, OSError), err:
        log.debug('failed to checkout %s from %s:%s' %
                  (relative_path, commit_id, str(err)))
Beispiel #5
0
def createimage(args, ks_file):
    '''create image using mic'''
    extra_mic_opts = []
    if args.outdir:
        extra_mic_opts = ['--outdir=%s' % args.outdir]
    if args.tmpfs:
        extra_mic_opts += ['--tmpfs']
    extra_mic_opts += ['--record-pkgs=name']
    mic_cmd = 'sudo mic create auto %s %s' % (ks_file, ' '.join(extra_mic_opts))
    log.debug(mic_cmd)
    return os.system(mic_cmd)
Beispiel #6
0
def createimage(args, ks_file):
    '''create image using mic'''
    extra_mic_opts = []
    if args.outdir:
        extra_mic_opts = ['--outdir=%s' % args.outdir]
    if args.tmpfs:
        extra_mic_opts += ['--tmpfs']
    extra_mic_opts += ['--record-pkgs=name']
    mic_cmd = 'sudo mic create auto %s %s' % (ks_file,
                                              ' '.join(extra_mic_opts))
    log.debug(mic_cmd)
    return os.system(mic_cmd)
Beispiel #7
0
    def perform(self):
        '''do the real Curl perform work'''

        curl = self.curl

        stop = [False]

        def progressing(*_args):
            '''Returning a non-zero value from this callback will cause libcurl
            to abort the transfer and return CURLE_ABORTED_BY_CALLBACK.'''
            return -1 if stop[0] else 0

        def handler(_signum, _frame):
            '''set stop flag if catch SIGINT,
            if not catch SIGINT, pycurl will print traceback'''
            stop[0] = True

        curl.setopt(pycurl.PROGRESSFUNCTION, progressing)
        curl.setopt(pycurl.NOPROGRESS, False)
        original_handler = signal.signal(signal.SIGINT, handler)
        try:
            curl.perform()
        except pycurl.error, err:
            log.debug('fetching error:%s' % str(err))

            errcode, errmsg = err.args
            http_code = curl.getinfo(pycurl.HTTP_CODE)

            if errcode == pycurl.E_OPERATION_TIMEOUTED or http_code == 503:
                proxies = ['Detected proxies set in system environment:']
                env = os.environ
                for key in [
                        'HTTPS_PROXY', 'HTTP_PROXY', 'FTP_PROXY',
                        'https_proxy', 'http_proxy', 'ftp_proxy', 'NO_PROXY',
                        'no_proxy'
                ]:
                    proxies.append('%s=%s' % (key, env.get(key, '')))
                raise UrlError("connect timeout to %s, maybe it's caused by "
                               "proxy settings, please check. %s" %
                               (curl.url, '\n  '.join(proxies)))
            elif errcode == pycurl.E_ABORTED_BY_CALLBACK:
                raise KeyboardInterrupt(err)
            elif http_code in (401, 403):
                raise UrlError('authenticate failed on: %s' % curl.url)
            elif http_code == 404:
                raise PageNotFound(err)
            else:
                raise UrlError('URL error on %s: (%s: "%s")' %
                               (curl.url, errcode, errmsg))
Beispiel #8
0
    def perform(self):
        '''do the real Curl perform work'''

        curl = self.curl

        stop = [False]
        def progressing(*_args):
            '''Returning a non-zero value from this callback will cause libcurl
            to abort the transfer and return CURLE_ABORTED_BY_CALLBACK.'''
            return -1 if stop[0] else 0

        def handler(_signum, _frame):
            '''set stop flag if catch SIGINT,
            if not catch SIGINT, pycurl will print traceback'''
            stop[0] = True

        curl.setopt(pycurl.PROGRESSFUNCTION, progressing)
        curl.setopt(pycurl.NOPROGRESS, False)
        original_handler = signal.signal(signal.SIGINT, handler)
        try:
            curl.perform()
        except pycurl.error, err:
            log.debug('fetching error:%s' % str(err))

            errcode, errmsg = err.args
            http_code = curl.getinfo(pycurl.HTTP_CODE)

            if errcode == pycurl.E_OPERATION_TIMEOUTED or http_code == 503:
                proxies = ['Detected proxies set in system environment:']
                env = os.environ
                for key in ['HTTPS_PROXY', 'HTTP_PROXY', 'FTP_PROXY',
                            'https_proxy', 'http_proxy', 'ftp_proxy',
                            'NO_PROXY', 'no_proxy']:
                    proxies.append('%s=%s' % (key, env.get(key, '')))
                raise UrlError("connect timeout to %s, maybe it's caused by "
                               "proxy settings, please check. %s" % (curl.url,
                               '\n  '.join(proxies)))
            elif errcode == pycurl.E_ABORTED_BY_CALLBACK:
                raise KeyboardInterrupt(err)
            elif http_code in (401, 403):
                raise UrlError('authenticate failed on: %s' % curl.url)
            elif http_code == 404:
                raise PageNotFound(err)
            else:
                raise UrlError('URL error on %s: (%s: "%s")' %
                               (curl.url, errcode, errmsg))
Beispiel #9
0
    def change_url(self, url, outfile, user, passwd, no_cache=False):
        '''change options for individual url'''

        curl = self.curl
        curl.url = url
        curl.setopt(pycurl.URL, url)
        curl.setopt(pycurl.WRITEDATA, outfile)
        if user:
            userpwd = user
            if passwd:
                userpwd = '%s:%s' % (user, passwd)
            curl.setopt(pycurl.USERPWD, userpwd)
        httpheader = []
        if no_cache:
            httpheader.append('Pragma: no-cache')
            httpheader.append('Cache-Control: no-cache')
            log.debug("disable HTTP caching")
        curl.setopt(pycurl.HTTPHEADER, httpheader)
Beispiel #10
0
    def change_url(self, url, outfile, user, passwd, no_cache=False):
        '''change options for individual url'''

        curl = self.curl
        curl.url = url
        curl.setopt(pycurl.URL, url)
        curl.setopt(pycurl.WRITEDATA, outfile)
        if user:
            userpwd = user
            if passwd:
                userpwd = '%s:%s' % (user, passwd)
            curl.setopt(pycurl.USERPWD, userpwd)
        httpheader = []
        if no_cache:
            httpheader.append('Pragma: no-cache')
            httpheader.append('Cache-Control: no-cache')
            log.debug("disable HTTP caching")
        curl.setopt(pycurl.HTTPHEADER, httpheader)
Beispiel #11
0
def main(args):
    """gbs build entry point."""

    if args.commit and args.include_all:
        raise Usage('--commit can\'t be specified together with '\
                    '--include-all')
    if args.noinit and (args.clean or args.clean_once):
        raise Usage('--noinit can\'t be specified together with '\
                    '--clean or --clean-once')
    workdir = args.gitdir

    try:
        repo = RpmGitRepository(workdir)
        workdir = repo.path
    except GitRepositoryError:
        if args.spec:
            raise GbsError("git project can't be found for --spec, "
                           "give it in argument or cd into it")

    hostarch = os.uname()[4]
    if args.arch:
        buildarch = args.arch
    else:
        buildarch = hostarch
        log.info('No arch specified, using system arch: %s' % hostarch)

    if not buildarch in SUPPORTEDARCHS:
        raise GbsError('arch %s not supported, supported archs are: %s ' % \
                       (buildarch, ','.join(SUPPORTEDARCHS)))

    if buildarch not in CAN_ALSO_BUILD.get(hostarch, []):
        if buildarch not in QEMU_CAN_BUILD:
            raise GbsError("hostarch: %s can't build target arch %s" %
                            (hostarch, buildarch))

    profile = get_profile(args)
    if args.buildroot:
        build_root = args.buildroot
    elif 'TIZEN_BUILD_ROOT' in os.environ:
        build_root = os.environ['TIZEN_BUILD_ROOT']
    elif profile.buildroot:
        build_root = profile.buildroot
    else:
        build_root = configmgr.get('buildroot', 'general')
    build_root = os.path.expanduser(build_root)
    # transform variables from shell to python convention ${xxx} -> %(xxx)s
    build_root = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', build_root)
    sanitized_profile_name = re.sub("[^a-zA-Z0-9:._-]", "_", profile.name)
    build_root = build_root % {'tmpdir': TMPDIR,
                               'profile': sanitized_profile_name}
    os.environ['TIZEN_BUILD_ROOT'] = os.path.abspath(build_root)

    # get virtual env from system env first
    if 'VIRTUAL_ENV' in os.environ:
        cmd = ['%s/usr/bin/depanneur' % os.environ['VIRTUAL_ENV']]
    else:
        cmd = ['depanneur']

    cmd += ['--arch=%s' % buildarch]

    if args.clean:
        cmd += ['--clean']

    # check & prepare repos and build conf
    if not args.noinit:
        cmd += prepare_repos_and_build_conf(args, buildarch, profile)
    else:
        cmd += ['--noinit']

    cmd += ['--path=%s' % workdir]

    if args.ccache:
        cmd += ['--ccache']

    if args.extra_packs:
        cmd += ['--extra-packs=%s' % args.extra_packs]

    if hostarch != buildarch and buildarch in CHANGE_PERSONALITY:
        cmd = [ CHANGE_PERSONALITY[buildarch] ] + cmd

    # Extra depanneur special command options
    cmd += prepare_depanneur_opts(args)

    # Extra options for gbs export
    if args.include_all:
        cmd += ['--include-all']
    if args.commit:
        cmd += ['--commit=%s' % args.commit]

    if args.upstream_branch:
        upstream_branch = args.upstream_branch
    else:
        upstream_branch = configmgr.get('upstream_branch', 'general')
    cmd += ['--upstream-branch=%s' % upstream_branch]

    if args.upstream_tag:
        cmd += ['--upstream-tag=%s' % args.upstream_tag]
    if args.squash_patches_until:
        cmd += ['--squash-patches-until=%s' % args.squash_patches_until]
    if args.no_patch_export:
        cmd += ['--no-patch-export']

    if args.define:
        cmd += [('--define="%s"' % i) for i in args.define]
    if args.spec:
        cmd += ['--spec=%s' % args.spec]

    log.debug("running command: %s" % ' '.join(cmd))
    retcode = os.system(' '.join(cmd))
    if retcode != 0:
        raise GbsError('rpmbuild fails')
    else:
        log.info('Done')
Beispiel #12
0
def create_gbp_export_args(repo, commit, export_dir, tmp_dir, spec, args,
                           force_native=False):
    """
    Construct the cmdline argument list for git-buildpackage export
    """
    if args.upstream_branch:
        upstream_branch = args.upstream_branch
    else:
        upstream_branch = configmgr.get('upstream_branch', 'general')
    if args.upstream_tag:
        upstream_tag = args.upstream_tag
    else:
        upstream_tag = configmgr.get('upstream_tag', 'general')
        # transform variables from shell to python convention ${xxx} -> %(xxx)s
        upstream_tag = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', upstream_tag)

    log.debug("Using upstream branch: %s" % upstream_branch)
    log.debug("Using upstream tag format: '%s'" % upstream_tag)

    # Get patch squashing option
    if args.squash_patches_until:
        squash_patches_until = args.squash_patches_until
    else:
        squash_patches_until = configmgr.get('squash_patches_until', 'general')

    # Determine the remote repourl
    reponame = ""
    remotes = repo.get_remote_repos()
    if remotes:
        remotename = 'origin' if 'origin' in remotes else remotes.keys()[0]
        # Take the remote repo of current branch, if available
        try:
            remote_branch = repo.get_upstream_branch(repo.branch)
            if remote_branch:
                remotename = remote_branch.split("/")[0]
        except GitRepositoryError:
            pass
        reponame = urlparse(remotes[remotename][0]).path.lstrip('/')

    packaging_dir = get_packaging_dir(args)
    # Now, start constructing the argument list
    argv = ["argv[0] placeholder",
            "--git-color-scheme=magenta:green:yellow:red",
            "--git-ignore-new",
            "--git-upstream-branch=upstream",
            "--git-export-dir=%s" % export_dir,
            "--git-tmp-dir=%s" % tmp_dir,
            "--git-packaging-dir=%s" % packaging_dir,
            "--git-spec-file=%s" % spec,
            "--git-export=%s" % commit,
            "--git-upstream-branch=%s" % upstream_branch,
            "--git-upstream-tag=%s" % upstream_tag,
            "--git-spec-vcs-tag=%s#%%(tagname)s" % reponame]

    if args.debug:
        argv.append("--git-verbose")
    if force_native or is_native_pkg(repo, args) or args.no_patch_export:
        argv.extend(["--git-no-patch-export",
                     "--git-upstream-tree=%s" % commit])
    else:
        argv.extend(["--git-patch-export",
                     "--git-patch-export-compress=100k",
                     "--git-force-create",
                     "--git-patch-export-squash-until=%s" %
                        squash_patches_until,
                     "--git-patch-export-ignore-path=^(%s/.*|.gbs.conf)" %
                        packaging_dir,
                    ])
        if repo.has_branch("pristine-tar"):
            argv.extend(["--git-pristine-tar"])

    if 'source_rpm' in args and args.source_rpm:
        argv.extend(['--git-builder=rpmbuild',
                     '--git-rpmbuild-builddir=.',
                     '--git-rpmbuild-builddir=.',
                     '--git-rpmbuild-rpmdir=.',
                     '--git-rpmbuild-sourcedir=.',
                     '--git-rpmbuild-specdir=.',
                     '--git-rpmbuild-srpmdir=.',
                     '--git-rpmbuild-buildrootdir=.',
                     '--short-circuit', '-bs',
                     ])
    else:
        argv.extend(["--git-builder=osc", "--git-export-only"])

    return argv
Beispiel #13
0
def create_gbp_export_args(repo, commit, export_dir, tmp_dir, spec, args,
                           create_tarball=True):
    """
    Construct the cmdline argument list for git-buildpackage export
    """
    upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch')
    upstream_tag = configmgr.get_arg_conf(args, 'upstream_tag')
    # transform variables from shell to python convention ${xxx} -> %(xxx)s
    upstream_tag = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', upstream_tag)

    log.debug("Using upstream branch: %s" % upstream_branch)
    log.debug("Using upstream tag format: '%s'" % upstream_tag)

    # Get patch squashing option
    squash_patches_until = configmgr.get_arg_conf(args, 'squash_patches_until')

    # Determine the remote repourl
    reponame = ""
    remotes = repo.get_remote_repos()
    if remotes:
        remotename = 'origin' if 'origin' in remotes else remotes.keys()[0]
        # Take the remote repo of current branch, if available
        try:
            config_remote = repo.get_config('branch.%s.remote' % repo.branch)
        except KeyError:
            pass
        else:
            if config_remote in remotes:
                remotename = config_remote
            elif config_remote != '.':
                log.warning("You appear to have non-existent remote '%s' "
                            "configured for branch '%s'. Check your git config!"
                            % (config_remote, repo.branch))
        reponame = urlparse(remotes[remotename][0]).path.lstrip('/')

    packaging_dir = get_packaging_dir(args)
    # Now, start constructing the argument list
    export_rev = commit
    argv = ["argv[0] placeholder",
            "--git-color-scheme=magenta:green:yellow:red",
            "--git-ignore-new",
            "--git-compression-level=6",
            "--git-export-dir=%s" % export_dir,
            "--git-tmp-dir=%s" % tmp_dir,
            "--git-packaging-dir=%s" % packaging_dir,
            "--git-spec-file=%s" % spec,
            "--git-pq-branch=development/%(branch)s/%(upstreamversion)s",
            "--git-upstream-branch=%s" % upstream_branch,
            "--git-upstream-tag=%s" % upstream_tag,
            "--git-spec-vcs-tag=%s#%%(commit)s" % reponame]

    if create_tarball:
        argv.append("--git-force-create")
    else:
        argv.append("--git-no-create-orig")
    if args.debug:
        argv.append("--git-verbose")
    if is_native_pkg(repo, args) or args.no_patch_export:
        argv.extend(["--git-no-patch-export",
                     "--git-upstream-tree=%s" % commit])
    else:
        # Check if the revision seems to be of an orphan development branch
        is_orphan = False
        export_commitish = 'HEAD' if commit == 'WC.UNTRACKED' else commit
        try:
            repo.get_merge_base(export_commitish, upstream_branch)
        except GitRepositoryError:
            is_orphan = True
        # Development branch in orphan packaging model is identified in the conf
        orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel')

        if not is_orphan:
            argv.extend(["--git-patch-export",
                         "--git-patch-export-compress=100k",
                         "--git-patch-export-squash-until=%s" %
                            squash_patches_until,
                         "--git-patch-export-ignore-path=^(%s/.*|.gbs.conf)" %
                            packaging_dir,
                        ])

            if orphan_packaging:
                export_rev = orphan_packaging
                argv.extend(["--git-patch-export-rev=%s" % commit])

        if repo.has_branch("pristine-tar"):
            argv.extend(["--git-pristine-tar"])

    argv.append("--git-export=%s" % export_rev)

    if 'source_rpm' in args and args.source_rpm:
        argv.extend(['--git-builder=rpmbuild',
                     '--git-rpmbuild-builddir=.',
                     '--git-rpmbuild-builddir=.',
                     '--git-rpmbuild-rpmdir=.',
                     '--git-rpmbuild-sourcedir=.',
                     '--git-rpmbuild-specdir=.',
                     '--git-rpmbuild-srpmdir=.',
                     '--git-rpmbuild-buildrootdir=.',
                     '--short-circuit', '-bs',
                     ])
    else:
        argv.extend(["--git-builder=osc", "--git-export-only"])

    return argv
Beispiel #14
0
def main(args):
    """gbs build entry point."""

    global TMPDIR
    TMPDIR = os.path.join(configmgr.get('tmpdir', 'general'),
                          '%s-gbs' % USERID)

    if args.commit and args.include_all:
        raise Usage('--commit can\'t be specified together with '\
                    '--include-all')
    if args.noinit and (args.clean or args.clean_once):
        raise Usage('--noinit can\'t be specified together with '\
                    '--clean or --clean-once')
    workdir = args.gitdir

    try:
        repo = RpmGitRepository(workdir)
        workdir = repo.path
    except GitRepositoryError:
        if args.spec:
            raise GbsError("git project can't be found for --spec, "
                           "give it in argument or cd into it")

    read_localconf(workdir)

    hostarch = os.uname()[4]
    if args.arch:
        buildarch = args.arch
    else:
        buildarch = hostarch
        log.info('No arch specified, using system arch: %s' % hostarch)

    if not buildarch in SUPPORTEDARCHS:
        raise GbsError('arch %s not supported, supported archs are: %s ' % \
                       (buildarch, ','.join(SUPPORTEDARCHS)))

    profile = get_profile(args)
    if args.buildroot:
        build_root = args.buildroot
    elif 'TIZEN_BUILD_ROOT' in os.environ:
        build_root = os.environ['TIZEN_BUILD_ROOT']
    elif profile.buildroot:
        build_root = profile.buildroot
    else:
        build_root = configmgr.get('buildroot', 'general')
    build_root = os.path.expanduser(build_root)
    # transform variables from shell to python convention ${xxx} -> %(xxx)s
    build_root = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', build_root)
    sanitized_profile_name = re.sub("[^a-zA-Z0-9:._-]", "_", profile.name)
    build_root = build_root % {
        'tmpdir': TMPDIR,
        'profile': sanitized_profile_name
    }
    if profile.exclude_packages:
        log.info('the following packages have been excluded build from gbs '
                 'config:\n   %s' % '\n   '.join(profile.exclude_packages))
        if args.exclude:
            args.exclude += ',' + ','.join(profile.exclude_packages)
        else:
            args.exclude = ','.join(profile.exclude_packages)
    os.environ['TIZEN_BUILD_ROOT'] = os.path.abspath(build_root)

    # get virtual env from system env first
    if 'VIRTUAL_ENV' in os.environ:
        cmd = ['%s/usr/bin/depanneur' % os.environ['VIRTUAL_ENV']]
    else:
        cmd = ['depanneur']

    cmd += ['--arch=%s' % buildarch]

    if args.clean:
        cmd += ['--clean']

    # check & prepare repos and build conf
    if not args.noinit:
        cmd += prepare_repos_and_build_conf(args, buildarch, profile)
    else:
        cmd += ['--noinit']

    cmd += ['--path=%s' % workdir]

    if args.ccache:
        cmd += ['--ccache']

    if args.extra_packs:
        cmd += ['--extra-packs=%s' % args.extra_packs]

    if hostarch != buildarch and buildarch in CHANGE_PERSONALITY:
        cmd = [CHANGE_PERSONALITY[buildarch]] + cmd

    # Extra depanneur special command options
    cmd += prepare_depanneur_opts(args)

    # Extra options for gbs export
    if args.include_all:
        cmd += ['--include-all']
    if args.commit:
        cmd += ['--commit=%s' % args.commit]

    if args.upstream_branch:
        cmd += ['--upstream-branch=%s' % args.upstream_branch]
    if args.upstream_tag:
        cmd += ['--upstream-tag=%s' % args.upstream_tag]
    if args.squash_patches_until:
        cmd += ['--squash-patches-until=%s' % args.squash_patches_until]
    if args.no_patch_export:
        cmd += ['--no-patch-export']

    if args.define:
        cmd += [('--define="%s"' % i) for i in args.define]
    if args.spec:
        cmd += ['--spec=%s' % args.spec]

    # Determine if we're on devel branch
    orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel')
    if orphan_packaging:
        cmd += ['--spec-commit=%s' % orphan_packaging]

    log.debug("running command: %s" % ' '.join(cmd))
    retcode = os.system(' '.join(cmd))
    if retcode != 0:
        raise GbsError('some packages failed to be built')
    else:
        log.info('Done')
Beispiel #15
0
def main(args):
    """gbs build entry point."""

    global TMPDIR
    TMPDIR = os.path.join(configmgr.get('tmpdir', 'general'), '%s-gbs' % USERID)

    if args.commit and args.include_all:
        raise Usage('--commit can\'t be specified together with '\
                    '--include-all')
    if args.noinit and (args.clean or args.clean_once):
        raise Usage('--noinit can\'t be specified together with '\
                    '--clean or --clean-once')
    workdir = args.gitdir

    try:
        repo = RpmGitRepository(workdir)
        workdir = repo.path
    except GitRepositoryError:
        if args.spec:
            raise GbsError("git project can't be found for --spec, "
                           "give it in argument or cd into it")

    read_localconf(workdir)

    hostarch = os.uname()[4]
    if args.arch:
        buildarch = args.arch
    else:
        buildarch = hostarch
        log.info('No arch specified, using system arch: %s' % hostarch)

    if not buildarch in SUPPORTEDARCHS:
        raise GbsError('arch %s not supported, supported archs are: %s ' % \
                       (buildarch, ','.join(SUPPORTEDARCHS)))

    profile = get_profile(args)
    if args.buildroot:
        build_root = args.buildroot
    elif 'TIZEN_BUILD_ROOT' in os.environ:
        build_root = os.environ['TIZEN_BUILD_ROOT']
    elif profile.buildroot:
        build_root = profile.buildroot
    else:
        build_root = configmgr.get('buildroot', 'general')
    build_root = os.path.expanduser(build_root)
    # transform variables from shell to python convention ${xxx} -> %(xxx)s
    build_root = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', build_root)
    sanitized_profile_name = re.sub("[^a-zA-Z0-9:._-]", "_", profile.name)
    build_root = build_root % {'tmpdir': TMPDIR,
                               'profile': sanitized_profile_name}
    if profile.exclude_packages:
        log.info('the following packages have been excluded build from gbs '
                 'config:\n   %s' % '\n   '.join(profile.exclude_packages))
        if args.exclude:
            args.exclude += ',' + ','.join(profile.exclude_packages)
        else:
            args.exclude = ','.join(profile.exclude_packages)
    os.environ['TIZEN_BUILD_ROOT'] = os.path.abspath(build_root)

    # get virtual env from system env first
    if 'VIRTUAL_ENV' in os.environ:
        cmd = ['%s/usr/bin/depanneur' % os.environ['VIRTUAL_ENV']]
    else:
        cmd = ['depanneur']

    cmd += ['--arch=%s' % buildarch]

    if args.clean:
        cmd += ['--clean']

    # check & prepare repos and build conf
    if not args.noinit:
        cmd += prepare_repos_and_build_conf(args, buildarch, profile)
    else:
        cmd += ['--noinit']

    cmd += ['--path=%s' % workdir]

    if args.ccache:
        cmd += ['--ccache']

    if args.extra_packs:
        cmd += ['--extra-packs=%s' % args.extra_packs]

    if hostarch != buildarch and buildarch in CHANGE_PERSONALITY:
        cmd = [CHANGE_PERSONALITY[buildarch]] + cmd

    # Extra depanneur special command options
    cmd += prepare_depanneur_opts(args)

    # Extra options for gbs export
    if args.include_all:
        cmd += ['--include-all']
    if args.commit:
        cmd += ['--commit=%s' % args.commit]

    if args.upstream_branch:
        cmd += ['--upstream-branch=%s' % args.upstream_branch]
    if args.upstream_tag:
        cmd += ['--upstream-tag=%s' % args.upstream_tag]

    if args.conf and args.conf != '.gbs.conf':
        fallback = configmgr.get('fallback_to_native')
    else:
        fallback = ''
    if args.fallback_to_native or config_is_true(fallback):
        cmd += ['--fallback-to-native']

    if args.squash_patches_until:
        cmd += ['--squash-patches-until=%s' % args.squash_patches_until]
    if args.no_patch_export:
        cmd += ['--no-patch-export']

    if args.define:
        cmd += [('--define="%s"' % i) for i in args.define]
    if args.spec:
        cmd += ['--spec=%s' % args.spec]

    # Determine if we're on devel branch
    orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel')
    if orphan_packaging:
        cmd += ['--spec-commit=%s' % orphan_packaging]

    log.debug("running command: %s" % ' '.join(cmd))
    retcode = os.system(' '.join(cmd))
    if retcode != 0:
        raise GbsError('some packages failed to be built')
    else:
        log.info('Done')