Beispiel #1
0
def main(args):
    """gbs pull entry point."""

    # Determine upstream branch
    upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch')

    # Construct GBP cmdline arguments
    gbp_args = [
        'dummy argv[0]', '--color-scheme=magenta:green:yellow:red',
        '--pristine-tar',
        '--upstream-branch=%s' % upstream_branch, '--packaging-branch=master'
    ]
    if args.depth:
        gbp_args.append('--depth=%s' % args.depth)
    if args.force:
        gbp_args.append('--force=clean')
    if args.all:
        gbp_args.append('--all')
    if args.debug:
        gbp_args.append("--verbose")

    # Clone
    log.info('updating from remote')
    ret = do_pull(gbp_args)
    if ret == 2:
        raise GbsError('Failed to update some of the branches!')
    elif ret:
        raise GbsError('Update failed!')

    log.info('finished')
Beispiel #2
0
def main(args):
    """gbs clone entry point."""

    # Determine upstream branch
    upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch')
    packaging_branch = configmgr.get_arg_conf(args, 'packaging_branch')
    # Construct GBP cmdline arguments
    gbp_args = [
        'dummy argv[0]', '--color-scheme=magenta:green:yellow:red',
        '--pristine-tar',
        '--upstream-branch=%s' % upstream_branch,
        '--packaging-branch=%s' % packaging_branch
    ]
    if args.all:
        gbp_args.append('--all')
    if args.depth:
        gbp_args.append('--depth=%s' % args.depth)
    if args.debug:
        gbp_args.append("--verbose")
    gbp_args.append(args.uri)
    if args.directory:
        gbp_args.append(args.directory)

    # Clone
    log.info('cloning %s' % args.uri)
    if do_clone(gbp_args):
        raise GbsError('Failed to clone %s' % args.uri)

    log.info('finished')
Beispiel #3
0
def main(args):
    """gbs chroot entry point."""

    build_root = args.buildroot

    running_lock = '%s/not-ready' % build_root
    if os.path.exists(running_lock):
        raise GbsError('build root %s is not ready' % build_root)

    log.info('chroot %s' % build_root)
    user = '******'
    if args.root:
        user = '******'
    cmd = ['sudo', 'chroot', build_root, 'su', user]

    try:
        subprocess.call(['sudo', 'cp', '/etc/resolv.conf', build_root + \
                         '/etc/resolv.conf'])
    except OSError:
        log.warning('failed to setup /etc/resolv.conf')

    try:
        build_env = os.environ
        build_env['PS1'] = "(tizen-build-env)@\h \W]\$ "
        subprocess.call(cmd, env=build_env)
    except OSError, err:
        raise GbsError('failed to chroot to %s: %s' % (build_root, err))
Beispiel #4
0
def main(args):
    """gbs submit entry point."""

    workdir = args.gitdir

    orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel')
    if orphan_packaging and args.commit == 'HEAD':
        log.error("You seem to be submitting a development branch of an "
                  "(orphan) packaging branch. Please export your changes to"
                  "the packaging branch with 'gbs devel export' and submit"
                  "from there.")
        raise GbsError("Refusing to submit from devel branch")

    message = args.msg
    if message is None:
        message = get_message()

    if not message:
        raise GbsError("tag message is required")

    try:
        repo = RpmGitRepository(workdir)
        commit = repo.rev_parse(args.commit)
        current_branch = repo.get_branch()
    except GitRepositoryError, err:
        raise GbsError(str(err))
Beispiel #5
0
    def _check_passwd(self):
        'convert passwd item to passwdx and then update origin conf files'
        dirty = set()

        all_sections = set()
        for layer in self._cfgparsers:
            for sec in layer.sections():
                all_sections.add(sec)

        for sec in all_sections:
            for key in self.options(sec):
                if key.endswith('passwd'):
                    for cfgparser in self._cfgparsers:
                        if cfgparser.has_option(sec, key):
                            plainpass = cfgparser.get(sec, key)
                            if plainpass is None:
                                # empty string password is acceptable here
                                continue
                            cfgparser.set_into_file(sec, key + 'x',
                                                    encode_passwd(plainpass),
                                                    key)
                            dirty.add(cfgparser)

        if dirty:
            log.warning('plaintext password in config files will '
                        'be replaced by encoded ones')
            self.update(dirty)
Beispiel #6
0
 def update(cfgparsers):
     'update changed values into files on disk'
     for cfgparser in cfgparsers:
         try:
             cfgparser.update()
         except IOError, err:
             log.warning('update config file error: %s' % err)
Beispiel #7
0
    def _check_passwd(self):
        'convert passwd item to passwdx and then update origin conf files'
        dirty = set()

        all_sections = set()
        for layer in self._cfgparsers:
            for sec in layer.sections():
                all_sections.add(sec)

        for sec in all_sections:
            for key in self.options(sec):
                if key.endswith('passwd'):
                    for cfgparser in self._cfgparsers:
                        if cfgparser.has_option(sec, key):
                            plainpass = cfgparser.get(sec, key)
                            if plainpass is None:
                                # empty string password is acceptable here
                                continue
                            cfgparser.set_into_file(sec,
                                                    key + 'x',
                                                    encode_passwd(plainpass),
                                                    key)
                            dirty.add(cfgparser)

        if dirty:
            log.warning('plaintext password in config files will '
                        'be replaced by encoded ones')
            self.update(dirty)
Beispiel #8
0
def check_export_branches(repo, args):
    '''checking export related branches: pristine-tar, upstream.
    give warning if pristine-tar/upstream branch exist in remote
    but have not been checkout to local
    '''
    remote_branches = [branch.split('/')[-1] for branch in \
                       repo.get_remote_branches()]
    if args.upstream_branch:
        upstream_branch = args.upstream_branch
    else:
        upstream_branch = configmgr.get('upstream_branch', 'general')

    # upstream exist, but pristine-tar not exist
    if repo.has_branch(upstream_branch) and \
       not repo.has_branch('pristine-tar') and \
       'pristine-tar' in remote_branches:
        log.warning('pristine-tar branch exist in remote branches, '
                    'you can checkout it to enable exporting upstrean '
                    'tarball from pristine-tar branch')

    # all upstream and pristine-tar are not exist
    if not repo.has_branch(upstream_branch) and \
       not repo.has_branch('pristine-tar') and  \
       'pristine-tar' in remote_branches and upstream_branch in remote_branches:
        log.warning('pristine-tar and %s branches exist in remote branches, '
                    'you can checkout them to enable upstream tarball and '
                    'patch-generation ' % upstream_branch)
Beispiel #9
0
def main(args):
    """gbs chroot entry point."""

    build_root = args.buildroot

    running_lock = '%s/not-ready' % build_root
    if os.path.exists(running_lock):
        raise GbsError('build root %s is not ready' % build_root)

    log.info('chroot %s' % build_root)
    user = '******'
    if args.root:
        user = '******'
    cmd = ['sudo', 'chroot', build_root, 'su', user]

    try:
        subprocess.call(['sudo', 'cp', '/etc/resolv.conf', build_root + \
                         '/etc/resolv.conf'])
    except OSError:
        log.warning('failed to setup /etc/resolv.conf')

    try:
        build_env = os.environ
        build_env['PS1'] = "(tizen-build-env)@\h \W]\$ "
        subprocess.call(cmd, env=build_env)
    except OSError, err:
        raise GbsError('failed to chroot to %s: %s' % (build_root, err))
Beispiel #10
0
def export_sources(repo, commit, export_dir, spec, args):
    """
    Export packaging files using git-buildpackage
    """
    tmp = utils.Temp(prefix='gbp_', dirn=configmgr.get('tmpdir', 'general'),
                            directory=True)

    gbp_args = create_gbp_export_args(repo, commit, export_dir, tmp.path,
                                      spec, args)
    try:
        ret = gbp_build(gbp_args)
        if ret == 2 and not is_native_pkg(repo, args):
            # Try falling back to old logic of one monolithic tarball
            log.warning("Generating upstream tarball and/or generating "
                          "patches failed. GBS tried this as you have "
                          "upstream branch in you git tree. This is a new "
                          "mode introduced in GBS v0.10. "
                          "Consider fixing the problem by either:\n"
                          "  1. Update your upstream branch and/or fix the "
                          "spec file. Also, check the upstream tag format.\n"
                          "  2. Remove or rename the upstream branch")
            log.info("Falling back to the old method of generating one "
                       "monolithic source archive")
            gbp_args = create_gbp_export_args(repo, commit, export_dir,
                                              tmp.path, spec, args,
                                              force_native=True)
            ret = gbp_build(gbp_args)
        if ret:
            raise GbsError("Failed to export packaging files from git tree")
    except GitRepositoryError, excobj:
        raise GbsError("Repository error: %s" % excobj)
Beispiel #11
0
def main(args):
    """gbs clone entry point."""

    # Determine upstream branch
    upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch')
    packaging_branch = configmgr.get_arg_conf(args, 'packaging_branch')
    # Construct GBP cmdline arguments
    gbp_args = ['dummy argv[0]',
                '--color-scheme=magenta:green:yellow:red',
                '--pristine-tar',
                '--upstream-branch=%s' % upstream_branch,
                '--packaging-branch=%s' % packaging_branch]
    if args.all:
        gbp_args.append('--all')
    if args.depth:
        gbp_args.append('--depth=%s' % args.depth)
    if args.debug:
        gbp_args.append("--verbose")
    gbp_args.append(args.uri)
    if args.directory:
        gbp_args.append(args.directory)

    # Clone
    log.info('cloning %s' % args.uri)
    if do_clone(gbp_args):
        raise GbsError('Failed to clone %s' % args.uri)

    log.info('finished')
Beispiel #12
0
def main(args):
    """gbs submit entry point."""

    workdir = args.gitdir

    orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel')
    if orphan_packaging and args.commit == 'HEAD':
        log.error(
            "You seem to be submitting a development branch of an "
            "(orphan) packaging branch. Please export your changes to the "
            "packaging branch with 'gbs devel export' and submit from there.")
        raise GbsError("Refusing to submit from devel branch")

    message = args.msg
    if message is None:
        message = get_message()

    if not message:
        raise GbsError("tag message is required")

    try:
        repo = RpmGitRepository(workdir)
        commit = repo.rev_parse(args.commit)
        current_branch = repo.get_branch()
    except GitRepositoryError, err:
        raise GbsError(str(err))
Beispiel #13
0
def main(args):
    """gbs pull entry point."""

    # Determine upstream branch
    upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch')

    # Construct GBP cmdline arguments
    gbp_args = ['dummy argv[0]',
                '--color-scheme=magenta:green:yellow:red',
                '--pristine-tar',
                '--upstream-branch=%s' % upstream_branch,
                '--packaging-branch=master']
    if args.depth:
        gbp_args.append('--depth=%s' % args.depth)
    if args.force:
        gbp_args.append('--force=clean')
    if args.all:
        gbp_args.append('--all')
    if args.debug:
        gbp_args.append("--verbose")

    # Clone
    log.info('updating from remote')
    ret = do_pull(gbp_args)
    if ret == 2:
        raise GbsError('Failed to update some of the branches!')
    elif ret:
        raise GbsError('Update failed!')

    log.info('finished')
Beispiel #14
0
def export_sources(repo, commit, export_dir, spec, args, create_tarball=True):
    """
    Export packaging files using git-buildpackage
    """
    tmp = utils.Temp(prefix='gbp_', dirn=configmgr.get('tmpdir', 'general'),
                            directory=True)

    gbp_args = create_gbp_export_args(repo, commit, export_dir, tmp.path,
                                      spec, args, create_tarball=create_tarball)
    try:
        ret = gbp_build(gbp_args)
        if ret == 2 and not is_native_pkg(repo, args):
            # Try falling back to old logic of one monolithic tarball
            log.error("Generating upstream tarball and/or generating patches "
                      "failed. GBS tried this as you have upstream branch in "
                      "you git tree. Fix the problem by either:\n"
                      "  1. Update your upstream branch and/or fix the spec "
                      "file. Also, check the upstream tag format.\n"
                      "  2. Remove or rename the upstream branch (change the "
                      "package to native)\n"
                      "See https://source.tizen.org/documentation/reference/"
                      "git-build-system/upstream-package for more details.")
        if ret:
            raise GbsError("Failed to export packaging files from git tree")
    except GitRepositoryError, excobj:
        raise GbsError("Repository error: %s" % excobj)
Beispiel #15
0
 def update(cfgparsers):
     'update changed values into files on disk'
     for cfgparser in cfgparsers:
         try:
             cfgparser.update()
         except IOError, err:
             log.warning('update config file error: %s' % err)
Beispiel #16
0
    def grab(self, url, filename, user=None, passwd=None, no_cache=False):
        """Grab url to file."""

        log.debug("fetching %s => %s" % (url, filename))

        with open(filename, 'w') as outfile:
            self.change_url(url, outfile, user, passwd, no_cache)
            self.perform()
Beispiel #17
0
    def grab(self, url, filename, user=None, passwd=None):
        """Grab url to file."""

        log.debug("fetching %s => %s" % (url, filename))

        with open(filename, 'w') as outfile:
            self.change_url(url, outfile, user, passwd)
            self.perform()
Beispiel #18
0
def main(args):
    """gbs import entry point."""

    if args.author_name:
        os.environ["GIT_AUTHOR_NAME"] = args.author_name
    if args.author_email:
        os.environ["GIT_AUTHOR_EMAIL"] = args.author_email

    path = args.path

    tmp = Temp(prefix='gbp_',
               dirn=configmgr.get('tmpdir', 'general'),
               directory=True)

    if args.upstream_branch:
        upstream_branch = args.upstream_branch
    else:
        upstream_branch = configmgr.get('upstream_branch', 'general')

    params = ["argv[0] placeholder",
              "--color-scheme=magenta:green:yellow:red",
              "--packaging-dir=%s" % get_packaging_dir(args),
              "--upstream-branch=%s" % upstream_branch, path,
              "--tmp-dir=%s" % tmp.path,
              ]
    if args.debug:
        params.append("--verbose")
    if not args.no_pristine_tar and os.path.exists("/usr/bin/pristine-tar"):
        params.append("--pristine-tar")
    if args.filter:
        params += [('--filter=%s' % f) for f in args.filter]

    if path.endswith('.src.rpm') or path.endswith('.spec'):
        if args.allow_same_version:
            params.append("--allow-same-version")
        if args.native:
            params.append("--native")
        if args.no_patch_import:
            params.append("--no-patch-import")
        ret = gbp_import_srpm(params)
        if ret == 2:
            log.warning("Importing of patches into packaging branch failed! "
                        "Please import manually (apply and commit to git, "
                        "remove files from packaging dir and spec) in order "
                        "to enable automatic patch generation.")
        elif ret:
            raise GbsError("Failed to import %s" % path)
    else:
        if args.upstream_vcs_tag:
            params.append('--upstream-vcs-tag=%s' % args.upstream_vcs_tag)
        if args.merge:
            params.append('--merge')
        else:
            params.append('--no-merge')
        if gbp_import_orig(params):
            raise GbsError('Failed to import %s' % path)

    log.info('done.')
Beispiel #19
0
def show_file_from_rev(git_path, relative_path, commit_id):
    """Get a single file content from given git revision."""
    args = ['git', 'show', '%s:%s' % (commit_id, relative_path)]
    try:
        with Workdir(git_path):
            return  subprocess.Popen(args, stdout=subprocess.PIPE).communicate()[0]
    except (subprocess.CalledProcessError, OSError), err:
        log.debug('failed to checkout %s from %s:%s' % (relative_path,
                                                        commit_id, str(err)))
Beispiel #20
0
def show_file_from_rev(git_path, relative_path, commit_id):
    """Get a single file content from given git revision."""
    args = ['git', 'show', '%s:%s' % (commit_id, relative_path)]
    try:
        with Workdir(git_path):
            return subprocess.Popen(args,
                                    stdout=subprocess.PIPE).communicate()[0]
    except (subprocess.CalledProcessError, OSError), err:
        log.debug('failed to checkout %s from %s:%s' %
                  (relative_path, commit_id, str(err)))
Beispiel #21
0
    def _new_conf(self):
        'generate a default conf file in home dir'
        fpath = os.path.expanduser('~/.gbs.conf')

        with open(fpath, 'w') as wfile:
            wfile.write(self.DEFAULT_CONF_TEMPLATE)
        os.chmod(fpath, 0600)

        log.warning('Created a new config file %s. Please check and edit '
                    'your authentication information.' % fpath)
Beispiel #22
0
    def _new_conf(self):
        'generate a default conf file in home dir'
        fpath = os.path.expanduser('~/.gbs.conf')

        with open(fpath, 'w') as wfile:
            wfile.write(self.DEFAULT_CONF_TEMPLATE)
        os.chmod(fpath, 0600)

        log.warning('Created a new config file %s. Please check and edit '
                    'your authentication information.' % fpath)
Beispiel #23
0
 def filter_valid_repo(repos):
     'filter valid remote and local repo'
     rets = []
     for url in repos:
         if not url.startswith('http://') and \
             not url.startswith('https://') and \
             not (url.startswith('/') and os.path.exists(url)):
             log.warning('ignore invalid repo url: %s' % url)
         else:
             rets.append(url)
     return rets
Beispiel #24
0
 def filter_valid_repo(repos):
     'filter valid remote and local repo'
     rets = []
     for url in repos:
         if not url.startswith('http://') and \
             not url.startswith('https://') and \
             not (url.startswith('/') and os.path.exists(url)):
             log.warning('ignore invalid repo url: %s' % url)
         else:
             rets.append(url)
     return rets
Beispiel #25
0
def createimage(args, ks_file):
    '''create image using mic'''
    extra_mic_opts = []
    if args.outdir:
        extra_mic_opts = ['--outdir=%s' % args.outdir]
    if args.tmpfs:
        extra_mic_opts += ['--tmpfs']
    extra_mic_opts += ['--record-pkgs=name']
    mic_cmd = 'sudo mic create auto %s %s' % (ks_file, ' '.join(extra_mic_opts))
    log.debug(mic_cmd)
    return os.system(mic_cmd)
Beispiel #26
0
def createimage(args, ks_file):
    '''create image using mic'''
    extra_mic_opts = []
    if args.outdir:
        extra_mic_opts = ['--outdir=%s' % args.outdir]
    if args.tmpfs:
        extra_mic_opts += ['--tmpfs']
    extra_mic_opts += ['--record-pkgs=name']
    mic_cmd = 'sudo mic create auto %s %s' % (ks_file,
                                              ' '.join(extra_mic_opts))
    log.debug(mic_cmd)
    return os.system(mic_cmd)
Beispiel #27
0
    def build_profile_by_name(self, name):
        '''return profile object by a given section'''
        if not name.startswith('profile.'):
            raise errors.ConfigError('section name specified by '
                                     ' general.profile must start with string'
                                     ' "profile.": %s' % name)
        if not self.has_section(name):
            raise errors.ConfigError('no such section: %s' % name)

        user = self.get_optional_item(name, 'user')
        password = self.get_optional_item(name, 'passwd')

        profile = Profile(name, user, password)

        obs = self.get_optional_item(name, 'obs')
        if obs:
            if not obs.startswith('obs.'):
                raise errors.ConfigError('obs section name should start '
                                         'with string "obs.": %s' % obs)

            obsconf = SectionConf(profile, obs,
                                  self._get_url_options(obs),
                                  self.get_optional_item(obs, 'base_prj'),
                                  self.get_optional_item(obs, 'target_prj'))
            profile.set_obs(obsconf)

        repos = self.get_optional_item(name, 'repos')
        if repos:
            for repo in repos.split(','):
                repo = repo.strip()
                if not repo.startswith('repo.'):
                    log.warning('ignore %s, repo section name should start '
                                'with string "repo."' % repo)
                    continue

                repoconf = SectionConf(profile, repo,
                                       self._get_url_options(repo))
                profile.add_repo(repoconf)

        profile.buildroot = self.get_optional_item(name, 'buildroot')
        if self.get_optional_item(name, 'buildconf'):
            profile.buildconf = os.path.expanduser(self._interpolate(
                self.get_optional_item(name, 'buildconf')))
        if self.get_optional_item(name, 'exclude_packages'):
            exclude_val = self.get_optional_item(name, 'exclude_packages')
            for pkg in exclude_val.split(','):
                if pkg.strip():
                    profile.exclude_packages.append(pkg.strip())

        return profile
Beispiel #28
0
    def perform(self):
        '''do the real Curl perform work'''

        curl = self.curl

        stop = [False]

        def progressing(*_args):
            '''Returning a non-zero value from this callback will cause libcurl
            to abort the transfer and return CURLE_ABORTED_BY_CALLBACK.'''
            return -1 if stop[0] else 0

        def handler(_signum, _frame):
            '''set stop flag if catch SIGINT,
            if not catch SIGINT, pycurl will print traceback'''
            stop[0] = True

        curl.setopt(pycurl.PROGRESSFUNCTION, progressing)
        curl.setopt(pycurl.NOPROGRESS, False)
        original_handler = signal.signal(signal.SIGINT, handler)
        try:
            curl.perform()
        except pycurl.error, err:
            log.debug('fetching error:%s' % str(err))

            errcode, errmsg = err.args
            http_code = curl.getinfo(pycurl.HTTP_CODE)

            if errcode == pycurl.E_OPERATION_TIMEOUTED or http_code == 503:
                proxies = ['Detected proxies set in system environment:']
                env = os.environ
                for key in [
                        'HTTPS_PROXY', 'HTTP_PROXY', 'FTP_PROXY',
                        'https_proxy', 'http_proxy', 'ftp_proxy', 'NO_PROXY',
                        'no_proxy'
                ]:
                    proxies.append('%s=%s' % (key, env.get(key, '')))
                raise UrlError("connect timeout to %s, maybe it's caused by "
                               "proxy settings, please check. %s" %
                               (curl.url, '\n  '.join(proxies)))
            elif errcode == pycurl.E_ABORTED_BY_CALLBACK:
                raise KeyboardInterrupt(err)
            elif http_code in (401, 403):
                raise UrlError('authenticate failed on: %s' % curl.url)
            elif http_code == 404:
                raise PageNotFound(err)
            else:
                raise UrlError('URL error on %s: (%s: "%s")' %
                               (curl.url, errcode, errmsg))
Beispiel #29
0
    def build_profile_by_name(self, name):
        '''return profile object by a given section'''
        if not name.startswith('profile.'):
            raise errors.ConfigError(
                'section name specified by general.profile'
                ' must start with string "profile.": %s' % name)
        if not self.has_section(name):
            raise errors.ConfigError('no such section: %s' % name)

        user = self.get_optional_item(name, 'user')
        password = self.get_optional_item(name, 'passwd')

        profile = Profile(name, user, password)

        obs = self.get_optional_item(name, 'obs')
        if obs:
            if not obs.startswith('obs.'):
                raise errors.ConfigError('obs section name should start '
                                         'with string "obs.": %s' % obs)

            obsconf = SectionConf(profile, obs, self._get_url_options(obs),
                                  self.get_optional_item(obs, 'base_prj'),
                                  self.get_optional_item(obs, 'target_prj'))
            profile.set_obs(obsconf)

        repos = self.get_optional_item(name, 'repos')
        if repos:
            for repo in repos.split(','):
                repo = repo.strip()
                if not repo.startswith('repo.'):
                    log.warning('ignore %s, repo section name should start '
                                'with string "repo."' % repo)
                    continue

                repoconf = SectionConf(profile, repo,
                                       self._get_url_options(repo))
                profile.add_repo(repoconf)

        profile.buildroot = self.get_optional_item(name, 'buildroot')
        if self.get_optional_item(name, 'buildconf'):
            profile.buildconf = os.path.expanduser(
                self._interpolate(self.get_optional_item(name, 'buildconf')))
        if self.get_optional_item(name, 'exclude_packages'):
            exclude_val = self.get_optional_item(name, 'exclude_packages')
            for pkg in exclude_val.split(','):
                if pkg.strip():
                    profile.exclude_packages.append(pkg.strip())

        return profile
Beispiel #30
0
def update_local_conf(repo, values):
    """Create/update local gbs.conf"""
    parser = BrainConfigParser()
    conf_fn = os.path.join(repo.path, '.gbs.conf')
    log.info('Updating local .gbs.conf')
    with open(conf_fn, 'a+') as conf_fp:
        parser.readfp(conf_fp)
    for section, items in values.iteritems():
        for key, value in items.iteritems():
            parser.set_into_file(section, key, value)
    parser.update()

    log.info('Committing local .gbs.conf to git')
    repo.add_files(['.gbs.conf'])
    repo.commit_all(msg="Autoupdate local .gbs.conf\n\nGbp-Rpm: Ignore")
Beispiel #31
0
def update_local_conf(repo, values):
    """Create/update local gbs.conf"""
    parser = BrainConfigParser()
    conf_fn = os.path.join(repo.path, '.gbs.conf')
    log.info('Updating local .gbs.conf')
    with open(conf_fn, 'a+') as conf_fp:
        parser.readfp(conf_fp)
    for section, items in values.iteritems():
        for key, value in items.iteritems():
            parser.set_into_file(section, key, value)
    parser.update()

    log.info('Committing local .gbs.conf to git')
    repo.add_files(['.gbs.conf'])
    repo.commit_all(msg="Autoupdate local .gbs.conf\n\nGbp-Rpm: Ignore")
Beispiel #32
0
    def split_out_local_repo(repos):
        """Divide repos to local and remote parts."""
        local_repos = []
        remotes = []

        for repo in repos:
            if repo.is_local():
                if os.path.exists(repo):
                    local_repos.append(repo)
                else:
                    log.warning('No such repo path:%s' % repo)
            else:
                remotes.append(repo)

        return local_repos, remotes
Beispiel #33
0
    def split_out_local_repo(repos):
        """Divide repos to local and remote parts."""
        local_repos = []
        remotes = []

        for repo in repos:
            if repo.is_local():
                if os.path.exists(repo):
                    local_repos.append(repo)
                else:
                    log.warning('No such repo path:%s' % repo)
            else:
                remotes.append(repo)

        return local_repos, remotes
Beispiel #34
0
    def _parse_build_xml(build_xml):
        """
        Parse build.xml.
        Returns: dictionary with buildconf, repos and archs.
        """
        if not (build_xml and os.path.exists(build_xml)):
            return

        try:
            etree = ET.parse(build_xml)
        except ET.ParseError:
            log.warning('Not well formed xml: %s' % build_xml)
            return

        meta = {}
        root = etree.getroot()

        # Get version of build.xml
        build_version = root.get('version')
        # It's new format of repo structure if 'version' exists
        if build_version:
            log.warning('new format repo structure has not been supportted '
                        'well, please upgrade your gbs to latest version')
            return None

        buildelem = root.find('buildconf')
        # Must using None here, "if buildelem" is wrong
        # None means item does not exist
        # It's different from bool(buildelem)
        if buildelem is not None:
            meta['buildconf'] = buildelem.text.strip()

        repo_items = root.find('repos')
        if repo_items is not None:
            meta['repos'] = [
                repo.text.strip() for repo in repo_items.findall('repo')
            ]

        arch_items = root.find('archs')
        if arch_items is not None:
            meta['archs'] = [
                arch.text.strip() for arch in arch_items.findall('arch')
            ]
        id_item = root.find('id')
        if id_item is not None:
            meta['id'] = id_item.text.strip()

        return meta
Beispiel #35
0
def main(args):
    '''main entrance for createimage'''
    try:
        import mic
    except ImportError:
        raise GbsError('please install mic manually first')

    if not os.path.exists(args.ks_file):
        raise GbsError('specified ks file %s does not exist' % args.ks_file)

    log.info('creating image for ks file: %s' % args.ks_file)
    retcode = createimage(args, args.ks_file)
    if retcode != 0:
        raise GbsError('failed to create image')
    else:
        log.info('Done')
Beispiel #36
0
def main(args):
    '''main entrance for createimage'''
    try:
        import mic
    except ImportError:
        raise GbsError('please install mic manually first')

    if not os.path.exists(args.ks_file):
        raise GbsError('specified ks file %s does not exist' % args.ks_file)

    log.info('creating image for ks file: %s' % args.ks_file)
    retcode = createimage(args, args.ks_file)
    if retcode != 0:
        raise GbsError('failed to create image')
    else:
        log.info('Done')
Beispiel #37
0
    def perform(self):
        '''do the real Curl perform work'''

        curl = self.curl

        stop = [False]
        def progressing(*_args):
            '''Returning a non-zero value from this callback will cause libcurl
            to abort the transfer and return CURLE_ABORTED_BY_CALLBACK.'''
            return -1 if stop[0] else 0

        def handler(_signum, _frame):
            '''set stop flag if catch SIGINT,
            if not catch SIGINT, pycurl will print traceback'''
            stop[0] = True

        curl.setopt(pycurl.PROGRESSFUNCTION, progressing)
        curl.setopt(pycurl.NOPROGRESS, False)
        original_handler = signal.signal(signal.SIGINT, handler)
        try:
            curl.perform()
        except pycurl.error, err:
            log.debug('fetching error:%s' % str(err))

            errcode, errmsg = err.args
            http_code = curl.getinfo(pycurl.HTTP_CODE)

            if errcode == pycurl.E_OPERATION_TIMEOUTED or http_code == 503:
                proxies = ['Detected proxies set in system environment:']
                env = os.environ
                for key in ['HTTPS_PROXY', 'HTTP_PROXY', 'FTP_PROXY',
                            'https_proxy', 'http_proxy', 'ftp_proxy',
                            'NO_PROXY', 'no_proxy']:
                    proxies.append('%s=%s' % (key, env.get(key, '')))
                raise UrlError("connect timeout to %s, maybe it's caused by "
                               "proxy settings, please check. %s" % (curl.url,
                               '\n  '.join(proxies)))
            elif errcode == pycurl.E_ABORTED_BY_CALLBACK:
                raise KeyboardInterrupt(err)
            elif http_code in (401, 403):
                raise UrlError('authenticate failed on: %s' % curl.url)
            elif http_code == 404:
                raise PageNotFound(err)
            else:
                raise UrlError('URL error on %s: (%s: "%s")' %
                               (curl.url, errcode, errmsg))
Beispiel #38
0
    def _parse_build_xml(build_xml):
        """
        Parse build.xml.
        Returns: dictionary with buildconf, repos and archs.
        """
        if not (build_xml and os.path.exists(build_xml)):
            return

        try:
            etree = ET.parse(build_xml)
        except ET.ParseError:
            log.warning('Not well formed xml: %s' % build_xml)
            return

        meta = {}
        root = etree.getroot()

        # Get version of build.xml
        build_version = root.get('version')
        # It's new format of repo structure if 'version' exists
        if build_version:
            log.warning('new format repo structure has not been supportted '
                        'well, please upgrade your gbs to latest version')
            return None

        buildelem = root.find('buildconf')
        # Must using None here, "if buildelem" is wrong
        # None means item does not exist
        # It's different from bool(buildelem)
        if buildelem is not None:
            meta['buildconf'] = buildelem.text.strip()

        repo_items = root.find('repos')
        if repo_items is not None:
            meta['repos'] = [repo.text.strip()
                             for repo in repo_items.findall('repo')]

        arch_items = root.find('archs')
        if arch_items is not None:
            meta['archs'] = [arch.text.strip()
                             for arch in arch_items.findall('arch')]
        id_item = root.find('id')
        if id_item is not None:
            meta['id'] = id_item.text.strip()

        return meta
Beispiel #39
0
    def change_url(self, url, outfile, user, passwd, no_cache=False):
        '''change options for individual url'''

        curl = self.curl
        curl.url = url
        curl.setopt(pycurl.URL, url)
        curl.setopt(pycurl.WRITEDATA, outfile)
        if user:
            userpwd = user
            if passwd:
                userpwd = '%s:%s' % (user, passwd)
            curl.setopt(pycurl.USERPWD, userpwd)
        httpheader = []
        if no_cache:
            httpheader.append('Pragma: no-cache')
            httpheader.append('Cache-Control: no-cache')
            log.debug("disable HTTP caching")
        curl.setopt(pycurl.HTTPHEADER, httpheader)
Beispiel #40
0
    def change_url(self, url, outfile, user, passwd, no_cache=False):
        '''change options for individual url'''

        curl = self.curl
        curl.url = url
        curl.setopt(pycurl.URL, url)
        curl.setopt(pycurl.WRITEDATA, outfile)
        if user:
            userpwd = user
            if passwd:
                userpwd = '%s:%s' % (user, passwd)
            curl.setopt(pycurl.USERPWD, userpwd)
        httpheader = []
        if no_cache:
            httpheader.append('Pragma: no-cache')
            httpheader.append('Cache-Control: no-cache')
            log.debug("disable HTTP caching")
        curl.setopt(pycurl.HTTPHEADER, httpheader)
Beispiel #41
0
def track_export_branches(repo, args):
    '''checking export related branches: pristine-tar, upstream.
    give warning if pristine-tar/upstream branch exist in remote
    but have not been checkout to local
    '''
    remote_branches = {}
    tracked_branches = []
    for branch in repo.get_remote_branches():
        remote_branches[branch.split('/', 1)[-1]] = branch
    upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch')

    # track upstream/pristine-tar branch
    for branch in [upstream_branch, 'pristine-tar']:
        if not repo.has_branch(branch) and branch in remote_branches:
            log.info('tracking branch: %s -> %s' % (remote_branches[branch],
                                                    branch))
            repo.create_branch(branch, remote_branches[branch])
            tracked_branches.append(branch)

    return tracked_branches
Beispiel #42
0
def prepare_repos_and_build_conf(args, arch, profile):
    '''generate repos and build conf options for depanneur'''

    cmd_opts = []
    cache = Temp(prefix=os.path.join(TMPDIR, 'gbscache'),
                 directory=True)
    cachedir = cache.path
    if not os.path.exists(cachedir):
        os.makedirs(cachedir)
    log.info('generate repositories ...')

    if args.skip_conf_repos:
        repos = []
    else:
        repos = [i.url for i in profile.repos]

    if args.repositories:
        for repo in args.repositories:
            try:
                if not urlparse.urlsplit(repo).scheme:
                    if os.path.exists(repo):
                        repo = os.path.abspath(os.path.expanduser(repo))
                    else:
                        log.warning('local repo: %s does not exist' % repo)
                        continue
                opt_repo = SafeURL(repo)
            except ValueError, err:
                log.warning('Invalid repo %s: %s' % (repo, str(err)))
            else:
                repos.append(opt_repo)
Beispiel #43
0
def prepare_repos_and_build_conf(args, arch, profile):
    '''generate repos and build conf options for depanneur'''

    cmd_opts = []
    cache = Temp(prefix=os.path.join(TMPDIR, 'gbscache'), directory=True)
    cachedir = cache.path
    if not os.path.exists(cachedir):
        os.makedirs(cachedir)
    log.info('generate repositories ...')

    if args.skip_conf_repos:
        repos = []
    else:
        repos = [i.url for i in profile.repos]

    if args.repositories:
        for repo in args.repositories:
            try:
                if not urlparse.urlsplit(repo).scheme:
                    if os.path.exists(repo):
                        repo = os.path.abspath(os.path.expanduser(repo))
                    else:
                        log.warning('local repo: %s does not exist' % repo)
                        continue
                opt_repo = SafeURL(repo)
            except ValueError, err:
                log.warning('Invalid repo %s: %s' % (repo, str(err)))
            else:
                repos.append(opt_repo)
Beispiel #44
0
    def _fetch_build_conf(self, latest_repo_url, meta):
        """Get build.conf file name from build.xml and fetch it."""
        if self.buildconf:
            return

        if not meta or \
            'buildconf' not in meta or \
            not meta['buildconf']:
            log.warning("No build.conf in build.xml "
                        "of repo: %s" % latest_repo_url)
            return

        buildconf_url = latest_repo_url.pathjoin('builddata/%s' %
                                                 meta['buildconf'])
        fname = self.fetch(buildconf_url)
        if fname:
            release, _buildid = meta['id'].split('_')
            release = release.replace('-', '')
            target_conf = os.path.join(os.path.dirname(fname),
                                       '%s.conf' % release)
            os.rename(fname, target_conf)
            self.buildconf = target_conf
Beispiel #45
0
    def _fetch_build_conf(self, latest_repo_url, meta):
        """Get build.conf file name from build.xml and fetch it."""
        if self.buildconf:
            return

        if not meta or \
            'buildconf' not in meta or \
            not meta['buildconf']:
            log.warning("No build.conf in build.xml "
                          "of repo: %s" % latest_repo_url)
            return

        buildconf_url = latest_repo_url.pathjoin('builddata/%s' %
                                                 meta['buildconf'])
        fname = self.fetch(buildconf_url)
        if fname:
            release, _buildid = meta['id'].split('_')
            release = release.replace('-','')
            target_conf = os.path.join(os.path.dirname(fname),
                                       '%s.conf' % release)
            os.rename(fname, target_conf)
            self.buildconf = target_conf
Beispiel #46
0
    def _parse_build_xml(build_xml):
        """
        Parse build.xml.
        Returns: dictionary with buildconf, repos and archs.
        """
        if not (build_xml and os.path.exists(build_xml)):
            return

        try:
            etree = ET.parse(build_xml)
        except ET.ParseError:
            log.warning('Not well formed xml: %s' % build_xml)
            return

        meta = {}
        root = etree.getroot()

        buildelem = root.find('buildconf')
        # Must using None here, "if buildelem" is wrong
        # None means item does not exist
        # It's different from bool(buildelem)
        if buildelem is not None:
            meta['buildconf'] = buildelem.text.strip()

        repo_items = root.find('repos')
        if repo_items is not None:
            meta['repos'] = [ repo.text.strip()
                             for repo in repo_items.findall('repo') ]

        arch_items = root.find('archs')
        if arch_items is not None:
            meta['archs'] = [ arch.text.strip()
                             for arch in arch_items.findall('arch') ]
        id_item = root.find('id')
        if id_item is not None:
            meta['id'] = id_item.text.strip()

        return meta
Beispiel #47
0
    def create_project(self, target, src=None, rewrite=False,
                       description='', linkto='', linkedbuild=''):
        """
        Create new OBS project based on existing project.
        Copy config and repositories from src project to target
        if src exists.
        """

        if src and not self.exists(src):
            raise ObsError('base project: %s not exists' % src)

        if self.exists(target):
            logger.warning('target project: %s exists' % target)
            if rewrite:
                logger.warning('rewriting target project %s' % target)
            else:
                return

        # Create target meta
        meta = '<project name="%s"><title></title>'\
	       '<description>%s</description>'\
               '<person role="maintainer" userid="%s"/>' % \
               (target, description, conf.get_apiurl_usr(self.apiurl))
        if linkto:
            meta += '<link project="%s"/>' % linkto

        # Collect source repos if src project exist
        if src:
            # Copy debuginfo, build, useforbuild and publish meta
            meta += self.get_tags(src, ['debuginfo', 'build',
                                        'useforbuild', 'publish'])
            # Copy repos to target
            repos = self.get_repos_of_project(src)
            for name in repos:
                if linkedbuild:
                    meta += '<repository name="%s" linkedbuild="%s">' % \
                                (name, linkedbuild)
                else:
                    meta += '<repository name="%s">' % name
                meta += '<path project="%s" repository="%s" />' % (src, name)
                for arch in repos[name]:
                    meta += "<arch>%s</arch>\n" % arch
                meta += "</repository>\n"
        else:
            logger.warning('no project repos in target project, please add '
                           'repos from OBS webUI manually, or specify base '
                           'project with -B <base_prj>, then gbs can help to '
                           'set repos using the settings of the specified '
                           'base project.')
        meta += "</project>\n"

        try:
            # Create project and set its meta
            core.edit_meta('prj', path_args=quote_plus(target), data=meta)
        except (urllib2.URLError, M2Crypto.m2urllib2.URLError,
                M2Crypto.SSL.SSLError), err:
            raise ObsError("Can't set meta for %s: %s" % (target, str(err)))
Beispiel #48
0
def prepare_repos_and_build_conf(args, arch, profile):
    '''generate repos and build conf options for depanneur'''

    cmd_opts = []
    cache = Temp(prefix=os.path.join(TMPDIR, 'gbscache'),
                       directory=True)
    cachedir  = cache.path
    if not os.path.exists(cachedir):
        os.makedirs(cachedir)
    log.info('generate repositories ...')

    if args.skip_conf_repos:
        repos = []
    else:
        repos = [i.url for i in profile.repos]

    if args.repositories:
        for i in args.repositories:
            try:
                opt_repo = SafeURL(i)
            except ValueError, err:
                log.warning('Invalid repo %s: %s' % (i, str(err)))
            else:
                repos.append(opt_repo)
Beispiel #49
0
    def _fetch_build_conf_new(self, baseurl):
        """ fetch build conf from standard repo"""
        repomd_url = baseurl.pathjoin('repodata/repomd.xml')
        repomd_file = self.fetch(repomd_url)
        if not repomd_file:
            return

        try:
            etree = ET.parse(repomd_file)
        except ET.ParseError:
            log.warning('Not well formed xml: %s' % repomd_file)
            return
        root = etree.getroot()

        # get namespace of repomd element
        xmlns = re.sub('repomd$', '', root.tag)
        location_elem = None
        for elem in root.findall('%sdata' % xmlns):
            if elem.attrib['type'] == 'build':
                location_elem = elem.find('%slocation' % xmlns)
                break
        if location_elem is not None and 'href' in location_elem.attrib:
            buildconf_url = baseurl.pathjoin(location_elem.attrib['href'])
            fname = self.fetch(buildconf_url)
            if fname:
                if fname[-3:] == '.gz':
                    fh_gz = gzip.open(fname, 'r')
                else:
                    fh_gz = open(fname, 'r')
                buildconf_file = os.path.join(os.path.dirname(fname),
                                              'build.conf')
                buildconf_fh = open(buildconf_file, 'w')
                buildconf_fh.write(fh_gz.read())
                fh_gz.close()
                buildconf_fh.close()
                self.buildconf = buildconf_file
Beispiel #50
0
    def _fetch_build_conf_new(self, baseurl):
        """ fetch build conf from standard repo"""
        repomd_url = baseurl.pathjoin('repodata/repomd.xml')
        repomd_file = self.fetch(repomd_url)
        if not repomd_file:
            return

        try:
            etree = ET.parse(repomd_file)
        except ET.ParseError:
            log.warning('Not well formed xml: %s' % repomd_file)
            return
        root = etree.getroot()

        # get namespace of repomd element
        xmlns = re.sub('repomd$', '', root.tag)
        location_elem = None
        for elem in root.findall('%sdata' % xmlns):
            if elem.attrib['type'] == 'build':
                location_elem = elem.find('%slocation' % xmlns)
                break
        if location_elem is not None and 'href' in location_elem.attrib:
            buildconf_url = baseurl.pathjoin(location_elem.attrib['href'])
            fname = self.fetch(buildconf_url)
            if fname:
                if fname[-3:] == '.gz':
                    fh_gz = gzip.open(fname, 'r')
                else:
                    fh_gz = open(fname, 'r')
                buildconf_file = os.path.join(os.path.dirname(fname),
                                              'build.conf')
                buildconf_fh = open(buildconf_file, 'w')
                buildconf_fh.write(fh_gz.read())
                fh_gz.close()
                buildconf_fh.close()
                self.buildconf = buildconf_file
Beispiel #51
0
            status = api.get_results(target_prj, package)

            for build_repo in status.keys():
                for arch in status[build_repo]:
                    archlist.append('%-15s%-15s' % (build_repo, arch))
            if not obs_repo or not obs_arch or obs_repo not in status.keys() \
                   or obs_arch not in status[obs_repo].keys():
                raise GbsError('no valid repo / arch specified for buildlog, '\
                               'valid arguments of repo and arch are:\n%s' % \
                               '\n'.join(archlist))
            if status[obs_repo][obs_arch] not in ['failed', 'succeeded',
                                                  'building', 'finishing']:
                raise GbsError('build status of %s for %s/%s is %s, '\
                               'no build log.' % (package, obs_repo, obs_arch,
                                                  status[obs_repo][obs_arch]))
            log.info('build log for %s/%s/%s/%s' % (target_prj, package,
                                                      obs_repo, obs_arch))
            print api.get_buildlog(target_prj, package, obs_repo, obs_arch)

            return 0

        if args.status:
            results = []

            status = api.get_results(target_prj, package)

            for build_repo in status.keys():
                for arch in status[build_repo]:
                    stat = status[build_repo][arch]
                    results.append('%-15s%-15s%-15s' % (build_repo, arch, stat))
            if results:
                log.info('build results from build server:\n%s' \
Beispiel #52
0
        repo = RpmGitRepository(workdir)
        commit = repo.rev_parse(args.commit)
        current_branch = repo.get_branch()
    except GitRepositoryError, err:
        raise GbsError(str(err))

    try:
        upstream = repo.get_upstream_branch(current_branch)
    except GitRepositoryError:
        upstream = None

    if not args.remote:
        if upstream:
            args.remote = upstream.split('/')[0]
        else:
            log.info("no upstream set for the current branch, using "
                     "'origin' as the remote server")
            args.remote = 'origin'

    if args.tag:
        tagname = args.tag
        tag_re = re.compile(r'^submit/\S+/\d{8}\.\d{6}$')
        if not tag_re.match(tagname):
            raise GbsError("invalid tag %s, valid tag format is "
                           "submit/$target/$date.$time. For example:\n      "
                           "submit/trunk/20130128.022439 " % tagname)
    else:
        target = args.target
        if not target:
            if upstream and upstream.startswith(args.remote):
                target = re.sub('^%s/' % args.remote, '', upstream)
            else:
Beispiel #53
0
def create_gbp_export_args(repo, commit, export_dir, tmp_dir, spec, args,
                           create_tarball=True):
    """
    Construct the cmdline argument list for git-buildpackage export
    """
    upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch')
    upstream_tag = configmgr.get_arg_conf(args, 'upstream_tag')
    # transform variables from shell to python convention ${xxx} -> %(xxx)s
    upstream_tag = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', upstream_tag)

    log.debug("Using upstream branch: %s" % upstream_branch)
    log.debug("Using upstream tag format: '%s'" % upstream_tag)

    # Get patch squashing option
    squash_patches_until = configmgr.get_arg_conf(args, 'squash_patches_until')

    # Determine the remote repourl
    reponame = ""
    remotes = repo.get_remote_repos()
    if remotes:
        remotename = 'origin' if 'origin' in remotes else remotes.keys()[0]
        # Take the remote repo of current branch, if available
        try:
            config_remote = repo.get_config('branch.%s.remote' % repo.branch)
        except KeyError:
            pass
        else:
            if config_remote in remotes:
                remotename = config_remote
            elif config_remote != '.':
                log.warning("You appear to have non-existent remote '%s' "
                            "configured for branch '%s'. Check your git config!"
                            % (config_remote, repo.branch))
        reponame = urlparse(remotes[remotename][0]).path.lstrip('/')

    packaging_dir = get_packaging_dir(args)
    # Now, start constructing the argument list
    export_rev = commit
    argv = ["argv[0] placeholder",
            "--git-color-scheme=magenta:green:yellow:red",
            "--git-ignore-new",
            "--git-compression-level=6",
            "--git-export-dir=%s" % export_dir,
            "--git-tmp-dir=%s" % tmp_dir,
            "--git-packaging-dir=%s" % packaging_dir,
            "--git-spec-file=%s" % spec,
            "--git-pq-branch=development/%(branch)s/%(upstreamversion)s",
            "--git-upstream-branch=%s" % upstream_branch,
            "--git-upstream-tag=%s" % upstream_tag,
            "--git-spec-vcs-tag=%s#%%(commit)s" % reponame]

    if create_tarball:
        argv.append("--git-force-create")
    else:
        argv.append("--git-no-create-orig")
    if args.debug:
        argv.append("--git-verbose")
    if is_native_pkg(repo, args) or args.no_patch_export:
        argv.extend(["--git-no-patch-export",
                     "--git-upstream-tree=%s" % commit])
    else:
        # Check if the revision seems to be of an orphan development branch
        is_orphan = False
        export_commitish = 'HEAD' if commit == 'WC.UNTRACKED' else commit
        try:
            repo.get_merge_base(export_commitish, upstream_branch)
        except GitRepositoryError:
            is_orphan = True
        # Development branch in orphan packaging model is identified in the conf
        orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel')

        if not is_orphan:
            argv.extend(["--git-patch-export",
                         "--git-patch-export-compress=100k",
                         "--git-patch-export-squash-until=%s" %
                            squash_patches_until,
                         "--git-patch-export-ignore-path=^(%s/.*|.gbs.conf)" %
                            packaging_dir,
                        ])

            if orphan_packaging:
                export_rev = orphan_packaging
                argv.extend(["--git-patch-export-rev=%s" % commit])

        if repo.has_branch("pristine-tar"):
            argv.extend(["--git-pristine-tar"])

    argv.append("--git-export=%s" % export_rev)

    if 'source_rpm' in args and args.source_rpm:
        argv.extend(['--git-builder=rpmbuild',
                     '--git-rpmbuild-builddir=.',
                     '--git-rpmbuild-builddir=.',
                     '--git-rpmbuild-rpmdir=.',
                     '--git-rpmbuild-sourcedir=.',
                     '--git-rpmbuild-specdir=.',
                     '--git-rpmbuild-srpmdir=.',
                     '--git-rpmbuild-buildrootdir=.',
                     '--short-circuit', '-bs',
                     ])
    else:
        argv.extend(["--git-builder=osc", "--git-export-only"])

    return argv
Beispiel #54
0
            git.rev_parse(opts.commit)
        is_clean = git.is_clean()[0]
        status = git.status()
    except (GbpError, GitRepositoryError), err:
        raise GbsError(str(err))

    untracked_files = status['??']
    uncommitted_files = []
    for stat in status:
        if stat == '??':
            continue
        uncommitted_files.extend(status[stat])

    if not is_clean and not opts.include_all:
        if untracked_files:
            log.warning('the following untracked files would NOT be '
                        'included:\n   %s' % '\n   '.join(untracked_files))
        if uncommitted_files:
            log.warning('the following uncommitted changes would NOT be '
                        'included:\n   %s' % '\n   '.join(uncommitted_files))
        log.warning('you can specify \'--include-all\' option to '
                    'include these uncommitted and untracked files.')
    if not is_clean and opts.include_all:
        if untracked_files:
            log.info('the following untracked files would be included'
                     ':\n   %s' % '\n   '.join(untracked_files))
        if uncommitted_files:
            log.info('the following uncommitted changes would be included'
                     ':\n   %s' % '\n   '.join(uncommitted_files))


def hexdigest(fhandle, block_size=4096):
Beispiel #55
0
            # restore updated spec files
            for spec in glob.glob(os.path.join(specbakd.path, "*.spec")):
                shutil.copy(spec, export_dir)

    # Remove tracked export branches
    if tracked_branches:
        untrack_export_branches(repo, tracked_branches)

    specfile = os.path.basename(main_spec)
    try:
        spec = rpm.SpecFile(os.path.join(export_dir, specfile))
    except GbpError, err:
        raise GbsError('%s' % err)

    if not spec.name or not spec.version:
        raise GbsError('can\'t get correct name or version from spec file.')
    else:
        outdir = "%s/%s-%s-%s" % (outdir, spec.name, spec.upstreamversion,
                                  spec.release)
    if os.path.exists(outdir):
        if not os.access(outdir, os.W_OK|os.X_OK):
            raise GbsError('no permission to update outdir: %s' % outdir)
        shutil.rmtree(outdir, ignore_errors=True)

    shutil.move(export_dir, outdir)
    if args.source_rpm:
        log.info('source rpm generated to:\n     %s/%s.src.rpm' % \
                   (outdir, os.path.basename(outdir)))

    log.info('package files have been exported to:\n     %s' % outdir)
Beispiel #56
0
def main(args):
    """gbs import entry point."""

    if args.author_name:
        os.environ["GIT_AUTHOR_NAME"] = args.author_name
    if args.author_email:
        os.environ["GIT_AUTHOR_EMAIL"] = args.author_email

    path = args.path

    tmp = Temp(prefix='gbp_',
               dirn=configmgr.get('tmpdir', 'general'),
               directory=True)

    upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch')
    upstream_tag = configmgr.get_arg_conf(args, 'upstream_tag')
    # transform variables from shell to python convention ${xxx} -> %(xxx)s
    upstream_tag = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', upstream_tag)

    params = ["argv[0] placeholder",
              "--color-scheme=magenta:green:yellow:red",
              "--packaging-dir=%s" % get_packaging_dir(args),
              "--upstream-branch=%s" % upstream_branch, path,
              "--upstream-tag=%s" % upstream_tag,
              "--tmp-dir=%s" % tmp.path,
              ]
    if args.debug:
        params.append("--verbose")
    if not args.no_pristine_tar and os.path.exists("/usr/bin/pristine-tar"):
        params.append("--pristine-tar")
    if args.filter:
        params += [('--filter=%s' % f) for f in args.filter]
    if args.upstream_vcs_tag:
        params.append('--upstream-vcs-tag=%s' % args.upstream_vcs_tag)

    if path.endswith('.src.rpm') or path.endswith('.spec'):
        params.append("--create-missing-branches")
        if args.allow_same_version:
            params.append("--allow-same-version")
        if args.native:
            params.append("--native")
        if args.orphan_packaging:
            params.append("--orphan-packaging")
        if args.no_patch_import:
            params.append("--no-patch-import")
        ret = gbp_import_srpm(params)
        if ret == 2:
            log.warning("Importing of patches into packaging branch failed! "
                        "Please import manually (apply and commit to git, "
                        "remove files from packaging dir and spec) in order "
                        "to enable automatic patch generation.")
        elif ret:
            raise GbsError("Failed to import %s" % path)
    else:
        if args.merge:
            params.append('--merge')
        else:
            params.append('--no-merge')
        if gbp_import_orig(params):
            raise GbsError('Failed to import %s' % path)

    log.info('done.')