def export_sources(repo, commit, export_dir, spec, args, create_tarball=True): """ Export packaging files using git-buildpackage """ tmp = utils.Temp(prefix='gbp_', dirn=configmgr.get('tmpdir', 'general'), directory=True) gbp_args = create_gbp_export_args(repo, commit, export_dir, tmp.path, spec, args, create_tarball=create_tarball) try: ret = gbp_build(gbp_args) if ret == 2 and not is_native_pkg(repo, args): # Try falling back to old logic of one monolithic tarball log.error("Generating upstream tarball and/or generating patches " "failed. GBS tried this as you have upstream branch in " "you git tree. Fix the problem by either:\n" " 1. Update your upstream branch and/or fix the spec " "file. Also, check the upstream tag format.\n" " 2. Remove or rename the upstream branch (change the " "package to native)\n" "See https://source.tizen.org/documentation/reference/" "git-build-system/upstream-package for more details.") if ret: raise GbsError("Failed to export packaging files from git tree") except GitRepositoryError, excobj: raise GbsError("Repository error: %s" % excobj)
def main(args): """gbs submit entry point.""" workdir = args.gitdir orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel') if orphan_packaging and args.commit == 'HEAD': log.error( "You seem to be submitting a development branch of an " "(orphan) packaging branch. Please export your changes to the " "packaging branch with 'gbs devel export' and submit from there.") raise GbsError("Refusing to submit from devel branch") message = args.msg if message is None: message = get_message() if not message: raise GbsError("tag message is required") try: repo = RpmGitRepository(workdir) commit = repo.rev_parse(args.commit) current_branch = repo.get_branch() except GitRepositoryError, err: raise GbsError(str(err))
def get_binary_name_from_git(args, package_dirs): ''' get binary rpm name from specified git package''' binary_list = [] packaging_dir = get_packaging_dir(args) if args.commit: commit = args.commit elif args.include_all: commit = 'WC.UNTRACKED' else: commit = 'HEAD' for package_dir in package_dirs: main_spec, rest_specs = guess_spec(package_dir, packaging_dir, None, commit) rest_specs.append(main_spec) for spec in rest_specs: if args.include_all: spec_to_parse = os.path.join(package_dir, spec) else: content = show_file_from_rev(package_dir, spec, commit) if content is None: raise GbsError('failed to checkout %s from commit: %s' % (spec, commit)) tmp_spec = Temp(content=content) spec_to_parse = tmp_spec.path try: spec = rpm.SpecFile(spec_to_parse) except GbpError, err: raise GbsError('%s' % err) binary_list.append(spec.name)
def main(args): """gbs pull entry point.""" # Determine upstream branch upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch') # Construct GBP cmdline arguments gbp_args = [ 'dummy argv[0]', '--color-scheme=magenta:green:yellow:red', '--pristine-tar', '--upstream-branch=%s' % upstream_branch, '--packaging-branch=master' ] if args.depth: gbp_args.append('--depth=%s' % args.depth) if args.force: gbp_args.append('--force=clean') if args.all: gbp_args.append('--all') if args.debug: gbp_args.append("--verbose") # Clone log.info('updating from remote') ret = do_pull(gbp_args) if ret == 2: raise GbsError('Failed to update some of the branches!') elif ret: raise GbsError('Update failed!') log.info('finished')
def main(args): """gbs chroot entry point.""" build_root = args.buildroot running_lock = '%s/not-ready' % build_root if os.path.exists(running_lock): raise GbsError('build root %s is not ready' % build_root) log.info('chroot %s' % build_root) user = '******' if args.root: user = '******' cmd = ['sudo', 'chroot', build_root, 'su', user] try: subprocess.call(['sudo', 'cp', '/etc/resolv.conf', build_root + \ '/etc/resolv.conf']) except OSError: log.warning('failed to setup /etc/resolv.conf') try: build_env = os.environ build_env['PS1'] = "(tizen-build-env)@\h \W]\$ " subprocess.call(cmd, env=build_env) except OSError, err: raise GbsError('failed to chroot to %s: %s' % (build_root, err))
def main(args): """gbs remotebuild entry point.""" obsconf = get_profile(args).obs if not obsconf or not obsconf.url: raise GbsError('no obs api found, please add it to gbs conf ' 'and try again') apiurl = obsconf.url if not apiurl.user: raise GbsError('empty user is not allowed for remotebuild, please '\ 'add user/passwd to gbs conf, and try again') if args.commit and args.include_all: raise Usage('--commit can\'t be specified together with ' '--include-all') obs_repo = args.repository obs_arch = args.arch if args.buildlog and None in (obs_repo, obs_arch): raise GbsError('please specify arch(-A) and repository(-R)') try: repo = RpmGitRepository(args.gitdir) except GitRepositoryError, err: raise GbsError(str(err))
def prepare_depanneur_opts(args): '''generate extra options for depanneur''' cmd_opts = [] if args.exclude: cmd_opts += ['--exclude=%s' % i for i in args.exclude.split(',')] if args.exclude_from_file: cmd_opts += ['--exclude-from-file=%s' % args.exclude_from_file] if args.overwrite: cmd_opts += ['--overwrite'] if args.clean_once: cmd_opts += ['--clean-once'] if args.clean_repos: cmd_opts += ['--clean-repos'] if args.debug: cmd_opts += ['--debug'] if args.incremental: cmd_opts += ['--incremental'] if args.no_configure: cmd_opts += ['--no-configure'] if args.keep_packs: cmd_opts += ['--keep-packs'] if args.baselibs: cmd_opts += ['--baselibs'] # if args.package_list: package_list = args.package_list.split(',') binary_list = get_binary_name_from_git(args, package_list) args.binary_list += ',' + ','.join(binary_list) if args.package_from_file: if not os.path.exists(args.package_from_file): raise GbsError('specified package list file %s not exists' % \ args.package_from_file) with open(args.package_from_file) as fobj: pkglist = [pkg.strip() for pkg in fobj.readlines() if pkg.strip()] binary_list = get_binary_name_from_git(args, pkglist) args.binary_list += ',' + ','.join(binary_list) if args.binary_list: blist = [i.strip() for i in args.binary_list.split(',')] cmd_opts += ['--binary-list=%s' % ','.join(blist)] if args.binary_from_file: if not os.path.exists(args.binary_from_file): raise GbsError('specified binary list file %s not exists' % \ args.binary_from_file) cmd_opts += ['--binary-from-file=%s' % args.binary_from_file] if args.deps: cmd_opts += ['--deps'] if args.rdeps: cmd_opts += ['--rdeps'] cmd_opts += ['--threads=%s' % args.threads] cmd_opts += ['--packaging-dir=%s' % get_packaging_dir(args)] return cmd_opts
def guess_spec(git_path, packaging_dir, given_spec, commit_id='WC.UNTRACKED'): """Guess spec file from project name if not given.""" git_path = os.path.abspath(git_path) if commit_id == 'WC.UNTRACKED': if os.path.islink(packaging_dir): packaging_dir = os.readlink(packaging_dir) check = lambda fname, dir_only=False: os.path.exists( os.path.join(git_path, fname)) glob_ = lambda pattern: [ name.replace(git_path + '/', '') for name in reversed(glob.glob(os.path.join(git_path, pattern))) ] msg = 'No such spec file %s' else: git_object = commit_id + ':' + packaging_dir cmd = ['git', 'show', git_object] try: with Workdir(git_path): outp = subprocess.Popen(cmd, stdout=subprocess.PIPE) except (subprocess.CalledProcessError, OSError): raise GbsError("failed to run %s in %s" % (' '.join(cmd), git_path)) output = outp.communicate()[0] if not output.startswith('tree %s' % git_object): # packaging_dir is a symlink packaging_dir = output check = lambda fname, dir_only=False: file_exists_in_rev( git_path, fname, commit_id, dir_only=dir_only) glob_ = lambda pattern: glob_in_rev(git_path, pattern, commit_id) msg = "No such spec file %%s in %s" % commit_id spec = None if given_spec: spec = os.path.join(packaging_dir, given_spec) if not check(spec): raise GbsError(msg % spec) specs = glob_(os.path.join(packaging_dir, '*.spec')) if not specs: raise GbsError("can't find any spec file under packaging dir: " "%s" % packaging_dir) project_name = os.path.basename(git_path) if not spec: spec = os.path.join(packaging_dir, '%s.spec' % project_name) spec = spec if spec in specs else specs[0] specs.remove(spec) return [spec, specs]
def main(args): '''main entrance for createimage''' try: import mic except ImportError: raise GbsError('please install mic manually first') if not os.path.exists(args.ks_file): raise GbsError('specified ks file %s does not exist' % args.ks_file) log.info('creating image for ks file: %s' % args.ks_file) retcode = createimage(args, args.ks_file) if retcode != 0: raise GbsError('failed to create image') else: log.info('Done')
def deal_with_one_repo(repo): """Deal with one repo url.""" if self.is_standard_repo(repo): self.standardrepos.append(repo) self._fetch_build_conf_new(repo) if self.buildconf: return latest_repo_url = repo.pathjoin('../../../../') if latest_repo_url.find('../') >= 0: return meta = self._fetch_build_meta(latest_repo_url) if meta: self._fetch_build_conf(latest_repo_url, meta) return # Check if it's repo with builddata/build.xml exist meta = self._fetch_build_meta(repo) if meta: # Generate repos from build.xml self.build_repos_from_buildmeta(repo, meta) self._fetch_build_conf(repo, meta) else: # Check if it's repo with build.xml exist buildxml_url = repo.pathjoin('build.xml') if self.fetch(buildxml_url): raise GbsError('Maybe you are using new designed repo, '\ 'and please specify real RPM repo with '\ 'repodata under it.')
def __init__(self, suffix='', prefix='tmp', dirn=None, directory=False, content=None): """ Create file or directory using tempfile.mk[sd]temp. If content is provided write it to the file. """ self.directory = directory self.path = None try: if dirn: target_dir = os.path.abspath(os.path.join(dirn, prefix)) else: target_dir = os.path.abspath(prefix) target_dir = os.path.dirname(target_dir) if not os.path.exists(target_dir): os.makedirs(target_dir) if directory: path = tempfile.mkdtemp(suffix, prefix, dirn) else: (fds, path) = tempfile.mkstemp(suffix, prefix, dirn) os.close(fds) if content: with file(path, 'w+') as fobj: fobj.write(content) except OSError, err: raise GbsError("Failed to create dir or file on %s: %s" % \ (target_dir, str(err)))
def main(args): """gbs devel entry point.""" try: repo = RpmGitRepository(args.gitdir) except GitRepositoryError, err: raise GbsError(str(err))
def main(args): """gbs clone entry point.""" # Determine upstream branch upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch') packaging_branch = configmgr.get_arg_conf(args, 'packaging_branch') # Construct GBP cmdline arguments gbp_args = [ 'dummy argv[0]', '--color-scheme=magenta:green:yellow:red', '--pristine-tar', '--upstream-branch=%s' % upstream_branch, '--packaging-branch=%s' % packaging_branch ] if args.all: gbp_args.append('--all') if args.depth: gbp_args.append('--depth=%s' % args.depth) if args.debug: gbp_args.append("--verbose") gbp_args.append(args.uri) if args.directory: gbp_args.append(args.directory) # Clone log.info('cloning %s' % args.uri) if do_clone(gbp_args): raise GbsError('Failed to clone %s' % args.uri) log.info('finished')
def main(args): """gbs export entry point.""" if args.commit and args.include_all: raise Usage("--commit can't be specified together with --include-all") workdir = args.gitdir try: repo = RpmGitRepository(workdir) except GitRepositoryError, err: raise GbsError(str(err))
def mkdir_p(path): """ Create directory as in mkdir -p """ try: os.makedirs(path) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST: pass else: raise GbsError('failed to create %s: %s' % (path, exc.strerror))
def git_status_checker(git, opts): """ Perform git repository status check. Warn user if repository is not clean or untracked files are found. """ try: if opts.commit: git.rev_parse(opts.commit) is_clean = git.is_clean()[0] status = git.status() except (GbpError, GitRepositoryError), err: raise GbsError(str(err))
def glob_in_rev(git_path, pattern, commit_id): """Glob pattern in given revision.""" path = os.path.dirname(pattern) args = ['git', 'ls-tree', '--name-only', commit_id, '%s/' % path] try: with Workdir(git_path): output = subprocess.Popen(args, stdout=subprocess.PIPE).communicate()[0] except (subprocess.CalledProcessError, OSError), err: raise GbsError('failed to glob %s in %s:%s' % (pattern, commit_id, str(err)))
def __call__(self, parser, namespace, value, option_string=None): workdir = value if not os.path.exists(workdir): raise GbsError("specified package dir %s does not exist" \ % workdir) try: repo = RpmGitRepository(value) workdir = repo.path except GitRepositoryError: pass read_localconf(workdir) setattr(namespace, self.dest, value)
def edit_file(target_fname, initial_content=None): """ Create temporary copy of target_fname with initial_content and launch an editor to edit it. Update content back if user changed it. Returns: True if content has been changed. """ changes = edit(initial_content) if not changes: return False try: with open(target_fname, 'w') as fobj: fobj.write(changes) except IOError, err: raise GbsError("Can't update %s: %s" % (target_fname, str(err)))
def file_exists_in_rev(git_path, relative_path, commit_id, dir_only=False): """Check if file exists in given given revision.""" git_opts = ['--name-only'] if dir_only: git_opts += ['-d'] args = ['git', 'ls-tree', commit_id, relative_path] args.extend(git_opts) try: with Workdir(git_path): output = subprocess.Popen(args, stdout=subprocess.PIPE).communicate()[0] except (subprocess.CalledProcessError, OSError), err: raise GbsError('failed to check existence of %s in %s:%s' % (relative_path, commit_id, str(err)))
def prepare_depanneur_opts(args): '''generate extra options for depanneur''' cmd_opts = [] if args.exclude: cmd_opts += ['--exclude=%s' % i for i in args.exclude.split(',')] if args.exclude_from_file: cmd_opts += ['--exclude-from-file=%s' % args.exclude_from_file] if args.overwrite: cmd_opts += ['--overwrite'] if args.clean_once: cmd_opts += ['--clean-once'] if args.clean_repos: cmd_opts += ['--clean-repos'] if args.debug: cmd_opts += ['--debug'] if args.incremental: cmd_opts += ['--incremental'] if args.no_configure: cmd_opts += ['--no-configure'] if args.keep_packs: cmd_opts += ['--keep-packs'] if args.use_higher_deps: cmd_opts += ['--use-higher-deps'] if args.not_export_source: cmd_opts += ['--not-export-source'] if args.baselibs: cmd_opts += ['--baselibs'] if args.skip_srcrpm: cmd_opts += ['--skip-srcrpm'] if args.fail_fast: cmd_opts += ['--fail-fast'] if args.keepgoing: cmd_opts += ['--keepgoing=%s' % args.keepgoing] # if args.package_list: package_list = args.package_list.split(',') binary_list = get_binary_name_from_git(args, package_list) args.binary_list += ','+ ','.join(binary_list) if args.package_from_file: if not os.path.exists(args.package_from_file): raise GbsError('specified package list file %s not exists' % \ args.package_from_file) with open(args.package_from_file) as fobj: pkglist = [pkg.strip() for pkg in fobj.readlines() if pkg.strip()] binary_list = get_binary_name_from_git(args, pkglist) args.binary_list += ',' + ','.join(binary_list) if args.binary_list: blist = [i.strip() for i in args.binary_list.split(',')] cmd_opts += ['--binary-list=%s' % ','.join(blist)] if args.binary_from_file: if not os.path.exists(args.binary_from_file): raise GbsError('specified binary list file %s not exists' % \ args.binary_from_file) cmd_opts += ['--binary-from-file=%s' % args.binary_from_file] if args.deps: cmd_opts += ['--deps'] if args.rdeps: cmd_opts += ['--rdeps'] if args.kvm: cmd_opts += ['--clean'] cmd_opts += ['--vm-type=kvm'] cmd_opts += ['--vm-memory=%s' % args.vm_memory] cmd_opts += ['--vm-disk=%s' % args.vm_disk] cmd_opts += ['--vm-swap=%s' % args.vm_swap] cmd_opts += ['--vm-diskfilesystem=%s' % args.vm_diskfilesystem] if not os.path.exists(args.vm_initrd): raise GbsError("Check file to exists vm-initrd") cmd_opts += ['--vm-initrd=%s' % args.vm_initrd] if not os.path.exists(args.vm_kernel): raise GbsError("Check file to exists vm-kernel") cmd_opts += ['--vm-kernel=%s' % args.vm_kernel] if args.icecream > 0: cmd_opts += ['--icecream=%s' % args.icecream] cmd_opts += ['--threads=%s' % args.threads] if args.kvm: loopdev = len([name for name in os.listdir('/dev') if bool(re.search("loop[0-9]",name))]) if not args.threads < loopdev: raise GbsError('When using the kvm, loop device should be larger than the threads option.') cmd_opts += ['--packaging-dir=%s' % get_packaging_dir(args)] return cmd_opts
elif args.include_all: commit = 'WC.UNTRACKED' else: commit = 'HEAD' relative_spec = utils.guess_spec(workdir, packaging_dir, args.spec, commit)[0] if args.include_all: # include_all means to use work copy, # otherwise use the reversion in git history spec_to_parse = os.path.join(workdir, relative_spec) else: content = utils.show_file_from_rev(workdir, relative_spec, commit) if content is None: raise GbsError('failed to checkout %s from commit: %s' % (relative_spec, commit)) tmp_spec = utils.Temp(content=content) spec_to_parse = tmp_spec.path # get 'name' and 'version' from spec file try: spec = gbp.rpm.SpecFile(spec_to_parse) except GbpError, err: raise GbsError('%s' % err) if not spec.name: raise GbsError("can't get correct name.") package = spec.name base_prj = None
if len(changes_file_list) > 1: log.warning("Found more than one changes files, %s is taken " % (changes_file_list[0])) else: fn_changes = 'CHANGES' gbp_args = ['dummy argv[0]', '--color-scheme=magenta:green:yellow:red', '--ignore-branch', '--changelog-revision=%(tagname)s', '--spawn-editor=always', '--git-author', '--packaging-dir=%s' % packaging_dir, '--spec-file=%s' % specfile, '--changelog-file=%s' % fn_changes, '--editor-cmd=%s' % get_editor_cmd(), ] if args.since: gbp_args.append('--since=%s' % args.since) if args.all: gbp_args.append('--all') if args.message: gbp_args.append('--message=%s' % args.message) ret = gbp_rpm_ch(gbp_args) if ret: raise GbsError("Change log has not been updated") else: log.info("Change log has been updated.")
try: current_branch = repo.get_branch() except GitRepositoryError: current_branch = None gbp_args = compose_gbp_args(repo, tmp.path, specfile, args) # Run gbp command if args.action == 'start': ret = gbp_pq_rpm(gbp_args + ['import']) if not ret: update_local_conf( repo, {'orphan-devel': { 'packaging_branch': current_branch }}) elif args.action == 'export': log.info('Exporting patches to packaging branch') ret = gbp_pq_rpm(gbp_args + ['export']) elif args.action == 'switch': ret = gbp_pq_rpm(gbp_args + ['switch']) elif args.action == 'drop': ret = gbp_pq_rpm(gbp_args + ['drop']) elif args.action == 'convert': log.info('Converting package to orphan-packaging git layout') ret = gbp_pq_rpm(gbp_args + ['convert']) if not ret: log.info("You can now create the development branch with " "'gbs devel start'") if ret: raise GbsError('Action failed!')
upstream = None if not args.remote: if upstream: args.remote = upstream.split('/')[0] else: log.info("no upstream set for the current branch, using " "'origin' as the remote server") args.remote = 'origin' if args.tag: tagname = args.tag tag_re = re.compile(r'^submit/\S+/\d{8}\.\d{6}$') if not tag_re.match(tagname): raise GbsError("invalid tag %s, valid tag format is " "submit/$target/$date.$time. For example:\n " "submit/trunk/20130128.022439 " % tagname) else: target = args.target if not target: if upstream and upstream.startswith(args.remote): target = re.sub('^%s/' % args.remote, '', upstream) else: log.warning( "Can't find upstream branch for current branch " "%s. Gbs uses the local branch name as the target. " "Please consider to use git-branch --set-upstream " "to set upstream remote branch." % current_branch) target = current_branch if target == 'master': target = 'trunk'
for repo in args.repositories: try: if not urlparse.urlsplit(repo).scheme: if os.path.exists(repo): repo = os.path.abspath(os.path.expanduser(repo)) else: log.warning('local repo: %s does not exist' % repo) continue opt_repo = SafeURL(repo) except ValueError, err: log.warning('Invalid repo %s: %s' % (repo, str(err))) else: repos.append(opt_repo) if not repos: raise GbsError('No package repository specified.') repoparser = RepoParser(repos, cachedir) repourls = repoparser.get_repos_by_arch(arch) if not repourls: raise GbsError('no available repositories found for arch %s under the ' 'following repos:\n%s' % (arch, '\n'.join(repos))) cmd_opts += [('--repository=%s' % url.full) for url in repourls] profile = get_profile(args) profile_name = formalize_build_conf(profile.name.replace( 'profile.', '', 1)) distconf = os.path.join(TMPDIR, '%s.conf' % profile_name) if args.dist: buildconf = args.dist
def main(args): """gbs build entry point.""" global TMPDIR TMPDIR = os.path.join(configmgr.get('tmpdir', 'general'), '%s-gbs' % USERID) if args.commit and args.include_all: raise Usage('--commit can\'t be specified together with '\ '--include-all') if args.noinit and (args.clean or args.clean_once): raise Usage('--noinit can\'t be specified together with '\ '--clean or --clean-once') workdir = args.gitdir try: repo = RpmGitRepository(workdir) workdir = repo.path except GitRepositoryError: if args.spec: raise GbsError("git project can't be found for --spec, " "give it in argument or cd into it") read_localconf(workdir) hostarch = os.uname()[4] if args.arch: buildarch = args.arch else: buildarch = hostarch log.info('No arch specified, using system arch: %s' % hostarch) if not buildarch in SUPPORTEDARCHS: raise GbsError('arch %s not supported, supported archs are: %s ' % \ (buildarch, ','.join(SUPPORTEDARCHS))) profile = get_profile(args) if args.buildroot: build_root = args.buildroot elif 'TIZEN_BUILD_ROOT' in os.environ: build_root = os.environ['TIZEN_BUILD_ROOT'] elif profile.buildroot: build_root = profile.buildroot else: build_root = configmgr.get('buildroot', 'general') build_root = os.path.expanduser(build_root) # transform variables from shell to python convention ${xxx} -> %(xxx)s build_root = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', build_root) sanitized_profile_name = re.sub("[^a-zA-Z0-9:._-]", "_", profile.name) build_root = build_root % { 'tmpdir': TMPDIR, 'profile': sanitized_profile_name } if profile.exclude_packages: log.info('the following packages have been excluded build from gbs ' 'config:\n %s' % '\n '.join(profile.exclude_packages)) if args.exclude: args.exclude += ',' + ','.join(profile.exclude_packages) else: args.exclude = ','.join(profile.exclude_packages) os.environ['TIZEN_BUILD_ROOT'] = os.path.abspath(build_root) # get virtual env from system env first if 'VIRTUAL_ENV' in os.environ: cmd = ['%s/usr/bin/depanneur' % os.environ['VIRTUAL_ENV']] else: cmd = ['depanneur'] cmd += ['--arch=%s' % buildarch] if args.clean: cmd += ['--clean'] # check & prepare repos and build conf if not args.noinit: cmd += prepare_repos_and_build_conf(args, buildarch, profile) else: cmd += ['--noinit'] cmd += ['--path=%s' % workdir] if args.ccache: cmd += ['--ccache'] if args.extra_packs: cmd += ['--extra-packs=%s' % args.extra_packs] if hostarch != buildarch and buildarch in CHANGE_PERSONALITY: cmd = [CHANGE_PERSONALITY[buildarch]] + cmd # Extra depanneur special command options cmd += prepare_depanneur_opts(args) # Extra options for gbs export if args.include_all: cmd += ['--include-all'] if args.commit: cmd += ['--commit=%s' % args.commit] if args.upstream_branch: cmd += ['--upstream-branch=%s' % args.upstream_branch] if args.upstream_tag: cmd += ['--upstream-tag=%s' % args.upstream_tag] if args.squash_patches_until: cmd += ['--squash-patches-until=%s' % args.squash_patches_until] if args.no_patch_export: cmd += ['--no-patch-export'] if args.define: cmd += [('--define="%s"' % i) for i in args.define] if args.spec: cmd += ['--spec=%s' % args.spec] # Determine if we're on devel branch orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel') if orphan_packaging: cmd += ['--spec-commit=%s' % orphan_packaging] log.debug("running command: %s" % ' '.join(cmd)) retcode = os.system(' '.join(cmd)) if retcode != 0: raise GbsError('some packages failed to be built') else: log.info('Done')
else: commit = 'HEAD' orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel') spec_commit_id = orphan_packaging if orphan_packaging else commit packaging_dir = get_packaging_dir(args) main_spec, rest_specs = utils.guess_spec(workdir, packaging_dir, args.spec, spec_commit_id) if args.outdir: outdir = args.outdir else: outdir = os.path.join(workdir, packaging_dir) outdir = os.path.abspath(outdir) if os.path.exists(outdir): if not os.access(outdir, os.W_OK|os.X_OK): raise GbsError('no write permission to outdir: %s' % outdir) else: mkdir_p(outdir) tmpdir = configmgr.get('tmpdir', 'general') tempd = utils.Temp(prefix=os.path.join(tmpdir, '.gbs_export_'), \ directory=True) export_dir = tempd.path tracked_branches = track_export_branches(repo, args) with utils.Workdir(workdir): export_sources(repo, commit, export_dir, main_spec, args) if rest_specs: # backup updated spec file
def main(args): """gbs import entry point.""" if args.author_name: os.environ["GIT_AUTHOR_NAME"] = args.author_name if args.author_email: os.environ["GIT_AUTHOR_EMAIL"] = args.author_email path = args.path tmp = Temp(prefix='gbp_', dirn=configmgr.get('tmpdir', 'general'), directory=True) upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch') upstream_tag = configmgr.get_arg_conf(args, 'upstream_tag') # transform variables from shell to python convention ${xxx} -> %(xxx)s upstream_tag = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', upstream_tag) params = ["argv[0] placeholder", "--color-scheme=magenta:green:yellow:red", "--packaging-dir=%s" % get_packaging_dir(args), "--upstream-branch=%s" % upstream_branch, path, "--upstream-tag=%s" % upstream_tag, "--tmp-dir=%s" % tmp.path, ] if args.debug: params.append("--verbose") if not args.no_pristine_tar and os.path.exists("/usr/bin/pristine-tar"): params.append("--pristine-tar") if args.filter: params += [('--filter=%s' % f) for f in args.filter] if args.upstream_vcs_tag: params.append('--upstream-vcs-tag=%s' % args.upstream_vcs_tag) if path.endswith('.src.rpm') or path.endswith('.spec'): params.append("--create-missing-branches") if args.allow_same_version: params.append("--allow-same-version") if args.native: params.append("--native") if args.orphan_packaging: params.append("--orphan-packaging") if args.no_patch_import: params.append("--no-patch-import") ret = gbp_import_srpm(params) if ret == 2: log.warning("Importing of patches into packaging branch failed! " "Please import manually (apply and commit to git, " "remove files from packaging dir and spec) in order " "to enable automatic patch generation.") elif ret: raise GbsError("Failed to import %s" % path) else: if args.merge: params.append('--merge') else: params.append('--no-merge') if gbp_import_orig(params): raise GbsError('Failed to import %s' % path) log.info('done.')