def main(args): """gbs pull entry point.""" # Determine upstream branch upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch') # Construct GBP cmdline arguments gbp_args = [ 'dummy argv[0]', '--color-scheme=magenta:green:yellow:red', '--pristine-tar', '--upstream-branch=%s' % upstream_branch, '--packaging-branch=master' ] if args.depth: gbp_args.append('--depth=%s' % args.depth) if args.force: gbp_args.append('--force=clean') if args.all: gbp_args.append('--all') if args.debug: gbp_args.append("--verbose") # Clone log.info('updating from remote') ret = do_pull(gbp_args) if ret == 2: raise GbsError('Failed to update some of the branches!') elif ret: raise GbsError('Update failed!') log.info('finished')
def main(args): """gbs clone entry point.""" # Determine upstream branch upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch') packaging_branch = configmgr.get_arg_conf(args, 'packaging_branch') # Construct GBP cmdline arguments gbp_args = [ 'dummy argv[0]', '--color-scheme=magenta:green:yellow:red', '--pristine-tar', '--upstream-branch=%s' % upstream_branch, '--packaging-branch=%s' % packaging_branch ] if args.all: gbp_args.append('--all') if args.depth: gbp_args.append('--depth=%s' % args.depth) if args.debug: gbp_args.append("--verbose") gbp_args.append(args.uri) if args.directory: gbp_args.append(args.directory) # Clone log.info('cloning %s' % args.uri) if do_clone(gbp_args): raise GbsError('Failed to clone %s' % args.uri) log.info('finished')
def prepare_repos_and_build_conf(args, arch, profile): '''generate repos and build conf options for depanneur''' cmd_opts = [] cache = Temp(prefix=os.path.join(TMPDIR, 'gbscache'), directory=True) cachedir = cache.path if not os.path.exists(cachedir): os.makedirs(cachedir) log.info('generate repositories ...') if args.skip_conf_repos: repos = [] else: repos = [i.url for i in profile.repos] if args.repositories: for repo in args.repositories: try: if not urlparse.urlsplit(repo).scheme: if os.path.exists(repo): repo = os.path.abspath(os.path.expanduser(repo)) else: log.warning('local repo: %s does not exist' % repo) continue opt_repo = SafeURL(repo) except ValueError, err: log.warning('Invalid repo %s: %s' % (repo, str(err))) else: repos.append(opt_repo)
def main(args): """gbs chroot entry point.""" build_root = args.buildroot running_lock = '%s/not-ready' % build_root if os.path.exists(running_lock): raise GbsError('build root %s is not ready' % build_root) log.info('chroot %s' % build_root) user = '******' if args.root: user = '******' cmd = ['sudo', 'chroot', build_root, 'su', user] try: subprocess.call(['sudo', 'cp', '/etc/resolv.conf', build_root + \ '/etc/resolv.conf']) except OSError: log.warning('failed to setup /etc/resolv.conf') try: build_env = os.environ build_env['PS1'] = "(tizen-build-env)@\h \W]\$ " subprocess.call(cmd, env=build_env) except OSError, err: raise GbsError('failed to chroot to %s: %s' % (build_root, err))
def main(args): """gbs clone entry point.""" # Determine upstream branch upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch') packaging_branch = configmgr.get_arg_conf(args, 'packaging_branch') # Construct GBP cmdline arguments gbp_args = ['dummy argv[0]', '--color-scheme=magenta:green:yellow:red', '--pristine-tar', '--upstream-branch=%s' % upstream_branch, '--packaging-branch=%s' % packaging_branch] if args.all: gbp_args.append('--all') if args.depth: gbp_args.append('--depth=%s' % args.depth) if args.debug: gbp_args.append("--verbose") gbp_args.append(args.uri) if args.directory: gbp_args.append(args.directory) # Clone log.info('cloning %s' % args.uri) if do_clone(gbp_args): raise GbsError('Failed to clone %s' % args.uri) log.info('finished')
def export_sources(repo, commit, export_dir, spec, args): """ Export packaging files using git-buildpackage """ tmp = utils.Temp(prefix='gbp_', dirn=configmgr.get('tmpdir', 'general'), directory=True) gbp_args = create_gbp_export_args(repo, commit, export_dir, tmp.path, spec, args) try: ret = gbp_build(gbp_args) if ret == 2 and not is_native_pkg(repo, args): # Try falling back to old logic of one monolithic tarball log.warning("Generating upstream tarball and/or generating " "patches failed. GBS tried this as you have " "upstream branch in you git tree. This is a new " "mode introduced in GBS v0.10. " "Consider fixing the problem by either:\n" " 1. Update your upstream branch and/or fix the " "spec file. Also, check the upstream tag format.\n" " 2. Remove or rename the upstream branch") log.info("Falling back to the old method of generating one " "monolithic source archive") gbp_args = create_gbp_export_args(repo, commit, export_dir, tmp.path, spec, args, force_native=True) ret = gbp_build(gbp_args) if ret: raise GbsError("Failed to export packaging files from git tree") except GitRepositoryError, excobj: raise GbsError("Repository error: %s" % excobj)
def main(args): """gbs pull entry point.""" # Determine upstream branch upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch') # Construct GBP cmdline arguments gbp_args = ['dummy argv[0]', '--color-scheme=magenta:green:yellow:red', '--pristine-tar', '--upstream-branch=%s' % upstream_branch, '--packaging-branch=master'] if args.depth: gbp_args.append('--depth=%s' % args.depth) if args.force: gbp_args.append('--force=clean') if args.all: gbp_args.append('--all') if args.debug: gbp_args.append("--verbose") # Clone log.info('updating from remote') ret = do_pull(gbp_args) if ret == 2: raise GbsError('Failed to update some of the branches!') elif ret: raise GbsError('Update failed!') log.info('finished')
def main(args): """gbs import entry point.""" if args.author_name: os.environ["GIT_AUTHOR_NAME"] = args.author_name if args.author_email: os.environ["GIT_AUTHOR_EMAIL"] = args.author_email path = args.path tmp = Temp(prefix='gbp_', dirn=configmgr.get('tmpdir', 'general'), directory=True) if args.upstream_branch: upstream_branch = args.upstream_branch else: upstream_branch = configmgr.get('upstream_branch', 'general') params = ["argv[0] placeholder", "--color-scheme=magenta:green:yellow:red", "--packaging-dir=%s" % get_packaging_dir(args), "--upstream-branch=%s" % upstream_branch, path, "--tmp-dir=%s" % tmp.path, ] if args.debug: params.append("--verbose") if not args.no_pristine_tar and os.path.exists("/usr/bin/pristine-tar"): params.append("--pristine-tar") if args.filter: params += [('--filter=%s' % f) for f in args.filter] if path.endswith('.src.rpm') or path.endswith('.spec'): if args.allow_same_version: params.append("--allow-same-version") if args.native: params.append("--native") if args.no_patch_import: params.append("--no-patch-import") ret = gbp_import_srpm(params) if ret == 2: log.warning("Importing of patches into packaging branch failed! " "Please import manually (apply and commit to git, " "remove files from packaging dir and spec) in order " "to enable automatic patch generation.") elif ret: raise GbsError("Failed to import %s" % path) else: if args.upstream_vcs_tag: params.append('--upstream-vcs-tag=%s' % args.upstream_vcs_tag) if args.merge: params.append('--merge') else: params.append('--no-merge') if gbp_import_orig(params): raise GbsError('Failed to import %s' % path) log.info('done.')
def update_local_conf(repo, values): """Create/update local gbs.conf""" parser = BrainConfigParser() conf_fn = os.path.join(repo.path, '.gbs.conf') log.info('Updating local .gbs.conf') with open(conf_fn, 'a+') as conf_fp: parser.readfp(conf_fp) for section, items in values.iteritems(): for key, value in items.iteritems(): parser.set_into_file(section, key, value) parser.update() log.info('Committing local .gbs.conf to git') repo.add_files(['.gbs.conf']) repo.commit_all(msg="Autoupdate local .gbs.conf\n\nGbp-Rpm: Ignore")
def main(args): '''main entrance for createimage''' try: import mic except ImportError: raise GbsError('please install mic manually first') if not os.path.exists(args.ks_file): raise GbsError('specified ks file %s does not exist' % args.ks_file) log.info('creating image for ks file: %s' % args.ks_file) retcode = createimage(args, args.ks_file) if retcode != 0: raise GbsError('failed to create image') else: log.info('Done')
def track_export_branches(repo, args): '''checking export related branches: pristine-tar, upstream. give warning if pristine-tar/upstream branch exist in remote but have not been checkout to local ''' remote_branches = {} tracked_branches = [] for branch in repo.get_remote_branches(): remote_branches[branch.split('/', 1)[-1]] = branch upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch') # track upstream/pristine-tar branch for branch in [upstream_branch, 'pristine-tar']: if not repo.has_branch(branch) and branch in remote_branches: log.info('tracking branch: %s -> %s' % (remote_branches[branch], branch)) repo.create_branch(branch, remote_branches[branch]) tracked_branches.append(branch) return tracked_branches
def export_sources(repo, commit, export_dir, spec, args, create_tarball=True): """ Export packaging files using git-buildpackage """ tmp = utils.Temp(prefix='gbp_', dirn=configmgr.get('tmpdir', 'general'), directory=True) gbp_args = create_gbp_export_args(repo, commit, export_dir, tmp.path, spec, args, force_native=False, create_tarball=create_tarball) try: ret = gbp_build(gbp_args) if ret == 2 and not is_native_pkg(repo, args): errmsg = ("Generating upstream tarball and/or generating patches " "failed. GBS tried this as you have upstream branch in " "you git tree. Fix the problem by either:\n" " 1. Update your upstream branch and/or fix the spec " "file. Also, check the upstream tag format.\n" " 2. Remove or rename the upstream branch (change the " "package to native)\n" "See https://source.tizen.org/documentation/reference/" "git-build-system/upstream-package for more details.") fallback = configmgr.get_arg_conf(args, 'fallback_to_native') if config_is_true(fallback): # Try falling back to old logic of one monolithic tarball log.warn(errmsg) log.info("Falling back to native, i.e. creating source archive " "directly from exported commit, without any patches.") gbp_args = create_gbp_export_args(repo, commit, export_dir, tmp.path, spec, args, force_native=True, create_tarball=create_tarball) ret = gbp_build(gbp_args) else: log.error(errmsg) if ret: raise GbsError("Failed to export packaging files from git tree") except GitRepositoryError, excobj: raise GbsError("Repository error: %s" % excobj)
def prepare_repos_and_build_conf(args, arch, profile): '''generate repos and build conf options for depanneur''' cmd_opts = [] cache = Temp(prefix=os.path.join(TMPDIR, 'gbscache'), directory=True) cachedir = cache.path if not os.path.exists(cachedir): os.makedirs(cachedir) log.info('generate repositories ...') if args.skip_conf_repos: repos = [] else: repos = [i.url for i in profile.repos] if args.repositories: for i in args.repositories: try: opt_repo = SafeURL(i) except ValueError, err: log.warning('Invalid repo %s: %s' % (i, str(err))) else: repos.append(opt_repo)
directory=True) export_dir = tempd.path check_export_branches(repo, args) with utils.Workdir(workdir): export_sources(repo, commit, export_dir, relative_spec, args) specfile = os.path.basename(relative_spec) try: spec = rpm.parse_spec(os.path.join(export_dir, specfile)) except GbpError, err: raise GbsError('%s' % err) if not spec.name or not spec.version: raise GbsError('can\'t get correct name or version from spec file.') else: outdir = "%s/%s-%s-%s" % (outdir, spec.name, spec.upstreamversion, spec.release) if os.path.exists(outdir): if not os.access(outdir, os.W_OK|os.X_OK): raise GbsError('no permission to update outdir: %s' % outdir) shutil.rmtree(outdir, ignore_errors=True) shutil.move(export_dir, outdir) if args.source_rpm: log.info('source rpm generated to:\n %s/%s.src.rpm' % \ (outdir, os.path.basename(outdir))) log.info('package files have been exported to:\n %s' % outdir)
# restore updated spec files for spec in glob.glob(os.path.join(specbakd.path, "*.spec")): shutil.copy(spec, export_dir) # Remove tracked export branches if tracked_branches: untrack_export_branches(repo, tracked_branches) specfile = os.path.basename(main_spec) try: spec = rpm.SpecFile(os.path.join(export_dir, specfile)) except GbpError, err: raise GbsError('%s' % err) if not spec.name or not spec.version: raise GbsError('can\'t get correct name or version from spec file.') else: outdir = "%s/%s-%s-%s" % (outdir, spec.name, spec.upstreamversion, spec.release) if os.path.exists(outdir): if not os.access(outdir, os.W_OK|os.X_OK): raise GbsError('no permission to update outdir: %s' % outdir) shutil.rmtree(outdir, ignore_errors=True) shutil.move(export_dir, outdir) if args.source_rpm: log.info('source rpm generated to:\n %s/%s.src.rpm' % \ (outdir, os.path.basename(outdir))) log.info('package files have been exported to:\n %s' % outdir)
def main(args): """gbs import entry point.""" if args.author_name: os.environ["GIT_AUTHOR_NAME"] = args.author_name if args.author_email: os.environ["GIT_AUTHOR_EMAIL"] = args.author_email path = args.path tmp = Temp(prefix='gbp_', dirn=configmgr.get('tmpdir', 'general'), directory=True) upstream_branch = configmgr.get_arg_conf(args, 'upstream_branch') upstream_tag = configmgr.get_arg_conf(args, 'upstream_tag') # transform variables from shell to python convention ${xxx} -> %(xxx)s upstream_tag = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', upstream_tag) params = ["argv[0] placeholder", "--color-scheme=magenta:green:yellow:red", "--packaging-dir=%s" % get_packaging_dir(args), "--upstream-branch=%s" % upstream_branch, path, "--upstream-tag=%s" % upstream_tag, "--tmp-dir=%s" % tmp.path, ] if args.debug: params.append("--verbose") if not args.no_pristine_tar and os.path.exists("/usr/bin/pristine-tar"): params.append("--pristine-tar") if args.filter: params += [('--filter=%s' % f) for f in args.filter] if args.upstream_vcs_tag: params.append('--upstream-vcs-tag=%s' % args.upstream_vcs_tag) if path.endswith('.src.rpm') or path.endswith('.spec'): params.append("--create-missing-branches") if args.allow_same_version: params.append("--allow-same-version") if args.native: params.append("--native") if args.orphan_packaging: params.append("--orphan-packaging") if args.no_patch_import: params.append("--no-patch-import") ret = gbp_import_srpm(params) if ret == 2: log.warning("Importing of patches into packaging branch failed! " "Please import manually (apply and commit to git, " "remove files from packaging dir and spec) in order " "to enable automatic patch generation.") elif ret: raise GbsError("Failed to import %s" % path) else: if args.merge: params.append('--merge') else: params.append('--no-merge') if gbp_import_orig(params): raise GbsError('Failed to import %s' % path) log.info('done.')
if stat == '??': continue uncommitted_files.extend(status[stat]) if not is_clean and not opts.include_all: if untracked_files: log.warning('the following untracked files would NOT be ' 'included:\n %s' % '\n '.join(untracked_files)) if uncommitted_files: log.warning('the following uncommitted changes would NOT be ' 'included:\n %s' % '\n '.join(uncommitted_files)) log.warning('you can specify \'--include-all\' option to ' 'include these uncommitted and untracked files.') if not is_clean and opts.include_all: if untracked_files: log.info('the following untracked files would be included' ':\n %s' % '\n '.join(untracked_files)) if uncommitted_files: log.info('the following uncommitted changes would be included' ':\n %s' % '\n '.join(uncommitted_files)) def hexdigest(fhandle, block_size=4096): """Calculate hexdigest of file content.""" md5obj = hashlib.new('md5') while True: data = fhandle.read(block_size) if not data: break md5obj.update(data) return md5obj.hexdigest()
cmd_opts += [('--repository=%s' % url.full) for url in repourls] if args.dist: distconf = args.dist if not os.path.exists(distconf): raise GbsError('specified build conf %s does not exists' % distconf) else: if repoparser.buildconf is None: raise GbsError('failed to get build conf from repos, please ' 'use snapshot repo or specify build config using ' '-D option') else: shutil.copy(repoparser.buildconf, TMPDIR) distconf = os.path.join(TMPDIR, os.path.basename(\ repoparser.buildconf)) log.info('build conf has been downloaded at:\n %s' \ % distconf) if distconf is None: raise GbsError('No build config file specified, please specify in '\ '~/.gbs.conf or command line using -D') # must use abspath here, because build command will also use this path distconf = os.path.abspath(distconf) if not distconf.endswith('.conf') or '-' in os.path.basename(distconf): raise GbsError("build config file must end with .conf, and can't " "contain '-'") dist = os.path.basename(distconf)[:-len('.conf')] cmd_opts += ['--dist=%s' % dist] cmd_opts += ['--configdir=%s' % os.path.dirname(distconf)]
# Get current branch try: current_branch = repo.get_branch() except GitRepositoryError: current_branch = None gbp_args = compose_gbp_args(repo, tmp.path, specfile, args) # Run gbp command if args.action == 'start': ret = gbp_pq_rpm(gbp_args + ['import']) if not ret: update_local_conf(repo, {'orphan-devel': {'packaging_branch': current_branch}}) elif args.action == 'export': log.info('Exporting patches to packaging branch') ret = gbp_pq_rpm(gbp_args + ['export']) elif args.action == 'switch': ret = gbp_pq_rpm(gbp_args + ['switch']) elif args.action == 'drop': ret = gbp_pq_rpm(gbp_args + ['drop']) elif args.action == 'convert': log.info('Converting package to orphan-packaging git layout') ret = gbp_pq_rpm(gbp_args + ['convert']) if not ret: log.info("You can now create the development branch with " "'gbs devel start'") if ret: raise GbsError('Action failed!')
if len(changes_file_list) > 1: log.warning("Found more than one changes files, %s is taken " % (changes_file_list[0])) else: fn_changes = 'CHANGES' gbp_args = ['dummy argv[0]', '--color-scheme=magenta:green:yellow:red', '--ignore-branch', '--changelog-revision=%(tagname)s', '--spawn-editor=always', '--git-author', '--packaging-dir=%s' % packaging_dir, '--spec-file=%s' % specfile, '--changelog-file=%s' % fn_changes, '--editor-cmd=%s' % get_editor_cmd(), ] if args.since: gbp_args.append('--since=%s' % args.since) if args.all: gbp_args.append('--all') if args.message: gbp_args.append('--message=%s' % args.message) ret = gbp_rpm_ch(gbp_args) if ret: raise GbsError("Change log has not been updated") else: log.info("Change log has been updated.")
def main(args): """gbs chroot entry point.""" build_root = args.buildroot running_lock = '%s/not-ready' % build_root if os.path.exists(running_lock): raise GbsError('build root %s is not ready' % build_root) log.info('chroot %s' % build_root) user = '******' if args.root: user = '******' cmd = ['sudo', 'chroot', build_root, 'su', user] try: subprocess.call(['sudo', 'cp', '/etc/resolv.conf', build_root + \ '/etc/resolv.conf']) except OSError: log.warning('failed to setup /etc/resolv.conf') try: build_env = os.environ build_env['PS1'] = "(tizen-build-env)@\h \W]\$ " subprocess.call(cmd, env=build_env) except OSError, err: raise GbsError('failed to chroot to %s: %s' % (build_root, err)) except KeyboardInterrupt: log.info('keyboard interrupt ...')
try: current_branch = repo.get_branch() except GitRepositoryError: current_branch = None gbp_args = compose_gbp_args(repo, tmp.path, specfile, args) # Run gbp command if args.action == 'start': ret = gbp_pq_rpm(gbp_args + ['import']) if not ret: update_local_conf( repo, {'orphan-devel': { 'packaging_branch': current_branch }}) elif args.action == 'export': log.info('Exporting patches to packaging branch') ret = gbp_pq_rpm(gbp_args + ['export']) elif args.action == 'switch': ret = gbp_pq_rpm(gbp_args + ['switch']) elif args.action == 'drop': ret = gbp_pq_rpm(gbp_args + ['drop']) elif args.action == 'convert': log.info('Converting package to orphan-packaging git layout') ret = gbp_pq_rpm(gbp_args + ['convert']) if not ret: log.info("You can now create the development branch with " "'gbs devel start'") if ret: raise GbsError('Action failed!')
def main(args): """gbs build entry point.""" global TMPDIR TMPDIR = os.path.join(configmgr.get('tmpdir', 'general'), '%s-gbs' % USERID) if args.commit and args.include_all: raise Usage('--commit can\'t be specified together with '\ '--include-all') if args.noinit and (args.clean or args.clean_once): raise Usage('--noinit can\'t be specified together with '\ '--clean or --clean-once') workdir = args.gitdir try: repo = RpmGitRepository(workdir) workdir = repo.path except GitRepositoryError: if args.spec: raise GbsError("git project can't be found for --spec, " "give it in argument or cd into it") read_localconf(workdir) hostarch = os.uname()[4] if args.arch: buildarch = args.arch else: buildarch = hostarch log.info('No arch specified, using system arch: %s' % hostarch) if not buildarch in SUPPORTEDARCHS: raise GbsError('arch %s not supported, supported archs are: %s ' % \ (buildarch, ','.join(SUPPORTEDARCHS))) profile = get_profile(args) if args.buildroot: build_root = args.buildroot elif 'TIZEN_BUILD_ROOT' in os.environ: build_root = os.environ['TIZEN_BUILD_ROOT'] elif profile.buildroot: build_root = profile.buildroot else: build_root = configmgr.get('buildroot', 'general') build_root = os.path.expanduser(build_root) # transform variables from shell to python convention ${xxx} -> %(xxx)s build_root = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', build_root) sanitized_profile_name = re.sub("[^a-zA-Z0-9:._-]", "_", profile.name) build_root = build_root % { 'tmpdir': TMPDIR, 'profile': sanitized_profile_name } if profile.exclude_packages: log.info('the following packages have been excluded build from gbs ' 'config:\n %s' % '\n '.join(profile.exclude_packages)) if args.exclude: args.exclude += ',' + ','.join(profile.exclude_packages) else: args.exclude = ','.join(profile.exclude_packages) os.environ['TIZEN_BUILD_ROOT'] = os.path.abspath(build_root) # get virtual env from system env first if 'VIRTUAL_ENV' in os.environ: cmd = ['%s/usr/bin/depanneur' % os.environ['VIRTUAL_ENV']] else: cmd = ['depanneur'] cmd += ['--arch=%s' % buildarch] if args.clean: cmd += ['--clean'] # check & prepare repos and build conf if not args.noinit: cmd += prepare_repos_and_build_conf(args, buildarch, profile) else: cmd += ['--noinit'] cmd += ['--path=%s' % workdir] if args.ccache: cmd += ['--ccache'] if args.extra_packs: cmd += ['--extra-packs=%s' % args.extra_packs] if hostarch != buildarch and buildarch in CHANGE_PERSONALITY: cmd = [CHANGE_PERSONALITY[buildarch]] + cmd # Extra depanneur special command options cmd += prepare_depanneur_opts(args) # Extra options for gbs export if args.include_all: cmd += ['--include-all'] if args.commit: cmd += ['--commit=%s' % args.commit] if args.upstream_branch: cmd += ['--upstream-branch=%s' % args.upstream_branch] if args.upstream_tag: cmd += ['--upstream-tag=%s' % args.upstream_tag] if args.squash_patches_until: cmd += ['--squash-patches-until=%s' % args.squash_patches_until] if args.no_patch_export: cmd += ['--no-patch-export'] if args.define: cmd += [('--define="%s"' % i) for i in args.define] if args.spec: cmd += ['--spec=%s' % args.spec] # Determine if we're on devel branch orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel') if orphan_packaging: cmd += ['--spec-commit=%s' % orphan_packaging] log.debug("running command: %s" % ' '.join(cmd)) retcode = os.system(' '.join(cmd)) if retcode != 0: raise GbsError('some packages failed to be built') else: log.info('Done')
def main(args): """gbs build entry point.""" global TMPDIR TMPDIR = os.path.join(configmgr.get('tmpdir', 'general'), '%s-gbs' % USERID) if args.commit and args.include_all: raise Usage('--commit can\'t be specified together with '\ '--include-all') if args.noinit and (args.clean or args.clean_once): raise Usage('--noinit can\'t be specified together with '\ '--clean or --clean-once') workdir = args.gitdir try: repo = RpmGitRepository(workdir) workdir = repo.path except GitRepositoryError: if args.spec: raise GbsError("git project can't be found for --spec, " "give it in argument or cd into it") read_localconf(workdir) hostarch = os.uname()[4] if args.arch: buildarch = args.arch else: buildarch = hostarch log.info('No arch specified, using system arch: %s' % hostarch) if not buildarch in SUPPORTEDARCHS: raise GbsError('arch %s not supported, supported archs are: %s ' % \ (buildarch, ','.join(SUPPORTEDARCHS))) profile = get_profile(args) if args.buildroot: build_root = args.buildroot elif 'TIZEN_BUILD_ROOT' in os.environ: build_root = os.environ['TIZEN_BUILD_ROOT'] elif profile.buildroot: build_root = profile.buildroot else: build_root = configmgr.get('buildroot', 'general') build_root = os.path.expanduser(build_root) # transform variables from shell to python convention ${xxx} -> %(xxx)s build_root = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', build_root) sanitized_profile_name = re.sub("[^a-zA-Z0-9:._-]", "_", profile.name) build_root = build_root % {'tmpdir': TMPDIR, 'profile': sanitized_profile_name} if profile.exclude_packages: log.info('the following packages have been excluded build from gbs ' 'config:\n %s' % '\n '.join(profile.exclude_packages)) if args.exclude: args.exclude += ',' + ','.join(profile.exclude_packages) else: args.exclude = ','.join(profile.exclude_packages) os.environ['TIZEN_BUILD_ROOT'] = os.path.abspath(build_root) # get virtual env from system env first if 'VIRTUAL_ENV' in os.environ: cmd = ['%s/usr/bin/depanneur' % os.environ['VIRTUAL_ENV']] else: cmd = ['depanneur'] cmd += ['--arch=%s' % buildarch] if args.clean: cmd += ['--clean'] # check & prepare repos and build conf if not args.noinit: cmd += prepare_repos_and_build_conf(args, buildarch, profile) else: cmd += ['--noinit'] cmd += ['--path=%s' % workdir] if args.ccache: cmd += ['--ccache'] if args.extra_packs: cmd += ['--extra-packs=%s' % args.extra_packs] if hostarch != buildarch and buildarch in CHANGE_PERSONALITY: cmd = [CHANGE_PERSONALITY[buildarch]] + cmd # Extra depanneur special command options cmd += prepare_depanneur_opts(args) # Extra options for gbs export if args.include_all: cmd += ['--include-all'] if args.commit: cmd += ['--commit=%s' % args.commit] if args.upstream_branch: cmd += ['--upstream-branch=%s' % args.upstream_branch] if args.upstream_tag: cmd += ['--upstream-tag=%s' % args.upstream_tag] if args.conf and args.conf != '.gbs.conf': fallback = configmgr.get('fallback_to_native') else: fallback = '' if args.fallback_to_native or config_is_true(fallback): cmd += ['--fallback-to-native'] if args.squash_patches_until: cmd += ['--squash-patches-until=%s' % args.squash_patches_until] if args.no_patch_export: cmd += ['--no-patch-export'] if args.define: cmd += [('--define="%s"' % i) for i in args.define] if args.spec: cmd += ['--spec=%s' % args.spec] # Determine if we're on devel branch orphan_packaging = configmgr.get('packaging_branch', 'orphan-devel') if orphan_packaging: cmd += ['--spec-commit=%s' % orphan_packaging] log.debug("running command: %s" % ' '.join(cmd)) retcode = os.system(' '.join(cmd)) if retcode != 0: raise GbsError('some packages failed to be built') else: log.info('Done')
repo = RpmGitRepository(workdir) commit = repo.rev_parse(args.commit) current_branch = repo.get_branch() except GitRepositoryError, err: raise GbsError(str(err)) try: upstream = repo.get_upstream_branch(current_branch) except GitRepositoryError: upstream = None if not args.remote: if upstream: args.remote = upstream.split('/')[0] else: log.info("no upstream set for the current branch, using " "'origin' as the remote server") args.remote = 'origin' if args.tag: tagname = args.tag tag_re = re.compile(r'^submit/\S+/\d{8}\.\d{6}$') if not tag_re.match(tagname): raise GbsError("invalid tag %s, valid tag format is " "submit/$target/$date.$time. For example:\n " "submit/trunk/20130128.022439 " % tagname) else: target = args.target if not target: if upstream and upstream.startswith(args.remote): target = re.sub('^%s/' % args.remote, '', upstream) else:
def main(args): """gbs build entry point.""" if args.commit and args.include_all: raise Usage('--commit can\'t be specified together with '\ '--include-all') if args.noinit and (args.clean or args.clean_once): raise Usage('--noinit can\'t be specified together with '\ '--clean or --clean-once') workdir = args.gitdir try: repo = RpmGitRepository(workdir) workdir = repo.path except GitRepositoryError: if args.spec: raise GbsError("git project can't be found for --spec, " "give it in argument or cd into it") hostarch = os.uname()[4] if args.arch: buildarch = args.arch else: buildarch = hostarch log.info('No arch specified, using system arch: %s' % hostarch) if not buildarch in SUPPORTEDARCHS: raise GbsError('arch %s not supported, supported archs are: %s ' % \ (buildarch, ','.join(SUPPORTEDARCHS))) if buildarch not in CAN_ALSO_BUILD.get(hostarch, []): if buildarch not in QEMU_CAN_BUILD: raise GbsError("hostarch: %s can't build target arch %s" % (hostarch, buildarch)) profile = get_profile(args) if args.buildroot: build_root = args.buildroot elif 'TIZEN_BUILD_ROOT' in os.environ: build_root = os.environ['TIZEN_BUILD_ROOT'] elif profile.buildroot: build_root = profile.buildroot else: build_root = configmgr.get('buildroot', 'general') build_root = os.path.expanduser(build_root) # transform variables from shell to python convention ${xxx} -> %(xxx)s build_root = re.sub(r'\$\{([^}]+)\}', r'%(\1)s', build_root) sanitized_profile_name = re.sub("[^a-zA-Z0-9:._-]", "_", profile.name) build_root = build_root % {'tmpdir': TMPDIR, 'profile': sanitized_profile_name} os.environ['TIZEN_BUILD_ROOT'] = os.path.abspath(build_root) # get virtual env from system env first if 'VIRTUAL_ENV' in os.environ: cmd = ['%s/usr/bin/depanneur' % os.environ['VIRTUAL_ENV']] else: cmd = ['depanneur'] cmd += ['--arch=%s' % buildarch] if args.clean: cmd += ['--clean'] # check & prepare repos and build conf if not args.noinit: cmd += prepare_repos_and_build_conf(args, buildarch, profile) else: cmd += ['--noinit'] cmd += ['--path=%s' % workdir] if args.ccache: cmd += ['--ccache'] if args.extra_packs: cmd += ['--extra-packs=%s' % args.extra_packs] if hostarch != buildarch and buildarch in CHANGE_PERSONALITY: cmd = [ CHANGE_PERSONALITY[buildarch] ] + cmd # Extra depanneur special command options cmd += prepare_depanneur_opts(args) # Extra options for gbs export if args.include_all: cmd += ['--include-all'] if args.commit: cmd += ['--commit=%s' % args.commit] if args.upstream_branch: upstream_branch = args.upstream_branch else: upstream_branch = configmgr.get('upstream_branch', 'general') cmd += ['--upstream-branch=%s' % upstream_branch] if args.upstream_tag: cmd += ['--upstream-tag=%s' % args.upstream_tag] if args.squash_patches_until: cmd += ['--squash-patches-until=%s' % args.squash_patches_until] if args.no_patch_export: cmd += ['--no-patch-export'] if args.define: cmd += [('--define="%s"' % i) for i in args.define] if args.spec: cmd += ['--spec=%s' % args.spec] log.debug("running command: %s" % ' '.join(cmd)) retcode = os.system(' '.join(cmd)) if retcode != 0: raise GbsError('rpmbuild fails') else: log.info('Done')
repo = RpmGitRepository(workdir) commit = repo.rev_parse(args.commit) current_branch = repo.get_branch() except GitRepositoryError, err: raise GbsError(str(err)) try: upstream = repo.get_upstream_branch(current_branch) except GitRepositoryError: upstream = None if not args.remote: if upstream: args.remote = upstream.split('/')[0] else: log.info("no upstream set for the current branch, using " "'origin' as the remote server") args.remote = 'origin' if args.tag: tagname = args.tag tag_re = re.compile(r'^submit/\S+/\d{8}\.\d{6}$') if not tag_re.match(tagname): raise GbsError("invalid tag %s, valid tag format is " "submit/$target/$date.$time. For example:\n " "submit/trunk/20130128.022439 " % tagname) else: target = args.target if not target: if upstream and upstream.startswith(args.remote): target = os.path.basename(upstream)
profile_name = formalize_build_conf(profile.name.replace( 'profile.', '', 1)) distconf = os.path.join(TMPDIR, '%s.conf' % profile_name) if args.dist: buildconf = args.dist elif profile.buildconf: buildconf = profile.buildconf else: if repoparser.buildconf is None: raise GbsError('failed to get build conf from repos, please ' 'use snapshot repo or specify build config using ' '-D option') else: buildconf = repoparser.buildconf log.info('build conf has been downloaded at:\n %s' \ % distconf) try: shutil.copy(buildconf, distconf) except IOError, err: raise GbsError("Failed to copy build conf: %s" % (str(err))) if not os.path.exists(distconf): raise GbsError('No build config file specified, please specify in '\ '~/.gbs.conf or command line using -D') # must use abspath here, because build command will also use this path distconf = os.path.abspath(distconf) if not distconf.endswith('.conf') or '-' in os.path.basename(distconf): raise GbsError("build config file must end with .conf, and can't " "contain '-'")
status = api.get_results(target_prj, package) for build_repo in status.keys(): for arch in status[build_repo]: archlist.append('%-15s%-15s' % (build_repo, arch)) if not obs_repo or not obs_arch or obs_repo not in status.keys() \ or obs_arch not in status[obs_repo].keys(): raise GbsError('no valid repo / arch specified for buildlog, '\ 'valid arguments of repo and arch are:\n%s' % \ '\n'.join(archlist)) if status[obs_repo][obs_arch] not in ['failed', 'succeeded', 'building', 'finishing']: raise GbsError('build status of %s for %s/%s is %s, '\ 'no build log.' % (package, obs_repo, obs_arch, status[obs_repo][obs_arch])) log.info('build log for %s/%s/%s/%s' % (target_prj, package, obs_repo, obs_arch)) print api.get_buildlog(target_prj, package, obs_repo, obs_arch) return 0 if args.status: results = [] status = api.get_results(target_prj, package) for build_repo in status.keys(): for arch in status[build_repo]: stat = status[build_repo][arch] results.append('%-15s%-15s%-15s' % (build_repo, arch, stat)) if results: log.info('build results from build server:\n%s' \
fn_changes = changes_file_list[0] if len(changes_file_list) > 1: log.warning("Found more than one changes files, %s is taken " % (changes_file_list[0])) # get the commit start from the args.since commitid_since = get_first_commit(repo, fn_changes, args.since) commits = repo.get_commits(commitid_since, 'HEAD') if not commits: raise GbsError("Nothing found between %s and HEAD" % commitid_since) if args.message: author = repo.get_author_info() lines = ["- %s" % line for line in args.message.split(os.linesep) \ if line.strip()] new_entries = ["* %s %s <%s> %s" % \ (datetime.datetime.now().strftime("%a %b %d %Y"), author.name, author.email, get_version(repo, commits[0]))] new_entries.extend(lines) else: new_entries = make_log_entries(commits, repo) content = get_all_entries(fn_changes, new_entries) if edit_file(fn_changes, content): log.info("Change log has been updated.") else: log.info("Change log has not been updated")