def remote(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', default='projects.ini', help="Config file. Default: projects.ini") parser.add_argument('--repo-url', help="Base repository URL for remotely generated repo " "(required)", required=True) parser.add_argument('--info-repo', help="use a local rdoinfo repo instead of " "fetching the default one using rdopkg. Only " "applies when pkginfo_driver is rdoinfo in " "projects.ini") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) return import_commit(options.repo_url, options.config_file, local_info_repo=options.info_repo)
def user_manager(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', default='projects.ini', help="Config file. Default: projects.ini") parser.add_argument('--debug', action='store_true', help="Print debug logs") subparsers = parser.add_subparsers(dest='command', title='subcommands', description='available subcommands') subparsers.required = True # Subcommand create parser_create = subparsers.add_parser('create', help='Create a user') parser_create.add_argument('--username', type=str, required=True, help='User name') parser_create.add_argument('--password', type=str, help='Password') # Subcommand delete parser_delete = subparsers.add_parser('delete', help='Delete a user') parser_delete.add_argument('--username', type=str, required=True, help='User name') parser_delete.add_argument('--force', dest='force', action='store_true', help='Do not request a confirmation') # Subcommand update parser_update = subparsers.add_parser('update', help='Update a user') parser_update.add_argument('--username', type=str, required=True, help='User name') parser_update.add_argument('--password', type=str, required=True, help='New password') options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) cp = configparser.RawConfigParser() cp.read(options.config_file) config_options = ConfigOptions(cp) return command_funcs[options.command](options, config_options.database_connection)
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from flask import Flask app = Flask(__name__) app.config.from_object('dlrn.api.config') try: app.config.from_pyfile(os.environ['CONFIG_FILE'], silent=True) except KeyError: pass from dlrn.api import dlrn_api # noqa from dlrn.api import graphql # noqa from dlrn.config import setup_logging # noqa setup_logging(os.environ.get("DLRN_DEBUG"), os.environ.get("DLRN_LOG_FILE"))
def main(): parser = argparse.ArgumentParser() parser.add_argument('--config-file', default='projects.ini', help="Config file. Default: projects.ini") parser.add_argument('--info-repo', help="use a local rdoinfo repo instead of" " fetching the default one using rdopkg. Only" " applies when pkginfo_driver is rdoinfo in" " projects.ini") parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible. Only commited" " changes in the local repo will be used in the" " build.") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") group = parser.add_mutually_exclusive_group() group.add_argument('--project-name', action='append', help="Build a specific project name only." " Use multiple times to build more than one " "project in a run.") group.add_argument('--package-name', action='append', help="Build a specific package name only." " Use multiple times to build more than one " "package in a run.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") parser.add_argument('--log-commands', action="store_true", help="Log the commands run by dlrn.") parser.add_argument('--use-public', action="store_true", help="Use the public master repo for dependencies " "when doing install verification.") parser.add_argument('--order', action="store_true", help="Compute the build order according to the spec " "files instead of the dates of the commits. " "Implies --sequential.") parser.add_argument('--sequential', action="store_true", help="Run all actions sequentially, regardless of the" " number of workers specified in projects.ini.") parser.add_argument('--status', action="store_true", help="Get the status of packages.") parser.add_argument('--recheck', action="store_true", help="Force a rebuild for a particular package. " "Implies --package-name") parser.add_argument('--force-recheck', action="store_true", help="Force a rebuild for a particular package, even " "if its last build was successful. Requires setting " "allow_force_rechecks=True in projects.ini. " "Implies --package-name and --recheck") parser.add_argument('--version', action='version', version=version.version_info.version_string()) parser.add_argument('--run', help="Run a program instead of trying to build. " "Implies --head-only") parser.add_argument('--stop', action="store_true", help="Stop on error.") parser.add_argument('--verbose-build', action="store_true", help="Show verbose output during the package build.") parser.add_argument('--verbose-mock', action="store_true", help=argparse.SUPPRESS) parser.add_argument('--no-repo', action="store_true", help="Do not generate a repo with all the built " "packages.") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) if options.verbose_mock: logger.warning('The --verbose-mock command-line option is deprecated.' ' Please use --verbose-build instead.') options.verbose_build = options.verbose_mock global verbose_build verbose_build = options.verbose_build cp = configparser.RawConfigParser() cp.read(options.config_file) if options.log_commands is True: logging.getLogger("sh.command").setLevel(logging.INFO) if options.order is True: options.sequential = True config_options = ConfigOptions(cp) if options.dev: _, tmpdb_path = tempfile.mkstemp() logger.info("Using file %s for temporary db" % tmpdb_path) config_options.database_connection = "sqlite:///%s" % tmpdb_path session = getSession(config_options.database_connection) pkginfo_driver = config_options.pkginfo_driver global pkginfo pkginfo = import_object(pkginfo_driver, cfg_options=config_options) packages = pkginfo.getpackages(local_info_repo=options.info_repo, tags=config_options.tags, dev_mode=options.dev) if options.project_name: pkg_names = [p['name'] for p in packages if p['project'] in options.project_name] elif options.package_name: pkg_names = options.package_name else: pkg_names = None if options.status is True: if not pkg_names: pkg_names = [p['name'] for p in packages] for name in pkg_names: package = [p for p in packages if p['name'] == name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit( session, name, 'invalid status', type=build_type) if commit: print("{:>9}".format(build_type), name, commit.status) else: print("{:>9}".format(build_type), name, 'NO_BUILD') sys.exit(0) if pkg_names: pkg_name = pkg_names[0] else: pkg_name = None def recheck_commit(commit, force): if commit.status == 'SUCCESS': if not force: logger.error( "Trying to recheck an already successful commit," " ignoring. If you want to force it, use --force-recheck" " and set allow_force_rechecks=True in projects.ini") sys.exit(1) else: logger.info("Forcefully rechecking a successfully built " "commit for %s" % commit.project_name) elif commit.status == 'RETRY': # In this case, we are going to retry anyway, so # do nothing and exit logger.warning("Trying to recheck a commit in RETRY state," " ignoring.") sys.exit(0) # We could set the status to RETRY here, but if we have gone # beyond max_retries it wouldn't work as expected. Thus, our # only chance is to remove the commit session.delete(commit) session.commit() sys.exit(0) if options.recheck is True: if not pkg_name: logger.error('Please use --package-name or --project-name ' 'with --recheck.') sys.exit(1) if options.force_recheck and config_options.allow_force_rechecks: force_recheck = True else: force_recheck = False package = [p for p in packages if p['name'] == pkg_name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit(session, pkg_name, type=build_type) if commit: recheck_commit(commit, force_recheck) else: logger.error("There are no existing commits for package %s", pkg_name) sys.exit(1) # when we run a program instead of building we don't care about # the commits, we just want to run once per package if options.run: options.head_only = True # Build a list of commits we need to process toprocess = [] def add_commits(project_toprocess): # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or distro hash has changed, we can't simply check # against the last commit in the db, as multiple commits can # have the same commit date for commit_toprocess in project_toprocess: if options.dev is True or \ options.run or \ not session.query(Commit).filter( Commit.commit_hash == commit_toprocess.commit_hash, Commit.distro_hash == commit_toprocess.distro_hash, Commit.extended_hash == commit_toprocess.extended_hash, Commit.type == commit_toprocess.type, Commit.status != "RETRY").all(): toprocess.append(commit_toprocess) if not pkg_name and not pkg_names: pool = multiprocessing.Pool() # This will use all the system cpus # Use functools.partial to iterate on the packages to process, # while keeping a few options fixed getinfo_wrapper = partial(getinfo, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options. database_connection) iterator = pool.imap(getinfo_wrapper, packages) while True: try: project_toprocess, updated_pkg = iterator.next() for package in packages: if package['name'] == updated_pkg['name']: if package['upstream'] == 'Unknown': package['upstream'] = updated_pkg['upstream'] logger.debug( "Updated upstream for package %s to %s", package['name'], package['upstream']) break add_commits(project_toprocess) except StopIteration: break pool.close() pool.join() else: for package in packages: if package['name'] in pkg_names: project_toprocess, _ = getinfo(package, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options. database_connection) add_commits(project_toprocess) closeSession(session) # Close session, will reopen during post_build # Check if there is any commit at all to process if len(toprocess) == 0: if not pkg_name: # Use a shorter message if this was a full run logger.info("No commits to build.") else: logger.info("No commits to build. If this is not expected, please" " make sure the package name(s) are correct, and that " "any failed commit you want to rebuild has been " "removed from the database.") return 0 # if requested do a sort according to build and install # dependencies if options.order is True: # collect info from all spec files logger.info("Reading rpm spec files") projects = sorted([c.project_name for c in toprocess]) speclist = [] bootstraplist = [] for project_name in projects: # Preprocess spec if needed pkginfo.preprocess(package_name=project_name) specpath = os.path.join(pkginfo.distgit_dir(project_name), project_name + '.spec') speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath)) # Check if repo_bootstrap is defined in the package. # If so, we'll need to rebuild after the whole bootstrap exercise rawspec = open(specpath).read(-1) if 'repo_bootstrap' in rawspec: bootstraplist.append(project_name) logger.debug("Packages to rebuild: %s" % bootstraplist) specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist]) # compute order according to BuildRequires logger.info("Computing build order") orders = specs.compute_order() # hack because the package name is not consistent with the directory # name and the spec file name if 'python-networking_arista' in orders: orders.insert(orders.index('python-networking_arista'), 'python-networking-arista') # sort the commits according to the score of their project and # then use the timestamp of the commits as a secondary key def my_cmp(a, b): if a.project_name == b.project_name: _a = a.dt_commit _b = b.dt_commit else: _a = orders.index(a.project_name) _b = orders.index(b.project_name) # cmp is no longer available in python3 so replace it. See Ordering # Comparisons on: # https://docs.python.org/3.0/whatsnew/3.0.html return (_a > _b) - (_a < _b) toprocess.sort(key=cmp_to_key(my_cmp)) else: # sort according to the timestamp of the commits toprocess.sort() exit_code = 0 if options.sequential is True: toprocess_copy = deepcopy(toprocess) for commit in toprocess: status = build_worker(packages, commit, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=True) exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.error("Received exception %s" % exception) failures = 1 else: if not options.run: failures = post_build(status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result(status, packages, session, toprocess_copy, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code else: # Setup multiprocessing pool pool = multiprocessing.Pool(config_options.workers) # Use functools.partial to iterate on the commits to process, # while keeping a few options fixed build_worker_wrapper = partial(build_worker, packages, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=False) iterator = pool.imap(build_worker_wrapper, toprocess) while True: try: status = iterator.next() exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.info("Received exception %s" % exception) failures = 1 else: # Create repo, build versions.csv file. # This needs to be sequential if not options.run: failures = post_build( status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result( status, packages, session, toprocess, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code except StopIteration: break pool.close() pool.join() # If we were bootstrapping, set the packages that required it to RETRY session = getSession(config_options.database_connection) if options.order is True and not pkg_name: for bpackage in bootstraplist: commit = getLastProcessedCommit(session, bpackage) commit.status = 'RETRY' session.add(commit) session.commit() genreports(packages, options.head_only, session, []) closeSession(session) if options.dev: os.remove(tmpdb_path) return exit_code
def purge(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required)", required=True) parser.add_argument('--older-than', help="Purge builds older than provided value" " (in days).", required=True) parser.add_argument('-y', help="Answer \"yes\" to any questions", action="store_true") parser.add_argument('--dry-run', help="Do not change anything, show" " what changes would be made", action="store_true") parser.add_argument('--exclude-dirs', help="Do not remove commits whose" " packages are included in one of the specifided" " directories (comma-separated list).") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) cp = configparser.RawConfigParser() cp.read(options.config_file) timeparsed = datetime.now() - timedelta(days=int(options.older_than)) if options.y is False: ans = input(("Remove all data before %s, correct? [N/y] " % timeparsed.ctime())) if ans.lower() != "y": return session = getSession(cp.get('DEFAULT', 'database_connection')) # To remove builds we have to start at a point in time and move backwards # builds with no build date are also purged as these are legacy # All repositories can have the repodata directory and symlinks purged # But we must keep the rpm files of the most recent successful build of # each project as other symlinks not being purged will be pointing to them. topurge = getCommits(session, limit=0, before=int(mktime(timeparsed.timetuple())) ).all() fullpurge = [] for commit in topurge: if commit.flags & FLAG_PURGED: continue if is_commit_in_dirs(commit, options.exclude_dirs): # The commit RPMs are in one of the directories # that should not be touched. logger.info("Ignoring commit %s for %s, it is in one of the" " excluded directories" % (commit.id, commit.project_name)) continue datadir = os.path.join(cp.get('DEFAULT', 'datadir'), "repos", commit.getshardedcommitdir()) if commit.project_name not in fullpurge and commit.status == "SUCCESS": # So we have not removed any commit from this project yet, and it # is successful. Is it the newest one? previouscommits = getCommits(session, project=commit.project_name, since=commit.dt_build, with_status='SUCCESS').count() if previouscommits == 0: logger.info("Keeping old commit for %s" % commit.project_name) continue # this is the newest commit for this project, keep it try: for entry in os.listdir(datadir): entry = os.path.join(datadir, entry) if entry.endswith(".rpm") and not os.path.islink(entry): continue if os.path.isdir(entry): logger.info("Remove %s" % entry) if options.dry_run is False: shutil.rmtree(entry) else: logger.info("Delete %s" % entry) if options.dry_run is False: os.unlink(entry) except OSError: logger.warning("Cannot access directory %s for purge," " ignoring." % datadir) fullpurge.append(commit.project_name) commit.flags |= FLAG_PURGED logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: # If the commit was not successful, we need to be careful not to # remove the directory if there was a successful build if commit.status != "SUCCESS": othercommits = session.query(Commit).filter( Commit.project_name == commit.project_name, Commit.commit_hash == commit.commit_hash, Commit.status == 'SUCCESS').count() if othercommits == 0: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) commit.flags |= FLAG_PURGED if options.dry_run is False: session.commit() closeSession(session)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--config-file', default='projects.ini', help="Config file. Default: projects.ini") parser.add_argument('--config-override', action='append', help="Override a configuration option from the" " config file. Specify it as: " "section.option=value. Can be used multiple " "times if more than one override is needed.") parser.add_argument('--info-repo', help="use a local distroinfo repo instead of" " fetching the default one. Only applies when" " pkginfo_driver is rdoinfo or downstream in" " projects.ini") parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible. Only commited" " changes in the local repo will be used in the" " build.") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") group = parser.add_mutually_exclusive_group() group.add_argument('--project-name', action='append', help="Build a specific project name only." " Use multiple times to build more than one " "project in a run.") group.add_argument('--package-name', action='append', help="Build a specific package name only." " Use multiple times to build more than one " "package in a run.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") parser.add_argument('--log-commands', action="store_true", help="Log the commands run by dlrn.") parser.add_argument('--use-public', action="store_true", help="Use the public master repo for dependencies " "when doing install verification.") parser.add_argument('--order', action="store_true", help="Compute the build order according to the spec " "files instead of the dates of the commits. " "Implies --sequential.") parser.add_argument('--sequential', action="store_true", help="Run all actions sequentially, regardless of the" " number of workers specified in projects.ini.") parser.add_argument('--status', action="store_true", help="Get the status of packages.") parser.add_argument('--recheck', action="store_true", help="Force a rebuild for a particular package. " "Implies --package-name") parser.add_argument('--force-recheck', action="store_true", help="Force a rebuild for a particular package, even " "if its last build was successful. Requires setting " "allow_force_rechecks=True in projects.ini. " "Implies --package-name and --recheck") parser.add_argument('--version', action='version', version=version.version_info.version_string()) parser.add_argument('--run', help="Run a program instead of trying to build. " "Implies --head-only") parser.add_argument('--stop', action="store_true", help="Stop on error.") parser.add_argument('--verbose-build', action="store_true", help="Show verbose output during the package build.") parser.add_argument('--verbose-mock', action="store_true", help=argparse.SUPPRESS) parser.add_argument('--no-repo', action="store_true", help="Do not generate a repo with all the built " "packages.") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) if options.verbose_mock: logger.warning('The --verbose-mock command-line option is deprecated.' ' Please use --verbose-build instead.') options.verbose_build = options.verbose_mock global verbose_build verbose_build = options.verbose_build cp = configparser.RawConfigParser() cp.read(options.config_file) if options.log_commands is True: logging.getLogger("sh.command").setLevel(logging.INFO) if options.order is True: options.sequential = True config_options = ConfigOptions(cp, overrides=options.config_override) if options.dev: _, tmpdb_path = tempfile.mkstemp() logger.info("Using file %s for temporary db" % tmpdb_path) config_options.database_connection = "sqlite:///%s" % tmpdb_path session = getSession(config_options.database_connection) pkginfo_driver = config_options.pkginfo_driver global pkginfo pkginfo = import_object(pkginfo_driver, cfg_options=config_options) packages = pkginfo.getpackages(local_info_repo=options.info_repo, tags=config_options.tags, dev_mode=options.dev) if options.project_name: pkg_names = [ p['name'] for p in packages if p['project'] in options.project_name ] elif options.package_name: pkg_names = options.package_name else: pkg_names = None if options.status is True: if not pkg_names: pkg_names = [p['name'] for p in packages] for name in pkg_names: package = [p for p in packages if p['name'] == name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit(session, name, 'invalid status', type=build_type) if commit: print("{:>9}".format(build_type), name, commit.status) else: print("{:>9}".format(build_type), name, 'NO_BUILD') sys.exit(0) if pkg_names: pkg_name = pkg_names[0] else: pkg_name = None def recheck_commit(commit, force): if commit.status == 'SUCCESS': if not force: logger.error( "Trying to recheck an already successful commit," " ignoring. If you want to force it, use --force-recheck" " and set allow_force_rechecks=True in projects.ini") sys.exit(1) else: logger.info("Forcefully rechecking a successfully built " "commit for %s" % commit.project_name) elif commit.status == 'RETRY': # In this case, we are going to retry anyway, so # do nothing and exit logger.warning("Trying to recheck a commit in RETRY state," " ignoring.") sys.exit(0) # We could set the status to RETRY here, but if we have gone # beyond max_retries it wouldn't work as expected. Thus, our # only chance is to remove the commit session.delete(commit) session.commit() sys.exit(0) if options.recheck is True: if not pkg_name: logger.error('Please use --package-name or --project-name ' 'with --recheck.') sys.exit(1) if options.force_recheck and config_options.allow_force_rechecks: force_recheck = True else: force_recheck = False package = [p for p in packages if p['name'] == pkg_name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit(session, pkg_name, type=build_type) if commit: recheck_commit(commit, force_recheck) else: logger.error("There are no existing commits for package %s", pkg_name) sys.exit(1) # when we run a program instead of building we don't care about # the commits, we just want to run once per package if options.run: options.head_only = True # Build a list of commits we need to process toprocess = [] skipped_list = [] def add_commits(project_toprocess): # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or distro hash has changed, we can't simply check # against the last commit in the db, as multiple commits can # have the same commit date for commit_toprocess in project_toprocess: if options.dev is True or \ options.run or \ not session.query(Commit).filter( Commit.commit_hash == commit_toprocess.commit_hash, Commit.distro_hash == commit_toprocess.distro_hash, Commit.extended_hash == commit_toprocess.extended_hash, Commit.type == commit_toprocess.type, Commit.status != "RETRY").all(): toprocess.append(commit_toprocess) if not pkg_name and not pkg_names: pool = multiprocessing.Pool() # This will use all the system cpus # Use functools.partial to iterate on the packages to process, # while keeping a few options fixed getinfo_wrapper = partial( getinfo, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options.database_connection) iterator = pool.imap(getinfo_wrapper, packages) while True: try: project_toprocess, updated_pkg, skipped = iterator.next() for package in packages: if package['name'] == updated_pkg['name']: if package['upstream'] == 'Unknown': package['upstream'] = updated_pkg['upstream'] logger.debug( "Updated upstream for package %s to %s", package['name'], package['upstream']) break if skipped: skipped_list.append(updated_pkg['name']) add_commits(project_toprocess) except StopIteration: break pool.close() pool.join() else: for package in packages: if package['name'] in pkg_names: project_toprocess, _, skipped = getinfo( package, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options.database_connection) if skipped: skipped_list.append(package['name']) add_commits(project_toprocess) closeSession(session) # Close session, will reopen during post_build # Store skip list datadir = os.path.realpath(config_options.datadir) if not os.path.exists(os.path.join(datadir, 'repos')): os.makedirs(os.path.join(datadir, 'repos')) with open(os.path.join(datadir, 'repos', 'skiplist.txt'), 'w') as fp: for pkg in skipped_list: fp.write(pkg + '\n') # Check if there is any commit at all to process if len(toprocess) == 0: if not pkg_name: # Use a shorter message if this was a full run logger.info("No commits to build.") else: logger.info("No commits to build. If this is not expected, please" " make sure the package name(s) are correct, and that " "any failed commit you want to rebuild has been " "removed from the database.") return 0 # if requested do a sort according to build and install # dependencies if options.order is True: # collect info from all spec files logger.info("Reading rpm spec files") projects = sorted([c.project_name for c in toprocess]) speclist = [] bootstraplist = [] for project_name in projects: # Preprocess spec if needed pkginfo.preprocess(package_name=project_name) filename = None for f in os.listdir(pkginfo.distgit_dir(project_name)): if f.endswith('.spec'): filename = f if filename: specpath = os.path.join(pkginfo.distgit_dir(project_name), filename) speclist.append( sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath)) # Check if repo_bootstrap is defined in the package. # If so, we'll need to rebuild after the whole bootstrap rawspec = open(specpath).read(-1) if 'repo_bootstrap' in rawspec: bootstraplist.append(project_name) else: logger.warning("Could not find a spec for package %s" % project_name) logger.debug("Packages to rebuild: %s" % bootstraplist) specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist]) # compute order according to BuildRequires logger.info("Computing build order") orders = specs.compute_order() # hack because the package name is not consistent with the directory # name and the spec file name if 'python-networking_arista' in orders: orders.insert(orders.index('python-networking_arista'), 'python-networking-arista') # sort the commits according to the score of their project and # then use the timestamp of the commits as a secondary key def my_cmp(a, b): if a.project_name == b.project_name: _a = a.dt_commit _b = b.dt_commit else: _a = orders.index(a.project_name) if a.project_name in \ orders else sys.maxsize _b = orders.index(b.project_name) if b.project_name in \ orders else sys.maxsize # cmp is no longer available in python3 so replace it. See Ordering # Comparisons on: # https://docs.python.org/3.0/whatsnew/3.0.html return (_a > _b) - (_a < _b) toprocess.sort(key=cmp_to_key(my_cmp)) else: # sort according to the timestamp of the commits toprocess.sort() exit_code = 0 if options.sequential is True: toprocess_copy = deepcopy(toprocess) for commit in toprocess: status = build_worker(packages, commit, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=True) exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.error("Received exception %s" % exception) failures = 1 else: if not options.run: failures = post_build(status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result(status, packages, session, toprocess_copy, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code else: # Setup multiprocessing pool pool = multiprocessing.Pool(config_options.workers) # Use functools.partial to iterate on the commits to process, # while keeping a few options fixed build_worker_wrapper = partial(build_worker, packages, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=False) iterator = pool.imap(build_worker_wrapper, toprocess) while True: try: status = iterator.next() exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.info("Received exception %s" % exception) failures = 1 else: # Create repo, build versions.csv file. # This needs to be sequential if not options.run: failures = post_build( status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result( status, packages, session, toprocess, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code except StopIteration: break pool.close() pool.join() # If we were bootstrapping, set the packages that required it to RETRY session = getSession(config_options.database_connection) if options.order is True and not pkg_name: for bpackage in bootstraplist: commit = getLastProcessedCommit(session, bpackage) commit.status = 'RETRY' session.add(commit) session.commit() genreports(packages, options.head_only, session, []) closeSession(session) if options.dev: os.remove(tmpdb_path) return exit_code
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from flask import Flask app = Flask(__name__) app.config.from_object('dlrn.api.config') try: app.config.from_pyfile(os.environ['CONFIG_FILE'], silent=True) except KeyError: pass from dlrn.api import dlrn_api # nopep8 from dlrn.config import setup_logging # nopep8 setup_logging(os.environ.get("DLRN_DEBUG"), os.environ.get("DLRN_LOG_FILE"))
def purge(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required)", required=True) parser.add_argument('--older-than', help="Purge builds older than provided value" " (in days).", required=True) parser.add_argument('-y', help="Answer \"yes\" to any questions", action="store_true") parser.add_argument('--dry-run', help="Do not change anything, show" " what changes would be made", action="store_true") parser.add_argument('--exclude-dirs', help="Do not remove commits whose" " packages are included in one of the specifided" " directories (comma-separated list).") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) cp = configparser.RawConfigParser() cp.read(options.config_file) timeparsed = datetime.now() - timedelta(days=int(options.older_than)) if options.y is False: ans = input(("Remove all data before %s, correct? [N/y] " % timeparsed.ctime())) if ans.lower() != "y": return session = getSession(cp.get('DEFAULT', 'database_connection')) try: use_components = cp.getboolean('DEFAULT', 'use_components') except ValueError: use_components = False basedir = os.path.abspath( os.path.join(cp.get('DEFAULT', 'datadir'), 'repos')) if use_components: component_list = get_component_list(session) else: component_list = None logger.debug("Used components: %s" % component_list) # To remove builds we have to start at a point in time and move backwards # builds with no build date are also purged as these are legacy # All repositories can have the repodata directory and symlinks purged # But we must keep the rpm files of the most recent successful build of # each project as other symlinks not being purged will be pointing to them. topurge = getCommits(session, limit=0, before=int(mktime(timeparsed.timetuple()))).all() logger.debug("Commmits from %s days ago: %s" % (options.older_than, topurge)) fullpurge = [] for commit in topurge: if commit.flags & FLAG_PURGED: logger.debug("Commit %s was purged" % commit) continue if is_commit_in_dirs(commit, options.exclude_dirs, basedir, component_list=component_list): # The commit RPMs are in one of the directories # that should not be touched. logger.info("Ignoring commit %s for %s, it is in one of the" " excluded directories" % (commit.id, commit.project_name)) continue datadir = os.path.join(cp.get('DEFAULT', 'datadir'), "repos", commit.getshardedcommitdir()) if commit.project_name not in fullpurge and commit.status == "SUCCESS": # So we have not removed any commit from this project yet, and it # is successful. Is it the newest one? previouscommits = getCommits(session, project=commit.project_name, since=commit.dt_build, with_status='SUCCESS').count() if previouscommits == 0: logger.info("Keeping old commit for %s" % commit.project_name) continue # this is the newest commit for this project, keep it try: for entry in os.listdir(datadir): entry = os.path.join(datadir, entry) if entry.endswith(".rpm") and not os.path.islink(entry): logger.debug("Skipping dir or file %s" % entry) continue if os.path.isdir(entry): logger.info("Remove %s" % entry) if options.dry_run is False: shutil.rmtree(entry) else: logger.info("Delete %s" % entry) if options.dry_run is False: os.unlink(entry) except OSError: logger.warning("Cannot access directory %s for purge," " ignoring." % datadir) fullpurge.append(commit.project_name) commit.flags |= FLAG_PURGED logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: # If the commit was not successful, we need to be careful not to # remove the directory if there was a successful build if commit.status != "SUCCESS": othercommits = session.query(Commit).filter( Commit.project_name == commit.project_name, Commit.commit_hash == commit.commit_hash, Commit.status == 'SUCCESS').count() if othercommits == 0: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) commit.flags |= FLAG_PURGED if options.dry_run is False: session.commit() closeSession(session) if cp.getboolean('DEFAULT', 'use_components'): purge_promoted_hashes(cp, mktime(timeparsed.timetuple()), dry_run=options.dry_run)