def create_user(options, db_connection): try: session = getSession(db_connection) olduser = session.query(User).filter( User.username == options.username).first() if olduser is None: if options.password is None: newpass = getpass("Enter password for %s: " % options.username) else: newpass = options.password password = passlib.hash.sha512_crypt.encrypt(newpass) newuser = User(username=options.username, password=password) session.add(newuser) session.commit() closeSession(session) print("User %s successfully created" % options.username) else: print("User %s already exists" % options.username) return -1 except Exception as e: print("Failed to create user %s, %s" % (options.username, e)) return -1 return 0
def create_user(options, db_connection): try: session = getSession(db_connection) olduser = session.query(User).filter( User.username == options.username).first() if olduser is None: if options.password is None: newpass = getpass("Enter password for %s: " % options.username) else: newpass = options.password password = passlib.hash.sha512_crypt.encrypt(newpass) newuser = User(username=options.username, password=password) session.add(newuser) session.commit() closeSession(session) print("User %s successfully created" % options.username) else: print("User %s already exists" % options.username) return -1 except Exception as e: print("Failed to create user %s, %s" % (options.username, e)) return -1 return 0
def get_report(): package_name = request.args.get('package', None) success = request.args.get('success', None) offset = request.args.get('offset', 0) if success is not None: if bool(strtobool(success)): with_status = "SUCCESS" else: with_status = "FAILED" else: with_status = None session = getSession(app.config['DB_PATH']) commits = getCommits(session, without_status="RETRY", project=package_name, with_status=with_status, limit=pagination_limit, offset=offset) count = commits.count() config_options = _get_config_options(app.config['CONFIG_FILE']) closeSession(session) return render_template('report.j2', reponame='Detailed build report', target=config_options.target, src=config_options.source, project_name=config_options.project_name, commits=commits, count=count, limit=pagination_limit)
def get_report(): package_name = request.args.get('package', None) success = request.args.get('success', None) offset = request.args.get('offset', 0) if success is not None: if bool(strtobool(success)): with_status = "SUCCESS" else: with_status = "FAILED" else: with_status = None session = getSession(app.config['DB_PATH']) commits = getCommits(session, without_status="RETRY", project=package_name, with_status=with_status, limit=pagination_limit, offset=offset) count = commits.count() config_options = _get_config_options(app.config['CONFIG_FILE']) closeSession(session) return render_template('report.j2', reponame='Detailed build report', target=config_options.target, src=config_options.source, project_name=config_options.project_name, commits=commits, count=count, limit=pagination_limit)
def last_tested_repo_GET(): # max_age: Maximum age in hours, used as base for the search # success(optional): find repos with a successful/unsuccessful vote # job_id(optional); name of the CI that sent the vote # sequential_mode(optional): if set to true, change the search algorithm # to only use previous_job_id as CI name to # search for. Defaults to false # previous_job_id(optional): CI name to search for, if sequential_mode is # True max_age = request.json.get('max_age', None) job_id = request.json.get('job_id', None) success = request.json.get('success', None) sequential_mode = request.json.get('sequential_mode', None) previous_job_id = request.json.get('previous_job_id', None) if success is not None: success = bool(strtobool(success)) if sequential_mode is not None: sequential_mode = bool(strtobool(sequential_mode)) if sequential_mode and previous_job_id is None: raise InvalidUsage('Missing parameter previous_job_id', status_code=400) if max_age is None: raise InvalidUsage('Missing parameters', status_code=400) # Calculate timestamp as now - max_age if int(max_age) == 0: timestamp = 0 else: oldest_time = datetime.now() - timedelta(hours=int(max_age)) timestamp = time.mktime(oldest_time.timetuple()) session = getSession(app.config['DB_PATH']) try: if sequential_mode: # CI pipeline case vote = getVote(session, timestamp, success, previous_job_id, fallback=False) else: # Normal case vote = getVote(session, timestamp, success, job_id) except Exception as e: raise e commit = session.query(Commit).filter( Commit.status == 'SUCCESS', Commit.id == vote.commit_id).first() result = {'commit_hash': commit.commit_hash, 'distro_hash': commit.distro_hash, 'timestamp': vote.timestamp, 'job_id': vote.ci_name, 'success': vote.ci_vote, 'in_progress': vote.ci_in_progress, 'user': vote.user} closeSession(session) return jsonify(result), 200
def report_result(): # job_id: name of CI # commit_hash: commit hash # distro_hash: distro hash # url: URL where more information can be found # timestamp: CI execution timestamp # success: boolean # notes(optional): notes if request.headers['Content-Type'] != 'application/json': raise InvalidUsage('Unsupported Media Type, use JSON', status_code=415) try: commit_hash = request.json['commit_hash'] distro_hash = request.json['distro_hash'] timestamp = request.json['timestamp'] job_id = request.json['job_id'] success = request.json['success'] url = request.json['url'] except KeyError: raise InvalidUsage('Missing parameters', status_code=400) notes = request.json.get('notes', '') session = getSession(app.config['DB_PATH']) commit = session.query(Commit).filter( Commit.status == 'SUCCESS', Commit.commit_hash == commit_hash, Commit.distro_hash == distro_hash).first() if commit is None: raise InvalidUsage('commit_hash+distro_hash combination not found', status_code=404) commit_id = commit.id vote = CIVote(commit_id=commit_id, ci_name=job_id, ci_url=url, ci_vote=bool(strtobool(success)), ci_in_progress=False, timestamp=int(timestamp), notes=notes, user=auth.username()) session.add(vote) session.commit() result = { 'commit_hash': commit_hash, 'distro_hash': distro_hash, 'timestamp': timestamp, 'job_id': job_id, 'success': bool(strtobool(success)), 'in_progress': False, 'url': url, 'notes': notes, 'user': auth.username() } closeSession(session) return jsonify(result), 201
def get_metrics(): # start_date: start date for period, in YYYY-mm-dd format (UTC) # end_date: end date for period, in YYYY-mm-dd format (UTC) # package_name (optional): return metrics for package_name if request.headers['Content-Type'] != 'application/json': raise InvalidUsage('Unsupported Media Type, use JSON', status_code=415) try: start_date = request.json['start_date'] end_date = request.json['end_date'] except KeyError: raise InvalidUsage('Missing parameters', status_code=400) package_name = request.json.get('package_name', None) # Convert dates to timestamp fmt = '%Y-%m-%d' try: start_timestamp = int(calendar.timegm(time.strptime(start_date, fmt))) end_timestamp = int(calendar.timegm(time.strptime(end_date, fmt))) except ValueError: raise InvalidUsage('Invalid date format, it must be YYYY-mm-dd', status_code=400) # Find the commits count for each metric session = getSession(app.config['DB_PATH']) commits = session.query(Commit).filter(Commit.status == 'SUCCESS', Commit.dt_build >= start_timestamp, Commit.dt_build < end_timestamp) if package_name: commits = commits.filter(Commit.project_name == package_name) successful_commits = commits.count() commits = session.query(Commit).filter(Commit.status == 'FAILED', Commit.dt_build >= start_timestamp, Commit.dt_build <= end_timestamp) if package_name: commits = commits.filter(Commit.project_name == package_name) failed_commits = commits.count() total_commits = successful_commits + failed_commits result = { 'succeeded': successful_commits, 'failed': failed_commits, 'total': total_commits } closeSession(session) return jsonify(result), 200
def get_metrics(): # start_date: start date for period, in YYYY-mm-dd format (UTC) # end_date: end date for period, in YYYY-mm-dd format (UTC) # package_name (optional): return metrics for package_name try: start_date = request.json['start_date'] end_date = request.json['end_date'] except KeyError: raise InvalidUsage('Missing parameters', status_code=400) package_name = request.json.get('package_name', None) # Convert dates to timestamp fmt = '%Y-%m-%d' try: start_timestamp = int(calendar.timegm(time.strptime(start_date, fmt))) end_timestamp = int(calendar.timegm(time.strptime(end_date, fmt))) except ValueError: raise InvalidUsage('Invalid date format, it must be YYYY-mm-dd', status_code=400) # Find the commits count for each metric session = getSession(app.config['DB_PATH']) commits = session.query(Commit).filter( Commit.status == 'SUCCESS', Commit.dt_build >= start_timestamp, Commit.dt_build < end_timestamp) if package_name: commits = commits.filter( Commit.project_name == package_name) successful_commits = commits.count() commits = session.query(Commit).filter( Commit.status == 'FAILED', Commit.dt_build >= start_timestamp, Commit.dt_build <= end_timestamp) if package_name: commits = commits.filter( Commit.project_name == package_name) failed_commits = commits.count() total_commits = successful_commits + failed_commits result = {'succeeded': successful_commits, 'failed': failed_commits, 'total': total_commits} closeSession(session) return jsonify(result), 200
def get_civotes_detail(): commit_hash = request.args.get('commit_hash', None) distro_hash = request.args.get('distro_hash', None) ci_name = request.args.get('ci_name', None) success = request.args.get('success', None) offset = request.args.get('offset', 0) session = getSession(app.config['DB_PATH']) votes = session.query(CIVote) votes = votes.filter(CIVote.ci_name != 'consistent') if commit_hash and distro_hash: commit = session.query(Commit).filter( Commit.status == 'SUCCESS', Commit.commit_hash == commit_hash, Commit.distro_hash == distro_hash).first() votes = votes.from_self().filter(CIVote.commit_id == commit.id) elif ci_name: votes = votes.filter(CIVote.ci_name == ci_name) else: raise InvalidUsage( "Please specify either commit_hash+distro_hash or " "ci_name as parameters.", status_code=400) votes = votes.offset(offset).limit(pagination_limit) if success is not None: votes = votes.from_self().filter( CIVote.ci_vote == bool(strtobool(success))) votelist = votes.all() count = votes.count() for i in range(len(votelist)): commit = getCommits( session, limit=0).filter(Commit.id == votelist[i].commit_id).first() votelist[i].commit_hash = commit.commit_hash votelist[i].distro_hash = commit.distro_hash votelist[i].distro_hash_short = commit.distro_hash[:8] closeSession(session) config_options = _get_config_options(app.config['CONFIG_FILE']) return render_template('votes.j2', target=config_options.target, votes=votelist, count=count, limit=pagination_limit)
def update_user(options, db_connection): session = getSession(db_connection) password = passlib.hash.sha512_crypt.encrypt(options.password) user = session.query(User).filter( User.username == options.username).first() if user is None: print("ERROR: User %s does not exist" % options.username) return -1 else: user.password = password session.add(user) session.commit() closeSession(session) return 0
def update_user(options, db_connection): session = getSession(db_connection) password = passlib.hash.sha512_crypt.encrypt(options.password) user = session.query(User).filter( User.username == options.username).first() if user is None: print("ERROR: User %s does not exist" % options.username) return -1 else: user.password = password session.add(user) session.commit() closeSession(session) return 0
def repo_status(): # commit_hash: commit hash # distro_hash: distro hash # success(optional): only report successful/unsuccessful votes if request.headers['Content-Type'] != 'application/json': raise InvalidUsage('Unsupported Media Type, use JSON', status_code=415) commit_hash = request.json.get('commit_hash', None) distro_hash = request.json.get('distro_hash', None) success = request.json.get('success', None) if (commit_hash is None or distro_hash is None): raise InvalidUsage('Missing parameters', status_code=400) if success is not None: success = bool(strtobool(success)) # Find the commit id for commit_hash/distro_hash session = getSession(app.config['DB_PATH']) commit = session.query(Commit).filter( Commit.status == 'SUCCESS', Commit.commit_hash == commit_hash, Commit.distro_hash == distro_hash).first() if commit is None: raise InvalidUsage('commit_hash+distro_hash combination not found', status_code=404) commit_id = commit.id # Now find every vote for this commit_hash/distro_hash combination votes = session.query(CIVote).filter(CIVote.commit_id == commit_id) if success is not None: votes = votes.filter(CIVote.ci_vote == int(success)) # And format the output data = [] for vote in votes: d = { 'timestamp': vote.timestamp, 'commit_hash': commit_hash, 'distro_hash': distro_hash, 'job_id': vote.ci_name, 'success': bool(vote.ci_vote), 'in_progress': vote.ci_in_progress, 'url': vote.ci_url, 'notes': vote.notes, 'user': vote.user } data.append(d) closeSession(session) return jsonify(data)
def getinfo(package, local=False, dev_mode=False, head_only=False, db_connection=None, type="rpm"): project = package["name"] since = "-1" session = getSession(db_connection) commit = getLastProcessedCommit(session, project, type=type) if commit: # If we have switched source branches, we want to behave # as if no previous commits had been built, and only build # the last one if commit.commit_branch == getsourcebranch(package): # This will return all commits since the last handled commit # including the last handled commit, remove it later if needed. since = "--after=%d" % (commit.dt_commit) else: # The last processed commit belongs to a different branch. Just # in case, let's check if we built a previous commit from the # current branch commit = getLastBuiltCommit(session, project, getsourcebranch(package), type=type) if commit: logger.info("Last commit belongs to another branch, but" " we're ok with that") since = "--after=%d" % (commit.dt_commit) # In any case, we just want to build the last commit, if any head_only = True project_toprocess, skipped = pkginfo.getinfo(project=project, package=package, since=since, local=local, dev_mode=dev_mode, type=type) closeSession(session) # If since == -1, then we only want to trigger a build for the # most recent change if since == "-1" or head_only: del project_toprocess[:-1] return project_toprocess, package, skipped
def report_result(): # job_id: name of CI # commit_hash: commit hash # distro_hash: distro hash # url: URL where more information can be found # timestamp: CI execution timestamp # success: boolean # notes(optional): notes try: commit_hash = request.json['commit_hash'] distro_hash = request.json['distro_hash'] timestamp = request.json['timestamp'] job_id = request.json['job_id'] success = request.json['success'] url = request.json['url'] except KeyError: raise InvalidUsage('Missing parameters', status_code=400) notes = request.json.get('notes', '') session = getSession(app.config['DB_PATH']) commit = _get_commit(session, commit_hash, distro_hash) if commit is None: raise InvalidUsage('commit_hash+distro_hash combination not found', status_code=404) commit_id = commit.id vote = CIVote(commit_id=commit_id, ci_name=job_id, ci_url=url, ci_vote=bool(strtobool(success)), ci_in_progress=False, timestamp=int(timestamp), notes=notes, user=auth.username()) session.add(vote) session.commit() result = {'commit_hash': commit_hash, 'distro_hash': distro_hash, 'timestamp': timestamp, 'job_id': job_id, 'success': bool(strtobool(success)), 'in_progress': False, 'url': url, 'notes': notes, 'user': auth.username()} closeSession(session) return jsonify(result), 201
def get_civotes(): session = getSession(app.config['DB_PATH']) offset = request.args.get('offset', 0) votes = session.query(CIVote) votes = votes.filter(CIVote.ci_name != 'consistent') votes = votes.order_by(desc(CIVote.timestamp)) votes = votes.offset(offset).limit(pagination_limit) count = votes.count() # Let's find all individual commit_hash + distro_hash combinations commit_id_list = [] for vote in votes: if vote.commit_id not in commit_id_list: commit_id_list.append(vote.commit_id) # Populate list for commits repolist = [] for commit_id in commit_id_list: commit = getCommits(session, limit=0).filter( Commit.id == commit_id).first() repodetail = RepoDetail() repodetail.commit_hash = commit.commit_hash repodetail.distro_hash = commit.distro_hash repodetail.distro_hash_short = commit.distro_hash[:8] repodetail.success = votes.from_self().filter( CIVote.commit_id == commit_id, CIVote.ci_vote == 1).count() repodetail.failure = votes.from_self().filter( CIVote.commit_id == commit_id, CIVote.ci_vote == 0).count() repodetail.timestamp = votes.from_self().filter( CIVote.commit_id == commit_id).order_by(desc(CIVote.timestamp)).\ first().timestamp repolist.append(repodetail) repolist = sorted(repolist, key=lambda repo: repo.timestamp, reverse=True) closeSession(session) config_options = _get_config_options(app.config['CONFIG_FILE']) return render_template('votes_general.j2', target=config_options.target, repodetail=repolist, count=count, limit=pagination_limit)
def get_civotes(): session = getSession(app.config['DB_PATH']) offset = request.args.get('offset', 0) votes = session.query(CIVote) votes = votes.filter(CIVote.ci_name != 'consistent') votes = votes.order_by(desc(CIVote.timestamp)) votes = votes.offset(offset).limit(pagination_limit) count = votes.count() # Let's find all individual commit_hash + distro_hash combinations commit_id_list = [] for vote in votes: if vote.commit_id not in commit_id_list: commit_id_list.append(vote.commit_id) # Populate list for commits repolist = [] for commit_id in commit_id_list: commit = getCommits(session, limit=0).filter(Commit.id == commit_id).first() repodetail = RepoDetail() repodetail.commit_hash = commit.commit_hash repodetail.distro_hash = commit.distro_hash repodetail.distro_hash_short = commit.distro_hash[:8] repodetail.success = votes.from_self().filter( CIVote.commit_id == commit_id, CIVote.ci_vote == 1).count() repodetail.failure = votes.from_self().filter( CIVote.commit_id == commit_id, CIVote.ci_vote == 0).count() repodetail.timestamp = votes.from_self().filter( CIVote.commit_id == commit_id).order_by(desc(CIVote.timestamp)).\ first().timestamp repolist.append(repodetail) repolist = sorted(repolist, key=lambda repo: repo.timestamp, reverse=True) closeSession(session) config_options = _get_config_options(app.config['CONFIG_FILE']) return render_template('votes_general.j2', target=config_options.target, repodetail=repolist, count=count, limit=pagination_limit)
def get_civotes_detail(): commit_hash = request.args.get('commit_hash', None) distro_hash = request.args.get('distro_hash', None) ci_name = request.args.get('ci_name', None) success = request.args.get('success', None) offset = request.args.get('offset', 0) session = getSession(app.config['DB_PATH']) votes = session.query(CIVote) votes = votes.filter(CIVote.ci_name != 'consistent') if commit_hash and distro_hash: commit = _get_commit(session, commit_hash, distro_hash) votes = votes.from_self().filter(CIVote.commit_id == commit.id) elif ci_name: votes = votes.filter(CIVote.ci_name == ci_name) else: raise InvalidUsage("Please specify either commit_hash+distro_hash or " "ci_name as parameters.", status_code=400) votes = votes.offset(offset).limit(pagination_limit) if success is not None: votes = votes.from_self().filter( CIVote.ci_vote == bool(strtobool(success))) votelist = votes.all() count = votes.count() for i in range(len(votelist)): commit = getCommits(session, limit=0).filter( Commit.id == votelist[i].commit_id).first() votelist[i].commit_hash = commit.commit_hash votelist[i].distro_hash = commit.distro_hash votelist[i].distro_hash_short = commit.distro_hash[:8] closeSession(session) config_options = _get_config_options(app.config['CONFIG_FILE']) return render_template('votes.j2', target=config_options.target, votes=votelist, count=count, limit=pagination_limit)
def repo_status(): # commit_hash: commit hash # distro_hash: distro hash # success(optional): only report successful/unsuccessful votes commit_hash = request.json.get('commit_hash', None) distro_hash = request.json.get('distro_hash', None) success = request.json.get('success', None) if (commit_hash is None or distro_hash is None): raise InvalidUsage('Missing parameters', status_code=400) if success is not None: success = bool(strtobool(success)) # Find the commit id for commit_hash/distro_hash session = getSession(app.config['DB_PATH']) commit = _get_commit(session, commit_hash, distro_hash) if commit is None: raise InvalidUsage('commit_hash+distro_hash combination not found', status_code=404) commit_id = commit.id # Now find every vote for this commit_hash/distro_hash combination votes = session.query(CIVote).filter(CIVote.commit_id == commit_id) if success is not None: votes = votes.filter(CIVote.ci_vote == int(success)) # And format the output data = [] for vote in votes: d = {'timestamp': vote.timestamp, 'commit_hash': commit_hash, 'distro_hash': distro_hash, 'job_id': vote.ci_name, 'success': bool(vote.ci_vote), 'in_progress': vote.ci_in_progress, 'url': vote.ci_url, 'notes': vote.notes, 'user': vote.user} data.append(d) closeSession(session) return jsonify(data)
def purge_promoted_hashes(config, timestamp, dry_run=True): session = getSession(config.get('DEFAULT', 'database_connection')) basedir = os.path.join(config.get('DEFAULT', 'datadir'), 'repos') reponame = config.get('DEFAULT', 'reponame') # Get list of all promote names all_promotions = session.query(Promotion).\ distinct(Promotion.promotion_name).\ group_by(Promotion.promotion_name).all() closeSession(session) promotion_list = ['current', 'consistent'] for prom in all_promotions: promotion_list.append(prom.promotion_name) logger.debug("Promotion list: %s" % promotion_list) # Now go through all directories for prom in promotion_list: directory = os.path.join(basedir, prom) logger.info("Looking into directory: %s" % directory) if os.path.islink(os.path.join(directory, reponame + '.repo')): protected_path = os.path.dirname( os.path.realpath(os.path.join(directory, reponame + '.repo'))) else: logger.warning('No symlinks at %s' % directory) protected_path = '' logger.debug("Setting protected path: %s" % protected_path) # We have to traverse a 3-level hash structure # Not deleting the first two levels (xx/yy), just the final level, # where the files are located for path in glob.glob('%s/??/??/*' % directory): if os.path.isdir(path): dirstats = os.stat(path) if timestamp > dirstats.st_mtime: if os.path.realpath(path) == protected_path: logger.info('Not deleting %s, it is protected' % path) continue logger.info("Remove %s" % path) if not dry_run: shutil.rmtree(path, ignore_errors=True)
def delete_user(options, db_connection): session = getSession(db_connection) user = session.query(User).filter( User.username == options.username).first() if user is None: print("ERROR: User %s does not exist" % options.username) return -1 else: if not options.force: print("Are you sure you want to delete user %s? " "If so, type YES to continue." % options.username) confirm = input() if confirm != "YES": print("Action not confirmed, exiting") return -1 session.delete(user) session.commit() print("User %s deleted" % options.username) closeSession(session) return 0
def delete_user(options, db_connection): session = getSession(db_connection) user = session.query(User).filter( User.username == options.username).first() if user is None: print("ERROR: User %s does not exist" % options.username) return -1 else: if not options.force: print("Are you sure you want to delete user %s? " "If so, type YES to continue." % options.username) confirm = input() if confirm != "YES": print("Action not confirmed, exiting") return -1 session.delete(user) session.commit() print("User %s deleted" % options.username) closeSession(session) return 0
def getinfo(package, local=False, dev_mode=False, head_only=False, db_connection=None, type="rpm"): project = package["name"] since = "-1" session = getSession(db_connection) commit = getLastProcessedCommit(session, project, type=type) if commit: # If we have switched source branches, we want to behave # as if no previous commits had been built, and only build # the last one if commit.commit_branch == getsourcebranch(package): # This will return all commits since the last handled commit # including the last handled commit, remove it later if needed. since = "--after=%d" % (commit.dt_commit) else: # The last processed commit belongs to a different branch. Just # in case, let's check if we built a previous commit from the # current branch commit = getLastBuiltCommit(session, project, getsourcebranch(package), type=type) if commit: logger.info("Last commit belongs to another branch, but" " we're ok with that") since = "--after=%d" % (commit.dt_commit) # In any case, we just want to build the last commit, if any head_only = True project_toprocess = pkginfo.getinfo(project=project, package=package, since=since, local=local, dev_mode=dev_mode, type=type) closeSession(session) # If since == -1, then we only want to trigger a build for the # most recent change if since == "-1" or head_only: del project_toprocess[:-1] return project_toprocess, package
def purge(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required)", required=True) parser.add_argument('--older-than', help="Purge builds older than provided value" " (in days).", required=True) parser.add_argument('-y', help="Answer \"yes\" to any questions", action="store_true") parser.add_argument('--dry-run', help="Do not change anything, show" " what changes would be made", action="store_true") parser.add_argument('--exclude-dirs', help="Do not remove commits whose" " packages are included in one of the specifided" " directories (comma-separated list).") options = parser.parse_args(sys.argv[1:]) cp = configparser.RawConfigParser() cp.read(options.config_file) timeparsed = datetime.now() - timedelta(days=int(options.older_than)) if options.y is False: ans = raw_input(("Remove all data before %s, correct? [N/y] " % timeparsed.ctime())) if ans.lower() != "y": return session = getSession(cp.get('DEFAULT', 'database_connection')) # To remove builds we have to start at a point in time and move backwards # builds with no build date are also purged as these are legacy # All repositories can have the repodata directory and symlinks purged # But we must keep the rpm files of the most recent successful build of # each project as other symlinks not being purged will be pointing to them. topurge = getCommits(session, limit=0, before=int(mktime(timeparsed.timetuple())) ).all() fullpurge = [] for commit in topurge: if commit.flags & FLAG_PURGED: continue if is_commit_in_dirs(commit, options.exclude_dirs): # The commit RPMs are in one of the directories # that should not be touched. logger.info("Ignoring commit %s for %s, it is in one of the" " excluded directories" % (commit.id, commit.project_name)) continue datadir = os.path.join(cp.get('DEFAULT', 'datadir'), "repos", commit.getshardedcommitdir()) if commit.project_name not in fullpurge and commit.status == "SUCCESS": # So we have not removed any commit from this project yet, and it # is successful. Is it the newest one? previouscommits = getCommits(session, project=commit.project_name, since=commit.dt_build, with_status='SUCCESS').count() if previouscommits == 0: logger.info("Keeping old commit for %s" % commit.project_name) continue # this is the newest commit for this project, keep it try: for entry in os.listdir(datadir): entry = os.path.join(datadir, entry) if entry.endswith(".rpm") and not os.path.islink(entry): continue if os.path.isdir(entry): logger.info("Remove %s" % entry) if options.dry_run is False: shutil.rmtree(entry) else: logger.info("Delete %s" % entry) if options.dry_run is False: os.unlink(entry) except OSError: logger.warning("Cannot access directory %s for purge," " ignoring." % datadir) fullpurge.append(commit.project_name) commit.flags |= FLAG_PURGED logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: # If the commit was not successful, we need to be careful not to # remove the directory if there was a successful build if commit.status != "SUCCESS": othercommits = session.query(Commit).filter( Commit.project_name == commit.project_name, Commit.commit_hash == commit.commit_hash, Commit.status == 'SUCCESS').count() if othercommits == 0: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) commit.flags |= FLAG_PURGED if options.dry_run is False: session.commit() closeSession(session)
def last_tested_repo_POST(): # max_age: Maximum age in hours, used as base for the search # success(optional): find repos with a successful/unsuccessful vote # job_id(optional); name of the CI that sent the vote # reporting_job_id: name of the CI that will test this repo # sequential_mode(optional): if set to true, change the search algorithm # to only use previous_job_id as CI name to # search for. Defaults to false # previous_job_id(optional): CI name to search for, if sequential_mode is # True if request.headers['Content-Type'] != 'application/json': raise InvalidUsage('Unsupported Media Type, use JSON', status_code=415) max_age = request.json.get('max_age', None) my_job_id = request.json.get('reporting_job_id', None) job_id = request.json.get('job_id', None) success = request.json.get('success', None) sequential_mode = request.json.get('sequential_mode', None) previous_job_id = request.json.get('previous_job_id', None) if success is not None: success = bool(strtobool(success)) if sequential_mode is not None: sequential_mode = bool(strtobool(sequential_mode)) if sequential_mode and previous_job_id is None: raise InvalidUsage('Missing parameter previous_job_id', status_code=400) if (max_age is None or my_job_id is None): raise InvalidUsage('Missing parameters', status_code=400) # Calculate timestamp as now - max_age if int(max_age) == 0: timestamp = 0 else: oldest_time = datetime.now() - timedelta(hours=int(max_age)) timestamp = time.mktime(oldest_time.timetuple()) session = getSession(app.config['DB_PATH']) try: if sequential_mode: # CI pipeline case vote = getVote(session, timestamp, success, previous_job_id, fallback=False) else: # Normal case vote = getVote(session, timestamp, success, job_id) except Exception as e: raise e newvote = CIVote(commit_id=vote.commit_id, ci_name=my_job_id, ci_url='', ci_vote=False, ci_in_progress=True, timestamp=int(time.time()), notes='', user=auth.username()) session.add(newvote) session.commit() commit = session.query(Commit).filter(Commit.status == 'SUCCESS', Commit.id == vote.commit_id).first() result = { 'commit_hash': commit.commit_hash, 'distro_hash': commit.distro_hash, 'timestamp': newvote.timestamp, 'job_id': newvote.ci_name, 'success': newvote.ci_vote, 'in_progress': newvote.ci_in_progress, 'user': newvote.user } closeSession(session) return jsonify(result), 201
def promote(): # commit_hash: commit hash # distro_hash: distro hash # promote_name: symlink name if request.headers['Content-Type'] != 'application/json': raise InvalidUsage('Unsupported Media Type, use JSON', status_code=415) try: commit_hash = request.json['commit_hash'] distro_hash = request.json['distro_hash'] promote_name = request.json['promote_name'] except KeyError: raise InvalidUsage('Missing parameters', status_code=400) # Check for invalid promote names if (promote_name == 'consistent' or promote_name == 'current'): raise InvalidUsage('Invalid promote_name %s' % promote_name, status_code=403) config_options = _get_config_options(app.config['CONFIG_FILE']) session = getSession(app.config['DB_PATH']) commit = session.query(Commit).filter( Commit.status == 'SUCCESS', Commit.commit_hash == commit_hash, Commit.distro_hash == distro_hash).first() if commit is None: raise InvalidUsage('commit_hash+distro_hash combination not found', status_code=404) target_link = os.path.join(app.config['REPO_PATH'], promote_name) # Check for invalid target links, like ../promotename target_dir = os.path.dirname(os.path.abspath(target_link)) if not os.path.samefile(target_dir, app.config['REPO_PATH']): raise InvalidUsage('Invalid promote_name %s' % promote_name, status_code=403) # We should create a relative symlink yumrepodir = commit.getshardedcommitdir() # Remove symlink if it exists, so we can create it again if os.path.lexists(os.path.abspath(target_link)): os.remove(target_link) try: os.symlink(yumrepodir, target_link) except Exception as e: raise InvalidUsage("Symlink creation failed with error: %s" % e, status_code=500) timestamp = time.mktime(datetime.now().timetuple()) promotion = Promotion(commit_id=commit.id, promotion_name=promote_name, timestamp=timestamp, user=auth.username()) session.add(promotion) session.commit() repo_hash = _repo_hash(commit) repo_url = "%s/%s" % (config_options.baseurl, yumrepodir) result = { 'commit_hash': commit_hash, 'distro_hash': distro_hash, 'repo_hash': repo_hash, 'repo_url': repo_url, 'promote_name': promote_name, 'timestamp': timestamp, 'user': auth.username() } closeSession(session) return jsonify(result), 201
def import_commit(repo_url, config_file, db_connection=None, local_info_repo=None): cp = configparser.RawConfigParser() cp.read(config_file) config_options = ConfigOptions(cp) pkginfo_driver = config_options.pkginfo_driver pkginfo = import_object(pkginfo_driver, cfg_options=config_options) packages = pkginfo.getpackages(local_info_repo=local_info_repo, tags=config_options.tags, dev_mode=False) remote_yaml = repo_url + '/' + 'commit.yaml' r = urlopen(remote_yaml) contents = map(lambda x: x.decode('utf8'), r.readlines()) osfd, tmpfilename = mkstemp() fp = os.fdopen(osfd, 'w') fp.writelines(contents) fp.close() commits = loadYAML_list(tmpfilename) os.remove(tmpfilename) datadir = os.path.realpath(config_options.datadir) if not os.path.exists(datadir): os.makedirs(datadir) for commit in commits: commit.id = None if commit.artifacts == 'None': commit.artifacts = None commit.dt_build = int(commit.dt_build) commit.dt_commit = float(commit.dt_commit) commit.dt_distro = int(commit.dt_distro) # Check if the latest built commit for this project is newer # than this one. In that case, we should ignore it if db_connection: session = getSession(db_connection) else: session = getSession(config_options.database_connection) package = commit.project_name old_commit = getLastProcessedCommit(session, package) if old_commit: if old_commit.dt_commit >= commit.dt_commit: if old_commit.dt_distro >= commit.dt_distro: logger.info('Skipping commit %s, a newer commit is ' 'already built\n' 'Old: %s %s, new: %s %s' % (commit.commit_hash, old_commit.dt_commit, old_commit.dt_distro, commit.dt_commit, commit.dt_distro)) continue # Skip yumrepodir = os.path.join(datadir, "repos", commit.getshardedcommitdir()) if not os.path.exists(yumrepodir): os.makedirs(yumrepodir) for logfile in ['build.log', 'installed', 'mock.log', 'root.log', 'rpmbuild.log', 'state.log']: logfile_url = repo_url + '/' + logfile try: r = urlopen(logfile_url) contents = map(lambda x: x.decode('utf8'), r.readlines()) with open(os.path.join(yumrepodir, logfile), "w") as fp: fp.writelines(contents) except urllib.error.HTTPError: # Ignore errors, if the remote build failed there may be # some missing files pass if commit.artifacts: for rpm in commit.artifacts.split(","): rpm_url = repo_url + '/' + rpm.split('/')[-1] try: r = urlopen(rpm_url) contents = r.read() with open(os.path.join(datadir, rpm), "wb") as fp: fp.write(contents) except urllib.error.HTTPError: if rpm != 'None': logger.warning("Failed to download rpm file %s" % rpm_url) # Get remote update lock, to prevent any other remote operation # while we are creating the repo and updating the database logger.debug("Acquiring remote update lock") with lock_file(os.path.join(datadir, 'remote.lck')): logger.debug("Acquired lock") if commit.status == 'SUCCESS': built_rpms = [] for rpm in commit.artifacts.split(","): built_rpms.append(rpm) status = [commit, built_rpms, commit.notes, None] post_build(status, packages, session) else: pkg = [p for p in packages if p['name'] == package][0] # Here we fire a refresh of the repositories # (upstream and distgit) to be sure to have them in the # data directory. We need that in the case the worker # is running on another host mainly for the # submit_review.sh script. pkginfo.getinfo(project=pkg["name"], package=pkg, since='-1', local=False, dev_mode=False) # Paths on the worker might differ so we overwrite them # to reflect data path on the local API host. commit.distgit_dir = pkginfo.distgit_dir(pkg['name']) commit.repo_dir = os.path.join( config_options.datadir, pkg['name']) status = [commit, '', '', commit.notes] process_build_result(status, packages, session, []) closeSession(session) # Keep one session per commit logger.debug("Released lock") return 0
def teardown_db(exception=None): session = flask_g.pop('db', None) if session is not None: closeSession(session)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--config-file', default='projects.ini', help="Config file. Default: projects.ini") parser.add_argument('--info-repo', help="use a local rdoinfo repo instead of" " fetching the default one using rdopkg. Only" " applies when pkginfo_driver is rdoinfo in" " projects.ini") parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible. Only commited" " changes in the local repo will be used in the" " build.") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") group = parser.add_mutually_exclusive_group() group.add_argument('--project-name', action='append', help="Build a specific project name only." " Use multiple times to build more than one " "project in a run.") group.add_argument('--package-name', action='append', help="Build a specific package name only." " Use multiple times to build more than one " "package in a run.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") parser.add_argument('--log-commands', action="store_true", help="Log the commands run by dlrn.") parser.add_argument('--use-public', action="store_true", help="Use the public master repo for dependencies " "when doing install verification.") parser.add_argument('--order', action="store_true", help="Compute the build order according to the spec " "files instead of the dates of the commits. " "Implies --sequential.") parser.add_argument('--sequential', action="store_true", help="Run all actions sequentially, regardless of the" " number of workers specified in projects.ini.") parser.add_argument('--status', action="store_true", help="Get the status of packages.") parser.add_argument('--recheck', action="store_true", help="Force a rebuild for a particular package. " "Implies --package-name") parser.add_argument('--force-recheck', action="store_true", help="Force a rebuild for a particular package, even " "if its last build was successful. Requires setting " "allow_force_rechecks=True in projects.ini. " "Implies --package-name and --recheck") parser.add_argument('--version', action='version', version=version.version_info.version_string()) parser.add_argument('--run', help="Run a program instead of trying to build. " "Implies --head-only") parser.add_argument('--stop', action="store_true", help="Stop on error.") parser.add_argument('--verbose-build', action="store_true", help="Show verbose output during the package build.") parser.add_argument('--verbose-mock', action="store_true", help=argparse.SUPPRESS) parser.add_argument('--no-repo', action="store_true", help="Do not generate a repo with all the built " "packages.") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) if options.verbose_mock: logger.warning('The --verbose-mock command-line option is deprecated.' ' Please use --verbose-build instead.') options.verbose_build = options.verbose_mock global verbose_build verbose_build = options.verbose_build cp = configparser.RawConfigParser() cp.read(options.config_file) if options.log_commands is True: logging.getLogger("sh.command").setLevel(logging.INFO) if options.order is True: options.sequential = True config_options = ConfigOptions(cp) if options.dev: _, tmpdb_path = tempfile.mkstemp() logger.info("Using file %s for temporary db" % tmpdb_path) config_options.database_connection = "sqlite:///%s" % tmpdb_path session = getSession(config_options.database_connection) pkginfo_driver = config_options.pkginfo_driver global pkginfo pkginfo = import_object(pkginfo_driver, cfg_options=config_options) packages = pkginfo.getpackages(local_info_repo=options.info_repo, tags=config_options.tags, dev_mode=options.dev) if options.project_name: pkg_names = [p['name'] for p in packages if p['project'] in options.project_name] elif options.package_name: pkg_names = options.package_name else: pkg_names = None if options.status is True: if not pkg_names: pkg_names = [p['name'] for p in packages] for name in pkg_names: package = [p for p in packages if p['name'] == name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit( session, name, 'invalid status', type=build_type) if commit: print("{:>9}".format(build_type), name, commit.status) else: print("{:>9}".format(build_type), name, 'NO_BUILD') sys.exit(0) if pkg_names: pkg_name = pkg_names[0] else: pkg_name = None def recheck_commit(commit, force): if commit.status == 'SUCCESS': if not force: logger.error( "Trying to recheck an already successful commit," " ignoring. If you want to force it, use --force-recheck" " and set allow_force_rechecks=True in projects.ini") sys.exit(1) else: logger.info("Forcefully rechecking a successfully built " "commit for %s" % commit.project_name) elif commit.status == 'RETRY': # In this case, we are going to retry anyway, so # do nothing and exit logger.warning("Trying to recheck a commit in RETRY state," " ignoring.") sys.exit(0) # We could set the status to RETRY here, but if we have gone # beyond max_retries it wouldn't work as expected. Thus, our # only chance is to remove the commit session.delete(commit) session.commit() sys.exit(0) if options.recheck is True: if not pkg_name: logger.error('Please use --package-name or --project-name ' 'with --recheck.') sys.exit(1) if options.force_recheck and config_options.allow_force_rechecks: force_recheck = True else: force_recheck = False package = [p for p in packages if p['name'] == pkg_name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit(session, pkg_name, type=build_type) if commit: recheck_commit(commit, force_recheck) else: logger.error("There are no existing commits for package %s", pkg_name) sys.exit(1) # when we run a program instead of building we don't care about # the commits, we just want to run once per package if options.run: options.head_only = True # Build a list of commits we need to process toprocess = [] def add_commits(project_toprocess): # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or distro hash has changed, we can't simply check # against the last commit in the db, as multiple commits can # have the same commit date for commit_toprocess in project_toprocess: if options.dev is True or \ options.run or \ not session.query(Commit).filter( Commit.commit_hash == commit_toprocess.commit_hash, Commit.distro_hash == commit_toprocess.distro_hash, Commit.extended_hash == commit_toprocess.extended_hash, Commit.type == commit_toprocess.type, Commit.status != "RETRY").all(): toprocess.append(commit_toprocess) if not pkg_name and not pkg_names: pool = multiprocessing.Pool() # This will use all the system cpus # Use functools.partial to iterate on the packages to process, # while keeping a few options fixed getinfo_wrapper = partial(getinfo, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options. database_connection) iterator = pool.imap(getinfo_wrapper, packages) while True: try: project_toprocess, updated_pkg = iterator.next() for package in packages: if package['name'] == updated_pkg['name']: if package['upstream'] == 'Unknown': package['upstream'] = updated_pkg['upstream'] logger.debug( "Updated upstream for package %s to %s", package['name'], package['upstream']) break add_commits(project_toprocess) except StopIteration: break pool.close() pool.join() else: for package in packages: if package['name'] in pkg_names: project_toprocess, _ = getinfo(package, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options. database_connection) add_commits(project_toprocess) closeSession(session) # Close session, will reopen during post_build # Check if there is any commit at all to process if len(toprocess) == 0: if not pkg_name: # Use a shorter message if this was a full run logger.info("No commits to build.") else: logger.info("No commits to build. If this is not expected, please" " make sure the package name(s) are correct, and that " "any failed commit you want to rebuild has been " "removed from the database.") return 0 # if requested do a sort according to build and install # dependencies if options.order is True: # collect info from all spec files logger.info("Reading rpm spec files") projects = sorted([c.project_name for c in toprocess]) speclist = [] bootstraplist = [] for project_name in projects: # Preprocess spec if needed pkginfo.preprocess(package_name=project_name) specpath = os.path.join(pkginfo.distgit_dir(project_name), project_name + '.spec') speclist.append(sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath)) # Check if repo_bootstrap is defined in the package. # If so, we'll need to rebuild after the whole bootstrap exercise rawspec = open(specpath).read(-1) if 'repo_bootstrap' in rawspec: bootstraplist.append(project_name) logger.debug("Packages to rebuild: %s" % bootstraplist) specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist]) # compute order according to BuildRequires logger.info("Computing build order") orders = specs.compute_order() # hack because the package name is not consistent with the directory # name and the spec file name if 'python-networking_arista' in orders: orders.insert(orders.index('python-networking_arista'), 'python-networking-arista') # sort the commits according to the score of their project and # then use the timestamp of the commits as a secondary key def my_cmp(a, b): if a.project_name == b.project_name: _a = a.dt_commit _b = b.dt_commit else: _a = orders.index(a.project_name) _b = orders.index(b.project_name) # cmp is no longer available in python3 so replace it. See Ordering # Comparisons on: # https://docs.python.org/3.0/whatsnew/3.0.html return (_a > _b) - (_a < _b) toprocess.sort(key=cmp_to_key(my_cmp)) else: # sort according to the timestamp of the commits toprocess.sort() exit_code = 0 if options.sequential is True: toprocess_copy = deepcopy(toprocess) for commit in toprocess: status = build_worker(packages, commit, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=True) exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.error("Received exception %s" % exception) failures = 1 else: if not options.run: failures = post_build(status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result(status, packages, session, toprocess_copy, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code else: # Setup multiprocessing pool pool = multiprocessing.Pool(config_options.workers) # Use functools.partial to iterate on the commits to process, # while keeping a few options fixed build_worker_wrapper = partial(build_worker, packages, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=False) iterator = pool.imap(build_worker_wrapper, toprocess) while True: try: status = iterator.next() exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.info("Received exception %s" % exception) failures = 1 else: # Create repo, build versions.csv file. # This needs to be sequential if not options.run: failures = post_build( status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result( status, packages, session, toprocess, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code except StopIteration: break pool.close() pool.join() # If we were bootstrapping, set the packages that required it to RETRY session = getSession(config_options.database_connection) if options.order is True and not pkg_name: for bpackage in bootstraplist: commit = getLastProcessedCommit(session, bpackage) commit.status = 'RETRY' session.add(commit) session.commit() genreports(packages, options.head_only, session, []) closeSession(session) if options.dev: os.remove(tmpdb_path) return exit_code
def purge(): parser = argparse.ArgumentParser() # Some of the non-positional arguments are required, so change the text # saying "optional arguments" to just "arguments": parser._optionals.title = 'arguments' parser.add_argument('--config-file', help="Config file (required)", required=True) parser.add_argument('--older-than', help="Purge builds older than provided value" " (in days).", required=True) parser.add_argument('-y', help="Answer \"yes\" to any questions", action="store_true") parser.add_argument('--dry-run', help="Do not change anything, show" " what changes would be made", action="store_true") parser.add_argument('--exclude-dirs', help="Do not remove commits whose" " packages are included in one of the specifided" " directories (comma-separated list).") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) cp = configparser.RawConfigParser() cp.read(options.config_file) timeparsed = datetime.now() - timedelta(days=int(options.older_than)) if options.y is False: ans = input(("Remove all data before %s, correct? [N/y] " % timeparsed.ctime())) if ans.lower() != "y": return session = getSession(cp.get('DEFAULT', 'database_connection')) # To remove builds we have to start at a point in time and move backwards # builds with no build date are also purged as these are legacy # All repositories can have the repodata directory and symlinks purged # But we must keep the rpm files of the most recent successful build of # each project as other symlinks not being purged will be pointing to them. topurge = getCommits(session, limit=0, before=int(mktime(timeparsed.timetuple())) ).all() fullpurge = [] for commit in topurge: if commit.flags & FLAG_PURGED: continue if is_commit_in_dirs(commit, options.exclude_dirs): # The commit RPMs are in one of the directories # that should not be touched. logger.info("Ignoring commit %s for %s, it is in one of the" " excluded directories" % (commit.id, commit.project_name)) continue datadir = os.path.join(cp.get('DEFAULT', 'datadir'), "repos", commit.getshardedcommitdir()) if commit.project_name not in fullpurge and commit.status == "SUCCESS": # So we have not removed any commit from this project yet, and it # is successful. Is it the newest one? previouscommits = getCommits(session, project=commit.project_name, since=commit.dt_build, with_status='SUCCESS').count() if previouscommits == 0: logger.info("Keeping old commit for %s" % commit.project_name) continue # this is the newest commit for this project, keep it try: for entry in os.listdir(datadir): entry = os.path.join(datadir, entry) if entry.endswith(".rpm") and not os.path.islink(entry): continue if os.path.isdir(entry): logger.info("Remove %s" % entry) if options.dry_run is False: shutil.rmtree(entry) else: logger.info("Delete %s" % entry) if options.dry_run is False: os.unlink(entry) except OSError: logger.warning("Cannot access directory %s for purge," " ignoring." % datadir) fullpurge.append(commit.project_name) commit.flags |= FLAG_PURGED logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: # If the commit was not successful, we need to be careful not to # remove the directory if there was a successful build if commit.status != "SUCCESS": othercommits = session.query(Commit).filter( Commit.project_name == commit.project_name, Commit.commit_hash == commit.commit_hash, Commit.status == 'SUCCESS').count() if othercommits == 0: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) else: logger.info("Remove %s" % datadir) if options.dry_run is False: shutil.rmtree(datadir, ignore_errors=True) commit.flags |= FLAG_PURGED if options.dry_run is False: session.commit() closeSession(session)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--config-file', default='projects.ini', help="Config file. Default: projects.ini") parser.add_argument('--config-override', action='append', help="Override a configuration option from the" " config file. Specify it as: " "section.option=value. Can be used multiple " "times if more than one override is needed.") parser.add_argument('--info-repo', help="use a local distroinfo repo instead of" " fetching the default one. Only applies when" " pkginfo_driver is rdoinfo or downstream in" " projects.ini") parser.add_argument('--build-env', action='append', help="Variables for the build environment.") parser.add_argument('--local', action="store_true", help="Use local git repos if possible. Only commited" " changes in the local repo will be used in the" " build.") parser.add_argument('--head-only', action="store_true", help="Build from the most recent Git commit only.") group = parser.add_mutually_exclusive_group() group.add_argument('--project-name', action='append', help="Build a specific project name only." " Use multiple times to build more than one " "project in a run.") group.add_argument('--package-name', action='append', help="Build a specific package name only." " Use multiple times to build more than one " "package in a run.") parser.add_argument('--dev', action="store_true", help="Don't reset packaging git repo, force build " "and add public master repo for dependencies " "(dev mode).") parser.add_argument('--log-commands', action="store_true", help="Log the commands run by dlrn.") parser.add_argument('--use-public', action="store_true", help="Use the public master repo for dependencies " "when doing install verification.") parser.add_argument('--order', action="store_true", help="Compute the build order according to the spec " "files instead of the dates of the commits. " "Implies --sequential.") parser.add_argument('--sequential', action="store_true", help="Run all actions sequentially, regardless of the" " number of workers specified in projects.ini.") parser.add_argument('--status', action="store_true", help="Get the status of packages.") parser.add_argument('--recheck', action="store_true", help="Force a rebuild for a particular package. " "Implies --package-name") parser.add_argument('--force-recheck', action="store_true", help="Force a rebuild for a particular package, even " "if its last build was successful. Requires setting " "allow_force_rechecks=True in projects.ini. " "Implies --package-name and --recheck") parser.add_argument('--version', action='version', version=version.version_info.version_string()) parser.add_argument('--run', help="Run a program instead of trying to build. " "Implies --head-only") parser.add_argument('--stop', action="store_true", help="Stop on error.") parser.add_argument('--verbose-build', action="store_true", help="Show verbose output during the package build.") parser.add_argument('--verbose-mock', action="store_true", help=argparse.SUPPRESS) parser.add_argument('--no-repo', action="store_true", help="Do not generate a repo with all the built " "packages.") parser.add_argument('--debug', action='store_true', help="Print debug logs") options = parser.parse_args(sys.argv[1:]) setup_logging(options.debug) if options.verbose_mock: logger.warning('The --verbose-mock command-line option is deprecated.' ' Please use --verbose-build instead.') options.verbose_build = options.verbose_mock global verbose_build verbose_build = options.verbose_build cp = configparser.RawConfigParser() cp.read(options.config_file) if options.log_commands is True: logging.getLogger("sh.command").setLevel(logging.INFO) if options.order is True: options.sequential = True config_options = ConfigOptions(cp, overrides=options.config_override) if options.dev: _, tmpdb_path = tempfile.mkstemp() logger.info("Using file %s for temporary db" % tmpdb_path) config_options.database_connection = "sqlite:///%s" % tmpdb_path session = getSession(config_options.database_connection) pkginfo_driver = config_options.pkginfo_driver global pkginfo pkginfo = import_object(pkginfo_driver, cfg_options=config_options) packages = pkginfo.getpackages(local_info_repo=options.info_repo, tags=config_options.tags, dev_mode=options.dev) if options.project_name: pkg_names = [ p['name'] for p in packages if p['project'] in options.project_name ] elif options.package_name: pkg_names = options.package_name else: pkg_names = None if options.status is True: if not pkg_names: pkg_names = [p['name'] for p in packages] for name in pkg_names: package = [p for p in packages if p['name'] == name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit(session, name, 'invalid status', type=build_type) if commit: print("{:>9}".format(build_type), name, commit.status) else: print("{:>9}".format(build_type), name, 'NO_BUILD') sys.exit(0) if pkg_names: pkg_name = pkg_names[0] else: pkg_name = None def recheck_commit(commit, force): if commit.status == 'SUCCESS': if not force: logger.error( "Trying to recheck an already successful commit," " ignoring. If you want to force it, use --force-recheck" " and set allow_force_rechecks=True in projects.ini") sys.exit(1) else: logger.info("Forcefully rechecking a successfully built " "commit for %s" % commit.project_name) elif commit.status == 'RETRY': # In this case, we are going to retry anyway, so # do nothing and exit logger.warning("Trying to recheck a commit in RETRY state," " ignoring.") sys.exit(0) # We could set the status to RETRY here, but if we have gone # beyond max_retries it wouldn't work as expected. Thus, our # only chance is to remove the commit session.delete(commit) session.commit() sys.exit(0) if options.recheck is True: if not pkg_name: logger.error('Please use --package-name or --project-name ' 'with --recheck.') sys.exit(1) if options.force_recheck and config_options.allow_force_rechecks: force_recheck = True else: force_recheck = False package = [p for p in packages if p['name'] == pkg_name][0] for build_type in package.get('types', ['rpm']): commit = getLastProcessedCommit(session, pkg_name, type=build_type) if commit: recheck_commit(commit, force_recheck) else: logger.error("There are no existing commits for package %s", pkg_name) sys.exit(1) # when we run a program instead of building we don't care about # the commits, we just want to run once per package if options.run: options.head_only = True # Build a list of commits we need to process toprocess = [] skipped_list = [] def add_commits(project_toprocess): # The first entry in the list of commits is a commit we have # already processed, we want to process it again only if in dev # mode or distro hash has changed, we can't simply check # against the last commit in the db, as multiple commits can # have the same commit date for commit_toprocess in project_toprocess: if options.dev is True or \ options.run or \ not session.query(Commit).filter( Commit.commit_hash == commit_toprocess.commit_hash, Commit.distro_hash == commit_toprocess.distro_hash, Commit.extended_hash == commit_toprocess.extended_hash, Commit.type == commit_toprocess.type, Commit.status != "RETRY").all(): toprocess.append(commit_toprocess) if not pkg_name and not pkg_names: pool = multiprocessing.Pool() # This will use all the system cpus # Use functools.partial to iterate on the packages to process, # while keeping a few options fixed getinfo_wrapper = partial( getinfo, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options.database_connection) iterator = pool.imap(getinfo_wrapper, packages) while True: try: project_toprocess, updated_pkg, skipped = iterator.next() for package in packages: if package['name'] == updated_pkg['name']: if package['upstream'] == 'Unknown': package['upstream'] = updated_pkg['upstream'] logger.debug( "Updated upstream for package %s to %s", package['name'], package['upstream']) break if skipped: skipped_list.append(updated_pkg['name']) add_commits(project_toprocess) except StopIteration: break pool.close() pool.join() else: for package in packages: if package['name'] in pkg_names: project_toprocess, _, skipped = getinfo( package, local=options.local, dev_mode=options.dev, head_only=options.head_only, db_connection=config_options.database_connection) if skipped: skipped_list.append(package['name']) add_commits(project_toprocess) closeSession(session) # Close session, will reopen during post_build # Store skip list datadir = os.path.realpath(config_options.datadir) if not os.path.exists(os.path.join(datadir, 'repos')): os.makedirs(os.path.join(datadir, 'repos')) with open(os.path.join(datadir, 'repos', 'skiplist.txt'), 'w') as fp: for pkg in skipped_list: fp.write(pkg + '\n') # Check if there is any commit at all to process if len(toprocess) == 0: if not pkg_name: # Use a shorter message if this was a full run logger.info("No commits to build.") else: logger.info("No commits to build. If this is not expected, please" " make sure the package name(s) are correct, and that " "any failed commit you want to rebuild has been " "removed from the database.") return 0 # if requested do a sort according to build and install # dependencies if options.order is True: # collect info from all spec files logger.info("Reading rpm spec files") projects = sorted([c.project_name for c in toprocess]) speclist = [] bootstraplist = [] for project_name in projects: # Preprocess spec if needed pkginfo.preprocess(package_name=project_name) filename = None for f in os.listdir(pkginfo.distgit_dir(project_name)): if f.endswith('.spec'): filename = f if filename: specpath = os.path.join(pkginfo.distgit_dir(project_name), filename) speclist.append( sh.rpmspec('-D', 'repo_bootstrap 1', '-P', specpath)) # Check if repo_bootstrap is defined in the package. # If so, we'll need to rebuild after the whole bootstrap rawspec = open(specpath).read(-1) if 'repo_bootstrap' in rawspec: bootstraplist.append(project_name) else: logger.warning("Could not find a spec for package %s" % project_name) logger.debug("Packages to rebuild: %s" % bootstraplist) specs = RpmSpecCollection([RpmSpecFile(spec) for spec in speclist]) # compute order according to BuildRequires logger.info("Computing build order") orders = specs.compute_order() # hack because the package name is not consistent with the directory # name and the spec file name if 'python-networking_arista' in orders: orders.insert(orders.index('python-networking_arista'), 'python-networking-arista') # sort the commits according to the score of their project and # then use the timestamp of the commits as a secondary key def my_cmp(a, b): if a.project_name == b.project_name: _a = a.dt_commit _b = b.dt_commit else: _a = orders.index(a.project_name) if a.project_name in \ orders else sys.maxsize _b = orders.index(b.project_name) if b.project_name in \ orders else sys.maxsize # cmp is no longer available in python3 so replace it. See Ordering # Comparisons on: # https://docs.python.org/3.0/whatsnew/3.0.html return (_a > _b) - (_a < _b) toprocess.sort(key=cmp_to_key(my_cmp)) else: # sort according to the timestamp of the commits toprocess.sort() exit_code = 0 if options.sequential is True: toprocess_copy = deepcopy(toprocess) for commit in toprocess: status = build_worker(packages, commit, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=True) exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.error("Received exception %s" % exception) failures = 1 else: if not options.run: failures = post_build(status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result(status, packages, session, toprocess_copy, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code else: # Setup multiprocessing pool pool = multiprocessing.Pool(config_options.workers) # Use functools.partial to iterate on the commits to process, # while keeping a few options fixed build_worker_wrapper = partial(build_worker, packages, run_cmd=options.run, build_env=options.build_env, dev_mode=options.dev, use_public=options.use_public, order=options.order, sequential=False) iterator = pool.imap(build_worker_wrapper, toprocess) while True: try: status = iterator.next() exception = status[3] consistent = False datadir = os.path.realpath(config_options.datadir) with lock_file(os.path.join(datadir, 'remote.lck')): session = getSession(config_options.database_connection) if exception is not None: logger.info("Received exception %s" % exception) failures = 1 else: # Create repo, build versions.csv file. # This needs to be sequential if not options.run: failures = post_build( status, packages, session, build_repo=not options.no_repo) consistent = (failures == 0) exit_value = process_build_result( status, packages, session, toprocess, dev_mode=options.dev, run_cmd=options.run, stop=options.stop, build_env=options.build_env, head_only=options.head_only, consistent=consistent, failures=failures) closeSession(session) if exit_value != 0: exit_code = exit_value if options.stop and exit_code != 0: return exit_code except StopIteration: break pool.close() pool.join() # If we were bootstrapping, set the packages that required it to RETRY session = getSession(config_options.database_connection) if options.order is True and not pkg_name: for bpackage in bootstraplist: commit = getLastProcessedCommit(session, bpackage) commit.status = 'RETRY' session.add(commit) session.commit() genreports(packages, options.head_only, session, []) closeSession(session) if options.dev: os.remove(tmpdb_path) return exit_code
def import_commit(repo_url, config_file, db_connection=None, local_info_repo=None): cp = configparser.RawConfigParser() cp.read(config_file) config_options = ConfigOptions(cp) pkginfo_driver = config_options.pkginfo_driver pkginfo = import_object(pkginfo_driver, cfg_options=config_options) packages = pkginfo.getpackages(local_info_repo=local_info_repo, tags=config_options.tags, dev_mode=False) remote_yaml = repo_url + '/' + 'commit.yaml' with closing(urlopen(remote_yaml)) as r: contents = map(lambda x: x.decode('utf8'), r.readlines()) osfd, tmpfilename = mkstemp() with os.fdopen(osfd, 'w') as fp: fp.writelines(contents) commits = loadYAML_list(tmpfilename) os.remove(tmpfilename) datadir = os.path.realpath(config_options.datadir) if not os.path.exists(datadir): os.makedirs(datadir) for commit in commits: commit.id = None if commit.artifacts == 'None': commit.artifacts = None commit.dt_build = int(commit.dt_build) commit.dt_commit = float(commit.dt_commit) commit.dt_distro = int(commit.dt_distro) # Check if the latest built commit for this project is newer # than this one. In that case, we should ignore it if db_connection: session = getSession(db_connection) else: session = getSession(config_options.database_connection) package = commit.project_name old_commit = getLastProcessedCommit(session, package) if old_commit: if old_commit.dt_commit >= commit.dt_commit: if old_commit.dt_distro >= commit.dt_distro: logger.info('Skipping commit %s, a newer commit is ' 'already built\n' 'Old: %s %s, new: %s %s' % (commit.commit_hash, old_commit.dt_commit, old_commit.dt_distro, commit.dt_commit, commit.dt_distro)) continue # Skip yumrepodir = os.path.join(datadir, "repos", commit.getshardedcommitdir()) if not os.path.exists(yumrepodir): os.makedirs(yumrepodir) for logfile in ['build.log', 'installed', 'mock.log', 'root.log', 'rpmbuild.log', 'state.log']: logfile_url = repo_url + '/' + logfile try: with closing(urlopen(logfile_url)) as r: contents = map(lambda x: x.decode('utf8'), r.readlines()) with open(os.path.join(yumrepodir, logfile), "w") as fp: fp.writelines(contents) except urllib.error.HTTPError: # Ignore errors, if the remote build failed there may be # some missing files pass if commit.artifacts: for rpm in commit.artifacts.split(","): rpm_url = repo_url + '/' + rpm.split('/')[-1] try: with closing(urlopen(rpm_url)) as r: contents = r.read() with open(os.path.join(datadir, rpm), "wb") as fp: fp.write(contents) except urllib.error.HTTPError: if rpm != 'None': logger.warning("Failed to download rpm file %s" % rpm_url) # Get remote update lock, to prevent any other remote operation # while we are creating the repo and updating the database logger.debug("Acquiring remote update lock") with lock_file(os.path.join(datadir, 'remote.lck')): logger.debug("Acquired lock") if commit.status == 'SUCCESS': built_rpms = [] for rpm in commit.artifacts.split(","): built_rpms.append(rpm) status = [commit, built_rpms, commit.notes, None] post_build(status, packages, session) else: pkg = [p for p in packages if p['name'] == package][0] # Here we fire a refresh of the repositories # (upstream and distgit) to be sure to have them in the # data directory. We need that in the case the worker # is running on another host mainly for the # submit_review.sh script. pkginfo.getinfo(project=pkg["name"], package=pkg, since='-1', local=False, dev_mode=False) # Paths on the worker might differ so we overwrite them # to reflect data path on the local API host. commit.distgit_dir = pkginfo.distgit_dir(pkg['name']) commit.repo_dir = os.path.join( config_options.datadir, pkg['name']) status = [commit, '', '', commit.notes] process_build_result(status, packages, session, []) closeSession(session) # Keep one session per commit logger.debug("Released lock") return 0
def promotions_GET(): # commit_hash(optional): commit hash # distro_hash(optional): distro hash # promote_name(optional): only report promotions for promote_name # offset(optional): skip the first X promotions (only 100 are shown # per query) if request.headers['Content-Type'] != 'application/json': raise InvalidUsage('Unsupported Media Type, use JSON', status_code=415) commit_hash = request.json.get('commit_hash', None) distro_hash = request.json.get('distro_hash', None) promote_name = request.json.get('promote_name', None) offset = request.json.get('offset', 0) limit = request.json.get('limit', 100) config_options = _get_config_options(app.config['CONFIG_FILE']) # Make sure we do not exceed if limit > max_limit: limit = max_limit if ((commit_hash and not distro_hash) or (distro_hash and not commit_hash)): raise InvalidUsage( 'Both commit_hash and distro_hash must be ' 'specified if any of them is.', status_code=400) # Find the commit id for commit_hash/distro_hash session = getSession(app.config['DB_PATH']) if commit_hash and distro_hash: commit = session.query(Commit).filter( Commit.status == 'SUCCESS', Commit.commit_hash == commit_hash, Commit.distro_hash == distro_hash).first() if commit is None: raise InvalidUsage('commit_hash+distro_hash combination not found', status_code=404) commit_id = commit.id else: commit_id = None # Now find the promotions, and filter if necessary promotions = session.query(Promotion) if commit_id is not None: promotions = promotions.filter(Promotion.commit_id == commit_id) if promote_name is not None: promotions = promotions.filter( Promotion.promotion_name == promote_name) promotions = promotions.order_by(desc(Promotion.timestamp)).limit(limit).\ offset(offset) # And format the output data = [] for promotion in promotions: commit = getCommits( session, limit=0).filter(Commit.id == promotion.commit_id).first() repo_hash = _repo_hash(commit) repo_url = "%s/%s" % (config_options.baseurl, commit.getshardedcommitdir()) d = { 'timestamp': promotion.timestamp, 'commit_hash': commit.commit_hash, 'distro_hash': commit.distro_hash, 'repo_hash': repo_hash, 'repo_url': repo_url, 'promote_name': promotion.promotion_name, 'user': promotion.user } data.append(d) closeSession(session) return jsonify(data)
def promote(): # commit_hash: commit hash # distro_hash: distro hash # promote_name: symlink name try: commit_hash = request.json['commit_hash'] distro_hash = request.json['distro_hash'] promote_name = request.json['promote_name'] except KeyError: raise InvalidUsage('Missing parameters', status_code=400) # Check for invalid promote names if (promote_name == 'consistent' or promote_name == 'current'): raise InvalidUsage('Invalid promote_name %s' % promote_name, status_code=403) config_options = _get_config_options(app.config['CONFIG_FILE']) session = getSession(app.config['DB_PATH']) commit = _get_commit(session, commit_hash, distro_hash) if commit is None: raise InvalidUsage('commit_hash+distro_hash combination not found', status_code=404) # If the commit has been purged, do not move on if commit.flags & FLAG_PURGED: raise InvalidUsage('commit_hash+distro_hash has been purged, cannot ' 'promote it', status_code=410) target_link = os.path.join(app.config['REPO_PATH'], promote_name) # Check for invalid target links, like ../promotename target_dir = os.path.dirname(os.path.abspath(target_link)) if not os.path.samefile(target_dir, app.config['REPO_PATH']): raise InvalidUsage('Invalid promote_name %s' % promote_name, status_code=403) # We should create a relative symlink yumrepodir = commit.getshardedcommitdir() # Remove symlink if it exists, so we can create it again if os.path.lexists(os.path.abspath(target_link)): os.remove(target_link) try: os.symlink(yumrepodir, target_link) except Exception as e: raise InvalidUsage("Symlink creation failed with error: %s" % e, status_code=500) timestamp = time.mktime(datetime.now().timetuple()) promotion = Promotion(commit_id=commit.id, promotion_name=promote_name, timestamp=timestamp, user=auth.username()) session.add(promotion) session.commit() repo_hash = _repo_hash(commit) repo_url = "%s/%s" % (config_options.baseurl, yumrepodir) result = {'commit_hash': commit_hash, 'distro_hash': distro_hash, 'repo_hash': repo_hash, 'repo_url': repo_url, 'promote_name': promote_name, 'timestamp': timestamp, 'user': auth.username()} closeSession(session) return jsonify(result), 201
def promotions_GET(): # commit_hash(optional): commit hash # distro_hash(optional): distro hash # promote_name(optional): only report promotions for promote_name # offset(optional): skip the first X promotions (only 100 are shown # per query) commit_hash = request.json.get('commit_hash', None) distro_hash = request.json.get('distro_hash', None) promote_name = request.json.get('promote_name', None) offset = request.json.get('offset', 0) limit = request.json.get('limit', 100) config_options = _get_config_options(app.config['CONFIG_FILE']) # Make sure we do not exceed if limit > max_limit: limit = max_limit if ((commit_hash and not distro_hash) or (distro_hash and not commit_hash)): raise InvalidUsage('Both commit_hash and distro_hash must be ' 'specified if any of them is.', status_code=400) # Find the commit id for commit_hash/distro_hash session = getSession(app.config['DB_PATH']) if commit_hash and distro_hash: commit = _get_commit(session, commit_hash, distro_hash) if commit is None: raise InvalidUsage('commit_hash+distro_hash combination not found', status_code=404) commit_id = commit.id else: commit_id = None # Now find the promotions, and filter if necessary promotions = session.query(Promotion) if commit_id is not None: promotions = promotions.filter(Promotion.commit_id == commit_id) if promote_name is not None: promotions = promotions.filter( Promotion.promotion_name == promote_name) promotions = promotions.order_by(desc(Promotion.timestamp)).limit(limit).\ offset(offset) # And format the output data = [] for promotion in promotions: commit = getCommits(session, limit=0).filter( Commit.id == promotion.commit_id).first() repo_hash = _repo_hash(commit) repo_url = "%s/%s" % (config_options.baseurl, commit.getshardedcommitdir()) d = {'timestamp': promotion.timestamp, 'commit_hash': commit.commit_hash, 'distro_hash': commit.distro_hash, 'repo_hash': repo_hash, 'repo_url': repo_url, 'promote_name': promotion.promotion_name, 'user': promotion.user} data.append(d) closeSession(session) return jsonify(data)