Beispiel #1
0
def regenerate_repositories(db_info, repo_info):
    """
    Run through all project documents found in build database, extract
    remote info from each and checkout/update bare repositories for each
    project
    """

    db = cbdatabase_db.CouchbaseDB(db_info)
    repo_base_dir = pathlib.Path(repo_info['repo_basedir'])
    repo_cache = cbutil_git.RepoCache()

    # Create base directory, if needed
    os.makedirs(repo_base_dir, exist_ok=True)

    for proj in db.query_documents('project', simple=True):
        proj_name = proj['name']
        logger.info(f'Creating {proj_name} repository...')
        project_data = db.get_document(f'project:{proj_name}')

        for remote in project_data['remotes']:
            for url in project_data['remotes'][remote]:
                print(f'    Adding remote {url} for {proj_name}...')
                try:
                    repo_cache.get_repo(proj_name, repo_base_dir / proj_name,
                                        remote, url)
                except (dulwich.errors.GitProtocolError,
                        urllib.error.HTTPError):
                    print(f'        Remote {url} no longer valid, skipping')
                    pass
    def __init__(self, db_info, repo_info, email_info):
        """Basic initialization"""

        self.initial_data = None
        self.db = cbdatabase_db.CouchbaseDB(db_info)
        self.prod_ver_index = self.db.get_product_version_index()
        self.first_prod_ver_build = False
        self.project = None
        self.repo_base_path = pathlib.Path(repo_info['repo_basedir'])
        self.repo_cache = cbutil_git.RepoCache()
        self.smtp_server = email_info['smtp_server']
        self.receivers = email_info['receivers']
 def __init__(self, db_info, dryrun):
     self.db = cbdatabase_db.CouchbaseDB(db_info)
     self.jira = JIRA({'server': 'https://issues.couchbase.com/'})
     self.dryrun = dryrun
     self.ticket_re = re.compile(r'(\b[A-Z]+-\d+\b)')
Beispiel #4
0
def main():
    """
    Parse the command line arguments, handle configuration setup,
    load in the product data and step through the build documents
    in the build database to determine which have completed builds
    or not
    """

    parser = argparse.ArgumentParser(
        description='Update documents in build database')
    parser.add_argument('-c',
                        '--config',
                        dest='check_build_config',
                        help='Configuration file for build database loader',
                        default='check_builds.ini')
    parser.add_argument('metadata_dir',
                        type=Path,
                        help='Path to product-metadata directory')
    parser.add_argument('-n',
                        '--dryrun',
                        action='store_true',
                        help="Only check, don't update database or send email")
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help="Enable additional debug output")
    args = parser.parse_args()

    if args.verbose:
        logger.setLevel(logging.DEBUG)
    dryrun = args.dryrun
    metadata_dir = args.metadata_dir
    if not metadata_dir.exists():
        logger.error(f'product-metadata path {metadata_dir} does not exist')
        sys.exit(1)

    # Check configuration file information
    check_build_config = configparser.ConfigParser()
    check_build_config.read(args.check_build_config)

    if any(key not in check_build_config
           for key in ['build_db', 'missing_builds']):
        logger.error(
            f'Invalid or unable to read config file {args.check_build_config}')
        sys.exit(1)

    db_info = check_build_config['build_db']
    db_required_keys = ['db_uri', 'username', 'password']

    if any(key not in db_info for key in db_required_keys):
        logger.error(
            f'One of the following DB keys is missing in the config file:\n'
            f'    {", ".join(db_required_keys)}')
        sys.exit(1)

    miss_info = check_build_config['missing_builds']
    miss_required_keys = [
        'receivers', 'lb_base_dir', 'lb_base_url', 'smtp_server'
    ]

    if any(key not in miss_info for key in miss_required_keys):
        logger.error(
            f'One of the following DB keys is missing in the config file:\n'
            f'    {", ".join(miss_required_keys)}')
        sys.exit(1)

    # Find builds to check
    db = cbutil_db.CouchbaseDB(db_info)
    builds = db.query_documents(
        'build',
        where_clause="ifmissingornull(metadata.builds_complete, 'n/a')='n/a'")

    # Go through builds and based on age and whether certain metadata
    # values (builds_complete and email_notification) are set, determine
    # proper course of action.  The basic process is as follows:
    #   - Get age of build
    #   - If build age is over 28 days old, simply mark as unknown
    #     (files already gone from latestbuilds)
    #   - If the product isn't in the product config data, skip
    #   - Generate necessary file list, then get current file list from
    #     latestbuilds (mounted via NFS)
    #   - Check to see if any files in needed list aren't in current list:
    #      - If not, mark build complete and continue
    #      - Else if there are and build age is over 2 hours, check to
    #        see if email's been sent previously and send notification
    #        if not, marking email as sent
    #      - And if there are and build age is also over 12 hours, mark
    #        as incomplete and continue
    for build in builds:
        build_age = int(time.time()) - build.timestamp

        if build_age > 28 * 24 * 60 * 60:  # 28 days
            dryrun or build.set_metadata('builds_complete', 'unknown')
            continue

        template_dir = metadata_dir / build.product / "check_builds"
        if not template_dir.exists():
            logger.debug(f"Skipping build for unknown product {build.product}")
            continue

        prodver_path = f'{build.product}/{build.release}/{build.build_num}'
        lb_dir = f'{miss_info["lb_base_dir"]}/{prodver_path}/'
        lb_url = f'{miss_info["lb_base_url"]}/{prodver_path}/'

        templates = list(
            filter(lambda x: x.name.endswith(('.yaml.j2', '.json')),
                   template_dir.glob("pkg_data.*")))
        if len(templates) < 1:
            logger.error(f"Product {build.product} has no pkg_data templates")
            sys.exit(1)
        if len(templates) > 1:
            logger.error(
                f"Found multiple possible pkg_data files for {build.product}!")
            sys.exit(1)
        logger.debug(f"Using template {templates[0]} for {build.product}")

        logger.info(
            f"***** Checking {build.product} {build.release} build {build.version}-{build.build_num} ({build_age} seconds old)"
        )

        needed_files = generate_filelist(build.product, build.release,
                                         build.version, build.build_num,
                                         templates[0])
        try:
            existing_files = set(os.listdir(lb_dir))
        except FileNotFoundError:
            existing_files = set()
        missing_files = list(needed_files.difference(existing_files))

        if not missing_files:
            logger.info("All expected files found - build complete!")
            dryrun or build.set_metadata('builds_complete', 'complete')
            continue

        if build_age > 2 * 60 * 60:  # 2 hours
            logger.info("Still incomplete after 2 hours; missing files:")
            for missing in missing_files:
                logger.info(f"    - {missing}")
            if not build.metadata.setdefault('email_notification', False):
                curr_bld = \
                    f'{build.product}-{build.version}-{build.build_num}'
                message = {
                    'subject': f'Build {curr_bld} not complete after 2 hours',
                    'body': generate_mail_body(lb_url, missing_files)
                }
                receivers = miss_info['receivers'].split(',')
                send_email(miss_info['smtp_server'], receivers, message,
                           dryrun)
                dryrun or build.set_metadata('email_notification', True)
            else:
                logger.info("Email previously sent")
        else:
            logger.info("Incomplete but less than 2 hours old")

        if build_age > 12 * 60 * 60:  # 12 hours
            logger.info("Build incomplete after 12 hours - marking incomplete")
            dryrun or build.set_metadata('builds_complete', 'incomplete')
    def __init__(self, db_info, repo_info):
        """Basic initialization"""

        self.db = cbdatabase_db.CouchbaseDB(db_info)
        self.repo_base_path = pathlib.Path(repo_info['repo_basedir'])