def main():
    global log
    results = dict(
        changed=False,
        msg='',
        projectdir=None,
        logfiles=[],
        sysname='',
        kmods=[],
        install_dirs={},
    )
    global module
    module = AnsibleModule(
        argument_spec=dict(
            state=dict(type='str'),  # Not used.
            projectdir=dict(type='path', required=True),
            builddir=dict(type='path'),
            logdir=dict(type='path'),
            destdir=dict(type='path'),

            # Procesing options
            make=dict(type='path'),
            clean=dict(type='bool', default=False),
            jobs=dict(type='int', fallback=(cpu_count, [])),

            # Build options.
            with_version=dict(type='str', default=None, aliases=['version']),
            with_transarc_paths=dict(type='bool',
                                     default=False,
                                     aliases=['transarc_paths']),
            with_debug_symbols=dict(type='bool', default=True),
            with_rxgk=dict(type='bool', defaults=False),

            # What to build.
            build_manpages=dict(type='bool',
                                default=True,
                                aliases=['manpages']),
            build_userspace=dict(type='bool', default=True),
            build_module=dict(type='bool', default=True),
            build_terminal_programs=dict(type='bool', default=True),
            build_fuse_client=dict(type='bool', default=True),
            build_bindings=dict(type='bool', default=True),

            # Explicit configure and target options.
            configure_options=dict(type='raw', default=None),
            configure_environment=dict(type='dict', default=None),
            target=dict(type='str', default=None),
        ),
        supports_check_mode=False,
    )
    log = Logger(module_name)
    log.info('Starting %s', module_name)

    projectdir = module.params['projectdir']
    builddir = module.params['builddir']
    logdir = module.params['logdir']
    clean = module.params['clean']
    make = module.params['make']
    build_manpages = module.params['build_manpages']
    with_version = module.params['with_version']
    configure_environment = module.params['configure_environment']

    if not (os.path.exists(projectdir) and os.path.isdir(projectdir)):
        module.fail_json(msg='projectdir directory not found: %s' % projectdir)
    results['projectdir'] = os.path.abspath(projectdir)

    # Find `make` if not specified.
    if not make:
        make = module.get_bin_path('make', required=True)

    #
    # Setup build logging.
    #
    if not logdir:
        logdir = os.path.join(projectdir, '.ansible')
    if not os.path.isdir(logdir):
        os.makedirs(logdir)
        results['changed'] = True
    results['logdir'] = logdir

    #
    # Setup paths.
    #
    if builddir:
        builddir = abspath(projectdir, builddir)
    else:
        builddir = projectdir
    results['builddir'] = builddir
    log.debug("builddir='%s'", builddir)

    gitdir = os.path.abspath(os.path.join(projectdir, '.git'))
    if not (os.path.exists(gitdir) and os.path.isdir(gitdir)):
        gitdir = None
    log.debug("gitdir='%s'.", gitdir)

    #
    # Setup environment.
    #
    solariscc = lookup_fact('solariscc')
    if solariscc:
        os.environ['SOLARISCC'] = solariscc
        os.environ['UT_NO_USAGE_TRACKING'] = '1'
        os.environ['SUNW_NO_UPDATE_NOTIFY'] = '1'

    #
    # Clean previous build.
    #
    if clean and gitdir:
        clean_command = [
            module.get_bin_path('git', required=True),
            'clean',
            '-f',
            '-d',
            '-x',
            '--exclude=.ansible',
        ]
        log.info('Running git clean.')
        run_command('clean', clean_command, projectdir, logdir, results)

    #
    # Clean out of tree build files.
    #
    if clean and builddir != projectdir and os.path.exists(builddir):
        if builddir == '/':
            module.fail_json(msg='Refusing to remove "/" builddir!')
        log.info('Removing old build directory %s' % builddir)
        shutil.rmtree(builddir)

    #
    # Setup build directory. (This must be done after the clean step.)
    #
    if not os.path.isdir(builddir):
        log.info('Creating build directory %s' % builddir)
        os.makedirs(builddir)

    #
    # Set the version string, if supplied.
    #
    if with_version:
        version_file = os.path.join(projectdir, '.version')
        log.info('Writing version %s to file %s' %
                 (with_version, version_file))
        with open(version_file, 'w') as f:
            f.write(with_version)

    #
    # Report the version string. This is read from the .version file if
    # present, otherwise it is generated from `git describe`.
    #
    cwd = os.path.join(builddir, 'build-tools')
    rc, out, err = module.run_command(['./git-version', builddir], cwd=cwd)
    if rc != 0:
        log.info('Unable to determine version string.')
    else:
        results['version'] = out

    #
    # Run autoconf.
    #
    if os.path.exists(os.path.join(projectdir, 'configure')):
        log.info('Skipping regen.sh: configure found.')
    else:
        regen_command = [os.path.join(projectdir, 'regen.sh')]
        if not build_manpages:
            regen_command.append('-q')
        run_command('regen', regen_command, projectdir, logdir, results)

    #
    # Run configure.
    #
    run_command('configure',
                configure_command(module, results),
                builddir,
                logdir,
                results,
                extra_env=configure_environment)
    results['sysname'] = configured_sysname(builddir)
    log.info("configured sysname is '%s'.", results['sysname'])

    #
    # Get installation directories.
    #
    with open(os.path.join(builddir, '.Makefile.dirs'), 'w') as f:
        f.write(MAKEFILE_PATHS)
    rc, out, err = module.run_command([make, '-f', '.Makefile.dirs'],
                                      cwd=builddir)
    if rc != 0:
        module.fail_json(msg='Failed to find installation directories: %s' %
                         err)
    for line in out.splitlines():
        line = line.rstrip()
        if '=' in line:
            name, value = line.split('=', 1)
            if value.startswith('//'):
                # Cleanup leading double slashes.
                value = value.replace('//', '/', 1)
            results['install_dirs'][name] = value

    #
    # Run make clean if we did not run git clean.
    #
    if clean and not gitdir:
        run_command('make', [make, 'clean'], builddir, logdir, results)

    #
    # Run make.
    #
    run_command('make', make_command(module, results), builddir, logdir,
                results)

    #
    # `make` may silently fail to build a kernel module for the running kernel
    # version (or any version). Let's fail early instead of finding out later
    # when we try to start the cache manager.
    #
    if is_linux():
        results['kmods'] = find_kmods_linux(builddir)
    elif is_solaris():
        results['kmods'] = find_kmods_solaris(builddir)
    else:
        log.warning('Unable to find kernel modules; unknown platform %s' %
                    platform.system())
    if is_kmod_expected(results):
        if is_linux():
            kmod_check_linux(module, results)
        elif is_solaris():
            kmod_check_solaris(module, results)

    #
    # Copy the transarc-style distribution tree into a DESTDIR file tree
    # for installation.
    #
    if 'destdir' in results and 'target' in results and \
            results['target'] in ('dest', 'dest_nolibafs', 'dest_only_libafs'):
        log.info('Copying transarc-style distribution files to %s' %
                 results['destdir'])
        sysname = configured_sysname(builddir)
        if not sysname:
            module.fail_json(msg='Unable to get destdir; sysname not found.')
        dest = os.path.join(builddir, sysname, 'dest')
        if not os.path.isdir(dest):
            module.fail_json(msg='Missing dest directory: %s' % dest)
        copy_tree(dest, results['destdir'])
        results['changed'] = True

    #
    # Copy security key utilities to a standard location.
    #
    if 'destdir' in results and 'target' in results and \
            results['target'] in ('install', 'install_nolibafs',
                                  'dest', 'dest_nolibafs'):
        log.info('Copying security key utilities to %s' % results['destdir'])
        for p in ('asetkey', 'akeyconvert'):
            src = os.path.join(builddir, 'src', 'aklog', p)
            dst = os.path.join(results['destdir'], 'usr', 'sbin')
            if os.path.isfile(src):
                if not os.path.isdir(dst):
                    os.makedirs(dst)
                log.debug('shutil.copy2("%s", "%s")' % (src, dst))
                shutil.copy2(src, dst)
                results['changed'] = True

    #
    # Save configured build paths in a meta-data file for installation.
    #
    if 'destdir' in results and 'target' in results and \
            results['target'] in ('install', 'install_nolibafs'):
        filename = os.path.join(results['destdir'], '.build-info.json')
        build_info = {'dirs': results['install_dirs']}
        with open(filename, 'w') as f:
            f.write(json.dumps(build_info, indent=4))

    log.debug('Results: %s' % pprint.pformat(results))
    results['msg'] = 'Build completed'
    log.info(results['msg'])

    #
    # Save results.
    #
    with open(os.path.join(logdir, 'results.json'), 'w') as f:
        f.write(json.dumps(results, indent=4))

    module.exit_json(**results)
def main():
    results = dict(
        changed=False,
    )
    module = AnsibleModule(
            argument_spec=dict(
                timeout=dict(type='int', default=600),
                delay=dict(type='int', default=0),
                sleep=dict(type='int', default=20),
                fail_on_timeout=dict(type='bool', default=False)
            ),
            supports_check_mode=False,
    )
    log = Logger(module_name)
    log.info('Starting %s', module_name)

    timeout = module.params['timeout']
    delay = module.params['delay']
    sleep = module.params['sleep']
    fail_on_timeout = module.params['fail_on_timeout']

    if delay < 0:
        log.warning('Ignoring negative delay parameter.')
        delay = 0
    if sleep < 1:
        log.warning('Ignoring out of range sleep parameter.')
        sleep = 1

    def lookup_command(name):
        """
        Lookup an OpenAFS command from local facts file. Try the PATH
        if not found in the local facts.
        """
        try:
            with open('/etc/ansible/facts.d/openafs.fact') as f:
                facts = json.load(f)
            cmd = facts['bins'][name]
        except Exception:
            cmd = module.get_bin_path(name)
        if not cmd:
            module.fail_json(msg='Unable to locate %s command.' % name)
        return cmd

    def check_quorum(port):
        """
        Run udebug to check for quorum.
        """
        status = {'port': port, 'quorum': False}
        udebug = lookup_command('udebug')
        args = [udebug, '-server', 'localhost', '-port', str(port)]
        log.info('Running: %s', ' '.join(args))
        rc, out, err = module.run_command(args)
        log.debug("Ran udebug: rc=%d, out=%s, err=%s", rc, out, err)
        if rc != 0:
            log.warning("Failed udebug: rc=%d, out=%s, err=%s", rc, out, err)
            return status
        status['udebug'] = out
        for line in out.splitlines():
            m = re.match(r'I am sync site', line)
            if m:
                status['sync'] = True
                log.info('Local host is sync site.')
                continue
            m = re.match(r'Recovery state ([0-9a-f]+)', line)
            if m:
                status['flags'] = m.group(1)
                continue
            m = re.match(r'Sync host (\S+) was set \d+ secs ago', line)
            if m:
                if m.group(1) != '0.0.0.0':
                    status['sync_host'] = m.group(1)
                    log.info('Remote host is sync site: %s',
                             status['sync_host'])
                continue
            m = re.match(r"Sync site's db version is (\d+)\.(\d+)", line)
            if m:
                status['db_version'] = (int(m.group(1)), int(m.group(2)))
                continue
        # Check recovery flags if this is the sync site, otherwise check for a
        # remote sync site address.
        if 'sync' in status and status['sync']:
            if 'flags' in status and status['flags'] in ('1f', 'f'):
                status['quorum'] = True
        elif 'sync_host' in status:
            status['quorum'] = True
        return status

    #
    # Wait for PRDB and VLDB quorum.
    #
    if delay:
        time.sleep(delay)
    now = int(time.time())
    expires = now + timeout
    retries = 0
    while True:
        pr = check_quorum(7002)
        vl = check_quorum(7003)
        if pr['quorum'] and vl['quorum']:
            log.info('Databases have quorum.')
            results['pr'] = pr
            results['vl'] = vl
            break
        now = int(time.time())
        if now > expires:
            if fail_on_timeout:
                log.error('Timeout expired.')
                module.fail_json(msg='Timeout expired.')
            else:
                log.warning('Timeout expired.')
                break
        log.info('Will retry in %d seconds.' % sleep)
        time.sleep(sleep)
        retries += 1

    results['retries'] = retries
    log.info('Results: %s', pprint.pformat(results))
    module.exit_json(**results)
def main():
    results = dict(changed=False, )
    module = AnsibleModule(
        argument_spec=dict(timeout=dict(type='int', default=600),
                           delay=dict(type='int', default=0),
                           sleep=dict(type='int', default=20),
                           signal=dict(type='bool', default=True)),
        supports_check_mode=False,
    )
    log = Logger(module_name)
    log.info('Starting %s', module_name)

    timeout = module.params['timeout']
    delay = module.params['delay']
    sleep = module.params['sleep']
    signal = module.params['signal']

    if delay < 0:
        log.warning('Ignoring negative delay parameter.')
        delay = 0
    if sleep < 1:
        log.warning('Ignoring out of range sleep parameter.')
        sleep = 1

    def lookup_command(name):
        """
        Lookup an OpenAFS command from local facts file. Try the PATH
        if not found in the local facts.
        """
        try:
            with open('/etc/ansible/facts.d/openafs.fact') as f:
                facts = json.load(f)
            cmd = facts['bins'][name]
        except Exception as e:
            log.warning("Unable to load facts: %s", e)
            cmd = module.get_bin_path(name)
        if not cmd:
            module.fail_json(msg='Unable to locate %s command.' % name)
        return cmd

    def lookup_directory(name):
        """
        Lookup an OpenAFS directory from the local facts file.
        """
        try:
            with open('/etc/ansible/facts.d/openafs.fact') as f:
                facts = json.load(f)
            dir = facts['dirs'][name]
        except Exception as e:
            log.warning("Unable to load facts: %s", e)
            module.fail_json(msg='Unable to locate %s directory.' % name)
        return dir

    def run_command(args, done=None, retry=None):
        """
        Run an afs command with retries.
        """
        def _done(rc, out, err):
            return rc == 0

        def _retry(rc, out, err):
            if "server or network not reponding" in err:
                return True
            if "no quorum elected" in err:
                return True
            if "invalid RPC (RX) operation" in err:
                return True  # May occur during server startup.
            if "Couldn't read/write the database" in err:
                return True  # May occur during server startup.
            if "no such entry" in err:
                return True  # Retry not found!
            return False

        if done is None:
            done = _done
        if retry is None:
            retry = _retry

        args.append('-localauth')
        cmdline = ' '.join(args)
        retries = 120
        while True:
            log.debug('Running: %s', cmdline)
            rc, out, err = module.run_command(args)
            log.debug('Ran: %s, rc=%d, out=%s, err=%s', cmdline, rc, out, err)
            if done(rc, out, err):
                return out
            if retries == 0 or not retry(rc, out, err):
                log.error("Failed: %s, rc=%d, err=%s", cmdline, rc, err)
                module.fail_json(
                    dict(msg='Command failed.',
                         cmdline=cmdline,
                         rc=rc,
                         out=out,
                         err=err))
            log.warning("Failed: %s, rc=%d, err=%s; %d retr%s left.", cmdline,
                        rc, err, retries, ('ies' if retries > 1 else 'y'))
            retries -= 1
            time.sleep(5)

    def vos_listaddrs():
        """
        Retrieve the server uuid and addreses from the VLDB.
        """
        def done(rc, out, err):
            return rc == 0

        def retry(rc, out, err):
            if "server or network not reponding" in err:
                return True
            if "no quorum elected" in err:
                return True
            if "invalid RPC (RX) operation" in err:
                return True  # May occur during server startup.
            if "Couldn't read/write the database" in err:
                return True  # May occur during server startup.
            return False

        vos = lookup_command('vos')
        out = run_command([vos, 'listaddrs', '-noresolve', '-printuuid'],
                          done=done,
                          retry=retry)
        servers = []
        uuid = None
        addrs = []
        for line in out.splitlines():
            m = re.match(r'UUID: (\S+)', line)
            if m:
                uuid = UUID.parse(m.group(1))
                addrs = []
                continue
            m = re.match(r'(\S+)', line)
            if m:
                addrs.append(m.group(1))
                continue
            m = re.match(r'$', line)
            if m:
                # Records are terminated with a blank line.
                servers.append(dict(uuid=uuid, addrs=addrs))
                uuid = None
                addrs = []
        log.debug("servers=%s", servers)
        return servers

    def lookup_uuid():
        """
        Retreive the fileserver UUID value from the sysid file created
        by the fileserver process.
        """
        path = os.path.join(lookup_directory('afslocaldir'), 'sysid')
        if not os.path.exists(path):
            # The sysid file is created by the filserver process.
            log.info("Waiting for sysid file '%s'.", path)
            return None
        log.debug("Reading sysid file '%s'.", path)
        sysid = Sysid(path)
        log.debug('sysid=%s', sysid)
        return sysid.uuid

    def lookup_bnode():
        """
        Lookup the active fileserver bnode name; 'fs', 'dafs', or None.
        """
        path = os.path.join(lookup_directory('afsbosconfigdir'), 'BosConfig')
        log.debug("Reading BosConfig file '%s'.", path)
        with open(path) as f:
            bosconfig = f.read()
        bnodes = re.findall(r'bnode (fs|dafs) \S+ 1', bosconfig)
        if len(bnodes) == 0:
            log.warning('No active fileserver bnodes found in BosConfig.')
            return None
        if len(bnodes) > 1:
            log.warning('Too many fileserver bnodes found in BosConfig.')
            return None
        bnode = bnodes[0]
        log.debug('fileserver bnode is %s', bnode)
        return bnode

    def lookup_pid():
        """
        Lookup the fileserver process pid or return None if not found.
        """
        bnode = lookup_bnode()
        if not bnode:
            return None
        path = os.path.join(lookup_directory('afslocaldir'),
                            '%s.file.pid' % bnode)
        try:
            log.debug("Reading pid file '%s'.", path)
            with open(path) as f:
                pid = int(f.read())
        except IOError as e:
            log.warning("Unable to read pid file '%s'; %s", path, e)
            return None
        except ValueError as e:
            log.warning("Unable to convert pid file '%s' contents to int; %s",
                        path, e)
            return None
        log.debug('fileserver pid is %d', pid)
        return pid

    #
    # Wait for VLDB registration. We check for our uuid in the VLDB, and if not
    # present, send a signal to the fileserver to expedite the registration.
    # The fileserver will retry to register every 5 minutes as well.
    #
    if delay:
        time.sleep(delay)
    now = int(time.time())
    expires = now + timeout
    retries = 0
    while True:
        uuid = lookup_uuid()
        if uuid:
            servers = vos_listaddrs()
            registered_uuids = [s['uuid'] for s in servers]
            if uuid in registered_uuids:
                results['uuid'] = str(uuid)
                log.info('Fileserver uuid %s is registered.', uuid)
                break
        if signal and retries > 0:
            pid = lookup_pid()
            if pid:
                log.info('Running: kill -XCPU %d', pid)
                module.run_command(['kill', '-XCPU', '%d' % pid])
        now = int(time.time())
        if now > expires:
            log.error('Timeout expired.')
            module.fail_json(msg='Timeout expired')
        log.info('Will retry in %d seconds.' % sleep)
        time.sleep(sleep)
        retries += 1

    results['retries'] = retries
    log.info('Results: %s', pprint.pformat(results))
    module.exit_json(**results)