Exemplo n.º 1
0
def verify_connectivity():
    """
    Verify connectivity to satellite server
    """
    logger.debug("Verifying Connectivity")
    for item, value in InsightsClient.config.items(APP_NAME):
        if item != 'password' and item != 'proxy' and item != 'systemid':
            logger.debug("%s:%s", item, value)
    ic = InsightsConnection()
    try:
        branch_info = ic.branch_info()
    except requests.ConnectionError as e:
        logger.debug(e)
        logger.debug("Failed to connect to satellite")
        return False
    except LookupError as e:
        logger.debug(e)
        logger.debug("Failed to parse response from satellite")
        return False

    try:
        remote_leaf = branch_info['remote_leaf']
        return remote_leaf
    except LookupError as e:
        logger.debug(e)
        logger.debug("Failed to find accurate branch_info")
        return False
Exemplo n.º 2
0
    def registration_check(self):
        # check local registration record
        if os.path.isfile(constants.registered_file):
            local_record = 'System is registered.'
            with open(constants.registered_file) as reg_file:
                local_record += ' Registered at ' + reg_file.readline()
        else:
            local_record = 'System is NOT registered.'
        if os.path.isfile(constants.unregistered_file):
            with open(constants.unregistered_file) as reg_file:
                local_record += ' Unregistered at ' + reg_file.readline()

        pconn = InsightsConnection(self.config)
        api_reg_status = pconn.api_registration_check()
        if type(api_reg_status) is bool:
            if api_reg_status:
                api_record = 'Insights API confirms registration.'
            else:
                api_record = 'Insights API could not be reached to confirm registration status.'
        elif api_reg_status is None:
            api_record = 'Insights API says this machine is NOT registered.'
        else:
            api_record = 'Insights API says this machine was unregistered at ' + api_reg_status

        return [local_record, api_record]
Exemplo n.º 3
0
def register():
    """
    Do registration using basic auth
    """
    username = InsightsClient.config.get(APP_NAME, 'username')
    password = InsightsClient.config.get(APP_NAME, 'password')
    authmethod = InsightsClient.config.get(APP_NAME, 'authmethod')
    # TODO validate this is boolean somewhere in config load
    auto_config = InsightsClient.config.getboolean(APP_NAME, 'auto_config')
    if not username and not password and not auto_config and authmethod == 'BASIC':
        print 'Please enter your Red Hat Customer Portal Credentials'
        sys.stdout.write('Username: '******'Would you like to save these credentials? (y/n) ')
        save = raw_input().strip()
        InsightsClient.config.set(APP_NAME, 'username', username)
        InsightsClient.config.set(APP_NAME, 'password', password)
        logger.debug('savestr: %s', save)
        if save.lower() == 'y' or save.lower() == 'yes':
            logger.debug('Writing user/pass to config')
            cmd = ('/bin/sed -e \'s/^username.*=.*$/username='******'/\' ' +
                   '-e \'s/^password.*=.*$/password='******'/\' ' +
                   constants.default_conf_file)
            status = DataCollector().run_command_get_output(cmd, nolog=True)
            with open(constants.default_conf_file, 'w') as config_file:
                config_file.write(status['output'])
                config_file.flush()
    pconn = InsightsConnection()
    return pconn.register()
Exemplo n.º 4
0
def register(config, group_id=None):
    """
    Do registration using basic auth
    """
    username = config.get(APP_NAME, 'username')
    password = config.get(APP_NAME, 'password')
    if ((
            username == "" and
            password == "" and
            config.get(APP_NAME, 'authmethod') == 'BASIC')
        and
            not config.get(APP_NAME, 'auto_config')):
        # Get input from user
        print "Please enter your Red Hat Customer Portal Credentials"
        sys.stdout.write('User Name: ')
        username = raw_input().strip()
        password = getpass.getpass()
        sys.stdout.write("Would you like to save these credentials? (y/n) ")
        save = raw_input().strip()
        config.set(APP_NAME, 'username', username)
        config.set(APP_NAME, 'password', password)
        logger.debug("savestr: %s", save)
        if save.lower() == "y" or save.lower() == "yes":
            logger.debug("writing user/pass to config file")
            cmd = ("/bin/sed -e 's/^username.*=.*$/username="******"/' " +
                   "-e 's/^password.*=.*$/password="******"/' " +
                   constants.default_conf_file)
            status = DataCollector().run_command_get_output(cmd, nolog=True)
            config_file = open(constants.default_conf_file, 'w')
            config_file.write(status['output'])
            config_file.flush()

    pconn = InsightsConnection(config)
    return pconn.register(group_id)
Exemplo n.º 5
0
def register(config, group_id=None):
    """
    Do registration using basic auth
    """
    username = config.get(APP_NAME, 'username')
    password = config.get(APP_NAME, 'password')
    if (((username == "") and
       (password == "") and
       (config.get(APP_NAME, 'authmethod') == 'BASIC')) and not
       (config.get(APP_NAME, 'auto_config'))):
        # Get input from user
        print "Please enter your Red Hat Customer Portal Credentials"
        sys.stdout.write('User Name: ')
        username = raw_input().strip()
        password = getpass.getpass()
        sys.stdout.write("Would you like to save these credentials? (y/n) ")
        save = raw_input().strip()
        config.set(APP_NAME, 'username', username)
        config.set(APP_NAME, 'password', password)
        logger.debug("savestr: %s", save)
        if save.lower() == "y" or save.lower() == "yes":
            logger.debug("writing user/pass to config file")
            cmd = ("/bin/sed -e 's/^username.*=.*$/username="******"/' " +
                   "-e 's/^password.*=.*$/password="******"/' " +
                   constants.default_conf_file)
            status = DataCollector().run_command_get_output(cmd, nolog=True)
            config_file = open(constants.default_conf_file, 'w')
            config_file.write(status['output'])
            config_file.flush()

    pconn = InsightsConnection(config)
    return pconn.register(group_id)
Exemplo n.º 6
0
def verify_connectivity(config):
    """
    Verify connectivity to satellite server
    """
    logger.debug("Verifying Connectivity")
    for item, value in config.items(APP_NAME):
        if item != 'password' and item != 'proxy' and item != 'systemid':
            logger.debug("%s:%s", item, value)
    ic = InsightsConnection(config)
    try:
        branch_info = ic.branch_info()
    except requests.ConnectionError as e:
        logger.debug(e)
        logger.debug("Failed to connect to satellite")
        return False
    except LookupError as e:
        logger.debug(e)
        logger.debug("Failed to parse response from satellite")
        return False

    try:
        remote_leaf = branch_info['remote_leaf']
        return remote_leaf
    except LookupError as e:
        logger.debug(e)
        logger.debug("Failed to find accurate branch_info")
        return False
Exemplo n.º 7
0
def verify_connectivity():
    """
    Verify connectivity to satellite server
    """
    logger.debug("Verifying Connectivity")
    ic = InsightsConnection()
    try:
        branch_info = ic.branch_info()
    except requests.ConnectionError as e:
        logger.debug(e)
        logger.debug("Failed to connect to satellite")
        return False
    except LookupError as e:
        logger.debug(e)
        logger.debug("Failed to parse response from satellite")
        return False

    try:
        remote_leaf = branch_info['remote_leaf']
        return remote_leaf
    except LookupError as e:
        logger.debug(e)
        logger.debug("Failed to find accurate branch_info")
        return False
Exemplo n.º 8
0
def register():
    """
    Do registration using basic auth
    """
    username = InsightsClient.config.get(APP_NAME, 'username')
    password = InsightsClient.config.get(APP_NAME, 'password')
    authmethod = InsightsClient.config.get(APP_NAME, 'authmethod')
    # TODO validate this is boolean somewhere in config load
    auto_config = InsightsClient.config.getboolean(APP_NAME, 'auto_config')
    if not username and not password and not auto_config and authmethod == 'BASIC':
        print 'Please enter your Red Hat Customer Portal Credentials'
        sys.stdout.write('Username: '******'Would you like to save these credentials? (y/n) ')
        save = raw_input().strip()
        InsightsClient.config.set(APP_NAME, 'username', username)
        InsightsClient.config.set(APP_NAME, 'password', password)
        logger.debug('savestr: %s', save)
        if save.lower() == 'y' or save.lower() == 'yes':
            logger.debug('Writing user/pass to config')
            modify_config_file({'username': username, 'password': password})
    pconn = InsightsConnection()
    return pconn.register()
Exemplo n.º 9
0
def register():
    """
    Do registration using basic auth
    """
    username = InsightsClient.config.get(APP_NAME, 'username')
    password = InsightsClient.config.get(APP_NAME, 'password')
    authmethod = InsightsClient.config.get(APP_NAME, 'authmethod')
    # TODO validate this is boolean somewhere in config load
    auto_config = InsightsClient.config.getboolean(APP_NAME, 'auto_config')
    if not username and not password and not auto_config and authmethod == 'BASIC':
        print 'Please enter your Red Hat Customer Portal Credentials'
        sys.stdout.write('Username: '******'Would you like to save these credentials? (y/n) ')
        save = raw_input().strip()
        InsightsClient.config.set(APP_NAME, 'username', username)
        InsightsClient.config.set(APP_NAME, 'password', password)
        logger.debug('savestr: %s', save)
        if save.lower() == 'y' or save.lower() == 'yes':
            logger.debug('Writing user/pass to config')
            modify_config_file({'username': username, 'password': password})
    pconn = InsightsConnection()
    return pconn.register()
Exemplo n.º 10
0
def collect_data_and_upload(config, options):
    """
    All the heavy lifting done here
    """
    pconn = InsightsConnection(config)
    pconn.check_registration()
    branch_info = pconn.branch_info()
    pc = InsightsConfig(config, pconn)
    dc = DataCollector()
    logger.info('Collecting Insights data')
    start = time.clock()
    dynamic_config = pc.get_conf(options.update)
    elapsed = (time.clock() - start)
    logger.debug("Dynamic Config Elapsed Time: %s", elapsed)
    start = time.clock()
    dc.run_commands(dynamic_config)
    elapsed = (time.clock() - start)
    logger.debug("Command Collection Elapsed Time: %s", elapsed)
    start = time.clock()
    dc.copy_files(dynamic_config)
    elapsed = (time.clock() - start)
    logger.debug("File Collection Elapsed Time: %s", elapsed)
    dc.write_branch_info(branch_info)
    obfuscate = config.getboolean(APP_NAME, "obfuscate")

    if not options.no_tar_file:
        tar_file = dc.done(config)
        if not options.no_upload:
            logger.info('Uploading Insights data,'
                        ' this may take a few minutes')
            pconn.upload_archive(tar_file)
            logger.info(
                'Check https://access.redhat.com/labs/insights in an hour')
            if not obfuscate and not options.keep_archive:
                dc.archive.delete_tmp_dir()
            else:
                if obfuscate:
                    logger.info('Obfuscated Insights data retained in %s',
                                os.path.dirname(tar_file))
                else:
                    logger.info('Insights data retained in %s', tar_file)
        else:
            logger.info('See Insights data in %s', tar_file)
    else:
        logger.info('See Insights data in %s', dc.archive.archive_dir)
Exemplo n.º 11
0
def handle_startup():
    """
    Handle startup options
    """
    # ----do X and exit options----
    # show version and exit
    if InsightsClient.options.version:
        print constants.version
        sys.exit()

    if (InsightsClient.options.container_mode
            and not InsightsClient.options.run_here
            and insights_client_container_is_available()):
        sys.exit(run_in_container())

    if (InsightsClient.options.container_mode
            and not InsightsClient.options.only):
        logger.error(
            "Client running in container mode but no image/container specified via --only."
        )
        sys.exit(1)

    if InsightsClient.options.only != None and len(
            InsightsClient.options.only) < 12:
        logger.error(
            "Image/Container ID must be atleast twelve characters long.")
        sys.exit(1)

    if InsightsClient.options.validate:
        validate_remove_file()
        sys.exit()

    if InsightsClient.options.enable_schedule and InsightsClient.options.disable_schedule:
        logger.error(
            'Conflicting options: --enable-schedule and --disable-schedule')
        sys.exit(1)

    if InsightsClient.options.enable_schedule:
        # enable automatic scheduling
        InsightsSchedule()
        InsightsClient.config.set(APP_NAME, 'no_schedule', False)
        logger.info('Automatic scheduling for Insights has been enabled.')
        logger.debug('Updating config...')
        modify_config_file({'no_schedule': 'False'})
        sys.exit()

    if InsightsClient.options.disable_schedule:
        # disable automatic schedling
        InsightsSchedule(set_cron=False).remove_scheduling()
        InsightsClient.config.set(APP_NAME, 'no_schedule', True)
        logger.info('Automatic scheduling for Insights has been disabled.')
        logger.debug('Updating config...')
        modify_config_file({'no_schedule': 'True'})
        sys.exit()

    # do auto_config here, for connection-related 'do X and exit' options
    if InsightsClient.config.getboolean(
            APP_NAME, 'auto_config') and not InsightsClient.options.offline:
        # Try to discover if we are connected to a satellite or not
        try_auto_configuration()

    if InsightsClient.options.test_connection:
        pconn = InsightsConnection()
        rc = pconn.test_connection()
        sys.exit(rc)

    if InsightsClient.options.status:
        reg_check = registration_check()
        logger.info('\n'.join(reg_check['messages']))
        # exit with !status, 0 for True, 1 for False
        sys.exit(not reg_check['status'])

    if InsightsClient.options.support:
        support = InsightsSupport()
        support.collect_support_info()
        sys.exit()

    # ----config options----
    # log the config
    # ignore password and proxy -- proxy might have pw
    for item, value in InsightsClient.config.items(APP_NAME):
        if item != 'password' and item != 'proxy':
            logger.debug("%s:%s", item, value)

    if InsightsClient.config.getboolean(
            APP_NAME, 'auto_update') and not InsightsClient.options.offline:
        # TODO: config updates option, but in GPG option, the option updates
        # the config.  make this consistent
        InsightsClient.options.update = True

    # disable automatic scheduling if it was set in the config, and if the job exists
    if InsightsClient.config.getboolean(APP_NAME, 'no_schedule'):
        cron = InsightsSchedule(set_cron=False)
        if cron.already_linked():
            cron.remove_scheduling()
            logger.debug(
                'Automatic scheduling for Insights has been disabled.')

    # ----modifier options----
    if InsightsClient.options.no_gpg:
        logger.warn("WARNING: GPG VERIFICATION DISABLED")
        InsightsClient.config.set(APP_NAME, 'gpg', 'False')

    if InsightsClient.options.just_upload:
        if InsightsClient.options.offline or InsightsClient.options.no_upload:
            logger.error(
                'Cannot use --just-upload in combination with --offline or --no-upload.'
            )
            sys.exit(1)
        # override these for great justice
        InsightsClient.options.no_tar_file = False
        InsightsClient.options.keep_archive = True

    # if InsightsClient.options.container_mode and InsightsClient.options.no_tar_file:
    #    logger.error('Invalid combination: --container and --no-tar-file')
    #    sys.exit(1)

    # can't use bofa
    if InsightsClient.options.from_stdin and InsightsClient.options.from_file:
        logger.error('Can\'t use both --from-stdin and --from-file.')
        sys.exit(1)

    # handle some docker/atomic flags
    if InsightsClient.options.use_docker and InsightsClient.options.use_atomic:
        logger.error('Cant\'t use both --use-docker and --use-atomic.')
        sys.exit(1)

    if InsightsClient.options.to_stdout:
        InsightsClient.options.no_upload = True

    # ----register options----
    # put this first to avoid conflicts with register
    if InsightsClient.options.unregister:
        pconn = InsightsConnection()
        pconn.unregister()
        sys.exit()

    # force-reregister -- remove machine-id files and registration files
    # before trying to register again
    new = False
    if InsightsClient.options.reregister:
        new = True
        InsightsClient.options.register = True
        delete_registered_file()
        delete_unregistered_file()
        delete_machine_id()
    logger.debug('Machine-id: %s', generate_machine_id(new))

    if InsightsClient.options.register:
        try_register()
        if not InsightsClient.config.getboolean(APP_NAME, 'no_schedule'):
            InsightsSchedule()

    # check registration before doing any uploads
    # Ignore if in offline mode
    if not InsightsClient.options.register and not InsightsClient.options.offline:
        msg, is_registered = _is_client_registered()
        if not is_registered:
            logger.error(msg)
            sys.exit(1)
Exemplo n.º 12
0
def _main():
    """
    Main entry point
    Parse cmdline options
    Parse config file
    Call data collector
    """
    global logger
    sys.excepthook = handle_exception

    parser = optparse.OptionParser()
    set_up_options(parser)
    options, args = parser.parse_args()
    if len(args) > 0:
        parser.error("Unknown arguments: %s" % args)
        sys.exit(1)

    if options.version:
        print constants.version
        sys.exit()

    if options.validate:
        validate_remove_file()
        sys.exit()

    config = parse_config_file()
    logger, handler = set_up_logging(config, options)

    # Defer logging till it's ready
    logger.debug('invoked with args: %s', options)
    logger.debug("Version: " + constants.version)
    # Generate /etc/machine-id if it does not exist
    new = False
    if options.reregister:
        new = True
        options.register = True
    logger.debug("Machine-ID: " + generate_machine_id(new))

    # Disable GPG verification
    if options.no_gpg:
        logger.warn("WARNING: GPG VERIFICATION DISABLED")
        config.set(APP_NAME, 'gpg', 'False')

    # Log config except the password
    # and proxy as it might have a pw as well
    for item, value in config.items(APP_NAME):
        if item != 'password' and item != 'proxy':
            logger.debug("%s:%s", item, value)

    if config.getboolean(APP_NAME, 'auto_update'):
        options.update = True

    if config.getboolean(APP_NAME, 'auto_config'):
        # Try to discover if we are connected to a satellite or not
        try_auto_configuration(config)

    # Set the schedule
    InsightsSchedule(options)

    # Test connection, useful for proxy debug
    if options.test_connection:
        pconn = InsightsConnection(config)
        pconn.test_connection()

    if options.unregister:
        pconn = InsightsConnection(config)
        pconn.unregister()
        sys.exit()

    # Handle registration, grouping, and display name
    if options.register:
        opt_group = options.group
        hostname, opt_group, display_name = register(config, options)
        if options.display_name is None and options.group is None:
            logger.info('Successfully registered %s', hostname)
        elif options.display_name is None:
            logger.info('Successfully registered %s in group %s', hostname, opt_group)
        else:
            logger.info('Successfully registered %s as %s in group %s', hostname, display_name, opt_group)

    # Check for .unregistered file
    if os.path.isfile(constants.unregistered_file):
        logger.error("This machine has been unregistered")
        logger.error("Use --register if you would like to re-register this machine")
        logger.error("Exiting")
        sys.exit(1)

    # do work
    collect_data_and_upload(config, options)
    handler.doRollover()
Exemplo n.º 13
0
def collect_data_and_upload(config, options):
    """
    All the heavy lifting done here
    """
    pconn = InsightsConnection(config)
    try:
        branch_info = pconn.branch_info()
    except requests.ConnectionError:
        logger.error("ERROR: Could not connect to determine branch information")
        sys.exit()
    except LookupError:
        logger.error("ERROR: Could not determine branch information")
        sys.exit()
    pc = InsightsConfig(config, pconn)
    dc = DataCollector()
    start = time.clock()
    collection_rules, rm_conf = pc.get_conf(options.update)
    elapsed = (time.clock() - start)
    logger.debug("Collection Rules Elapsed Time: %s", elapsed)
    start = time.clock()
    logger.info('Starting to collect Insights data')
    dc.run_commands(collection_rules, rm_conf)
    elapsed = (time.clock() - start)
    logger.debug("Command Collection Elapsed Time: %s", elapsed)
    start = time.clock()
    dc.copy_files(collection_rules, rm_conf)
    elapsed = (time.clock() - start)
    logger.debug("File Collection Elapsed Time: %s", elapsed)
    dc.write_branch_info(branch_info)
    obfuscate = config.getboolean(APP_NAME, "obfuscate")

    if not options.no_tar_file:
        tar_file = dc.done(config, rm_conf)
        if not options.no_upload:
            logger.info('Uploading Insights data,'
                        ' this may take a few minutes')
            for tries in range(options.retries):
                status = pconn.upload_archive(tar_file)
                if status == 201:
                    logger.info("Upload completed successfully!")
                    break
                else:
                    logger.error("Upload attempt %d of %d failed! Status Code: %s",
                                tries+1, options.retries, status)
                    if tries +1 != options.retries:
                        logger.info("Waiting %d seconds then retrying", constants.sleep_time)
                        time.sleep(constants.sleep_time)
                    else:
                        logger.error("All attempts to upload have failed!")
                        logger.error("Please see %s for additional information", constants.default_log_file)

            if not obfuscate and not options.keep_archive:
                dc.archive.delete_tmp_dir()
            else:
                if obfuscate:
                    logger.info('Obfuscated Insights data retained in %s',
                                os.path.dirname(tar_file))
                else:
                    logger.info('Insights data retained in %s', tar_file)
        else:
            logger.info('See Insights data in %s', tar_file)
    else:
        logger.info('See Insights data in %s', dc.archive.archive_dir)
Exemplo n.º 14
0
    def _support_diag_dump(self):
        '''
        Collect log info for debug
        '''
        # check insights config
        cfg_block = []

        pconn = InsightsConnection()
        logger.info('Insights version: %s', get_nvr())

        reg_check = registration_check(pconn)
        cfg_block.append('Registration check:')
        for key in reg_check:
            cfg_block.append(key + ': ' + str(reg_check[key]))

        lastupload = 'never'
        if os.path.isfile(constants.lastupload_file):
            with open(constants.lastupload_file) as upl_file:
                lastupload = upl_file.readline().strip()
        cfg_block.append('\nLast successful upload was ' + lastupload)

        cfg_block.append('auto_config: ' + str(config['auto_config']))
        if config['proxy']:
            obfuscated_proxy = re.sub(r'(.*)(:)(.*)(@.*)', r'\1\2********\4',
                                      config['proxy'])
        else:
            obfuscated_proxy = 'None'
        cfg_block.append('proxy: ' + obfuscated_proxy)

        logger.info('\n'.join(cfg_block))
        logger.info('python-requests: %s', requests.__version__)

        succ = pconn.test_connection()
        if succ == 0:
            logger.info('Connection test: PASS\n')
        else:
            logger.info('Connection test: FAIL\n')

        # run commands
        commands = [
            'uname -a', 'cat /etc/redhat-release', 'env', 'sestatus',
            'subscription-manager identity',
            'systemctl cat insights-client.timer',
            'systemctl cat insights-client.service',
            'systemctl status insights-client.timer',
            'systemctl status insights-client.service'
        ]
        for cmd in commands:
            logger.info("Running command: %s", cmd)
            try:
                proc = Popen(shlex.split(cmd),
                             shell=False,
                             stdout=PIPE,
                             stderr=STDOUT,
                             close_fds=True)
                stdout, stderr = proc.communicate()
            except OSError as o:
                if 'systemctl' not in cmd:
                    # suppress output for systemctl cmd failures
                    logger.info('Error running command "%s": %s', cmd, o)
            except Exception as e:
                # unknown error
                logger.info("Process failed: %s", e)
            logger.info("Process output: \n%s", stdout)
        # check available disk space for /var/tmp
        tmp_dir = '/var/tmp'
        dest_dir_stat = os.statvfs(tmp_dir)
        dest_dir_size = (dest_dir_stat.f_bavail * dest_dir_stat.f_frsize)
        logger.info(
            'Available space in %s:\t%s bytes\t%.1f 1K-blocks\t%.1f MB',
            tmp_dir, dest_dir_size, dest_dir_size / 1024.0,
            (dest_dir_size / 1024.0) / 1024.0)
Exemplo n.º 15
0
def collect_data_and_upload(rc=0):
    """
    All the heavy lifting done here
    Run through "targets" - could be just one (host, default) or many (containers+host)
    """
    # initialize collection targets
    # for now we do either containers OR host -- not both at same time
    if InsightsClient.options.container_mode:
        targets = get_targets()
        targets = targets + constants.default_target
    else:
        targets = constants.default_target

    if InsightsClient.options.offline:
        logger.warning("Assuming remote branch and leaf value of -1")
        pconn = None
        branch_info = constants.default_branch_info
    else:
        pconn = InsightsConnection()

    # TODO: change these err msgs to be more meaningful , i.e.
    # "could not determine login information"
    if pconn:
        try:
            branch_info = pconn.branch_info()
        except requests.ConnectionError:
            branch_info = handle_branch_info_error(
                "Could not connect to determine branch information")
        except LookupError:
            branch_info = handle_branch_info_error(
                "Could not determine branch information")
    pc = InsightsConfig(pconn)

    if InsightsClient.options.just_upload:
        if not os.path.exists(InsightsClient.options.just_upload):
            logger.error('No file %s', InsightsClient.options.just_upload)
            return 1
        tar_file = InsightsClient.options.just_upload
        rc = _do_upload(pconn, tar_file, 'dummy', 0)
        return rc

    # load config from stdin/file if specified
    try:
        stdin_config = {}
        if InsightsClient.options.from_file:
            with open(InsightsClient.options.from_file, 'r') as f:
                stdin_config = json.load(f)
        elif InsightsClient.options.from_stdin:
            stdin_config = json.load(sys.stdin)
        if ((InsightsClient.options.from_file or InsightsClient.options.from_stdin) and
            ('uploader.json' not in stdin_config or
             'sig' not in stdin_config)):
            raise ValueError
    except:
        logger.error('ERROR: Invalid config for %s! Exiting...',
                     ('--from-file' if InsightsClient.options.from_file else '--from-stdin'))
        sys.exit(1)

    start = time.clock()
    collection_rules, rm_conf = pc.get_conf(InsightsClient.options.update, stdin_config)
    collection_elapsed = (time.clock() - start)
    logger.debug("Rules configuration loaded. Elapsed time: %s", collection_elapsed)

    individual_archives = []

    for t in targets:
        # defaults
        archive = None
        container_connection = None
        mp = None
        obfuscate = None
        # archive metadata
        archive_meta = {}

        try:
            if t['type'] == 'docker_image':
                container_connection = open_image(t['name'])
                logging_name = 'Docker image ' + t['name']
                archive_meta['display_name'] = get_repotag(t['name'])
                archive_meta['docker_id'] = t['name']
                if container_connection:
                    mp = container_connection.get_fs()
                else:
                    logger.error('Could not open %s for analysis', logging_name)
                    continue
            elif t['type'] == 'docker_container':
                container_connection = open_container(t['name'])
                logging_name = 'Docker container ' + t['name']
                archive_meta['display_name'] = t['name']
                if container_connection:
                    mp = container_connection.get_fs()
                else:
                    logger.error('Could not open %s for analysis', logging_name)
                    continue
            elif t['type'] == 'host':
                logging_name = determine_hostname()
                archive_meta['display_name'] = determine_hostname(InsightsClient.options.display_name)
            else:
                logger.error('Unexpected analysis target: %s', t['type'])
                continue

            archive_meta['type'] = t['type'].replace('docker_', '')
            archive_meta['product'] = 'Docker'
            archive_meta['system_id'] = generate_analysis_target_id(t['type'], t['name'])

            collection_start = time.clock()
            archive = InsightsArchive(compressor=InsightsClient.options.compressor if not InsightsClient.options.container_mode else "none",
                                      target_name=t['name'])
            atexit.register(_delete_archive, archive)
            dc = DataCollector(archive,
                               mountpoint=mp,
                               target_name=t['name'],
                               target_type=t['type'])

            logger.info('Starting to collect Insights data for %s', logging_name)
            dc.run_collection(collection_rules, rm_conf, branch_info)
            elapsed = (time.clock() - start)
            logger.debug("Data collection complete. Elapsed time: %s", elapsed)

            obfuscate = InsightsClient.config.getboolean(APP_NAME, "obfuscate")

            # include rule refresh time in the duration
            collection_duration = (time.clock() - collection_start) + collection_elapsed

            if InsightsClient.options.no_tar_file:
                logger.info('See Insights data in %s', dc.archive.archive_dir)
                return rc

            tar_file = dc.done(collection_rules, rm_conf)

            # add archives to list of individual uploads
            archive_meta['tar_file'] = tar_file
            individual_archives.append(archive_meta)

        finally:
            # called on loop iter end or unexpected exit
            if container_connection:
                container_connection.close()

    # if multiple targets (container mode), add all archives to single archive
    if InsightsClient.options.container_mode:
        full_archive = InsightsArchive(compressor=InsightsClient.options.compressor)
        for a in individual_archives:
            shutil.copy(a['tar_file'], full_archive.archive_dir)
        # don't want insights_commands in meta archive
        shutil.rmtree(full_archive.cmd_dir)
        metadata = _create_metadata_json(individual_archives)
        full_archive.add_metadata_to_archive(json.dumps(metadata), 'metadata.json')
        full_tar_file = full_archive.create_tar_file(full_archive=True)
    # if only one target (regular mode), just upload one
    else:
        full_archive = archive
        full_tar_file = tar_file

    if InsightsClient.options.offline or InsightsClient.options.no_upload:
        handle_file_output(full_tar_file, full_archive)
        return rc

    # do the upload
    rc = _do_upload(pconn, full_tar_file, logging_name, collection_duration)

    if InsightsClient.options.keep_archive:
        logger.info('Insights data retained in %s', full_tar_file)
        return rc
    if obfuscate:
        logger.info('Obfuscated Insights data retained in %s',
                    os.path.dirname(full_tar_file))
    full_archive.delete_archive_dir()
    return rc
Exemplo n.º 16
0
def collect_data_and_upload(rc=0):
    """
    All the heavy lifting done here
    Run through "targets" - could be just ONE (host, default) or ONE (container/image)
    """
    # initialize collection targets
    # for now we do either containers OR host -- not both at same time
    if InsightsClient.options.container_mode:
        logger.debug("Client running in container/image mode.")
        logger.debug("Scanning for matching container/image.")
        targets = get_targets()
    else:
        logger.debug("Host selected as scanning target.")
        targets = constants.default_target

    # if there are no targets to scan then bail
    if not len(targets):
        logger.debug("No targets were found. Exiting.")
        sys.exit(1)

    if InsightsClient.options.offline:
        logger.warning("Assuming remote branch and leaf value of -1")
        pconn = None
        branch_info = constants.default_branch_info
    else:
        pconn = InsightsConnection()

    # TODO: change these err msgs to be more meaningful , i.e.
    # "could not determine login information"
    if pconn:
        try:
            branch_info = pconn.branch_info()
        except requests.ConnectionError:
            branch_info = handle_branch_info_error(
                "Could not connect to determine branch information")
        except LookupError:
            branch_info = handle_branch_info_error(
                "Could not determine branch information")
    pc = InsightsConfig(pconn)
    tar_file = None

    if InsightsClient.options.just_upload:
        if not os.path.exists(InsightsClient.options.just_upload):
            logger.error('No file %s', InsightsClient.options.just_upload)
            return 1
        tar_file = InsightsClient.options.just_upload
        rc = _do_upload(pconn, tar_file, 'dummy', 0)
        return rc

    # load config from stdin/file if specified
    try:
        stdin_config = {}
        if InsightsClient.options.from_file:
            with open(InsightsClient.options.from_file, 'r') as f:
                stdin_config = json.load(f)
        elif InsightsClient.options.from_stdin:
            stdin_config = json.load(sys.stdin)
        if ((InsightsClient.options.from_file
             or InsightsClient.options.from_stdin)
                and ('uploader.json' not in stdin_config
                     or 'sig' not in stdin_config)):
            raise ValueError
        if ((InsightsClient.options.from_file
             or InsightsClient.options.from_stdin)
                and 'branch_info' in stdin_config
                and stdin_config['branch_info'] is not None):
            branch_info = stdin_config['branch_info']
    except:
        logger.error('ERROR: Invalid config for %s! Exiting...',
                     ('--from-file'
                      if InsightsClient.options.from_file else '--from-stdin'))
        sys.exit(1)

    start = time.clock()
    collection_rules, rm_conf = pc.get_conf(InsightsClient.options.update,
                                            stdin_config)
    collection_elapsed = (time.clock() - start)
    logger.debug("Rules configuration loaded. Elapsed time: %s",
                 collection_elapsed)

    individual_archives = []

    for t in targets:
        # defaults
        archive = None
        container_connection = None
        mp = None
        obfuscate = None
        # archive metadata
        archive_meta = {}

        try:
            if t['type'] == 'docker_image':
                container_connection = open_image(t['name'])
                logging_name = 'Docker image ' + t['name']
                archive_meta['docker_id'] = t['name']
                archive_meta['display_name'] = docker_display_name(
                    t['name'], t['type'].replace('docker_', ''))
                logger.debug('Docker display_name: %s',
                             archive_meta['display_name'])
                logger.debug('Docker docker_id: %s', archive_meta['docker_id'])
                if container_connection:
                    mp = container_connection.get_fs()
                else:
                    logger.error('Could not open %s for analysis',
                                 logging_name)
                    sys.exit(1)
            elif t['type'] == 'docker_container':
                container_connection = open_container(t['name'])
                logging_name = 'Docker container ' + t['name']
                archive_meta['docker_id'] = t['name']
                archive_meta['display_name'] = docker_display_name(
                    t['name'], t['type'].replace('docker_', ''))
                logger.debug('Docker display_name: %s',
                             archive_meta['display_name'])
                logger.debug('Docker docker_id: %s', archive_meta['docker_id'])
                if container_connection:
                    mp = container_connection.get_fs()
                else:
                    logger.error('Could not open %s for analysis',
                                 logging_name)
                    sys.exit(1)
            elif t['type'] == 'host':
                logging_name = determine_hostname()
                archive_meta['display_name'] = determine_hostname(
                    InsightsClient.options.display_name)
            else:
                logger.error('Unexpected analysis target: %s', t['type'])
                sys.exit(1)

            archive_meta['type'] = t['type'].replace('docker_', '')
            archive_meta['product'] = 'Docker'
            archive_meta['system_id'] = generate_analysis_target_id(
                t['type'], t['name'])

            collection_start = time.clock()
            archive = InsightsArchive(
                compressor=InsightsClient.options.compressor
                if not InsightsClient.options.container_mode else "none",
                target_name=t['name'])
            atexit.register(_delete_archive, archive)
            dc = DataCollector(archive,
                               InsightsClient.config,
                               mountpoint=mp,
                               target_name=t['name'],
                               target_type=t['type'])

            logger.info('Starting to collect Insights data for %s',
                        logging_name)
            dc.run_collection(collection_rules, rm_conf, branch_info)
            elapsed = (time.clock() - start)
            logger.debug("Data collection complete. Elapsed time: %s", elapsed)

            obfuscate = InsightsClient.config.getboolean(APP_NAME, "obfuscate")

            # include rule refresh time in the duration
            collection_duration = (time.clock() -
                                   collection_start) + collection_elapsed

            # add custom metadata about a host if provided by from_file
            # use in the OSE case
            if InsightsClient.options.from_file:
                with open(InsightsClient.options.from_file, 'r') as f:
                    stdin_config = json.load(f)
                    if 'metadata' in stdin_config:
                        archive.add_metadata_to_archive(
                            json.dumps(stdin_config['metadata']),
                            'metadata.json')

            if InsightsClient.options.no_tar_file:
                logger.info('See Insights data in %s', dc.archive.archive_dir)
                return rc

            tar_file = dc.done(collection_rules, rm_conf)

            # add archives to list of individual uploads
            archive_meta['tar_file'] = tar_file
            individual_archives.append(archive_meta)

        finally:
            # called on loop iter end or unexpected exit
            if container_connection:
                container_connection.close()

    # if multiple targets (container mode), add all archives to single archive
    # if InsightsClient.options.container_mode:
    if False:  # we only run single collections now (not the uber archives), bypass this
        full_archive = InsightsArchive(
            compressor=InsightsClient.options.compressor)
        for a in individual_archives:
            shutil.copy(a['tar_file'], full_archive.archive_dir)
        # don't want insights_commands in meta archive
        shutil.rmtree(full_archive.cmd_dir)
        metadata = _create_metadata_json(individual_archives)
        full_archive.add_metadata_to_archive(json.dumps(metadata),
                                             'metadata.json')
        full_tar_file = full_archive.create_tar_file(full_archive=True)
    # if only one target (regular mode), just upload one
    else:
        full_archive = archive
        full_tar_file = tar_file

    if InsightsClient.options.offline or InsightsClient.options.no_upload:
        handle_file_output(full_tar_file, full_archive)
        return rc

    # do the upload
    rc = _do_upload(pconn, full_tar_file, logging_name, collection_duration)

    if InsightsClient.options.keep_archive:
        logger.info('Insights data retained in %s', full_tar_file)
        return rc
    if obfuscate:
        logger.info('Obfuscated Insights data retained in %s',
                    os.path.dirname(full_tar_file))
    full_archive.delete_archive_dir()
    return rc
Exemplo n.º 17
0
def handle_startup(options, config):
    """
    Handle startup options
    """

    if options.version:
        print constants.version
        sys.exit()

    if options.validate:
        validate_remove_file()
        sys.exit()

    # Generate /etc/machine-id if it does not exist
    new = False
    if options.reregister:
        new = True
        options.register = True
    logger.debug("Machine-ID: " + generate_machine_id(new))

    # Disable GPG verification
    if options.no_gpg:
        logger.warn("WARNING: GPG VERIFICATION DISABLED")
        config.set(APP_NAME, 'gpg', 'False')

    # Log config except the password
    # and proxy as it might have a pw as well
    for item, value in config.items(APP_NAME):
        if item != 'password' and item != 'proxy':
            logger.debug("%s:%s", item, value)

    if config.getboolean(APP_NAME, 'auto_update'):
        options.update = True

    if config.getboolean(APP_NAME, 'auto_config'):
        # Try to discover if we are connected to a satellite or not
        try_auto_configuration(config)

    # Set the schedule
    InsightsSchedule()

    # Test connection, useful for proxy debug
    if options.test_connection:
        pconn = InsightsConnection(config)
        pconn.test_connection()

    if options.unregister:
        pconn = InsightsConnection(config)
        pconn.unregister()
        sys.exit()

    # Handle registration, grouping, and display name
    if options.register:
        opt_group = options.group
        message, hostname, opt_group, display_name = register(config, options)
        if options.display_name is None and options.group is None:
            logger.info('Successfully registered %s', hostname)
        elif options.display_name is None:
            logger.info('Successfully registered %s in group %s', hostname, opt_group)
        else:
            logger.info('Successfully registered %s as %s in group %s', hostname, display_name, opt_group)

        logger.info(message)

    # Check for .unregistered file
    if os.path.isfile(constants.unregistered_file):
        logger.error("This machine has been unregistered")
        logger.error("Use --register if you would like to re-register this machine")
        logger.error("Exiting")
        sys.exit(1)
Exemplo n.º 18
0
def _main():
    """
    Main entry point
    Parse cmdline options
    Parse config file
    Call data collector
    """
    global logger
    sys.excepthook = handle_exception

    parser = optparse.OptionParser()
    set_up_options(parser)
    options, args = parser.parse_args()
    if len(args) > 0:
        parser.error("Unknown arguments: %s" % args)
        sys.exit(1)

    if options.satellite_group and not options.register:
        parser.error("--satellite-group must be used with --register")

    if options.version:
        print constants.version
        sys.exit()

    if options.daily and options.weekly:
        parser.error("options --daily and --weekly are mutually exclusive")

    config = parse_config_file()
    logger, handler = set_up_logging(config, options.verbose)

    # Defer logging till it's ready
    logger.debug('invoked with args: %s', options)
    logger.debug("Version: " + constants.version)
    # Generate /etc/machine-id if it does not exist
    new = False
    if options.regenerate:
        new = True
    logger.debug("Machine-ID: " + generate_machine_id(new))

    # Disable GPG verification
    if options.no_gpg:
        logger.warn("GPG VERIFICATION DISABLED")
        config.set(APP_NAME, 'gpg', 'False')

    # Log config except the password
    # and proxy as it might have a pw as well
    for item, value in config.items(APP_NAME):
        if item != 'password' and item != 'proxy':
            logger.debug("%s:%s",  item, value)

    if config.getboolean(APP_NAME, 'auto_update'):
        options.update = True

    if config.getboolean(APP_NAME, 'auto_config'):
        # Try to discover if we are connected to a satellite or not
        try_auto_configuration(config)

    # Set the schedule
    InsightsSchedule(options)

    # Test connection, useful for proxy debug
    if options.test_connection:
        pconn = InsightsConnection(config)
        pconn._test_connection()

    # Handle registration and grouping, this is mostly a no-op
    if options.register:
        opt_group = options.group
        if options.satellite_group:
            opt_group = get_satellite_group()
        hostname, opt_group = register(config, opt_group)
        logger.info('Successfully registered %s in group %s', hostname, opt_group)

    # Check for .unregistered file
    if os.path.isfile(constants.unregistered_file):
        logger.error("This machine has been unregistered")
        logger.error("Use --register if you would like to re-register this machine")
        logger.error("Exiting")
        sys.exit(1)

    # If we are not just setting the schedule, do work son
    if not options.schedule:
        collect_data_and_upload(config, options)
        handler.doRollover()
Exemplo n.º 19
0
def collect_data_and_upload(config, options, rc=0):
    """
    All the heavy lifting done here
    """
    collection_start = time.clock()

    pconn = InsightsConnection(config)
    try:
        branch_info = pconn.branch_info()
    except requests.ConnectionError:
        branch_info = handle_branch_info_error(
            "Could not connect to determine branch information", options)
    except LookupError:
        branch_info = handle_branch_info_error(
            "Could not determine branch information", options)
    pc = InsightsConfig(config, pconn)
    archive = InsightsArchive(compressor=options.compressor)
    dc = DataCollector(archive)

    # register the exit handler here to delete the archive
    atexit.register(handle_exit, archive, options.keep_archive or options.no_upload)

    stdin_config = json.load(sys.stdin) if options.from_stdin else {}

    start = time.clock()
    collection_rules, rm_conf = pc.get_conf(options.update, stdin_config)
    elapsed = (time.clock() - start)
    logger.debug("Collection Rules Elapsed Time: %s", elapsed)

    start = time.clock()
    logger.info('Starting to collect Insights data')
    dc.run_commands(collection_rules, rm_conf)
    elapsed = (time.clock() - start)
    logger.debug("Command Collection Elapsed Time: %s", elapsed)

    start = time.clock()
    dc.copy_files(collection_rules, rm_conf)
    elapsed = (time.clock() - start)
    logger.debug("File Collection Elapsed Time: %s", elapsed)

    dc.write_branch_info(branch_info)
    obfuscate = config.getboolean(APP_NAME, "obfuscate")

    collection_duration = (time.clock() - collection_start)

    if not options.no_tar_file:
        tar_file = dc.done(config, rm_conf)
        if not options.no_upload:
            logger.info('Uploading Insights data,'
                        ' this may take a few minutes')
            for tries in range(options.retries):
                upload = pconn.upload_archive(tar_file, collection_duration)
                if upload.status_code == 201:
                    write_lastupload_file()
                    logger.info("Upload completed successfully!")
                    break
                elif upload.status_code == 412:
                    pconn.handle_fail_rcs(upload)
                else:
                    logger.error("Upload attempt %d of %d failed! Status Code: %s",
                                 tries + 1, options.retries, upload.status_code)
                    if tries + 1 != options.retries:
                        logger.info("Waiting %d seconds then retrying",
                                    constants.sleep_time)
                        time.sleep(constants.sleep_time)
                    else:
                        logger.error("All attempts to upload have failed!")
                        logger.error("Please see %s for additional information",
                                     constants.default_log_file)
                        rc = 1

            if (not obfuscate and not options.keep_archive) or options.no_upload:
                dc.archive.delete_tmp_dir()
            else:
                if obfuscate:
                    logger.info('Obfuscated Insights data retained in %s',
                                os.path.dirname(tar_file))
                else:
                    logger.info('Insights data retained in %s', tar_file)
        else:
            handle_file_output(options, tar_file)
    else:
        logger.info('See Insights data in %s', dc.archive.archive_dir)
    return rc
Exemplo n.º 20
0
def handle_startup():
    """
    Handle startup options
    """
    # ----do X and exit options----
    # show version and exit
    if InsightsClient.options.version:
        print constants.version
        sys.exit()

    if (InsightsClient.options.container_mode and
       not InsightsClient.options.run_here and
       insights_client_container_is_available()):
        sys.exit(run_in_container())

    if InsightsClient.options.validate:
        validate_remove_file()
        sys.exit()

    if InsightsClient.options.enable_schedule and InsightsClient.options.disable_schedule:
        logger.error('Conflicting options: --enable-schedule and --disable-schedule')
        sys.exit(1)

    if InsightsClient.options.enable_schedule:
        # enable automatic scheduling
        InsightsSchedule(container_mode=options.container_mode)
        InsightsClient.config.set(APP_NAME, 'no_schedule', False)
        logger.info('Automatic scheduling for Insights has been enabled.')
        sys.exit()

    if InsightsClient.options.disable_schedule:
        # disable automatic schedling
        InsightsSchedule(set_cron=False).remove_scheduling()
        InsightsClient.config.set(APP_NAME, 'no_schedule', True)
        logger.info('Automatic scheduling for Insights has been disabled.')
        sys.exit()

    # do auto_config here, for connection-related 'do X and exit' options
    if InsightsClient.config.getboolean(APP_NAME, 'auto_config') and not InsightsClient.options.offline:
        # Try to discover if we are connected to a satellite or not
        try_auto_configuration()

    if InsightsClient.options.test_connection:
        pconn = InsightsConnection()
        rc = pconn.test_connection()
        sys.exit(rc)

    if InsightsClient.options.status:
        reg_check = registration_check()
        logger.info('\n'.join(reg_check['messages']))
        # exit with !status, 0 for True, 1 for False
        sys.exit(not reg_check['status'])

    if InsightsClient.options.support:
        support = InsightsSupport()
        support.collect_support_info()
        sys.exit()

    # ----config options----
    # log the config
    # ignore password and proxy -- proxy might have pw
    for item, value in InsightsClient.config.items(APP_NAME):
        if item != 'password' and item != 'proxy':
            logger.debug("%s:%s", item, value)

    if InsightsClient.config.getboolean(APP_NAME, 'auto_update') and not InsightsClient.options.offline:
        # TODO: config updates option, but in GPG option, the option updates
        # the config.  make this consistent
        InsightsClient.options.update = True

    # disable automatic scheduling if it was set in the config, and if the job exists
    if InsightsClient.config.getboolean(APP_NAME, 'no_schedule'):
        cron = InsightsSchedule(set_cron=False)
        if cron.already_linked():
            cron.remove_scheduling()
            logger.debug('Automatic scheduling for Insights has been disabled.')

    # ----modifier options----
    if InsightsClient.options.no_gpg:
        logger.warn("WARNING: GPG VERIFICATION DISABLED")
        InsightsClient.config.set(APP_NAME, 'gpg', 'False')

    if InsightsClient.options.just_upload:
        # override these for great justice
        InsightsClient.options.no_tar_file = False
        InsightsClient.options.keep_archive = True

    if InsightsClient.options.container_mode and InsightsClient.options.no_tar_file:
        logger.error('Invalid combination: --container and --no-tar-file')
        sys.exit(1)

    # can't use bofa
    if InsightsClient.options.from_stdin and InsightsClient.options.from_file:
        logger.error('Can\'t use both --from-stdin and --from-file.')
        sys.exit(1)

    # ----register options----
    # put this first to avoid conflicts with register
    if InsightsClient.options.unregister:
        pconn = InsightsConnection()
        pconn.unregister()
        sys.exit()

    # force-reregister -- remove machine-id files and registration files before trying to register again
    new = False
    if InsightsClient.options.reregister:
        new = True
        InsightsClient.options.register = True
        delete_registered_file()
        delete_unregistered_file()
        delete_machine_id()
    logger.debug('Machine-id: %s', generate_machine_id(new))

    if InsightsClient.options.register:
        try_register()
        if not InsightsClient.config.getboolean('no_schedule'):
            InsightsSchedule(container_mode=options.container_mode)

    # check registration before doing any uploads
    # Ignore if in offline mode
    if not InsightsClient.options.register and not InsightsClient.options.offline:
        msg, is_registered = _is_client_registered()
        if not is_registered:
            logger.error(msg)
            sys.exit(1)
Exemplo n.º 21
0
def handle_startup(options, config):
    """
    Handle startup options
    """

    if options.version:
        print constants.version
        sys.exit()

    if options.validate:
        validate_remove_file()
        sys.exit()

    # Generate /etc/machine-id if it does not exist
    new = False
    if options.reregister:
        new = True
        options.register = True
    logger.debug("Machine-ID: " + generate_machine_id(new))

    # Disable GPG verification
    if options.no_gpg:
        logger.warn("WARNING: GPG VERIFICATION DISABLED")
        config.set(APP_NAME, 'gpg', 'False')

    # Log config except the password
    # and proxy as it might have a pw as well
    for item, value in config.items(APP_NAME):
        if item != 'password' and item != 'proxy':
            logger.debug("%s:%s", item, value)

    if config.getboolean(APP_NAME, 'auto_update'):
        options.update = True

    if config.getboolean(APP_NAME, 'auto_config'):
        # Try to discover if we are connected to a satellite or not
        try_auto_configuration(config)

    # Set the schedule
    if not options.no_schedule and not config.getboolean(
            APP_NAME, 'no_schedule'):
        InsightsSchedule()

    # Test connection, useful for proxy debug
    if options.test_connection:
        pconn = InsightsConnection(config)
        pconn.test_connection()

    if options.unregister:
        pconn = InsightsConnection(config)
        pconn.unregister()
        sys.exit()

    # Handle registration, grouping, and display name
    if options.register:
        opt_group = options.group
        message, hostname, opt_group, display_name = register(config, options)
        if options.display_name is None and options.group is None:
            logger.info('Successfully registered %s', hostname)
        elif options.display_name is None:
            logger.info('Successfully registered %s in group %s', hostname, opt_group)
        else:
            logger.info('Successfully registered %s as %s in group %s', hostname, display_name,
                        opt_group)

        logger.info(message)

    # Collect debug/log information
    if options.support:
        support = InsightsSupport(config)
        support.collect_support_info()
        sys.exit(0)

    # Just check registration status
    if options.status:
        support = InsightsSupport(config)
        reg_check = support.registration_check()
        logger.info('\n'.join(reg_check))
        sys.exit(0)

    # Set offline mode for OSP use
    offline_mode = False
    if (options.offline and options.from_stdin) or options.no_upload:
        offline_mode = True

    # First startup, no .registered or .unregistered
    # Ignore if in offline mode
    if (not os.path.isfile(constants.registered_file) and
       not os.path.isfile(constants.unregistered_file) and
       not options.register and not offline_mode):
        logger.error('This machine has not yet been registered.')
        logger.error('Use --register to register this machine.')
        logger.error("Exiting")
        sys.exit(1)

    # Check for .unregistered file
    if (os.path.isfile(constants.unregistered_file) and
       not options.register and not offline_mode):
        logger.error("This machine has been unregistered.")
        logger.error("Use --register if you would like to re-register this machine.")
        logger.error("Exiting")
        sys.exit(1)
Exemplo n.º 22
0
def get_connection():
    global INSIGHTS_CONNECTION
    if INSIGHTS_CONNECTION is None:
        INSIGHTS_CONNECTION = InsightsConnection()
    return INSIGHTS_CONNECTION