def get_config(self, conf, local=False):
     """
     Retrieves local or merged dictionary of dicts local app context.
     This function creates parity for use with writeConfFile in splunk.clilib.
     Should use cli.getMergedConf(), but does not support custom conf files.
     :param conf:  Splunk conf file file name
     :param local: local config only
     :return: dictionary of dicts
     """
     cli.getMergedConf()
     conf = "%s.conf" % conf
     defaultconfpath = os.path.join(self.dir, "default", conf)
     stanzaDict = cli.readConfFile(defaultconfpath) if os.path.exists(defaultconfpath) else {}
     localconfpath = os.path.join(self.dir, "local", conf)
     if not local:
         if os.path.exists(localconfpath):
             localconf = cli.readConfFile(localconfpath)
             for setting, stanza in localconf.items():
                 if setting in stanzaDict:
                     stanzaDict[setting].update(stanza)
                 else:
                     stanzaDict[setting] = stanza
     else:
         stanzaDict = cli.readConfFile(localconfpath) if os.path.exists(localconfpath) else {}
     return stanzaDict
Exemplo n.º 2
0
 def get_config(self, conf, local=False):
     """
     Retrieves local or merged dictionary of dicts local app context.
     This function creates parity for use with writeConfFile in splunk.clilib.
     Should use cli.getMergedConf(), but does not support custom conf files.
     :param conf:  Splunk conf file file name
     :param local: local config only
     :return: dictionary of dicts
     """
     cli.getMergedConf()
     conf = "%s.conf" % conf
     defaultconfpath = os.path.join(self.dir, "default", conf)
     stanzaDict = cli.readConfFile(defaultconfpath) if os.path.exists(
         defaultconfpath) else {}
     localconfpath = os.path.join(self.dir, "local", conf)
     if not local:
         if os.path.exists(localconfpath):
             localconf = cli.readConfFile(localconfpath)
             for setting, stanza in localconf.items():
                 if setting in stanzaDict:
                     stanzaDict[setting].update(stanza)
                 else:
                     stanzaDict[setting] = stanza
     else:
         stanzaDict = cli.readConfFile(localconfpath) if os.path.exists(
             localconfpath) else {}
     return stanzaDict
Exemplo n.º 3
0
def stanzaStatusInternal(args, fromCLI):
    """
  Returns boolean based on whether or not a given stanza in a given file has the "disabled" parameter set.
  """
    paramsReq = (ARG_CONFIG, ARG_MODNAME, ARG_STANZA)
    paramsOpt = (ARG_AUTHSTR, )
    comm.validateArgs(paramsReq, paramsOpt, args)

    returnDict = {"enabled": False}
    stanza = args[ARG_STANZA]

    authStr = (ARG_AUTHSTR in args) and args[ARG_AUTHSTR] or None

    currStatus = comm.getMergedConf(args[ARG_CONFIG])

    if stanza in currStatus:
        returnDict[
            "enabled"] = True  # found the stanza, let's say it's enabled unless we learn otherwise.
        if KEY_DISABLED in currStatus[stanza]:
            if "true" == currStatus[stanza][KEY_DISABLED].lower():
                returnDict[
                    "enabled"] = False  # if disabled=true, override what we thought above.

    returnDict["stanzas"] = currStatus
    return returnDict
Exemplo n.º 4
0
    def _get_spark_settings(self):
        app = op.basename(op.dirname(op.dirname(op.abspath(__file__))))
        spark_conf = scc.getMergedConf("spark")
        spark_settings = {}
        for stanza in ("spark_settings",):
            spark_settings.update(spark_conf[stanza])

        return spark_settings
Exemplo n.º 5
0
 def __init__(self, appname, conf_filename):
     # In unit testing scenario we don't want to use btool
     if get_test_state():
         app_path = os.path.join(get_base_path(), appname)
         self.config = cli.getAppConf(conf_filename,
                                      appname,
                                      use_btool=False,
                                      app_path=app_path)
     else:
         self.config = cli.getMergedConf(conf_filename)
Exemplo n.º 6
0
def editClient(args, fromCLI):
    """
  Edits the various options of a deployment client.
  THIS WORKS ON THE LOCAL FILESYSTEM.  We usually don't want this,
  so this will only be used for certain options.
  """
    paramsReq = ()
    paramsOpt = tuple(optionMapClient.keys())
    comm.validateArgs(paramsReq, paramsOpt, args)

    returnDict = {}

    if 0 == len(args):
        raise cex.ArgError, "No changes have been specified."

    if not module.deplClientStatus({}, fromCLI)["enabled"]:
        raise cex.ServerState, ERR_CLIENT_DIS

    currConf = comm.getMergedConf(module.DEPL_CLI_CONFIG)
    if not module.DEPL_CLI_STANZA in currConf:
        raise cex.ParsingError, ERR_CLI_STANZA

    # assuming that we only support one of each of these tags - replace every tag found, and add if non-existent.
    for arg, val in args.items():
        paramName = optionMapClient[arg]

        # validate the few types of args we recognize, ignore anything else.
        try:
            if arg in (ARG_MCASTURI, ARG_DEPSERVURI):
                validate.checkIPPortOrHostPort(arg, val)
            elif arg == ARG_MCASTIP:
                validate.checkIP(arg, val)
            elif arg == ARG_POLLFREQ:
                validate.checkPosInt(arg, val)
        except cex.ValidationError:
            if "0" != val:  # we use 0 to disable these things, i guess.
                raise

        # remove if 0.
        if "0" == val and paramName in currConf[module.DEPL_CLI_STANZA]:
            currConf[module.DEPL_CLI_STANZA].pop(paramName)
        # or add/set.
        else:
            currConf[module.DEPL_CLI_STANZA][paramName] = val

        # if we're at this point, *something* has changed.
        returnDict["restartRequired"] = True

    comm.writeConfFile(
        bundle_paths.make_path(module.DEPL_CLI_CONFIG + ".conf"), currConf)
    if fromCLI:
        logger.info("Configuration updated.")

    return returnDict
Exemplo n.º 7
0
def editClient(args, fromCLI):
  """
  Edits the various options of a deployment client.
  THIS WORKS ON THE LOCAL FILESYSTEM.  We usually don't want this,
  so this will only be used for certain options.
  """
  paramsReq = ()
  paramsOpt = tuple(optionMapClient.keys())
  comm.validateArgs(paramsReq, paramsOpt, args)

  returnDict = {}

  if 0 == len(args):
    raise cex.ArgError, "No changes have been specified."

  if not module.deplClientStatus({}, fromCLI)["enabled"]:
    raise cex.ServerState, ERR_CLIENT_DIS

  
  currConf = comm.getMergedConf(module.DEPL_CLI_CONFIG)
  if not module.DEPL_CLI_STANZA in currConf:
    raise cex.ParsingError, ERR_CLI_STANZA

  # assuming that we only support one of each of these tags - replace every tag found, and add if non-existent.
  for arg, val in args.items():
    paramName = optionMapClient[arg]

    # validate the few types of args we recognize, ignore anything else.
    try:
      if arg in (ARG_MCASTURI, ARG_DEPSERVURI):
        validate.checkIPPortOrHostPort(arg, val)
      elif arg == ARG_MCASTIP:
        validate.checkIP(arg, val)
      elif arg == ARG_POLLFREQ:
        validate.checkPosInt(arg, val)
    except cex.ValidationError:
      if "0" != val: # we use 0 to disable these things, i guess.
        raise

    # remove if 0.
    if "0" == val and paramName in currConf[module.DEPL_CLI_STANZA]:
      currConf[module.DEPL_CLI_STANZA].pop(paramName)
    # or add/set.
    else:
      currConf[module.DEPL_CLI_STANZA][paramName] = val

    # if we're at this point, *something* has changed.
    returnDict["restartRequired"] = True

  comm.writeConfFile(bundle_paths.make_path(module.DEPL_CLI_CONFIG + ".conf"), currConf)
  if fromCLI:
    logger.info("Configuration updated.")

  return returnDict
    def read_config(self):

        cfg = cli.getMergedConf('arkime')

        self.elastic_nodes = dict()
        for stanza in cfg.keys():
            if re.match("^elastic:[^:]+$", stanza):
                elastic, site = stanza.split(':', 1)
                self.elastic_nodes[site] = list()
                for server in cfg[stanza]:
                    if re.match("^server\d+$", server):
                        self.elastic_nodes[site].append(cfg[stanza][server])
Exemplo n.º 9
0
    def apply_tag(self, session_key, result):
        '''
        This function will make the actual API call to launch the WAS scan.
        '''
        # print >> sys.stderr, "INFO Launching WAS scan with settings: %s" % settings

        # get API server, user and password
        api_user, api_password = qualysModule.splunkpopulator.utils.getCredentials(session_key)
        qualys_conf = scc.getMergedConf('qualys')
        api_server = qualys_conf['setupentity']['api_server']
        use_proxy = qualys_conf['setupentity']['use_proxy']
        proxy = qualys_conf['setupentity']['proxy_server']

        if api_user is None or api_user == '' or \
                api_password is None or api_password == '' or \
                api_server is None or api_server == '':
            logger.info("API server/username/password not configured. Exiting.")
            # self.add_event(["API server/username/password not configured. Exiting."])
            self.addevent("API server/username/password not configured. Exiting.", self.action_name)
            exit(3)

        api_config = qapi.Client.APIConfig()
        api_config.username = api_user
        api_config.password = api_password
        api_config.serverRoot = api_server

        if use_proxy == '1':
            api_config.useProxy = True
            api_config.proxyHost = proxy

        qapi.setupClient(api_config)
        api_client = qapi.client

        webapp_id_in_result = result.get('webapp_id', None)

        tag_ids = self.tag_ids.split(",")
        logger.info("Tag ID(s) to apply: %s" % tag_ids)
        api_params = self.get_api_params(tag_ids)
        logger.debug("Parameters to apply tag: %s" % api_params)
        self.addevent("Making %s request with params %s" % (API_ENDPOINT + webapp_id_in_result, api_params), self.action_name)
        response = api_client.get(API_ENDPOINT + webapp_id_in_result, api_params, qapi.Client.SimpleAPIResponse())
        api_response = response.get_response()
        logger.info("API response is %s" % api_response)
        response_root = ET.fromstring(api_response)
        response_code = response_root.find('responseCode').text
        self.addevent("API response is %s" % response_code, self.action_name)
        if response_code == "SUCCESS":
            self.addevent("Successfully applied tag id(s) %s to webapp id %s" % (self.tag_ids, webapp_id_in_result), self.action_name)
            return True
        else:
            self.addevent("Could not apply tag id(s) %s to webapp id %s" % (self.tag_ids, webapp_id_in_result), self.action_name)
            return False
Exemplo n.º 10
0
def getPoll(args, fromCLI):
  paramsReq = ()
  paramsOpt = ()
  comm.validateArgs(paramsReq, paramsOpt, args)

  retDict  = {"enabled" : False}
  currConf = comm.getMergedConf(module.DEPL_CLI_CONFIG)
  try:
    retDict["uri"] = currConf[module.DEPL_CLI_STANZA][XML_DEPSERVURI]
    retDict["enabled"] = True
  except KeyError:
    pass

  if fromCLI:
    if retDict["enabled"]:
      logger.info("Deployment Server URI is set to \"%s\"." % retDict["uri"])
    else:
      logger.info("Deployment Server URI is not set.")
  return retDict
Exemplo n.º 11
0
def getMulticast(args, fromCLI):
    paramsReq = ()
    paramsOpt = ()
    comm.validateArgs(paramsReq, paramsOpt, args)

    retDict = {"enabled": False}
    currConf = comm.getMergedConf(module.DEPL_CLI_CONFIG)
    try:
        retDict["uri"] = currConf[module.DEPL_CLI_STANZA][XML_MCASTURI]
        retDict["enabled"] = True
    except KeyError:
        pass

    if fromCLI:
        if retDict["enabled"]:
            logger.info("Multicast URI is set to \"%s\"." % retDict["uri"])
        else:
            logger.info("Multicast URI is not set.")
    return retDict
Exemplo n.º 12
0
    def _get_service_now_account(self):
        app = snow_consts.app_name
        snow_conf = scc.getMergedConf("service_now")
        snow_account = {}
        for stanza in ("snow_default", "snow_account", "snow_proxy"):
            snow_account.update(snow_conf[stanza])

        mgr = cred.CredentialManager(self.getSessionKey(), scc.getMgmtUri())
        accs = (("url", "username", "password"),
                ("proxy_url", "proxy_username", "proxy_password"))
        for (url_k, user_k, pass_k) in accs:
            url = snow_account[url_k]
            username = snow_account[user_k]
            password = snow_account[pass_k]
            if url and username == snow_consts.encrypted \
                    and password == snow_consts.encrypted:
                userpass = mgr.get_clear_password(url, snow_consts.dummy, app)
                if not userpass:
                    msg = "Failed to get clear credentials for %s" % url
                    _LOGGER.error(msg)
                    raise Exception(msg)
                username, password = userpass.split(snow_consts.userpass_sep)
            snow_account[user_k] = username
            snow_account[pass_k] = password
        if snow_account["proxy_port"]:
            snow_account["proxy_port"] = int(snow_account["proxy_port"])

        if utils.is_false(snow_account["proxy_enabled"]):
            snow_account["proxy_url"] = ""
            snow_account["proxy_port"] = ""

        snow_url = snow_account["url"]
        if not snow_url:
            raise Exception("ServiceNow account has not been setup.")

        if not snow_url.startswith("https://"):
            snow_url = "https://%s" % snow_url

        if not snow_url.endswith("/"):
            snow_url = "%s/" % snow_url

        snow_account["url"] = snow_url
        return snow_account
Exemplo n.º 13
0
    def _get_service_now_account(self):
        app = snow_consts.app_name
        snow_conf = scc.getMergedConf("service_now")
        snow_account = {}
        for stanza in ("snow_default", "snow_account", "snow_proxy"):
            snow_account.update(snow_conf[stanza])

        mgr = cred.CredentialManager(self.session_key, scc.getMgmtUri())
        accs = (("url", "username", "password"),
                ("proxy_url", "proxy_username", "proxy_password"))
        for (url_k, user_k, pass_k) in accs:
            url = snow_account[url_k]
            username = snow_account[user_k]
            password = snow_account[pass_k]
            if url and username == "<encrypted>" and password == "<encrypted>":
                userpass = mgr.get_clear_password(url, "dummy", app)
                if not userpass:
                    self.logger.error("Failed to get clear credentials for %s",
                                      url)
                    raise Exception("Failed to get clear credentials"
                                    " for {}".format(url))
                username, password = userpass.split("``")
            snow_account[user_k] = username
            snow_account[pass_k] = password
        if snow_account["proxy_port"]:
            snow_account["proxy_port"] = int(snow_account["proxy_port"])

        if utils.is_false(snow_account["proxy_enabled"]):
            snow_account["proxy_url"] = ""
            snow_account["proxy_port"] = ""

        snow_url = snow_account["url"]
        if not snow_url:
            raise Exception("ServiceNow account has not been setup.")

        if not snow_url.startswith("https://"):
            snow_url = "https://{}".format(snow_url)

        if not snow_url.endswith("/"):
            snow_url = "{}/".format(snow_url)

        snow_account["url"] = snow_url
        return snow_account
Exemplo n.º 14
0
 def get_api_parameters(self):
     api_params = ''
     qualysConf = scc.getMergedConf("qualys")
     if 'extra_was_params' in qualysConf['setupentity'] and qualysConf[
             'setupentity']['extra_was_params'] != '':
         extra_params_root = ET.fromstring(
             qualysConf['setupentity']['extra_was_params'])
         for child in extra_params_root:
             child_attribs = child.attrib
             if child_attribs['field'] == 'webApp.id':
                 api_params += "<Criteria field=\"id\" operator=\"%s\">%s</Criteria>" % (
                     child_attribs['operator'], child.text)
             # if
         # for
         if api_params != '':
             return "<ServiceRequest><filters>" + api_params + "</filters></ServiceRequest>"
         else:
             return ''
     else:
         return ''
Exemplo n.º 15
0
def stanzaStatusInternal(args, fromCLI):
  """
  Returns boolean based on whether or not a given stanza in a given file has the "disabled" parameter set.
  """
  paramsReq = (ARG_CONFIG, ARG_MODNAME, ARG_STANZA)
  paramsOpt = (ARG_AUTHSTR,)
  comm.validateArgs(paramsReq, paramsOpt, args)

  returnDict = {"enabled" : False}
  stanza = args[ARG_STANZA]

  authStr = (ARG_AUTHSTR in args) and args[ARG_AUTHSTR] or None

  currStatus = comm.getMergedConf(args[ARG_CONFIG])

  if stanza in currStatus:
    returnDict["enabled"] = True # found the stanza, let's say it's enabled unless we learn otherwise.
    if KEY_DISABLED in currStatus[stanza]:
      if "true" == currStatus[stanza][KEY_DISABLED].lower():
        returnDict["enabled"] = False # if disabled=true, override what we thought above.

  returnDict["stanzas"] = currStatus
  return returnDict
Exemplo n.º 16
0
if options.api_server:
    api_server = options.api_server

if options.username:
    api_user = options.username

if options.password:
    api_password = options.password

if options.start_date:
    start_date = options.start_date

temp_directory = APP_ROOT + '/tmp'

qualysConf = scc.getMergedConf("qualys")

appConfig = ApplicationConfiguration()
appConfig.load()

if proxy is None:
    proxy = qualysConf['setupentity']['proxy_server']

if api_server is None:
    #if not passed via CLI argument then load from config file
    api_server = qualysConf['setupentity']['api_server']

if api_server is None or api_server == '':
    api_server = raw_input("QG API Server:")

if api_user is None or api_user == '':
Exemplo n.º 17
0
    def handle(self, in_string):
        textchars = bytearray({7, 8, 9, 10, 12, 13, 27}
                              | set(range(0x20, 0x100)) - {0x7f})
        is_binary_string = lambda bytes: bool(bytes.translate(None, textchars))
        debug = ""
        user = ""
        result = ""
        reason = ""
        form = {"action": "", "path": "", "param1": ""}
        try:
            conf = getMergedConf(app_name)
            in_payload = json.loads(in_string)

            if in_payload['method'] != "POST":
                return {
                    'payload': {
                        "message":
                        "Webservice is working but it must be called via POST"
                    },
                    'status': 200
                }

            def runCommand(cmds, this_env, status_codes=[]):
                p = subprocess.Popen(cmds,
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.STDOUT,
                                     shell=False,
                                     env=this_env)
                o = p.communicate()
                status_codes.append(p.returncode)
                return str(o[0]) + "\n"

            def runCommandGit(git_output, git_status_codes, env_git, cmds):
                git_output.append({
                    "type": "cmd",
                    "content": '$ ' + " ".join(cmds)
                })
                git_output.append({
                    "type":
                    "out",
                    "content":
                    runCommand(cmds, env_git, git_status_codes)
                })
                git_output.append({
                    "type":
                    "cmd",
                    "content":
                    'Ended with code: ' + str(git_status_codes[-1])
                })

            def runCommandCustom(cmds, env_copy):
                # TODO timeout after: int(conf["global"]["run_timeout"])
                p = subprocess.Popen(cmds,
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.STDOUT,
                                     shell=True,
                                     env=env_copy)
                o = p.communicate()
                return str(o[0]) + "\n"

            def git(message, git_status_codes, git_output, file1, file2=None):
                if confIsTrue("git_autocommit", False):
                    try:
                        files = [file1]
                        if file2 != None:
                            files.append(file2)

                        cmds = [
                            'git', 'diff', '--no-ext-diff', '--quiet',
                            '--exit-code'
                        ]
                        cmds.extend(files)
                        runCommandGit(git_output, git_status_codes, env_git,
                                      cmds)

                        if git_status_codes.pop() == 1:
                            git_output[-1]['content'] += ' (There are changes)'
                            cmds = ['git', 'add']
                            cmds.extend(files)
                            runCommandGit(git_output, git_status_codes,
                                          env_git, cmds)
                            cmds = ['git', 'commit', '-uno', '-m', message]
                            runCommandGit(git_output, git_status_codes,
                                          env_git, cmds)
                        else:
                            git_output[-1]['content'] += ' (No changes)'

                    except Exception as ex:
                        template = "{0}: {1!r}"
                        git_output.append({
                            "type":
                            "desc",
                            "content":
                            "Git failed. Is git installed and configured correctly?"
                        })
                        git_output.append({
                            "type":
                            "out",
                            "content":
                            template.format(type(ex).__name__, ex.args)
                        })
                        git_status_codes.append(1)

            def confIsTrue(param, defaultValue):
                if param not in conf["global"]:
                    return defaultValue
                if conf["global"][param].lower().strip() in ("1", "true",
                                                             "yes", "t", "y"):
                    return True
                return False

            git_output = []
            git_status_codes = [-1]
            for formParam in in_payload['form']:
                form[formParam[0]] = formParam[1]

            user = in_payload['session']['user']

            # dont allow write or run access unless the user makes the effort to change the setting
            if form['action'] == 'run' and not confIsTrue(
                    "run_commands", False):
                reason = "missing_perm_run"

            elif ((form['action'] in [
                    'delete', 'rename', 'newfolder', 'newfile', 'fileupload',
                    'fileuploade'
            ]) or (form['action'] == "save" and form['path'] != "")
                  ) and not confIsTrue("write_access", False):
                reason = "missing_perm_write"

            elif form['action'] == "save" and form[
                    'path'] == "" and confIsTrue("hide_settings", False):
                reason = "config_locked"

            else:
                env_copy = os.environ.copy()
                env_git = env_copy.copy()
                if confIsTrue("git_autocommit", False):
                    git_output.append({
                        "type": "out",
                        "content": "cwd = " + os.getcwd() + "\n"
                    })
                    try:
                        git_autocommit_dir = conf["global"][
                            "git_autocommit_dir"].strip("\"")
                        if git_autocommit_dir != "":
                            env_git["GIT_DIR"] = os.path.join(
                                SPLUNK_HOME, git_autocommit_dir)
                            git_output.append({
                                "type":
                                "out",
                                "content":
                                "GIT_DIR=" +
                                os.path.join(SPLUNK_HOME, git_autocommit_dir)
                            })
                    except KeyError:
                        pass
                    try:
                        git_autocommit_work_tree = conf["global"][
                            "git_autocommit_work_tree"].strip("\"")
                        if git_autocommit_work_tree != "":
                            env_git["GIT_WORK_TREE"] = os.path.join(
                                SPLUNK_HOME, git_autocommit_work_tree)
                            git_output.append({
                                "type":
                                "out",
                                "content":
                                "GIT_WORK_TREE=" + os.path.join(
                                    SPLUNK_HOME, git_autocommit_work_tree)
                            })
                    except KeyError:
                        pass

                # when calling read or write with an empty argument it means we are trying to change the config
                if (form['action'] == 'read'
                        or form['action'] == 'save') and form['path'] == "":
                    localfolder = os.path.join(os.path.dirname(__file__), '..',
                                               'local')
                    form['path'] = os.path.join(os.path.dirname(__file__),
                                                '..', 'local',
                                                app_name + '.conf')
                    if not os.path.exists(localfolder):
                        os.makedirs(localfolder)
                    if not os.path.exists(form['path']):
                        shutil.copyfile(
                            os.path.join(os.path.dirname(__file__), '..',
                                         'default',
                                         app_name + '.conf.example'),
                            form['path'])

                if form['action'][:5] == 'btool' or form[
                        'action'] == 'run' or form['action'] == 'init' or form[
                            'action'][:3] == 'git':
                    system = platform.system()
                    os.chdir(SPLUNK_HOME)
                    if system != "Windows" and system != "Linux" and system != "Darwin":
                        reason = "Unable to run commands on this operating system: " + system
                    else:
                        if system == "Windows":
                            cmd = "bin\\splunk"
                        else:
                            cmd = "./bin/splunk"

                        if form['action'] == 'init':
                            result = {}
                            result['files'] = runCommand(
                                [cmd, 'btool', 'check', '--debug'], env_copy)
                            result['conf'] = conf

                        elif form['action'] == 'btool-check':
                            result = runCommand(
                                [cmd, 'btool', 'check', '--debug'], env_copy)
                            result = result + runCommand(
                                [cmd, 'btool', 'find-dangling'], env_copy)
                            result = result + runCommand(
                                [cmd, 'btool', 'validate-strptime'], env_copy)
                            result = result + runCommand(
                                [cmd, 'btool', 'validate-regex'], env_copy)

                        elif form['action'] == 'btool-list':
                            result = runCommand([
                                cmd, 'btool', form['path'], 'list', '--debug'
                            ], env_copy)

                        elif form['action'] == 'git-log':
                            os.chdir(form['path'])
                            result = runCommand(
                                ['git', 'log', '--stat', '--max-count=100'],
                                env_git)

                        elif form['action'] == 'git-history':
                            os.chdir(os.path.join(SPLUNK_HOME, form['param1']))
                            result += runCommand([
                                'git', 'log', '--follow', '-p', '--',
                                os.path.join(SPLUNK_HOME, form['path'])
                            ], env_git)

                        elif form['action'] == 'run':
                            # dont need to check if we are inside Splunk dir. User can do anything with run command anyway.
                            file_path = os.path.join(SPLUNK_HOME,
                                                     form['param1'])
                            os.chdir(file_path)
                            result = runCommandCustom(form['path'], env_copy)

                else:
                    if form['action'][:4] == 'spec':
                        spec_path = os.path.join(SPLUNK_HOME, 'etc', 'system',
                                                 'README',
                                                 form['path'] + '.conf.spec')
                        if os.path.exists(spec_path):
                            with open(spec_path, 'r') as fh:
                                result = fh.read()

                        apps_path = os.path.join(SPLUNK_HOME, 'etc', 'apps')
                        for d in os.listdir(apps_path):
                            spec_path = os.path.join(
                                apps_path, d, 'README',
                                form['path'] + '.conf.spec')
                            if os.path.exists(spec_path):
                                with open(spec_path, 'r') as fh:
                                    result = result + fh.read()

                    else:
                        base_path_abs = str(
                            os.path.abspath(os.path.join(SPLUNK_HOME)))
                        file_path = os.path.join(SPLUNK_HOME, form['path'])
                        file_path_abs = str(os.path.abspath(file_path))
                        if file_path_abs.find(base_path_abs) != 0:
                            reason = "Unable to access path [" + file_path_abs + "] out of splunk directory [" + base_path_abs + "]"

                        else:
                            if form['action'] == 'save':
                                if os.path.isdir(file_path):
                                    reason = "Cannot save file as a folder"

                                elif not os.path.exists(file_path):
                                    reason = "Cannot save to a file that does not exist"

                                else:
                                    os.chdir(os.path.dirname(file_path))
                                    git_output.append({
                                        "type":
                                        "desc",
                                        "content":
                                        "Committing file before saving changes"
                                    })
                                    git("unknown", git_status_codes,
                                        git_output, file_path)
                                    with open(file_path, "wb") as fh:
                                        fh.write(form['file'])
                                    git_output.append({
                                        "type":
                                        "desc",
                                        "content":
                                        "Committing file after saving changes"
                                    })
                                    git(user + " save ", git_status_codes,
                                        git_output, file_path)

                            elif form['action'] == 'fs':

                                def pack(base, path, dirs, files):
                                    if len(path) == 0:
                                        for i in dirs:
                                            base[i] = {}
                                        base["."] = files
                                    else:
                                        pack(base[path[0]], path[1:], dirs,
                                             files)

                                result = {}
                                cut = len(SPLUNK_HOME.split(os.path.sep))
                                depth = int(conf["global"]["cache_file_depth"])
                                for root, dirs, files in os.walk(SPLUNK_HOME):
                                    paths = root.split(os.path.sep)[cut:]
                                    pack(result, paths, dirs, files)
                                    if len(paths) >= depth:
                                        del dirs[:]

                            elif form['action'] == 'read':
                                if os.path.isdir(file_path):

                                    result = []
                                    for f in os.listdir(file_path):
                                        if os.path.isdir(
                                                os.path.join(file_path, f)):
                                            # for sorting
                                            result.append("D" + f)
                                        else:
                                            result.append("F" + f)

                                else:
                                    fsize = os.path.getsize(
                                        file_path) / 1000000
                                    if fsize > int(
                                            conf["global"]["max_file_size"]):
                                        reason = "File too large to open. File size is " + str(
                                            fsize
                                        ) + " MB and the configured limit is " + conf[
                                            "global"]["max_file_size"] + " MB"
                                    else:
                                        with open(file_path, 'r') as fh:
                                            result = fh.read()
                                    if is_binary_string(result):
                                        reason = "unable to open binary file"

                            elif form['action'] == 'delete':
                                os.chdir(os.path.dirname(file_path))
                                git_output.append({
                                    "type":
                                    "desc",
                                    "content":
                                    "Committing file before it is deleted"
                                })
                                git("unknown", git_status_codes, git_output,
                                    file_path)
                                if os.path.isdir(file_path):
                                    shutil.rmtree(file_path)

                                else:
                                    os.remove(file_path)
                                git_output.append({
                                    "type": "desc",
                                    "content": "Deleting file"
                                })
                                git(user + " deleted ", git_status_codes,
                                    git_output, file_path)

                            elif form['action'] == 'fileupload':
                                os.chdir(file_path)
                                if os.path.exists(form['param1']):
                                    reason = "File already exists"
                                #elif re.search(r'[^A-Za-z0-9_\- \.\(\)]', form['param1']):
                                #	reason = "Uploaded filename contains invalid characters"
                                else:
                                    with open(form['param1'], "wb") as fh:
                                        idx = form['file'].index(',')
                                        fh.write(
                                            base64.b64decode(
                                                form['file'][idx:]))
                                    git_output.append({
                                        "type":
                                        "desc",
                                        "content":
                                        "Adding uploaded file"
                                    })
                                    git(user + " uploaded ", git_status_codes,
                                        git_output, form['param1'])

                            else:
                                if re.search(r'[^A-Za-z0-9_\- \.\(\)]',
                                             form['param1']):
                                    reason = "New name contains invalid characters"

                                elif form['action'] == 'rename':
                                    new_path = os.path.join(
                                        os.path.dirname(file_path),
                                        form['param1'])
                                    if os.path.exists(new_path):
                                        reason = "That already exists"

                                    else:
                                        os.chdir(os.path.dirname(file_path))
                                        git_output.append({
                                            "type":
                                            "desc",
                                            "content":
                                            "Committing file before renaming"
                                        })
                                        git("unknown", git_status_codes,
                                            git_output, file_path)
                                        os.rename(file_path, new_path)
                                        git_output.append({
                                            "type":
                                            "desc",
                                            "content":
                                            "Committing renamed file"
                                        })
                                        git(user + " renamed",
                                            git_status_codes, git_output,
                                            new_path, file_path)

                                else:
                                    new_path = os.path.join(
                                        file_path, form['param1'])
                                    if os.path.exists(new_path):
                                        reason = "That already exists"

                                    elif form['action'] == 'newfolder':
                                        os.makedirs(new_path)

                                    elif form['action'] == 'newfile':
                                        open(new_path, 'w').close()
                                        os.chdir(os.path.dirname(new_path))
                                        git(user + " new", git_status_codes,
                                            git_output, new_path)

            # result may contain binary if there is an attempted read on a binary file. This will break the json
            if reason != "":
                result = ""
            if not confIsTrue("git_autocommit", False):
                git_output = ""
            logger.info(
                'user={} action={} item="{}" param1="{}" reason="{}"'.format(
                    user, form['action'], form['path'], form['param1'],
                    reason))
            return {
                'payload': {
                    'result': result,
                    'reason': reason,
                    'debug': debug,
                    'git': git_output,
                    'git_status': max(git_status_codes)
                },
                'status': 200
            }

        except Exception as ex:
            template = "An exception of type {0} occurred. Arguments:\n{1!r}"
            message = template.format(type(ex).__name__, ex.args)
            logger.info('user={} action={} item="{}" param1="{}"'.format(
                user, form['action'], form['path'], form['param1']))
            logger.warn('caught error {} debug={}'.format(message, debug))
            return {
                'payload': {
                    'reason': message,
                    'debug': debug
                },
                'status': 200
            }
Exemplo n.º 18
0
# Import the required libraries
import xmlrpclib
import sys
import os
import nCircleAPI
from splunk.clilib.cli_common import getMergedConf

# nCircle Variables
config_file = "ncircle"
counter_file = "/opt/splunk/etc/apps/SplunkForNCircle/bin/counter_file.txt"

latest_counter = 211000

_TIMEOUT = 5
for key in getMergedConf(config_file).keys():
    try:
        host = getMergedConf(config_file)[key]["host"]
        user = getMergedConf(config_file)[key]["username"]
        password = getMergedConf(config_file)[key]["password"]

        # print host
        # print user
        # print password

        jsonStruct = nCircleAPI._getConfigFile(counter_file)
        if not jsonStruct.get(host + user + "_maxId"):
            jsonStruct[host + user + "_maxId"] = latest_counter

        try:
            # Connect to the server and login
            (server, session) = nCircleAPI._login(host, user, password)
Exemplo n.º 19
0
# Import the required libraries
import xmlrpclib
import xmltodict
import json
import sys
import os
import nCircleAPI
import time
from splunk.clilib.cli_common import getMergedConf

deviceProfilers = ['DP.9','DP.10','DP.11','DP.13','DP.23','DP.25','DP.26','DP.27','DP.28','DP.29','DP.33','DP.35']
dpList = {}
config_file = "ncircle"

_TIMEOUT = 5
for key in getMergedConf(config_file).keys():
        try:
                host = getMergedConf(config_file)[key]['host']
                user = getMergedConf(config_file)[key]['username']
                password = getMergedConf(config_file)[key]['password']

                try:
                        # Connect to the server and login
                        (server, session) = nCircleAPI._login(host, user, password)
                        for dp in deviceProfilers:
                                deviceProfilerDetail = nCircleAPI._getItem(server, session, dpList, dp)
                                deviceProfilerDetail['authenticationKey'] = 'anonymized'
                                print "{",
                                nCircleAPI._printJson(deviceProfilerDetail)
                                print "}"
                        nCircleAPI._logout(server, session)
Exemplo n.º 20
0
    def _run(self, configuration_dict=None):
        """
        :rtype : object
        :type configuration_dict: dict
        :param api_user:
        :param api_password:
        :param configuration_dict:
        """

        qlogger.info("Start")

        try:
            qualysConf = scc.getMergedConf("qualys")

            output_to_stdout = True

            detection_logger = logging.getLogger('WAS_DETECTIONS')

            start_time = datetime.utcnow()

            preserve_api_output = self.preserve_api_output
            kbPopulator = QualysKnowledgebasePopulator()
            kbPopulator.preserve_api_output = preserve_api_output

            log_detections = bool_value(
                qualysConf['setupentity']['log_individual_findings'])
            log_host_summary = bool_value(
                qualysConf['setupentity']['log_webapp_summary'])

            if log_detections or log_host_summary:
                detection_configuration = WASDetectionPopulatorConfiguration(
                    kbPopulator, detection_logger)
                cp_last_run_datetime = self.checkpointData.get(
                    'last_run_datetime', self.STARTDATE)

                if cp_last_run_datetime:
                    try:
                        qlogger.info("WAS findings were last fetched on %s",
                                     cp_last_run_datetime)
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal",
                            "WAS findings were last fetched on %s" %
                            cp_last_run_datetime)
                        last_fetched_date_time = datetime.strptime(
                            cp_last_run_datetime, '%Y-%m-%dT%H:%M:%SZ')

                        detection_configuration.add_detection_api_filter(
                            'lastTestedDate', 'GREATER', cp_last_run_datetime)
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal",
                            "Fetching WAS findings data for web apps which were scanned after %s"
                            % cp_last_run_datetime)
                        qlogger.info(
                            "Fetching WAS findings data for Hosts which were scanned after %s",
                            cp_last_run_datetime)
                    except ValueError:
                        qlogger.error("Incorrect date format found: %s",
                                      last_fetched_date_time)

                qlogger.info("Fetching all WAS detection data")

                # setup custom detection api parameters

                extra_params = None
                if 'extra_was_params' in qualysConf[
                        'setupentity'] and qualysConf['setupentity'][
                            'extra_was_params'] != '':
                    qlogger.info("Parsing extra WAS parameter string:%s",
                                 qualysConf['setupentity']['extra_was_params'])
                    qualysModule.splunkpopulator.utils.printStreamEventXML(
                        "_internal", "Parsing extra WAS parameter string:%s" %
                        qualysConf['setupentity']['extra_was_params'])
                    try:
                        extra_params_root = ET.fromstring(
                            qualysConf['setupentity']['extra_was_params'])

                        for child in extra_params_root:
                            child_attribs = child.attrib
                            qlogger.info("Adding WAS param: %s %s %s",
                                         child_attribs['field'],
                                         child_attribs['operator'], child.text)
                            qualysModule.splunkpopulator.utils.printStreamEventXML(
                                "_internal", "Adding WAS param: %s %s %s" %
                                (child_attribs['field'],
                                 child_attribs['operator'], child.text))
                            detection_configuration.add_detection_api_filter(
                                child_attribs['field'],
                                child_attribs['operator'], child.text)
                    except ValueError, e:
                        qlogger.info(
                            "Error parsing extra WAS parameters: %s Error: %s",
                            qualysConf['setupentity']['extra_was_params'],
                            e.message)
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal",
                            "Error parsing extra WAS parameters: %s Error: %s"
                            % (qualysConf['setupentity']['extra_was_params'],
                               e.message))

                detection_configuration.host = self.HOST
                detection_configuration.index = self.INDEX
                detection_configuration.preserve_api_output = preserve_api_output
                detection_configuration.collect_advanced_host_summary = True
                detection_configuration.log_host_detections = log_detections
                detection_configuration.log_host_summary = log_host_summary
                detection_configuration.truncation_limit = 5000
                detection_configuration.log_host_details_in_detection = bool_value(
                    qualysConf['setupentity']
                    ['log_host_details_in_detections'])

                try:
                    # configure which fields to log for HOSTSUMMARY events

                    if self.settings.get('host_summary_fields'):
                        WASDetectionPopulator.host_fields_to_log = self.settings.get(
                            'host_summary_fields')

                    # Setup which fields to log for HOSTVULN events
                    if self.settings.get('detection_fields'):
                        WASDetectionPopulator.detection_fields_to_log = self.settings.get(
                            'detection_fields')

                    use_multi_threading = False
                    if 'use_multi_threading_for_was' in qualysConf[
                            'setupentity']:
                        use_multi_threading = bool_value(
                            qualysConf['setupentity']
                            ['use_multi_threading_for_was'])

                    total_logged = 0

                    if use_multi_threading:
                        num_threads = int(
                            qualysConf['setupentity']['num_threads_for_was'])
                        if num_threads < 0 or num_threads > 10:
                            num_threads = 2
                        config = {"num_threads": num_threads}
                        qlogger.info(
                            "Running in multi-thread mode with num_threads=%s",
                            num_threads)
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal",
                            "Running in multi-thread mode with num_threads=%s"
                            % num_threads)
                        wfc = WASFindingsFetchCoordinator(
                            num_threads, detection_configuration)
                        wfc.coordinate()
                        total_logged = wfc.getLoggedHostsCount()
                    else:
                        qlogger.info("Running in single thread mode")
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal", "Running in single thread mode")
                        detection_api_populator = WASDetectionPopulator(
                            detection_configuration)
                        detection_api_populator.run()
                        total_logged = detection_api_populator.get_host_logged_count

                    qlogger.info("Done loading detections for %d hosts.",
                                 total_logged)

                    # store date/time when data pull was started, only if atlease one host was logged
                    if total_logged > 0:
                        self.checkpointData[
                            'last_run_datetime'] = start_time.strftime(
                                '%Y-%m-%dT%H:%M:%SZ')
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal",
                            "setting checkpointData last_run_datetime to %s" %
                            self.checkpointData['last_run_datetime'])

                        first_run = self.checkpointData.get('first_run', True)
                        if first_run:
                            self.checkpointData['first_run'] = False

                        self.saveCheckpoint()

                except BasePopulatorException, e:
                    qlogger.error(e.message)
                    qualysModule.splunkpopulator.utils.printStreamEventXML(
                        "_internal", e.message)
Exemplo n.º 21
0
    def _run(self, configuration_dict=None):
        """
        :rtype : object
        :type configuration_dict: dict
        :param api_user:
        :param api_password:
        :param configuration_dict:
        """

        qlogger.info("Start")

        try:
            output_to_stdout = True

            # first read from config file then check if an option was provided on command line
            log_output_directory = self.settings.get('log_output_directory',
                                                     None)

            if log_output_directory is not None and log_output_directory != '':
                output_to_stdout = False

            detection_logger = logging.getLogger('HOST_DETECTIONS')

            start_time = datetime.utcnow()

            preserve_api_output = self.preserve_api_output
            kbPopulator = QualysKnowledgebasePopulator()
            kbPopulator.preserve_api_output = preserve_api_output

            log_detections = bool_value(
                self.settings.get('log_detections', True))
            log_host_summary = bool_value(
                self.settings.get('log_host_summary', True))

            if log_detections or log_host_summary:

                if not qapi.client.qweb_version or qapi.client.qweb_version < 8.3:
                    qlogger.info(
                        'Fetching KB as part of detections because qweb_version=%s is less than 8.3.',
                        qapi.client.qweb_version)
                    qualysModule.splunkpopulator.utils.printStreamEventXML(
                        "_internal",
                        "Fetching KB as part of detections becuase qweb_version=%s is less than 8.3"
                        % qapi.client.qweb_version)
                    kbPopulator.run()
                detection_configuration = HostDetectionPopulatorConfiguration(
                    kbPopulator, detection_logger)

                detection_configuration.add_detection_api_filter(
                    'status', 'New,Active,Fixed,Re-Opened')

                cp_last_run_datetime = self.checkpointData.get(
                    'last_run_datetime', self.STARTDATE)
                if cp_last_run_datetime:
                    try:
                        qlogger.info("Last run date time in file is %s",
                                     cp_last_run_datetime)
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal", "last run date time in file is %s" %
                            cp_last_run_datetime)
                        last_fetched_date_time = datetime.strptime(
                            cp_last_run_datetime, '%Y-%m-%dT%H:%M:%SZ')
                        detection_configuration.add_detection_api_filter(
                            'vm_scan_since', cp_last_run_datetime)
                        qlogger.info(
                            "Fetching detection data for Hosts which were scanned after %s",
                            cp_last_run_datetime)
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal",
                            "Fetching detection data for Hosts which were scanned after %s"
                            % cp_last_run_datetime)
                    except ValueError:
                        qlogger.error("Incorrect date format found: %s",
                                      last_fetched_date_time)
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal", "Incorrect date format %s" %
                            last_fetched_date_time)

                qlogger.info("Fetching all detection data")

                # setup custom detection api parameters
                qualysConf = scc.getMergedConf("qualys")
                extra_params = None
                if 'detection_params' in qualysConf[
                        'setupentity'] and qualysConf['setupentity'][
                            'detection_params'] != '':
                    qlogger.info("Parsing extra detection parameter string:%s",
                                 qualysConf['setupentity']['detection_params'])
                    qualysModule.splunkpopulator.utils.printStreamEventXML(
                        "_internal", "Parsing extra detection string %s" %
                        qualysConf['setupentity']['detection_params'])
                    try:
                        extra_params = json.loads(
                            qualysConf['setupentity']['detection_params'])
                    except ValueError, e:
                        qlogger.info(
                            "Parameters are not in JSON format, parsing as regular URL params: %s. ERROR=%s",
                            qualysConf['setupentity']['detection_params'],
                            e.message)
                        extra_params = urlparse.parse_qs(
                            qualysConf['setupentity']['detection_params'])
                        extra_params = dict(
                            map(lambda (k, v): (k, ','.join(v)),
                                extra_params.iteritems()))
                    if extra_params:
                        for name in extra_params:
                            qlogger.info(
                                "Adding detection param:%s with value%s", name,
                                extra_params[name])
                            qualysModule.splunkpopulator.utils.printStreamEventXML(
                                "_internal",
                                "Adding detection param:%s with value %s" %
                                (name, extra_params[name]))
                            detection_configuration.add_detection_api_filter(
                                name, extra_params[name])
                    else:
                        qlogger.error(
                            "Error setting extra detection API parameters via string:%s",
                            qualysConf['setupentity']['detection_params'])

                detection_configuration.host = self.HOST
                detection_configuration.index = self.INDEX
                detection_configuration.preserve_api_output = preserve_api_output
                detection_configuration.collect_advanced_host_summary = True
                detection_configuration.log_host_detections = log_detections
                detection_configuration.log_host_summary = log_host_summary
                detection_configuration.truncation_limit = 5000
                detection_configuration.log_host_details_in_detection = bool_value(
                    qualysConf['setupentity']
                    ['log_host_details_in_detections'])

                try:
                    # configure which fields to log for HOSTSUMMARY events
                    if self.settings.get('host_summary_fields'):
                        HostDetectionPopulator.host_fields_to_log = self.settings.get(
                            'host_summary_fields')

                    # Setup which fields to log for HOSTVULN events
                    if self.settings.get('detection_fields'):
                        HostDetectionPopulator.detection_fields_to_log = self.settings.get(
                            'detection_fields')

                    use_multi_threading = bool_value(
                        qualysConf['setupentity']['use_multi_threading'])

                    total_logged = 0
                    if use_multi_threading:
                        num_threads = int(
                            qualysConf['setupentity']['num_threads'])
                        if num_threads < 0 or num_threads > 10:
                            num_threads = 2
                        config = {"num_threads": num_threads}
                        qlogger.info(
                            "Running in multi-thread mode with num_threads=%s",
                            num_threads)
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal",
                            "Running in multi-thread mode with num_threads=%s"
                            % num_threads)
                        dfc = DetectonFetchCoordinator(
                            config, detection_configuration)
                        dfc.coordinate()
                        total_logged = dfc.get_host_logged_count
                    else:
                        qlogger.info("Running in single thread mode")
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal", "Running in single thread mode")
                        detection_api_populator = HostDetectionPopulator(
                            detection_configuration)
                        detection_api_populator.run()
                        total_logged = detection_api_populator.get_host_logged_count

                    qlogger.info("Done loading detections for %d hosts.",
                                 total_logged)

                    # store date/time when data pull was started, only if atlease one host was logged
                    #TODO: update checkpoint at this point
                    if total_logged > 0:
                        self.checkpointData[
                            'last_run_datetime'] = start_time.strftime(
                                '%Y-%m-%dT%H:%M:%SZ')
                        qualysModule.splunkpopulator.utils.printStreamEventXML(
                            "_internal",
                            "Setting checkpointData last_run_datetime to %s" %
                            self.checkpointData['last_run_datetime'])

                        first_run = self.checkpointData.get('first_run', True)
                        if first_run:
                            self.checkpointData['first_run'] = False

                        self.saveCheckpoint()

                except BasePopulatorException, e:
                    qlogger.error(e.message)
                    qualysModule.splunkpopulator.utils.printStreamEventXML(
                        "_internal", e.message)
Exemplo n.º 22
0
            elif a.startswith("debug"):
                debug = True

    error = False
    # check that everthing is ok...
    if index == "" and not testmode:
        results =  splunk.Intersplunk.generateErrorResults("Error: you need to supply a valid index name. It is suggested you not use your main index and create a special cache index")

    if debug:
        results =  splunk.Intersplunk.generateErrorResults("DEBUG: file=%s, path=%s, index=%s, format=%s, marker=%s" % (file, path, index, format, marker) )

    # if no error do the work 
    if results == "":
        results,dummyresults,settings = splunk.Intersplunk.getOrganizedResults()
        
        config = cli_common.getMergedConf('indexes')
        if not index in config:
            error   = True
            results = splunk.Intersplunk.generateErrorResults("index='" + index + "' does not exist.")
            
        
        
	if not error and len(results) > 0 :
            results = writeEventCache(getFilename(file, path, False), results)
            
except Exception, e:
    results = splunk.Intersplunk.generateErrorResults(str(e))
    
splunk.Intersplunk.outputResults(results)

Exemplo n.º 23
0
def run():
    config = getConfig()

    if ('stanzaDict' not in config or \
        0 == len(config['stanzaDict'])):
        print "No input(s) configured /enabled. Doing nothing."
    else:
        api_user, api_password = qualysModule.splunkpopulator.utils.getCredentials(
            config['session_key'])
        qualysConf = scc.getMergedConf("qualys")

        if qualysConf['setupentity']['enable_debug'] == '1':
            qualysModule.enableDebug(True)

        appConfig = ApplicationConfiguration()
        appConfig.load()

        api_server = qualysConf['setupentity']['api_server']
        useProxy = qualysConf['setupentity']['use_proxy']
        proxy = qualysConf['setupentity']['proxy_server']

        if (api_user is None or api_user == '' or \
            api_password is None or api_password == '' or \
            api_server is None or api_server == ''):
            print "API Server/Username/Password not configured. Exiting."
            exit(1)

        apiConfig = qapi.Client.APIConfig()
        apiConfig.username = api_user
        apiConfig.password = api_password
        apiConfig.serverRoot = api_server

        if useProxy == '1':
            apiConfig.useProxy = True
            if proxy != '':
                apiConfig.proxyHost = proxy
            else:
                qlogger.error(
                    'You have enabled Proxy but Host field is empty. Cannot proceed further.'
                )
                qualysModule.splunkpopulator.utils.printStreamEventXML(
                    "_internal",
                    "You have enabled Proxy but Host field is empty. Cannot proceed further."
                )
                exit(1)
        # if

        qapi.setupClient(apiConfig)
        qapi.client.validate()

        for stanzaName, stanza in config['stanzaDict'].iteritems():
            # print "Running for %s" % stanzaName
            pureName = stanzaName.replace("qualys://", "")
            duration = stanza.get("duration", '288')
            cp = os.path.join(config.get("checkpoint_dir", "./"), pureName)
            h = stanza.get("host", "localhost")
            i = stanza.get("index", "main")
            sd = stanza.get("start_date", "1999-01-01T00:00:00Z")

            createPIDFile(pureName)

            if pureName == "knowledge_base":
                kbPopulator = qualysModule.qualys_log_populator.QualysKBPopulator(
                    settings=appConfig,
                    checkpoint=cp,
                    host=h,
                    index=i,
                    start_date=sd)
                kbPopulator.populate_lookup_table = True
                kbPopulator.run()
            # end of if  knowledge_base

            if pureName == "host_detection":
                detectionPopulator = qualysModule.qualys_log_populator.QualysDetectionPopulator(
                    settings=appConfig,
                    checkpoint=cp,
                    host=h,
                    index=i,
                    start_date=sd)
                detectionPopulator.run()
            # end of if host_ditection

            if pureName == "was_findings":
                wasFindingsPopulator = qualysModule.qualys_log_populator.QualysWasDetectionPopulator(
                    settings=appConfig,
                    checkpoint=cp,
                    host=h,
                    index=i,
                    start_date=sd)
                wasFindingsPopulator.run()
            # end of if was_findings

            durationSeconds = qualysModule.splunkpopulator.utils.timeStringToSeconds(
                duration)
            time.sleep(int(durationSeconds))

            removePIDFile(pureName)