Exemplo n.º 1
0
 def setUp(self):
     self.mock_time = mock.patch('time.time', return_value=10).start()
     self.mock_request = mock.patch('requests.Session.post').start()
     self.splunk = SplunkHandler(
         host=SPLUNK_HOST,
         port=SPLUNK_PORT,
         token=SPLUNK_TOKEN,
         index=SPLUNK_INDEX,
         hostname=SPLUNK_HOSTNAME,
         source=SPLUNK_SOURCE,
         sourcetype=SPLUNK_SOURCETYPE,
         verify=SPLUNK_VERIFY,
         timeout=SPLUNK_TIMEOUT,
         flush_interval=SPLUNK_FLUSH_INTERVAL,
         queue_size=SPLUNK_QUEUE_SIZE,
         debug=SPLUNK_DEBUG,
         retry_count=SPLUNK_RETRY_COUNT,
         retry_backoff=SPLUNK_RETRY_BACKOFF,
     )
Exemplo n.º 2
0
    def validateCredentials(self):
        result = None
        try:
            result = [
                0 <= int(x) < 256 for x in re.split(
                    '\.',
                    re.match(r'^\d+\.\d+\.\d+\.\d+$',
                             self.IPAddress.text()).group(0))
            ].count(True) == 4
        except AttributeError:
            result = False
        nonLead = (self.LeadCheckBox.isChecked() and socket.gethostbyname(
            socket.gethostname()) != self.IPAddress.text())
        emptyIP = self.IPAddress.text() == ''
        if nonLead:
            QMessageBox.critical(
                self, 'Connection Error',
                f'Non Lead Analyst attempting to connect as a Lead Analyst\n' +
                'Check lead checkbox if lead IP is entered\n' +
                'Uncheck lead checkbox if non Lead Analyst IP is entered')
        elif emptyIP:
            QMessageBox.critical(
                self, 'Connection Error', 'No IP Address entered\n' +
                'Enter a value from 0.0.0.0 to 255.255.255.255')
        elif not result or result is None:
            QMessageBox.critical(
                self, 'Connection Error', 'IP address is not valid\n' +
                'Enter an IP address between 0.0.0.0 to 255.255.255.255')
        else:
            try:
                lead = self.IPAddress.text().strip()
                self.port = int(self.portLE.text().strip())
                self.index = self.indexLE.text().strip()
                self.username = self.usernameLE.text().strip()
                self.password = self.passwordLE.text().strip()
                splunk = SplunkHandler('localhost', self.port, self.index,
                                       self.username, self.password)

                QMessageBox.information(
                    self, 'Connection Successful',
                    f'Connection to server:from IP {self.IPAddressLineEdit.text()}'
                    f' established!')
                if not self.validateIP:
                    label = QLabel('Lead IP Validated.')
                    label.setStyleSheet("QLabel { color: green}")
                self.IPAddress.setEnabled(False)
            except ConnectionError:
                QMessageBox.critical(
                    self, 'Connection Error',
                    'Connection could not be established\n' +
                    'Confirm that the server is active and running\n' +
                    'and that login information is correct.')
            except ValueError:
                QMessageBox.critical(self, 'Port Number Error',
                                     'Port number must be a numerical value')
Exemplo n.º 3
0
 def setUp(self):
     self.splunk = SplunkHandler(
         host=SPLUNK_HOST,
         port=SPLUNK_PORT,
         username=SPLUNK_USERNAME,
         password=SPLUNK_PASSWORD,
         index=SPLUNK_INDEX,
         hostname=SPLUNK_HOSTNAME,
         source=SPLUNK_SOURCE,
         sourcetype=SPLUNK_SOURCETYPE,
         verify=SPLUNK_VERIFY
     )
def init():
    client = KeyVaultClient(KeyVaultAuthentication(auth_callback))
    SPLUNK_HOST = client.get_secret(VAULT_URL, 'splunk-host',secret_version=KeyVaultId.version_none).value
    SPLUNK_TOKEN = client.get_secret(VAULT_URL, 'splunk-token',secret_version=KeyVaultId.version_none).value
    EH_CONNECTION_STR = client.get_secret(VAULT_URL, 'eh-conn-str',secret_version=KeyVaultId.version_none).value
    splunk = SplunkHandler(
        host= SPLUNK_HOST,
        port='8088',
        token= SPLUNK_TOKEN,
        index='main',
        verify=False
    )
    init.ehclient = EventHubClient.from_connection_string(EH_CONNECTION_STR, receive_timeout=RECEIVE_TIMEOUT, retry_total=RETRY_TOTAL)
    logging.getLogger('').addHandler(splunk)
 def setUp(self):
     self.splunk = SplunkHandler(
         host=SPLUNK_HOST,
         port=SPLUNK_PORT,
         token=SPLUNK_TOKEN,
         index=SPLUNK_INDEX,
         hostname=SPLUNK_HOSTNAME,
         source=SPLUNK_SOURCE,
         sourcetype=SPLUNK_SOURCETYPE,
         verify=SPLUNK_VERIFY,
         timeout=SPLUNK_TIMEOUT,
         flush_interval=SPLUNK_FLUSH_INTERVAL,
         queue_size=SPLUNK_QUEUE_SIZE,
     )
     self.splunk.testing = True
Exemplo n.º 6
0
    def __init__(self):

        # ----> Splunk Log to HEC <----
        splunk = SplunkHandler(host=os.environ['SPLUNK_HOST'],
                               port=os.environ['SPLUNK_PORT'],
                               token=os.environ['SPLUNK_TOKEN'],
                               index=os.environ['SPLUNK_INDEX'],
                               verify=False)

        # -----> SETUP LOGGING <-----
        self.logger = logging.getLogger(__name__)
        handler = logging.FileHandler('/tmp/MCP_Master_Log.log')
        handler.setLevel(logging.INFO)
        formatter = logging.Formatter(
            '%(asctime)s - %(levelname)s - %(message)s')
        handler.setFormatter(formatter)
        self.logger.addHandler(handler)
        self.logger.addHandler(splunk)
        self.logger.setLevel(logging.INFO)

        # -----> S3 INFO <-----
        self.s3 = resource('s3')

        # -----> DUPLICATE PROCESS PROTECTION <-----
        self.pid_file = '/tmp/rip_reddit.pid'
        self.buddy_pid = '/tmp/tweet.pid'
        self.pid_check()
        self.process_to_check = 'python36'

        # -----> REDDIT INFO <-----
        self.reddit = praw.Reddit(client_id=os.environ['CLIENT_ID'],
                                  client_secret=os.environ['CLIENT_SECRET'],
                                  username=os.environ['USERNAME'],
                                  password=os.environ['PASSWORD'],
                                  user_agent=os.environ['USER_AGENT'])

        self.subreddit = self.reddit.subreddit(os.environ['SUBREDDIT'])

        self.table_name = os.environ['REDDIT_TABLE_NAME']

        # -----> PICTURE RIP INFO <-----
        self.rekog = boto3.client('rekognition', region_name='us-east-2')
        self.image_path = '/tmp/images'
        if not os.path.exists(self.image_path):
            os.makedirs(self.image_path)
            self.logger.info("Created path: " + self.image_path)
Exemplo n.º 7
0
 def setUp(self):
     self.splunk = SplunkHandler(
         host=SPLUNK_HOST,
         port=SPLUNK_PORT,
         token=SPLUNK_TOKEN,
         index=SPLUNK_INDEX,
         hostname=SPLUNK_HOSTNAME,
         source=SPLUNK_SOURCE,
         sourcetype=SPLUNK_SOURCETYPE,
         verify=SPLUNK_VERIFY,
         timeout=SPLUNK_TIMEOUT,
         flush_interval=SPLUNK_FLUSH_INTERVAL,
         queue_size=SPLUNK_QUEUE_SIZE,
         debug=SPLUNK_DEBUG,
         retry_count=SPLUNK_RETRY_COUNT,
         retry_backoff=SPLUNK_RETRY_BACKOFF,
     )
     self.splunk.testing = True
Exemplo n.º 8
0
    def __init__(self):

        self.table_name = os.environ['REDDIT_TABLE_NAME']

        # ----> Splunk Log to HEC <----
        splunk = SplunkHandler(host=os.environ['SPLUNK_HOST'],
                               port=os.environ['SPLUNK_PORT'],
                               token=os.environ['SPLUNK_TOKEN'],
                               index=os.environ['SPLUNK_INDEX'],
                               verify=False)

        # -----> SETUP LOGGING <-----
        self.logger = logging.getLogger(__name__)
        handler = logging.FileHandler('/tmp/MCP_Master_Log.log')
        handler.setLevel(logging.INFO)
        formatter = logging.Formatter(
            '%(asctime)s - %(levelname)s - %(message)s')
        handler.setFormatter(formatter)
        self.logger.addHandler(handler)
        self.logger.addHandler(splunk)
        self.logger.setLevel(logging.INFO)

        # -----> S3 INFO <-----
        self.s3 = resource('s3')

        # -----> DUPLICATE PROCESS PROTECTION <-----
        self.pid_file = '/tmp/tweet.pid'
        self.buddy_pid = '/tmp/rip_reddit.pid'
        self.pid_check()
        self.process_to_check = 'python36'

        # -----> TWITTER AUTH <----
        mcp_auth = tweepy.OAuthHandler(os.environ['CONSUMER_KEY'],
                                       os.environ['CONSUMER_SECRET'])
        mcp_auth.set_access_token(os.environ['ACCESS_TOKEN'],
                                  os.environ['ACCESS_TOKEN_SECRET'])
        mcp_auth.secure = True
        self.mcp_tweet = tweepy.API(mcp_auth)

        self.kb_message = "#cats #catsoftwitter #python #reddit #IndyPy"

        self.myBot = self.mcp_tweet.get_user(screen_name="@Xonk_dp")
        self.logger.info(
            "-------------> CONNECTED TO TWITTER!!! <-----------------")
 def test_init_queue_class_type_when_multiple_process(self):
     splunk = SplunkHandler(
         host=SPLUNK_HOST,
         port=SPLUNK_PORT,
         token=SPLUNK_TOKEN,
         index=SPLUNK_INDEX,
         hostname=SPLUNK_HOSTNAME,
         source=SPLUNK_SOURCE,
         sourcetype=SPLUNK_SOURCETYPE,
         verify=SPLUNK_VERIFY,
         timeout=SPLUNK_TIMEOUT,
         flush_interval=SPLUNK_FLUSH_INTERVAL,
         queue_size=SPLUNK_QUEUE_SIZE,
         debug=SPLUNK_DEBUG,
         retry_count=SPLUNK_RETRY_COUNT,
         retry_backoff=SPLUNK_RETRY_BACKOFF,
         multiple_process=True,
     )
     self.assertTrue(isinstance(splunk.queue, MQueue))
Exemplo n.º 10
0
def get_logger(name, config=None) -> logging.Logger:
    """
    function to get logger with extra settings from config
    :param config:
    :return:
    """
    l_log_config = config['LOGGING'] if config else log_config
    project = config['GENERAL']['tm4jProjectKey'] if config else gen_config['tm4jProjectKey']
    reporter = config['EXECUTION']['reporter'] if config else exc_config['reporter']
    l_log_level = get_log_level(l_log_config['configLevel'])

    console_formatter = logging.Formatter('-->  %(asctime)s - %(name)s - %(levelname)s %(funcName)s: %(message)s')
    console_handler = logging.StreamHandler()
    console_handler.setFormatter(console_formatter)

    error_log_handler = logging.FileHandler('logs/error.log')
    error_log_handler.setFormatter(console_formatter)
    error_log_handler.setLevel(logging.ERROR)

    splunk_formatter = SplunkFormatter(logging_app='tm4j_adapter',
                                       project=project,
                                       reporter=reporter)
    splunk_handler = SplunkHandler(host=log_config['splunkHost'],
                                   port=log_config['splunkPort'],
                                   token=log_config['splunkToken'],
                                   index=log_config['splunkIndex'],
                                   record_format=True,
                                   sourcetype='json',
                                   debug=False)
    splunk_handler.setFormatter(splunk_formatter)
    splunk_handler.setLevel(logging.INFO)

    logger = logging.getLogger(name)
    logger.addHandler(console_handler)
    logger.addHandler(error_log_handler)
    logger.addHandler(splunk_handler)
    logger.setLevel(l_log_level)
    return logger
Exemplo n.º 11
0
class TestSplunkHandler(unittest.TestCase):
    def setUp(self):
        self.mock_time = mock.patch('time.time', return_value=10).start()
        self.mock_request = mock.patch('requests.Session.post').start()
        self.splunk = SplunkHandler(
            host=SPLUNK_HOST,
            port=SPLUNK_PORT,
            token=SPLUNK_TOKEN,
            index=SPLUNK_INDEX,
            hostname=SPLUNK_HOSTNAME,
            source=SPLUNK_SOURCE,
            sourcetype=SPLUNK_SOURCETYPE,
            verify=SPLUNK_VERIFY,
            timeout=SPLUNK_TIMEOUT,
            flush_interval=SPLUNK_FLUSH_INTERVAL,
            queue_size=SPLUNK_QUEUE_SIZE,
            debug=SPLUNK_DEBUG,
            retry_count=SPLUNK_RETRY_COUNT,
            retry_backoff=SPLUNK_RETRY_BACKOFF,
        )

    def tearDown(self):
        self.splunk = None

    def test_init(self):
        self.assertIsNotNone(self.splunk)
        self.assertIsInstance(self.splunk, SplunkHandler)
        self.assertIsInstance(self.splunk, logging.Handler)
        self.assertEqual(self.splunk.host, SPLUNK_HOST)
        self.assertEqual(self.splunk.port, SPLUNK_PORT)
        self.assertEqual(self.splunk.token, SPLUNK_TOKEN)
        self.assertEqual(self.splunk.index, SPLUNK_INDEX)
        self.assertEqual(self.splunk.hostname, SPLUNK_HOSTNAME)
        self.assertEqual(self.splunk.source, SPLUNK_SOURCE)
        self.assertEqual(self.splunk.sourcetype, SPLUNK_SOURCETYPE)
        self.assertEqual(self.splunk.verify, SPLUNK_VERIFY)
        self.assertEqual(self.splunk.timeout, SPLUNK_TIMEOUT)
        self.assertEqual(self.splunk.flush_interval, SPLUNK_FLUSH_INTERVAL)
        self.assertEqual(self.splunk.max_queue_size, SPLUNK_QUEUE_SIZE)
        self.assertEqual(self.splunk.debug, SPLUNK_DEBUG)
        self.assertEqual(self.splunk.retry_count, SPLUNK_RETRY_COUNT)
        self.assertEqual(self.splunk.retry_backoff, SPLUNK_RETRY_BACKOFF)

        self.assertFalse(logging.getLogger('requests').propagate)
        self.assertFalse(logging.getLogger('splunk_handler').propagate)

    def test_splunk_worker(self):
        # Silence root logger
        log = logging.getLogger('')
        for h in log.handlers:
            log.removeHandler(h)

        log = logging.getLogger('test')
        for h in log.handlers:
            log.removeHandler(h)

        log.addHandler(self.splunk)
        log.warning('hello!')

        self.splunk.timer.join()  # Have to wait for the timer to exec

        expected_output = '{"event": "hello!", "host": "%s", "index": "%s", "source": "%s", ' \
                          '"sourcetype": "%s", "time": 10}' % \
                          (SPLUNK_HOSTNAME, SPLUNK_INDEX, SPLUNK_SOURCE, SPLUNK_SOURCETYPE)

        self.mock_request.assert_called_once_with(
            RECEIVER_URL,
            verify=SPLUNK_VERIFY,
            data=expected_output,
            timeout=SPLUNK_TIMEOUT,
            headers={'Authorization': "Splunk %s" % SPLUNK_TOKEN},
        )

    def test_splunk_worker_override(self):
        self.splunk.allow_overrides = True

        # Silence root logger
        log = logging.getLogger('')
        for h in log.handlers:
            log.removeHandler(h)

        log = logging.getLogger('test')
        for h in log.handlers:
            log.removeHandler(h)

        log.addHandler(self.splunk)
        log.warning('hello!', extra={'_time': 5, '_host': 'host', '_index': 'index'})

        self.splunk.timer.join()  # Have to wait for the timer to exec

        expected_output = '{"event": "hello!", "host": "host", "index": "index", ' \
                          '"source": "%s", "sourcetype": "%s", "time": 5}' % \
                          (SPLUNK_SOURCE, SPLUNK_SOURCETYPE)

        self.mock_request.assert_called_once_with(
            RECEIVER_URL,
            data=expected_output,
            headers={'Authorization': "Splunk %s" % SPLUNK_TOKEN},
            verify=SPLUNK_VERIFY,
            timeout=SPLUNK_TIMEOUT
        )

    def test_full_queue_error(self):
        self.splunk.allow_overrides = True
        self.splunk.max_queue_size = 10
        mock_write_log = patch.object(self.splunk, 'write_log').start()

        # Silence root logger
        log = logging.getLogger('')
        for h in log.handlers:
            log.removeHandler(h)

        log = logging.getLogger('test')
        for h in log.handlers:
            log.removeHandler(h)

        log.addHandler(self.splunk)

        for _ in range(20):
            log.warning('hello!', extra={'_time': 5, '_host': 'host', '_index': 'index'})

        self.splunk.timer.join()

        mock_write_log.assert_any_call("Log queue full; log data will be dropped.")

    def test_wait_until_empty_and_keep_ahead(self):
        self.splunk.allow_overrides = True
        self.splunk.force_keep_ahead = True
        self.splunk.max_queue_size = 10
        mock_write_log = patch.object(self.splunk, 'write_log').start()

        # Silence root logger
        log = logging.getLogger('')
        for h in log.handlers:
            log.removeHandler(h)

        log = logging.getLogger('test')
        for h in log.handlers:
            log.removeHandler(h)

        log.addHandler(self.splunk)

        # without force keep ahead, this would drop logs
        for _ in range(20):
            log.warning('hello!', extra={'_time': 5, '_host': 'host', '_index': 'index'})

        # use wait until empty instead of joining the timer
        # if this doesnt wait correctly, we'd expect to be missing calls to mock_request
        self.splunk.wait_until_empty()

        expected_output = '{"event": "hello!", "host": "host", "index": "index", ' \
                          '"source": "%s", "sourcetype": "%s", "time": 5}' % \
                          (SPLUNK_SOURCE, SPLUNK_SOURCETYPE)

        # two batches of 10 messages sent
        self.mock_request.assert_has_calls([call(
            RECEIVER_URL,
            data=expected_output * 10,
            headers={'Authorization': 'Splunk %s' % SPLUNK_TOKEN},
            verify=SPLUNK_VERIFY,
            timeout=SPLUNK_TIMEOUT
        )] * 2, any_order=True)

        # verify no logs dropped
        mock_write_log.assert_not_called()
Exemplo n.º 12
0
    def __init__(self):

        # ----> BOTO3 SETUP <----
        self.s3 = boto3.resource('s3')

        self.ec2 = boto3.resource('ec2')

        self.instance_name = 'MCP_cloud'
        self.image_id = 'ami-0cd3dfa4e37921605'  # 'ami-9c0638f9' - centos7
        self.image_type = 't2.micro'
        self.image_role = 'ec2-admin'

        # -----> ARGPARSE <-----
        help_text = "MCP: MASTER CONTROL PROGRAM: automated system to rip picture from reddit and post them on twitter"
        arg_parser = argparse.ArgumentParser(description=help_text)
        arg_parser.add_argument("-V", "--version", help="Show program version", action="store_true")
        arg_parser.add_argument("--config", "-c", help="Configuration file name")
        arg_parser.add_argument("--get_info", "-i", help="Get info on currently running EC2 instances",
                                action="store_true")

        self.get_info = False
        args = arg_parser.parse_args()
        if args.get_info:
            self.get_info = True
        if args.version:
            print("MCP Version - IndyPy")
        if args.config:
            self.config = args.config
        else:
            print("\n    Please specify a configuration file using -c\n")
            exit()

        # -----> CONFIGPARSER <-----
        parser = configparser.ConfigParser()
        parser.read(self.config)
        self.client_id = parser['reddit']['client_id']
        self.client_secret = parser['reddit']['client_secret']
        self.username = parser['reddit']['username']
        self.password = parser['reddit']['password']
        self.user_agent = parser['reddit']['user_agent']
        self.subreddit = parser['reddit']['subreddit']
        self.reddit_table_name = parser['dynamodb']['table_name']
        self.access_token = parser['twitter']['access_token']
        self.access_token_secret = parser['twitter']['access_token_secret']
        self.consumer_key = parser['twitter']['consumer_key']
        self.consumer_secret = parser['twitter']['consumer_secret']
        self.splunk_host = parser['splunk']['host']
        self.splunk_remote_host = parser['splunk']['remote_host']
        self.splunk_port = parser['splunk']['port']
        self.splunk_token = parser['splunk']['token']
        self.splunk_index = parser['splunk']['index']
        self.splunk_verify = parser['splunk']['verify']

        # ----> Splunk Log to HEC <----
        splunk = SplunkHandler(
            host=self.splunk_host,
            port=self.splunk_port,
            token=self.splunk_token,
            index=self.splunk_index,
            verify=False
        )

        # -----> SETUP LOGGING <-----
        self.logger = logging.getLogger(__name__)
        handler = logging.FileHandler('/var/log/MCP_Master_Log.log')
        handler.setLevel(logging.INFO)
        formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
        handler.setFormatter(formatter)
        self.logger.addHandler(handler)
        self.logger.addHandler(splunk)
        self.logger.setLevel(logging.INFO)
        self.logger.info("------------- MASTER CONTROL PROGRAM - ONLINE ------------- ")

        # -----> EC2 STARTUP SCRIPT <-----
        self.user_data = '#!/bin/bash \nyum -y update && yum -y upgrade \nyum -y install python36 python36-devel ' \
                         'python36-pip python36-setuptools git gcc \n' \
                         'python36 -m pip install --upgrade pip \npython36 -m pip install boto3 tweepy praw psutil ' \
                         'splunk_handler \n'\
                         'echo "ACCESS_TOKEN=' + self.access_token + '" >> /etc/environment\n' \
                         'echo "ACCESS_TOKEN_SECRET=' + self.access_token_secret + '" >> /etc/environment\n' \
                         'echo "CONSUMER_KEY=' + self.consumer_key + '" >> /etc/environment\n' \
                         'echo "CONSUMER_SECRET=' + self.consumer_secret + '" >> /etc/environment\n' \
                         'echo "CLIENT_ID=' + self.client_id + '" >> /etc/environment\n' \
                         'echo "CLIENT_SECRET=' + self.client_secret + '" >> /etc/environment\n' \
                         'echo "USERNAME='******'" >> /etc/environment\n' \
                         'echo "PASSWORD='******'" >> /etc/environment\n' \
                         'echo "USER_AGENT=' + self.user_agent + '" >> /etc/environment\n' \
                         'echo "SUBREDDIT=' + self.subreddit + '" >> /etc/environment\n' \
                         'echo "REDDIT_TABLE_NAME=' + self.reddit_table_name + '" >> /etc/environment\n' \
                         'echo "SPLUNK_HOST=' + self.splunk_remote_host + '" >> /etc/environment\n' \
                         'echo "SPLUNK_PORT=' + self.splunk_port + '" >> /etc/environment\n' \
                         'echo "SPLUNK_TOKEN=' + self.splunk_token + '" >> /etc/environment\n' \
                         'echo "SPLUNK_INDEX=' + self.splunk_index + '" >> /etc/environment\n' \
                         'echo "SPLUNK_VERIFY=' + self.splunk_verify + '" >> /etc/environment\n' \
                         'aws s3 cp s3://master-control-program/rip_reddit.py /tmp \n' \
                         'aws s3 cp s3://master-control-program/tweet.py /tmp \n' \
                         'touch /tmp/MCP_Master_Log.log \n' \
                         'crontab -l | { cat; echo "*/5 * * * * python36 /tmp/rip_reddit.py"; } | crontab - \n' \
                         'crontab -l | { cat; echo "*/5 * * * * python36 /tmp/tweet.py"; } | crontab - \n'

        # -----> COPY FILES TO S3 <-----
        try:
            self.s3.meta.client.upload_file('rip_reddit.py', 'master-control-program', 'rip_reddit.py')
            self.s3.meta.client.upload_file('tweet.py', 'master-control-program', 'tweet.py')
        except Exception as e:
            self.logger.error('ERROR: ' + str(e))
        self.logger.info("Initialization complete...Waiting 60 seconds for files to move, etc.....")
        time.sleep(60)
Exemplo n.º 13
0
class TestSplunkHandler(unittest.TestCase):
    def setUp(self):
        self.splunk = SplunkHandler(
            host=SPLUNK_HOST,
            port=SPLUNK_PORT,
            username=SPLUNK_USERNAME,
            password=SPLUNK_PASSWORD,
            index=SPLUNK_INDEX,
            hostname=SPLUNK_HOSTNAME,
            source=SPLUNK_SOURCE,
            sourcetype=SPLUNK_SOURCETYPE,
            verify=SPLUNK_VERIFY
        )

    def tearDown(self):
        self.splunk = None

    def test_init(self):
        self.assertIsNotNone(self.splunk)
        self.assertIsInstance(self.splunk, SplunkHandler)
        self.assertIsInstance(self.splunk, logging.Handler)
        self.assertEqual(self.splunk.host, SPLUNK_HOST)
        self.assertEqual(self.splunk.port, SPLUNK_PORT)
        self.assertEqual(self.splunk.username, SPLUNK_USERNAME)
        self.assertEqual(self.splunk.password, SPLUNK_PASSWORD)
        self.assertEqual(self.splunk.index, SPLUNK_INDEX)
        self.assertEqual(self.splunk.hostname, SPLUNK_HOSTNAME)
        self.assertEqual(self.splunk.source, SPLUNK_SOURCE)
        self.assertEqual(self.splunk.sourcetype, SPLUNK_SOURCETYPE)
        self.assertEqual(self.splunk.verify, SPLUNK_VERIFY)

        self.assertFalse(logging.getLogger('requests').propagate)
        self.assertFalse(logging.getLogger('splunk_handler').propagate)


    @mock.patch('splunk_handler.Thread')
    def test_emit(self, thread):
        self.splunk.emit('hello')

        self.assertEqual(
            mock.call(target=self.splunk._async_emit, args=('hello',)),
            thread.mock_calls[0]
        )
        thread.return_value.start.assert_called_once_with()

    @mock.patch('splunk_handler.requests.post')
    def test_async_emit(self, post):
        log = logging.getLogger('test')
        log.addHandler(self.splunk)
        log.warning('hello!')

        post.assert_called_once_with(
            RECEIVER_URL,
            auth=(SPLUNK_USERNAME, SPLUNK_PASSWORD),
            data='hello!',
            params={
                'host': SPLUNK_HOSTNAME,
                'index': SPLUNK_INDEX,
                'source': SPLUNK_SOURCE,
                'sourcetype': SPLUNK_SOURCETYPE
            },
            verify=SPLUNK_VERIFY
        )
Exemplo n.º 14
0
                record.x_real_ip = 'N/A'
        else:
            record.tenant = 'system'
            record.x_real_ip = 'localhost'
            record.user_agent = 'n/a'

        return super().format(record)


__stream_handler = logging.StreamHandler(stream=sys.stderr)
__stream_handler.setFormatter(
    __RequestFormatter(
        '[%(asctime)s] distributey {%(pathname)s:%(lineno)d} %(levelname)s - '
        'tenant: %(tenant)s, origin: %(x_real_ip)s, '
        'ua: %(user_agent)s - %(message)s'))

logger = logging.getLogger()
logger.setLevel(__LOGLVL)
logger.addHandler(__stream_handler)

if splunk_enabled:
    __splunk = SplunkHandler(
        host=config.get_config_by_keypath('SPLUNK_HOST'),
        port=config.get_config_by_keypath('SPLUNK_PORT'),
        protocol=config.get_config_by_keypath('SPLUNK_PROTOCOL'),
        verify=config.get_config_by_keypath('SPLUNK_VERIFY'),
        token=config.get_config_by_keypath('SPLUNK_TOKEN'),
        index=config.get_config_by_keypath('SPLUNK_INDEX'))

    logger.addHandler(__splunk)