import time
import traceback

import databricks_com as com
import databricks_const as const
import databricks_common_utils as utils
from log_manager import setup_logging

from splunklib.searchcommands import (
    dispatch,
    GeneratingCommand,
    Configuration,
    Option,
)

_LOGGER = setup_logging("ta_databricksrun_command")


@Configuration(type="events")
class DatabricksRunCommand(GeneratingCommand):
    """Custom Command of databricksrun."""

    # Take input from user using parameters
    notebook_path = Option(require=True)
    run_name = Option(require=False)
    cluster = Option(require=False)
    revision_timestamp = Option(require=False)
    notebook_params = Option(require=False)
    identifier = Option(require=False)

    def generate(self):
import ta_databricks_declare  # noqa: F401
import requests
import traceback

import databricks_const as const
import databricks_common_utils as utils
from log_manager import setup_logging
from requests.packages.urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter

_LOGGER = setup_logging("ta_databricks_com")


class DatabricksClient(object):
    """A class to establish connection with Databricks and get data using REST API."""
    def __init__(self, session_key):
        """Intialize DatabricksClient object to get data from Databricks platform.

        Args:
            session_key (object): Splunk session key
        """
        databricks_configs = utils.get_databricks_configs()
        databricks_instance = databricks_configs.get("databricks_instance")
        self.auth_type = databricks_configs.get("auth_type")
        self.session_key = session_key
        self.session = self.get_requests_retry_session()
        self.session.proxies = utils.get_proxy_uri(session_key)
        self.session.verify = const.VERIFY_SSL
        self.databricks_token = utils.get_clear_token(self.session_key,
                                                      self.auth_type)
        if not all([databricks_instance, self.databricks_token]):
Exemple #3
0
 def __init__(self, ip_address):
     self.ip_address = ip_address
     self.price_protected = True
     self.spike_active = False
     self.logger = log_manager.setup_logging(__name__)
import ta_databricks_declare  # noqa: F401
import requests
import databricks_const as const
import databricks_common_utils as utils
from log_manager import setup_logging

from splunktaucclib.rest_handler.endpoint.validator import Validator
from splunk_aoblib.rest_migration import ConfigMigrationHandler

_LOGGER = setup_logging("databricks_validator")


class SessionKeyProvider(ConfigMigrationHandler):
    """
    Provides Splunk session key to custom validator.
    """
    def __init__(self):
        """
        Save session key in class instance.
        """
        self.session_key = self.getSessionKey()


class ValidateDatabricksInstance(Validator):
    """
    Validator for Databricks instance and token.
    """
    def validate(self, value, data):
        """
        Check if the given value is valid.
Exemple #5
0
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# main.py
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

from multiprocessing import Process
from thermostat import Thermostat
import env_loader
import griddy
import log_manager
import time

POLL_FREQUENCY = 150
ENV_FILE = "./.env.local"

config = env_loader.load_config(ENV_FILE)
log = log_manager.setup_logging(__name__)


def main(t, g):
    while True:
        griddy_data = g.query()
        current_price = griddy_data["now"]["price_ckwh"]
        price_display = g.format_price(current_price)

        if g.price_is_high(current_price):
            t.set_spike_active(True)
            log.info("Spike active. Current price: {}".format(price_display))
        else:
            t.set_spike_active(False)

        t.run()
Exemple #6
0
import time

import databricks_com as com
import databricks_const as const
import databricks_common_utils as utils
from log_manager import setup_logging

from splunklib.searchcommands import (
    dispatch,
    GeneratingCommand,
    Configuration,
    Option,
    validators,
)

_LOGGER = setup_logging("databricksquery_command")


@Configuration(type="events")
class DatabricksQueryCommand(GeneratingCommand):
    """Custom Command of databricksquery."""

    # Take input from user using parameters
    cluster = Option(require=False)
    query = Option(require=True)
    command_timeout = Option(require=False, validate=validators.Integer(minimum=1))

    def generate(self):
        """Generating custom command."""
        _LOGGER.info("Initiating databricksquery command")
        command_timeout_in_seconds = self.command_timeout or const.COMMAND_TIMEOUT_IN_SECONDS
Exemple #7
0
 def __init__(self, meter_id, member_id, settlement_point):
     self.meter_id = meter_id
     self.member_id = member_id
     self.settlement_point = settlement_point
     self.price = None
     self.logger = log_manager.setup_logging(__name__)
Exemple #8
0
import traceback

import databricks_com as com
import databricks_const as const
import databricks_common_utils as utils
from log_manager import setup_logging

from splunklib.searchcommands import (
    dispatch,
    GeneratingCommand,
    Configuration,
    Option,
    validators,
)

_LOGGER = setup_logging("databricksjob_command")


@Configuration(type="events")
class DatabricksJobCommand(GeneratingCommand):
    """Custom Command of databricksjob."""

    # Take input from user using parameters
    job_id = Option(require=True, validate=validators.Integer(0))
    notebook_params = Option(require=False)

    def generate(self):
        """Generating custom command."""
        _LOGGER.info("Initiating databricksjob command")
        kv_log_info = {
            "user": self._metadata.searchinfo.username,
import ta_databricks_declare  # noqa: F401
import json
import requests
import databricks_const as const
from log_manager import setup_logging

import splunk.rest as rest
from six.moves.urllib.parse import quote
from splunk.clilib import cli_common as cli
from solnlib.credentials import CredentialManager, CredentialNotExistException
from solnlib.utils import is_true

_LOGGER = setup_logging("databricks_utils")
APP_NAME = const.APP_NAME


def get_databricks_configs():
    """
    Get configuration details from ta_databricks_settings.conf.

    :return: dictionary with Databricks fields and values
    """
    _LOGGER.info("Reading configuration file.")
    configs = cli.getConfStanza("ta_databricks_settings",
                                "databricks_credentials")
    return configs


def get_databricks_clear_token(session_key):
    """
    Get unencrypted access token from passwords.conf.