Example #1
0
 def __init__(self, instance):
     self.log = get_check_logger()
     self.host = instance.get('host', instance.get('server', ''))
     self.port = int(instance.get('port', 0))
     self.tags = list(instance.get('tags', []))
     self.mysql_sock = instance.get('sock', '')
     self.defaults_file = instance.get('defaults_file', '')
     self.user = instance.get('user', '')
     self.password = str(instance.get('pass', ''))
     self.tags = self._build_tags(instance.get('tags', []))
     self.options = instance.get('options', {}) or {
     }  # options could be None if empty in the YAML
     replication_channel = self.options.get('replication_channel')
     if replication_channel:
         self.tags.append("channel:{0}".format(replication_channel))
     self.queries = instance.get('queries', [])
     self.ssl = instance.get('ssl', {})
     self.connect_timeout = instance.get('connect_timeout', 10)
     self.max_custom_queries = instance.get('max_custom_queries',
                                            DEFAULT_MAX_CUSTOM_QUERIES)
     self.charset = instance.get('charset')
     self.deep_database_monitoring = is_affirmative(
         instance.get('deep_database_monitoring', False))
     self.statement_metrics_limits = instance.get(
         'statement_metrics_limits', None)
     self.full_statement_text_cache_max_size = instance.get(
         'full_statement_text_cache_max_size', 10000)
     self.full_statement_text_samples_per_hour_per_query = instance.get(
         'full_statement_text_samples_per_hour_per_query', 1)
     self.statement_samples_config = instance.get('statement_samples',
                                                  {}) or {}
     self.min_collection_interval = instance.get('min_collection_interval',
                                                 15)
     self.configuration_checks()
Example #2
0
def test_get_check_logger_fallback(caplog):
    log = get_check_logger()

    log.warning("This is a warning")

    assert log is DEFAULT_FALLBACK_LOGGER
    assert "This is a warning" in caplog.text
Example #3
0
 def __init__(self, check, config, connection_args):
     # (MySql, MySQLConfig) -> None
     collection_interval = float(
         config.statement_metrics_config.get('collection_interval', 10))
     if collection_interval <= 0:
         collection_interval = 10
     super(MySQLStatementMetrics, self).__init__(
         check,
         rate_limit=1 / float(collection_interval),
         run_sync=is_affirmative(
             config.statement_metrics_config.get('run_sync', False)),
         enabled=is_affirmative(
             config.statement_metrics_config.get('enabled', True)),
         expected_db_exceptions=(pymysql.err.DatabaseError, ),
         min_collection_interval=config.min_collection_interval,
         dbms="mysql",
         job_name="statement-metrics",
         shutdown_callback=self._close_db_conn,
     )
     self._metric_collection_interval = collection_interval
     self._connection_args = connection_args
     self._db = None
     self._config = config
     self.log = get_check_logger()
     self._state = StatementMetrics()
     self._obfuscate_options = to_native_string(
         json.dumps(self._config.obfuscator_options))
     # full_statement_text_cache: limit the ingestion rate of full statement text events per query_signature
     self._full_statement_text_cache = TTLCache(
         maxsize=self._config.full_statement_text_cache_max_size,
         ttl=60 * 60 /
         self._config.full_statement_text_samples_per_hour_per_query,
     )  # type: TTLCache
Example #4
0
 def __init__(self, check, config):
     # (MySql, MySQLConfig) -> None
     self._check = check
     self._config = config
     self._db_hostname = None
     self.log = get_check_logger()
     self._state = StatementMetrics()
Example #5
0
 def __init__(
         self,
         check,
         config_host=None,
         min_collection_interval=15,
         dbms="TODO",
         rate_limit=1,
         run_sync=False,
         enabled=True,
         expected_db_exceptions=(),
         shutdown_callback=None,
         job_name=None,
 ):
     self._check = check
     self._config_host = config_host
     self._min_collection_interval = min_collection_interval
     # map[dbname -> psycopg connection]
     self._log = get_check_logger()
     self._job_loop_future = None
     self._cancel_event = threading.Event()
     self._tags = None
     self._tags_no_db = None
     self._run_sync = None
     self._db_hostname = None
     self._last_check_run = 0
     self._shutdown_callback = shutdown_callback
     self._dbms = dbms
     self._rate_limiter = ConstantRateLimiter(rate_limit)
     self._run_sync = run_sync
     self._enabled = enabled
     self._expected_db_exceptions = expected_db_exceptions
     self._job_name = job_name
Example #6
0
    def __init__(self, check, config):
        self._check = check
        self._db = None
        self._config = config
        self._log = get_check_logger()
        self._activity_last_query_start = None
        self._last_check_run = 0
        self._collection_loop_future = None
        self._cancel_event = threading.Event()
        self._tags = None
        self._tags_str = None
        self._service = "postgres"
        self._db_hostname = resolve_db_host(self._config.host)
        self._enabled = is_affirmative(self._config.statement_samples_config.get('enabled', False))
        self._run_sync = is_affirmative(self._config.statement_samples_config.get('run_sync', False))
        self._rate_limiter = ConstantRateLimiter(
            float(self._config.statement_samples_config.get('collections_per_second', 1))
        )
        self._explain_function = self._config.statement_samples_config.get(
            'explain_function', 'datadog.explain_statement'
        )

        # explained_statements_cache: limit how often we try to re-explain the same query
        self._explained_statements_cache = TTLCache(
            maxsize=int(self._config.statement_samples_config.get('explained_statements_cache_maxsize', 5000)),
            ttl=60 * 60 / int(self._config.statement_samples_config.get('explained_statements_per_hour_per_query', 60)),
        )

        # seen_samples_cache: limit the ingestion rate per (query_signature, plan_signature)
        self._seen_samples_cache = TTLCache(
            # assuming ~100 bytes per entry (query & plan signature, key hash, 4 pointers (ordered dict), expiry time)
            # total size: 10k * 100 = 1 Mb
            maxsize=int(self._config.statement_samples_config.get('seen_samples_cache_maxsize', 10000)),
            ttl=60 * 60 / int(self._config.statement_samples_config.get('samples_per_hour_per_query', 15)),
        )
Example #7
0
def test_get_check_logger_argument_fallback(caplog):
    logger = logging.getLogger()
    log = get_check_logger(default_logger=logger)

    log.warning("This is a warning")

    assert log is logger
    assert "This is a warning" in caplog.text
Example #8
0
 def __init__(self, check, config):
     # (MySql, MySQLConfig) -> None
     self._check = check
     self._config = config
     self._db_hostname = None
     self.log = get_check_logger()
     self._state = StatementMetrics()
     # full_statement_text_cache: limit the ingestion rate of full statement text events per query_signature
     self._full_statement_text_cache = TTLCache(
         maxsize=self._config.full_statement_text_cache_max_size,
         ttl=60 * 60 /
         self._config.full_statement_text_samples_per_hour_per_query,
     )  # type: TTLCache
Example #9
0
    def version_compatible(self, compat_version):
        # some patch version numbers contain letters (e.g. 5.0.51a)
        # so let's be careful when we compute the version number
        log = get_check_logger()
        try:
            mysql_version = self.version.split('.')
        except Exception as e:
            log.warning("Cannot compute mysql version, assuming it's older.: %s", e)
            return False
        log.debug("MySQL version %s", mysql_version)

        patchlevel = int(re.match(r"([0-9]+)", mysql_version[2]).group(1))
        version = (int(mysql_version[0]), int(mysql_version[1]), patchlevel)

        return version >= compat_version
Example #10
0
 def __init__(self, instance):
     self.log = get_check_logger()
     self.host = instance.get('host', instance.get('server', ''))
     self.port = int(instance.get('port', 0))
     self.tags = list(instance.get('tags', []))
     self.mysql_sock = instance.get('sock', '')
     self.defaults_file = instance.get('defaults_file', '')
     self.user = instance.get('user', '')
     self.password = str(instance.get('pass', ''))
     self.tags = instance.get('tags', [])
     self.options = instance.get('options', {}) or {}  # options could be None if empty in the YAML
     self.queries = instance.get('queries', [])
     self.ssl = instance.get('ssl', {})
     self.connect_timeout = instance.get('connect_timeout', 10)
     self.max_custom_queries = instance.get('max_custom_queries', DEFAULT_MAX_CUSTOM_QUERIES)
     self.configuration_checks()
Example #11
0
    def __init__(self, agent_check):
        self.agent_check = agent_check
        self.log = get_check_logger()
        self.log.debug('Selecting remote connection for method of collection')
        self.agent_check._tags.append('server_hostname:{}'.format(
            self.agent_check._server_hostname))
        self.agent_check._tags.append('server:{}'.format(
            self.agent_check._server))
        self.agent_check._tags.append('port:{}'.format(self.agent_check._port))

        self._fetch_intermediate_certs = is_affirmative(
            self.agent_check.instance.get(
                'fetch_intermediate_certs',
                self.agent_check.init_config.get('fetch_intermediate_certs',
                                                 False)))
        self._intermediate_cert_refresh_interval = (
            # Convert minutes to seconds
            float(
                self.agent_check.instance.get(
                    'intermediate_cert_refresh_interval', 60)) * 60)
Example #12
0
    def __init__(self, init_config, instance_config, service_check_handler):
        self.instance = instance_config
        self.service_check_handler = service_check_handler
        self.log = get_check_logger()

        # mapping of raw connections based on conn_key to different databases
        self._conns = {}
        self.timeout = int(
            self.instance.get('command_timeout', self.DEFAULT_COMMAND_TIMEOUT))
        self.existing_databases = None
        self.server_version = int(
            self.instance.get('server_version',
                              self.DEFAULT_SQLSERVER_VERSION))

        self.adoprovider = self.default_adoprovider

        self.valid_connectors = []
        if adodbapi is not None:
            self.valid_connectors.append('adodbapi')
        if pyodbc is not None:
            self.valid_connectors.append('odbc')

        self.default_connector = init_config.get('connector', 'adodbapi')
        if self.default_connector.lower() not in self.valid_connectors:
            self.log.error(
                "Invalid database connector %s, defaulting to adodbapi",
                self.default_connector)
            self.default_connector = 'adodbapi'

        self.connector = self.get_connector()

        self.adoprovider = init_config.get('adoprovider',
                                           self.default_adoprovider)
        if self.adoprovider.upper() not in self.valid_adoproviders:
            self.log.error(
                "Invalid ADODB provider string %s, defaulting to %s",
                self.adoprovider, self.default_adoprovider)
            self.adoprovider = self.default_adoprovider

        self.log.debug('Connection initialized.')
Example #13
0
 def __init__(self, config):
     # type: (MySQLConfig) -> None
     self.config = config
     self.log = get_check_logger()
     self._state = StatementMetrics()
Example #14
0
 def __init__(self, agent_check):
     self.agent_check = agent_check
     self.log = get_check_logger()
     self.log.debug('Selecting local connection for method of collection')
     if self.agent_check._tls_validate_hostname and self.agent_check._server_hostname:
         self.agent_check._tags.append('server_hostname:{}'.format(self.agent_check._server_hostname))
Example #15
0
 def __init__(self, instance):
     self.log = get_check_logger()
     self.host = instance.get('host', instance.get('server', ''))
     self.port = int(instance.get('port', 0))
     self.reported_hostname = instance.get('reported_hostname', '')
     self.tags = list(instance.get('tags', []))
     self.mysql_sock = instance.get('sock', '')
     self.defaults_file = instance.get('defaults_file', '')
     self.user = instance.get('username', instance.get('user', ''))
     self.password = str(instance.get('password', instance.get('pass', '')))
     self.tags = self._build_tags(instance.get('tags', []))
     self.options = instance.get('options', {}) or {
     }  # options could be None if empty in the YAML
     replication_channel = self.options.get('replication_channel')
     if replication_channel:
         self.tags.append("channel:{0}".format(replication_channel))
     self.queries = instance.get('queries', [])
     self.ssl = instance.get('ssl', {})
     self.additional_status = instance.get('additional_status', [])
     self.additional_variable = instance.get('additional_variable', [])
     self.connect_timeout = instance.get('connect_timeout', 10)
     self.max_custom_queries = instance.get('max_custom_queries',
                                            DEFAULT_MAX_CUSTOM_QUERIES)
     self.charset = instance.get('charset')
     self.dbm_enabled = is_affirmative(
         instance.get('dbm', instance.get('deep_database_monitoring',
                                          False)))
     self.statement_metrics_limits = instance.get(
         'statement_metrics_limits', None)
     self.full_statement_text_cache_max_size = instance.get(
         'full_statement_text_cache_max_size', 10000)
     self.full_statement_text_samples_per_hour_per_query = instance.get(
         'full_statement_text_samples_per_hour_per_query', 1)
     self.statement_samples_config = instance.get(
         'query_samples', instance.get('statement_samples', {})) or {}
     self.statement_metrics_config = instance.get('query_metrics', {}) or {}
     self.activity_config = instance.get('query_activity', {}) or {}
     self.cloud_metadata = {}
     aws = instance.get('aws', {})
     gcp = instance.get('gcp', {})
     azure = instance.get('azure', {})
     if aws:
         self.cloud_metadata.update({'aws': aws})
     if gcp:
         self.cloud_metadata.update({'gcp': gcp})
     if azure:
         self.cloud_metadata.update({'azure': azure})
     self.min_collection_interval = instance.get('min_collection_interval',
                                                 15)
     self.only_custom_queries = is_affirmative(
         instance.get('only_custom_queries', False))
     obfuscator_options_config = instance.get('obfuscator_options',
                                              {}) or {}
     self.obfuscator_options = {
         # Valid values for this can be found at
         # https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md#connection-level-attributes
         'dbms':
         'mysql',
         'replace_digits':
         is_affirmative(
             obfuscator_options_config.get(
                 'replace_digits',
                 obfuscator_options_config.get('quantize_sql_tables',
                                               False))),
         'keep_sql_alias':
         is_affirmative(
             obfuscator_options_config.get('keep_sql_alias', True)),
         'return_json_metadata':
         is_affirmative(
             obfuscator_options_config.get('collect_metadata', True)),
         'table_names':
         is_affirmative(
             obfuscator_options_config.get('collect_tables', True)),
         'collect_commands':
         is_affirmative(
             obfuscator_options_config.get('collect_commands', True)),
         'collect_comments':
         is_affirmative(
             obfuscator_options_config.get('collect_comments', True)),
     }
     self.configuration_checks()
Example #16
0
 def __init__(self, check, config, connection_args):
     self._check = check
     self._version_processed = False
     self._connection_args = connection_args
     # checkpoint at zero so we pull the whole history table on the first run
     self._checkpoint = 0
     self._log = get_check_logger()
     self._last_check_run = 0
     self._db = None
     self._tags = None
     self._tags_str = None
     self._service = "mysql"
     self._collection_loop_future = None
     self._cancel_event = threading.Event()
     self._rate_limiter = ConstantRateLimiter(1)
     self._config = config
     self._db_hostname = resolve_db_host(self._config.host)
     self._enabled = is_affirmative(
         self._config.statement_samples_config.get('enabled', False))
     self._run_sync = is_affirmative(
         self._config.statement_samples_config.get('run_sync', False))
     self._collections_per_second = self._config.statement_samples_config.get(
         'collections_per_second', -1)
     self._events_statements_row_limit = self._config.statement_samples_config.get(
         'events_statements_row_limit', 5000)
     self._explain_procedure = self._config.statement_samples_config.get(
         'explain_procedure', 'explain_statement')
     self._fully_qualified_explain_procedure = self._config.statement_samples_config.get(
         'fully_qualified_explain_procedure', 'datadog.explain_statement')
     self._events_statements_temp_table = self._config.statement_samples_config.get(
         'events_statements_temp_table_name', 'datadog.temp_events')
     self._events_statements_enable_procedure = self._config.statement_samples_config.get(
         'events_statements_enable_procedure',
         'datadog.enable_events_statements_consumers')
     self._preferred_events_statements_tables = EVENTS_STATEMENTS_PREFERRED_TABLES
     self._has_window_functions = False
     events_statements_table = self._config.statement_samples_config.get(
         'events_statements_table', None)
     if events_statements_table:
         if events_statements_table in DEFAULT_EVENTS_STATEMENTS_COLLECTIONS_PER_SECOND:
             self._log.debug(
                 "Configured preferred events_statements_table: %s",
                 events_statements_table)
             self._preferred_events_statements_tables = [
                 events_statements_table
             ]
         else:
             self._log.warning(
                 "Invalid events_statements_table: %s. Must be one of %s. Falling back to trying all tables.",
                 events_statements_table,
                 ', '.join(DEFAULT_EVENTS_STATEMENTS_COLLECTIONS_PER_SECOND.
                           keys()),
             )
     self._explain_strategies = {
         'PROCEDURE': self._run_explain_procedure,
         'FQ_PROCEDURE': self._run_fully_qualified_explain_procedure,
         'STATEMENT': self._run_explain,
     }
     self._preferred_explain_strategies = [
         'PROCEDURE', 'FQ_PROCEDURE', 'STATEMENT'
     ]
     self._init_caches()
     self._statement_samples_client = _new_statement_samples_client()
Example #17
0
 def __init__(self, yamlconfig):
     # type: (List[Union[str, Dict]]) -> None
     self.log = get_check_logger()
     self.config = self._build_relations_config(yamlconfig)
     self.has_relations = len(self.config) > 0
Example #18
0
    def __init__(self, instance):
        self.log = get_check_logger()
        self.channel = instance.get('channel')  # type: str
        self.queue_manager_name = instance.get('queue_manager',
                                               'default')  # type: str

        if not self.channel or not self.queue_manager_name:
            msg = "channel, queue_manager are required configurations"
            raise ConfigurationError(msg)

        host = instance.get('host')  # type: str
        port = instance.get('port')  # type: str
        self.connection_name = instance.get('connection_name')  # type: str
        if (host or port) and self.connection_name:
            raise ConfigurationError(
                'Specify only one host/port or connection_name configuration, '
                '(host={}, port={}, connection_name={}).'.format(
                    host, port, self.connection_name))

        if not self.connection_name:
            host = host or 'localhost'
            port = port or '1414'
            self.connection_name = "{}({})".format(host, port)

        self.username = instance.get('username')  # type: str
        self.password = instance.get('password')  # type: str

        self.queues = instance.get('queues', [])  # type: List[str]
        self.queue_patterns = instance.get('queue_patterns',
                                           [])  # type: List[str]
        self.queue_regex = [
            re.compile(regex) for regex in instance.get('queue_regex', [])
        ]  # type: List[Pattern]

        self.auto_discover_queues = is_affirmative(
            instance.get('auto_discover_queues', False))  # type: bool

        self.collect_statistics_metrics = is_affirmative(
            instance.get('collect_statistics_metrics', False))  # type: bool

        if int(self.auto_discover_queues) + int(bool(
                self.queue_patterns)) + int(bool(self.queue_regex)) > 1:
            self.log.warning(
                "Configurations auto_discover_queues, queue_patterns and queue_regex are not intended to be used "
                "together.")

        self.channels = instance.get('channels', [])  # type: List[str]

        self.channel_status_mapping = self.get_channel_status_mapping(
            instance.get('channel_status_mapping'))  # type: Dict[str, str]

        self.convert_endianness = instance.get('convert_endianness', False)

        custom_tags = instance.get('tags', [])  # type: List[str]
        tags = [
            "queue_manager:{}".format(self.queue_manager_name),
            "connection_name:{}".format(self.connection_name),
        ]  # type: List[str]
        tags.extend(custom_tags)
        if host or port:
            # 'host' is reserved and 'mq_host' is used instead
            tags.extend({"mq_host:{}".format(host), "port:{}".format(port)})
        self.tags_no_channel = tags
        self.tags = tags + ["channel:{}".format(self.channel)
                            ]  # type: List[str]

        # SSL options
        self.ssl = is_affirmative(instance.get('ssl_auth',
                                               False))  # type: bool
        self.ssl_cipher_spec = instance.get(
            'ssl_cipher_spec', 'TLS_RSA_WITH_AES_256_CBC_SHA')  # type: str
        self.ssl_key_repository_location = instance.get(
            'ssl_key_repository_location',
            '/var/mqm/ssl-db/client/KeyringClient')  # type: str
        self.ssl_certificate_label = instance.get(
            'ssl_certificate_label')  # type: str
        if instance.get('ssl_auth') is None and (
                instance.get('ssl_cipher_spec')
                or instance.get('ssl_key_repository_location')
                or self.ssl_certificate_label):
            self.log.info(
                "ssl_auth has not been explictly enabled but other SSL options have been provided. "
                "SSL will be used for connecting")
            self.ssl = True

        self.mq_installation_dir = instance.get('mq_installation_dir',
                                                '/opt/mqm/')

        self._queue_tag_re = instance.get('queue_tag_re',
                                          {})  # type: Dict[str, str]
        self.queue_tag_re = self._compile_tag_re()

        raw_mqcd_version = instance.get('mqcd_version', 6)
        try:
            self.mqcd_version = getattr(
                pymqi.CMQC,
                'MQCD_VERSION_{}'.format(raw_mqcd_version))  # type: int
        except (ValueError, AttributeError):
            raise ConfigurationError(
                "mqcd_version must be a number between 1 and 9. {} found.".
                format(raw_mqcd_version))

        self.instance_creation_datetime = dt.datetime.now(UTC)
Example #19
0
 def __init__(self):
     self.log = get_check_logger()
Example #20
0
 def __init__(self, config):
     self.config = config
     self.log = get_check_logger()
     self._state = StatementMetrics()