class ClusterMasterPeer(SplunkAppObjModel):
    '''
    Represents a master's cluster peer state
    '''

    resource = 'cluster/master/peers'

    active_bundle_id = Field(is_mutable=False)
    apply_bundle_status = DictField(is_mutable=False)
    base_generation_id = IntField(is_mutable=False)
    bucket_count = IntField(is_mutable=False)
    bucket_count_by_index = DictField(is_mutable=False)
    delayed_buckets_to_discard = ListField(is_mutable=False)
    fixup_set = ListField(is_mutable=False)
    host_port_pair = Field(is_mutable=False)
    is_searchable = BoolField(is_mutable=False)
    label = Field(is_mutable=False)
    last_heartbeat = EpochField(is_mutable=False)
    latest_bundle_id = Field(is_mutable=False)
    pending_job_count = IntField(is_mutable=False)
    primary_count = IntField(is_mutable=False)
    primary_count_remote = IntField(is_mutable=False)
    replication_count = IntField(is_mutable=False)
    replication_port = IntField(is_mutable=False)
    replication_use_ssl = BoolField(is_mutable=False)
    search_state_counter = DictField(is_mutable=False)
    site = Field(is_mutable=False)
    status = Field(is_mutable=False)
    status_counter = DictField(is_mutable=False)
class License(SplunkAppObjModel):
    '''
    Represents a single license object
    '''

    resource = 'licenser/licenses'

    creation_time = EpochField()
    expiration_time = EpochField()
    features = ListField()
    hash = Field(api_name='license_hash')
    label = Field()
    max_violations = IntField()
    payload = Field()
    quota_bytes = FloatField(api_name='quota')
    sourcetypes = ListField()
    stack_name = Field(api_name='stack_id')
    status = Field()
    type = Field()
    window_period = IntField()
class SelfConfig(SplunkAppObjModel):
    '''
    Represents a Splunk license tracker (master) server
    '''

    resource = 'licenser/localslave'
    resource_default = 'licenser/localslave/license'

    connection_timeout = IntField(is_mutable=False)
    features = DictField(is_mutable=False)
    last_master_contact_attempt_time = EpochField(is_mutable=False)
    last_master_contact_success_time = EpochField(is_mutable=False)
    last_trackerdb_service_time = EpochField(is_mutable=False)
    license_keys = ListField(is_mutable=False)
    master_guid = Field(is_mutable=False)
    master_uri = Field()
    receive_timeout = IntField(is_mutable=False)
    send_timeout = IntField(is_mutable=False)
    slave_name = Field(api_name='slave_id', is_mutable=False)
    slave_label = Field(is_mutable=False)
    squash_threshold = IntField(is_mutable=False)
class Message(SplunkAppObjModel):
    '''
    Represnts a licenser message
    '''

    resource = 'licenser/messages'

    category = Field(is_mutable=False)
    create_time = EpochField()
    description = Field()
    pool_name = Field(api_name='pool_id')
    severity = Field(default_value='ERROR')
    slave_name = Field(api_name='slave_id')
    stack_name = Field(api_name='stack_id')
Example #5
0
class Summarization(SplunkAppObjModel):
    '''
    Represents an auto-summarization for a saved search
    '''

    resource = 'admin/summarization'

    saved_searches = DictField('saved_searches', is_mutable=False)
    saved_searches_count = Field('saved_searches.count')
    buckets = Field('summary.buckets', is_mutable=False)
    complete = Field('summary.complete', is_mutable=False)

    hash = Field('summary.hash', is_mutable=False)
    regularHash = Field('summary.regularHash', is_mutable=False)
    normHash = Field('summary.normHash', is_mutable=False)

    unique_id = Field('summary.id', is_mutable=False)
    regular_id = Field('summary.regular_id', is_mutable=False)
    normalized_id = Field('summary.normalized_id', is_mutable=False)

    chunks = Field('summary.chunks', is_mutable=False)
    earliest_time = Field('summary.earliest_time', is_mutable=False)
    latest_time = Field('summary.latest_time', is_mutable=False)
    time_range = Field('summary.time_range', is_mutable=False)
    load_factor = Field('summary.load_factor', is_mutable=False)
    total_time = Field('summary.total_time', is_mutable=False)
    run_stats = ListField('summary.run_stats', is_mutable=False)
    last_error = ListField('summary.last_error', is_mutable=False)
    mod_time = Field('summary.mod_time', is_mutable=False)
    access_time = Field('summary.access_time', is_mutable=False)
    access_count = Field('summary.access_count', is_mutable=False)
    size = Field('summary.size', is_mutable=False)
    timespan = Field('summary.timespan', is_mutable=False)
    is_inprogress = BoolField('summary.is_inprogress', is_mutable=False)
    is_suspended = BoolField('summary.is_suspended', is_mutable=False)
    suspend_expiration = EpochField('summary.suspend_expiration',
                                    is_mutable=False)
    verification_buckets_failed = Field('verification_buckets_failed',
                                        is_mutable=False)
    verification_buckets_skipped = Field('verification_buckets_skipped',
                                         is_mutable=False)
    verification_buckets_passed = Field('verification_buckets_passed',
                                        is_mutable=False)
    verification_state = Field('verification_state', is_mutable=False)
    verification_time = Field('verification_time', is_mutable=False)
    verification_error = Field('verification_error', is_mutable=False)
    verification_progress = Field('verification_progress', is_mutable=False)
class FiredAlert(SplunkAppObjModel):
    '''
    Represents a Splunk fired/triggered alert
    '''

    resource = 'alerts/fired_alerts/-'

    actions          = ListField()
    alert_type       = Field()
    savedsearch_name = Field()
    sid              = Field()
    severity         = IntField()
    trigger_time     = EpochField()
    # these are rendered time string in the current user's timezone
    trigger_time_rendered = Field()
    expiration_time_rendered  = Field()
    digest_mode      = BoolField()
    triggered_alerts = IntField()

    def get_savedsearch(self):
        from splunk.models.saved_search import SavedSearch
        return SavedSearch.get(self.entity.getLink('savedsearch'))       

    def get_job(self):
      job_id = self.entity.getLink('job')
      #TODO: return a search job object
      return None

    @classmethod
    def get_alerts(cls, alerts_id):
        '''
        Returns a SplunkQuerySet that can be used to access the alerts fired by the given id.
        The SplunkQuerySet can be modified to include a search, custom ordering etc..

        example alerts_id:
           absolute: https://localhost:8089/servicesNS/nobody/search/aalerts/fired_alerts/AlertTest1
           relative: /servicesNS/nobody/search/alerts/fired_alerts/AlertTest1 
        '''

        k      = SplunkQuerySet(FiredAlert.manager(), 30)
        k._uri = alerts_id
        return k 
Example #7
0
class Index(SplunkAppObjModel):
    '''
    Represents an Splunk index
    '''

    resource                  = 'data/indexes'

    assureUTF8                = BoolField()
    blockSignatureDatabase    = Field()
    blockSignSize             = IntField()
    bucketRebuildMemoryHint   = Field()
    coldPath                  = Field()
    coldPath_expanded         = Field()
    coldToFrozenDir           = Field()
    coldToFrozenScript        = Field()
    compressRawdata           = BoolField()
    currentDBSizeMB           = IntField()
    defaultDatabase           = Field()
    disabled                  = BoolField()

    enableOnlineBucketRepair  = BoolField()
    enableRealtimeSearch      = BoolField()
    frozenTimePeriodInSecs    = IntField()
    homePath                  = Field()
    homePath_expanded         = Field()
    indexThreads              = Field()
    isInternal                = BoolField()
    isReady                   = BoolField()
    isVirtual                 = BoolField()
    lastInitSequenceNumber	  = IntField()
    lastInitTime              = EpochField()
    maxBloomBackfillBucketAge = Field()
    maxBucketSizeCacheEntries = IntField()
    maxConcurrentOptimizes    = IntField()
    maxDataSize	              = Field()
    maxHotBuckets	          = IntField()
    maxHotIdleSecs            = IntField()
    maxHotSpanSecs            = IntField()
    maxMemMB                  = IntField()
    maxMetaEntries            = IntField()
    maxRunningProcessGroups   = IntField()
    maxRunningProcessGroupsLowPriority = IntField()
    maxTime	                  = Field()
    maxTimeUnreplicatedNoAcks = IntField()
    maxTimeUnreplicatedWithAcks = IntField()
    maxTotalDataSizeMB        = IntField()
    maxWarmDBCount            = IntField()
    memPoolMB                 = Field()
    minRawFileSyncSecs        = Field()
    minStreamGroupQueueSize   = IntField()
    minTime                   = Field()
    partialServiceMetaPeriod  = IntField()
    processTrackerServiceInterval = IntField()
    quarantineFutureSecs      = IntField()
    quarantinePastSecs        = IntField()
    rawChunkSizeBytes         = IntByteField()
    repFactor	              = IntField()
    rotatePeriodInSecs        = IntField()
    serviceMetaPeriod         = IntField()
    serviceOnlyAsNeeded       = BoolField()
    serviceSubtaskTimingPeriod = IntField()
    suppressBannerList        = BoolField()
    sync                      = BoolField()
    syncMeta                  = BoolField()
    thawedPath                = Field()
    thawedPath_expanded       = Field()
    throttleCheckPeriod       = IntField()
    totalEventCount           = IntField()
class HDFSExport(SavedSearch):

    resource = 'hdfs_export'
    search = Field()
    uri = Field()
    base_path = Field()
    starttime = IntField()
    next_scheduled_time = Field()
    cron_schedule = Field()
    parallel_searches = Field()
    partition_fields = Field()
    status = Field()
    compress_level = Field()
    firstevent = EpochField('status.earliest')
    lastevent = EpochField('status.latest')
    jobexporterrors = Field('status.jobs.errors')
    jobprogess = FloatField('status.jobs.progress')
    jobruntime = FloatField('status.jobs.runtime')
    jobstart = EpochField('status.jobs.starttime')
    jobend = EpochField('status.jobs.endtime')
    jobearliest = EpochField('status.jobs.earliest')
    load = Field('status.load')
    export_sids = Field('status.jobs.sids')  # comma delimited list
    scheduled_sid = Field('status.jobs.psid')
    maxspan = Field()
    minspan = Field()
    roll_size = Field()
    format = Field()
    fields = Field()

    def get_export_factor(self):
        if self.load == None or len(self.load) == 0:
            return 0
        loads = self.load.split(',')
        return float(loads[0])

    def get_percent_complete(self):
        '''
            returns the percentage of index times attempted to be exported
            compared to today.
        '''
        try:
            indexTimeSpan = time.mktime(
                self.lastevent.timetuple()) - time.mktime(
                    self.firstevent.timetuple())
            exportedIndexTimeSpan = time.time() - time.mktime(
                self.firstevent.timetuple())
            return indexTimeSpan / exportedIndexTimeSpan
        except:
            return 0

    def getErrors(self):
        if self.jobexporterrors == None or len(self.jobexporterrors) == 0:
            return []

        err = self.jobexporterrors.split(',')
        return err

    def isPaused(self):
        return not self.schedule.is_scheduled

    def execute_action(self, action_name):
        if not self.action_links:
            return False
        url = None
        url_base = None
        for item in self.action_links:
            if action_name == item[0]:
                url = item[1]
                break
            elif item[0] == 'list':
                url_base = item[1]

        # ghetto way to build the action url when not provided by endpoint
        if url == None and url_base != None:
            url = url_base + '/' + action_name

        if url == None:
            return False

        response, content = rest.simpleRequest(url, method='POST')
        return response.status == 200

    def pause(self):
        return self.execute_action('pause')

    def resume(self):
        return self.execute_action('resume')

    def force(self):
        #TODO: return the actual search id of the spawned search job
        return self.execute_action('force')

    def hasPartitionField(self, field):
        fields = self.partition_fields.split(',')
        return field in fields

    @classmethod
    def parse_except_messages(cls, e):
        return HDFSAppObjModel.parse_except_messages(e)