class APIClient():
    PING_TIMEOUT_IN_SECONDS = 10

    def __init__(self, connection, configuration):
        self.endpoint_start = 'alertflex-ctrl/rest/stix-alerts'
        headers = dict()
        auth = configuration.get('auth')
        headers['Authorization'] = b"Basic " + base64.b64encode(
            (auth['username'] + ':' + auth['password']).encode('ascii'))
        url_modifier_function = None
        self.client = RestApiClient(connection.get('host'),
                                    connection.get('port'),
                                    headers,
                                    url_modifier_function,
                                    cert_verify=connection.get(
                                        'selfSignedCert', False))

    def ping_data_source(self):
        endpoint = self.endpoint_start + '/status'
        return self.client.call_api(endpoint,
                                    'GET',
                                    timeout=self.PING_TIMEOUT_IN_SECONDS)

    def get_search_results(self, query_expression, offset=None, length=None):
        endpoint = self.endpoint_start + '/search'
        data = {'query': query_expression}
        result = self.client.call_api(endpoint, 'GET', urldata=data)
        return result

    def delete_search(self, search_id):
        # Optional since this may not be supported by the data source API
        # Delete the search
        return {"code": 200, "success": True}
Exemple #2
0
class APIClient():
    PING_ENDPOINT = 'sensor'
    PROCESS_ENDPOINT = 'process'

    def __init__(self, connection, configuration):
        self.endpoint_start = 'api/v1/'
        auth = configuration.get('auth')
        headers = dict()
        headers['X-Auth-Token'] = auth.get('token')
        self.client = RestApiClient(connection.get('host'),
                                    connection.get('port'),
                                    headers,
                                    cert_verify=connection.get(
                                        'selfSignedCert', True),
                                    sni=connection.get('sni', None))
        self.timeout = connection['options'].get('timeout')

    def ping_box(self):
        endpoint = self.endpoint_start + self.PING_ENDPOINT
        return self.client.call_api(endpoint, 'GET', timeout=self.timeout)

    def run_search(self, query_expression, start=0, rows=10):
        headers = dict()
        endpoint = self.endpoint_start + self.PROCESS_ENDPOINT
        data = [("q", query_expression), ("start", start), ("rows", rows)]
        sort_by = 'start asc'  # The purpose of this is to maintain order stability when doing paging

        data.append(("sort", sort_by))

        return self.client.call_api(endpoint,
                                    'GET',
                                    headers,
                                    urldata=data,
                                    timeout=self.timeout)
class APIClient:
    """API Client to handle all calls."""
    PING_TIMEOUT_IN_SECONDS = 10

    def __init__(self, connection, configuration):
        """Initialization.
        :param connection: dict, connection dict
        :param configuration: dict,config dict"""

        headers = dict()
        url_modifier_function = None
        auth = configuration.get('auth')
        self.endpoint = 'api/advancedqueries/run'
        self.host = connection.get('host')

        if auth:
            if 'access_token' in auth:
                headers['Authorization'] = "Bearer " + auth['access_token']

        self.client = RestApiClient(
            connection.get('host'),
            connection.get('port', None),
            connection.get('cert', None),
            headers,
            url_modifier_function=url_modifier_function,
            cert_verify=connection.get('selfSignedCert', True),
            mutual_auth=connection.get('use_securegateway', False),
            sni=connection.get('sni', None))

    def ping_box(self):
        """Ping the endpoint."""
        endpoint = '/api'
        return self.client.call_api(endpoint,
                                    'GET',
                                    timeout=self.PING_TIMEOUT_IN_SECONDS)

    def run_search(self,
                   query_expression,
                   offset=DEFAULT_OFFSET,
                   length=DEFAULT_LIMIT):
        """get the response from MSatp endpoints
        :param query_expression: str, search_id
        :param offset: int,offset value
        :param length: int,length value
        :return: response, json object"""
        serialize = '| serialize rn = row_number() | where rn >= {offset} | limit {length}'
        headers = dict()
        headers['Content-Type'] = 'application/json'
        headers['Accept'] = 'application/json'
        endpoint = self.endpoint
        query_expression = query_expression + serialize.format(offset=offset,
                                                               length=length)
        query_expression = json.dumps({
            'Query': query_expression
        }).encode("utf-8")
        return self.client.call_api(endpoint,
                                    'POST',
                                    headers=headers,
                                    data=query_expression)
Exemple #4
0
class APIClient:
    """API Client to handle all calls."""
    
    def __init__(self, connection, configuration):
        """Initialization.
        :param connection: dict, connection dict
        :param configuration: dict,config dict"""

        headers = dict()
        url_modifier_function = None
        default_api_version = 'v1.0'
        auth = configuration.get('auth')
        self.endpoint = '{api_version}/security/alerts'.format(api_version=default_api_version)
        self.host = connection.get('host')
        self.timeout = connection['options'].get('timeout')

        if auth:
            if 'access_token' in auth:
                headers['Authorization'] = "Bearer " + auth['access_token']

        self.client = RestApiClient(connection.get('host'),
                                    connection.get('port', None),
                                    headers,
                                    url_modifier_function=url_modifier_function,
                                    cert_verify=connection.get('selfSignedCert', True),
                                    sni=connection.get('sni', None)
                                    )

    def ping_box(self):
        """Ping the endpoint."""
        params = dict()
        params['$top'] = 1
        return self.client.call_api(self.endpoint, 'GET', urldata=params, timeout=self.timeout)

    def run_search(self, query_expression, length):
        """get the response from azure_sentinel endpoints
        :param query_expression: str, search_id
        :param length: int,length value
        :return: response, json object"""
        headers = dict()
        headers['Accept'] = 'application/json'
        params = dict()
        params['$filter'] = query_expression
        params['$top'] = length
        return self.client.call_api(self.endpoint, 'GET', headers, urldata=params, timeout=self.timeout)

    def next_page_run_search(self, next_page_url):
        """get the response from azure_sentinel endpoints
        :param next_page_url: str, search_id
        :return: response, json object"""
        headers = dict()
        headers['Accept'] = 'application/json'
        url = next_page_url.split('?', maxsplit=1)[1]
        endpoint = self.endpoint + '?' + url
        return self.client.call_api(endpoint, 'GET', headers, timeout=self.timeout)
class APIClient():
    PING_ENDPOINT = 'sensor'
    PROCESS_ENDPOINT = 'process'

    def __init__(self, connection, configuration):
        self.endpoint_start_v1 = 'api/v1/'  # Uses API v1 for `ping` and `processes search` endpoints.
        self.endpoint_start_v4 = 'api/v4/'  # Uses API v4 for `events search` endpoint.
        auth = configuration.get('auth')
        headers = dict()
        headers['X-Auth-Token'] = auth.get('token')
        self.client = RestApiClient(connection.get('host'),
                                    connection.get('port'),
                                    headers,
                                    cert_verify=connection.get(
                                        'selfSignedCert', True),
                                    sni=connection.get('sni', None))
        self.timeout = connection['options'].get('timeout')

    def ping_box(self):
        endpoint = self.endpoint_start_v1 + self.PING_ENDPOINT
        return self.client.call_api(endpoint, 'GET', timeout=self.timeout)

    def run_processes_search(self, query_expression, start=0, rows=10):
        """
            https://developer.carbonblack.com/reference/enterprise-response/6.3/rest-api/#process-search
            Processes search using `/api/v1/process`
        """
        headers = dict()
        process_endpoint = self.endpoint_start_v1 + self.PROCESS_ENDPOINT
        data = [("q", query_expression), ("start", start), ("rows", rows),
                ("sort", 'start asc')]
        return self.client.call_api(process_endpoint,
                                    'GET',
                                    headers,
                                    urldata=data,
                                    timeout=self.timeout)

    def run_events_search(self, process_id, segment_id):
        """
            https://developer.carbonblack.com/reference/enterprise-response/6.3/rest-api/#process-event-details
            Event details search for process X at segment Y using `/api/v4/process/(process_id)/(segment_id)/event`
        """
        headers = dict()
        event_endpoint = self.endpoint_start_v4 + self.PROCESS_ENDPOINT + '/{}/{}/event'.format(
            process_id, segment_id)
        data = []
        return self.client.call_api(event_endpoint,
                                    'GET',
                                    headers,
                                    urldata=data,
                                    timeout=self.timeout)
    def parse_query(self, data):
        proxy_host = self.options['proxy_host']
        proxy_port = self.options['proxy_port']

        connection, configuration = unwrap_connection_options(self.options)

        client = RestApiClient(
            proxy_host,
            proxy_port,
            url_modifier_function=lambda host_port, endpoint, headers:
            f'https://{host_port}{endpoint}',
            cert_verify=self.options.get('proxy_cert'))
        response = client.call_api('/parse_query',
                                   'POST',
                                   data=json.dumps({
                                       'module':
                                       connection['type'],
                                       'data_source': {},
                                       'data':
                                       data,
                                       'options':
                                       connection['options']
                                   }),
                                   timeout=self.options.get('timeout'))
        return json.loads(response.bytes)
    def translate_results(self, data_source, data):
        # A proxy translation call passes the entire data source connection object in as the options
        # Top-most connection host and port are for the proxy
        proxy_host = self.options['proxy_host']
        proxy_port = self.options['proxy_port']

        connection, configuration = unwrap_connection_options(self.options)

        client = RestApiClient(
            proxy_host,
            proxy_port,
            url_modifier_function=lambda host_port, endpoint, headers:
            f'http://{host_port}{endpoint}')
        response = client.call_api('/translate_results',
                                   'POST',
                                   data=json.dumps({
                                       'module':
                                       connection['type'],
                                       "data_source":
                                       data_source,
                                       "results":
                                       data,
                                       "options":
                                       connection['options']
                                   }),
                                   timeout=self.options.get('timeout'))
        return json.loads(response.bytes)
Exemple #8
0
    def get_Secret(self):
        eventDetail = self.get_events()
        secretIdList = []
        secretCollection = []
        for obj in eventDetail:
            item = (obj['ItemId'])
            secretIdList.append(item)
        unique = set(secretIdList)
        for id in unique:
            secret_server_user_url = self.secret_detail + "/%s" % id
            headers = {
                'Authorization': self.accessToken,
                'Content-Type': 'application/json'
            }
            payload = {}
            response = RestApiClient.call_api(self,
                                              secret_server_user_url,
                                              'GET',
                                              headers=headers,
                                              data=payload,
                                              urldata=None,
                                              timeout=None)

            secretCollection.append(response.response.text)
        json_data = json.dumps(secretCollection)
        collection = json.loads(json_data)
        return collection
Exemple #9
0
    def get_events(self):
        payload = "{\"name\": \"Secret Server Events Logs\", \"parameters\": [{\"name\": \"startDate\", \"value\": '%s'} , {\"name\":\"endDate\",\"value\": '%s'}]}" % (
            self.startDate, self.endDate)
        headers = {
            'Authorization': self.accessToken,
            'Content-Type': 'application/json'
        }
        endpoint = "SecretServer/api/v1/reports/execute"

        response = RestApiClient.call_api(self,
                                          endpoint,
                                          'POST',
                                          headers=headers,
                                          data=payload,
                                          urldata=None,
                                          timeout=None)
        return_obj = {}
        if response.code != 200:
            response_txt = response.response.text
            ErrorResponder.fill_error(return_obj, message=response_txt)
            raise Exception(return_obj)

        collection = []
        json_data = response.response.text
        eventData = json.loads(json_data)
        col = eventData['columns']
        for obj in eventData['rows']:
            obj = dict(zip(col, obj))
            collection.append(obj)
        return collection
Exemple #10
0
class APIClient():
    PING_ENDPOINT = 'sensor'
    PING_TIMEOUT_IN_SECONDS = 10

    @staticmethod
    def _dialect_to_endpoint(dialect):
        assert dialect in ["binary", "process"]
        return dialect

    def __init__(self, connection, configuration):
        self.endpoint_start = 'api/v1/'
        auth = configuration.get('auth')
        headers = dict()
        headers['X-Auth-Token'] = auth.get('token')
        self.client = RestApiClient(connection.get('host'),
                                    connection.get('port'),
                                    headers,
                                    cert_verify=connection.get(
                                        'selfSignedCert', True),
                                    sni=connection.get('sni', None))
        self.search_timeout = connection['options'].get('timeout')

    def ping_box(self):
        endpoint = self.endpoint_start + self.PING_ENDPOINT
        return self.client.call_api(endpoint,
                                    'GET',
                                    timeout=self.PING_TIMEOUT_IN_SECONDS)

    def run_search(self, query_expression, dialect, start=0, rows=10):
        headers = dict()
        endpoint = self.endpoint_start + self._dialect_to_endpoint(dialect)
        data = [("q", query_expression), ("start", start), ("rows", rows)]

        # The purpose of this is to maintain order stability when doing paging
        if dialect == 'binary':
            sort_by = 'server_added_timestamp asc'
        else:  # process
            sort_by = 'start asc'
        data.append(("sort", sort_by))

        return self.client.call_api(endpoint,
                                    'GET',
                                    headers,
                                    urldata=data,
                                    timeout=self.search_timeout)
class APIClient():
    def __init__(self, connection, configuration):
        # Uncomment when implementing data source API client.
        auth = configuration.get('auth')
        headers = dict()
        if auth:
            if 'principal' in auth and 'secret' in auth:
                headers['Authorization'] = b"Basic " + base64.b64encode(
                    (auth['principal'] + ':' + auth['secret']).encode('ascii'))
        self.client = RestApiClient(connection.get('host'),
                                    port=None,
                                    headers=headers,
                                    url_modifier_function=None,
                                    cert_verify=True,
                                    sni=None,
                                    auth=None)

    def ping_data_source(self):
        # Pings the data source
        endpoint = ENDPOINT_ALL + "?format=json&sinceSeconds=3600"
        pingresult = self.client.call_api(endpoint=endpoint, method='GET')
        return pingresult

    def create_search(self, query_expression):
        # Queries the data source
        return {"code": 200, "query_id": query_expression}

    def get_search_status(self, search_id):
        # Check the current status of the search
        return {"code": 200, "status": "COMPLETED"}

    def get_search_results(self, search_id):
        # Return the search results. Results must be in JSON format before being translated into STIX
        #resultdata = self.client.call_api(endpoint=ENDPOINT_ALL+search_id, method='GET')#working
        endpoint = ENDPOINT_ALL + "?format=json"
        resultdata = self.client.call_api(endpoint=endpoint,
                                          method='GET',
                                          urldata=search_id)
        # Check the current status of the search
        return resultdata

    def delete_search(self, search_id):
        # Optional since this may not be supported by the data source API
        # Delete the search
        return {"code": 200, "success": True}
Exemple #12
0
 def ping_data_source(self):
     response = RestApiClient.call_api(self,
                                       self.auth_token_url,
                                       'GET',
                                       headers=self.headers,
                                       data=self.payload,
                                       urldata=None,
                                       timeout=None)
     return response.code
Exemple #13
0
class Connector(BaseConnector):
    def __init__(self, connection, configuration):
        self.request_http_path = "http://{}:{}".format(connection['options']['proxy_host'], connection['options']['proxy_port'])
        self.timeout = connection['options']['timeout']
        self.connection, self.configuration = self._unwrap_connection_options(copy.deepcopy(connection), copy.deepcopy(configuration))
        self.client = RestApiClient(connection['options']['proxy_host'], connection['options']['proxy_port'], url_modifier_function=lambda host_port,endpoint,headers: f'http://{host_port}{endpoint}')
        
    def ping_connection(self):
        data = json.dumps({"connection": self.connection, "configuration": self.configuration})
        response = self.client.call_api('/ping', 'POST', data=data, timeout=self.timeout)
        return json.loads(response.bytes)

    def create_query_connection(self, query):
        data = json.dumps({"connection": self.connection, "configuration": self.configuration, "query": query})
        response = self.client.call_api('/create_query_connection', 'POST', data=data, timeout=self.timeout)
        return json.loads(response.bytes)

    def create_results_connection(self, search_id, offset, length):
        data = json.dumps({"connection": self.connection, "configuration": self.configuration, "search_id": search_id, "offset": offset, "length": length})
        response = self.client.call_api('/create_results_connection', 'POST', data=data, timeout=self.timeout)
        return json.loads(response.bytes)

    def create_status_connection(self, search_id):
        data = json.dumps({"connection": self.connection, "configuration": self.configuration, "search_id": search_id})
        response = self.client.call_api('/create_status_connection', 'POST', data=data, timeout=self.timeout)
        return json.loads(response.bytes)

    def delete_query_connection(self, search_id):
        data = json.dumps({"connection": self.connection, "configuration": self.configuration, "search_id": search_id})
        response = self.client.call_api('/delete_query_connection', 'POST', data=data, timeout=self.timeout)
        return json.loads(response.bytes)

    def is_async(self):
        data = json.dumps({"connection": self.connection, "configuration": self.configuration})
        response = self.client.call_api('/is_async', 'POST', data=data, timeout=self.timeout)
        return json.loads(response.bytes)

    def _unwrap_connection_options(self, connection, configuration):
        if 'options' in connection and 'destination' in connection['options']:
            destination_params = connection['options']['destination']
            if type(destination_params) == str:
                if len(destination_params):
                    destination_params = json.loads(destination_params)
                else:
                    destination_params = {}
            if destination_params:
                return destination_params['connection'], destination_params['configuration']
        return connection, configuration
Exemple #14
0
    def get_token(self):
        response = RestApiClient.call_api(self,
                                          self.auth_token_url,
                                          'GET',
                                          headers=self.headers,
                                          data=self.payload,
                                          urldata=None,
                                          timeout=None)

        return_obj = {}
        response_code = response.code
        response_txt = response.response.text

        if (response_code == 200):
            json_obj = json.loads(response_txt)
            token = json_obj.get('access_token')
            self.accessToken = 'Bearer' + " " + token
            return self.accessToken
        else:
            ErrorResponder.fill_error(return_obj, message=response_txt)
            raise Exception(return_obj)
Exemple #15
0
class Connector(BaseSyncConnector):
    def __init__(self, connection, configuration):
        self.connection = connection
        self.configuration = configuration
        self.timeout = connection['options'].get('timeout')
        self.bundle_url = self.connection.get('url')
        auth = None
        conf_auth = configuration.get('auth', {})
        if 'username' in conf_auth and 'password' in conf_auth:
            auth = (conf_auth['username'], conf_auth['password'])
        self.client = RestApiClient(None,
                                    auth=auth,
                                    url_modifier_function=lambda host_port,
                                    endpoint, headers: f'{endpoint}')

    # We re-implement this method so we can fetch all the "bindings", as their method only
    # returns the first for some reason
    def match(self, pattern, observed_data_sdos, verbose=False):
        compiled_pattern = Pattern(pattern)
        matcher = MatchListener(observed_data_sdos, verbose)
        compiled_pattern.walk(matcher)

        found_bindings = matcher.matched()

        if found_bindings:
            matching_sdos = []
            for binding in found_bindings:
                matches = [
                    match for match in matcher.get_sdos_from_binding(binding)
                    if match not in matching_sdos
                ]
                matching_sdos.extend(matches)
        else:
            matching_sdos = []

        return matching_sdos

    def ping_connection(self):
        return_obj = dict()

        response = self.client.call_api(self.bundle_url,
                                        'head',
                                        timeout=self.timeout)
        response_txt = response.raise_for_status()

        if response.code == 200:
            return_obj['success'] = True
        elif response.code == 301:
            self.bundle_url = response.headers.get('Location')
            return self.ping_connection()
        else:
            ErrorResponder.fill_error(return_obj, response_txt, ['message'])
        return return_obj

    def create_results_connection(self, search_id, offset, length):
        observations = []
        return_obj = dict()

        response = None
        if self.connection['options'].get('error_type') == ERROR_TYPE_TIMEOUT:
            # httpstat.us/200?sleep=60000 for slow connection that is valid
            response = self.client.call_api(
                'https://httpstat.us/200?sleep=60000',
                'get',
                timeout=self.timeout)
        elif self.connection['options'].get(
                'error_type') == ERROR_TYPE_BAD_CONNECTION:
            # www.google.com:81 for a bad connection that will timeout
            response = self.client.call_api('https://www.google.com:81',
                                            'get',
                                            timeout=self.timeout)
        else:
            response = self.client.call_api(self.bundle_url,
                                            'get',
                                            timeout=self.timeout)
        if response.code != 200:
            response_txt = response.raise_for_status()
            if ErrorResponder.is_plain_string(response_txt):
                ErrorResponder.fill_error(return_obj, message=response_txt)
            elif ErrorResponder.is_json_string(response_txt):
                response_json = json.loads(response_txt)
                ErrorResponder.fill_error(return_obj, response_json,
                                          ['reason'])
            else:
                raise UnexpectedResponseException
        else:
            try:
                response_txt = response.read().decode('utf-8')
                bundle = json.loads(response_txt)

                if "stix_validator" in self.connection[
                        'options'] and self.connection['options'].get(
                            "stix_validator") is True:
                    results = validate_instance(bundle)

                    if results.is_valid is not True:
                        ErrorResponder.fill_error(
                            return_obj,
                            message='Invalid Objects in STIX Bundle.')
                        return return_obj

                for obj in bundle["objects"]:
                    if obj["type"] == "observed-data":
                        observations.append(obj)

                # Pattern match
                try:
                    results = self.match(search_id, observations, False)

                    if len(results) != 0:
                        return_obj['success'] = True
                        return_obj['data'] = results[int(offset):int(offset +
                                                                     length)]
                    else:
                        return_obj['success'] = True
                        return_obj['data'] = []
                except Exception as ex:
                    ErrorResponder.fill_error(
                        return_obj,
                        message='Object matching error: ' + str(ex))
            except Exception as ex:
                ErrorResponder.fill_error(
                    return_obj,
                    message='Invalid STIX bundle. Malformed JSON: ' + str(ex))
        return return_obj

    def create_query_connection(self, query):
        return {"success": True, "search_id": query}

    def create_status_connection(self, search_id):
        return {"success": True, "status": "COMPLETED", "progress": 100}

    def delete_query_connection(self, search_id):
        return_obj = dict()
        return_obj['success'] = True
        return return_obj
Exemple #16
0
class APIClient():
    def __init__(self, connection, configuration):
        auth = configuration.get('auth')
        headers = dict()
        headers['X-Auth-Token'] = auth.get('token')
        headers['Accept'] = 'application/json'
        headers['Content-Type'] = 'application/json'
        self.org_key = auth.get('org_key')
        self.client = RestApiClient(connection.get('host'),
                                    connection.get('port'),
                                    headers,
                                    cert_verify=connection.get(
                                        'selfSignedCert', True),
                                    sni=connection.get('sni', None))
        self.timeout = connection['options'].get('timeout')
        self.result_limit = connection['options'].get('result_limit')

    def ping_data_source(self):
        """Verifies the data source API is working by sending a GET request to
        https://<server_ip>/api/investigate/v1/orgs/{org_key}/processes/limits

        Status codes:
        200: successfully fetched the upper and lower time limits (i.e API works)
        400: malformed JSON body or invalid value
        403: forbidden
        500: internal server error
        """
        endpoint = f'api/investigate/v1/orgs/{self.org_key}/processes/limits'
        return self.client.call_api(endpoint, 'GET', timeout=self.timeout)

    def create_search(self, query_expression):
        """Queries the data source by sending a POST request to
        https://<server_ip>/api/investigate/v2/orgs/{org_key}/processes/search_jobs

        200: successfully submitted search for processes
        400: malformed JSON body or invalid value
        403: forbidden
        500: internal server error
        """
        endpoint = f'api/investigate/v2/orgs/{self.org_key}/processes/search_jobs'
        data = {
            'query': query_expression,
            'fields': DEFAULT_FIELDS,
            'start': 0,
            'rows': self.result_limit,
            'sort': [{
                'field': 'device_timestamp',
                'order': 'asc'
            }]
        }
        return self.client.call_api(endpoint,
                                    'POST',
                                    data=json.dumps(data),
                                    timeout=self.timeout)

    def get_search_status(self, job_id):
        """ Check the status of the search by sending a GET request to
        https://<server_ip>/api/investigate/v1/orgs/{org_key}/processes/search_jobs/{job_id}

        200: successfully retrieved status of process search
        400: malformed JSON body or invalid value
        403: forbidden
        500: internal server error
        """
        endpoint = f'api/investigate/v1/orgs/{self.org_key}/processes/search_jobs/{job_id}'
        return self.client.call_api(endpoint, 'GET', timeout=self.timeout)

    def get_search_results(self, job_id, start=0, rows=100):
        """Return the JSON-formatted search results by sending a GET request to
        https://<server_ip>/api/investigate/v2/orgs/{org_key}/processes/search_jobs/{job_id}/results

        200: successfully fetched processes
        400: malformed JSON body or invalid value
        403: forbidden
        500: internal server error
        """
        urldata = [("start", start), ("rows", rows)]
        endpoint = f'api/investigate/v2/orgs/{self.org_key}/processes/search_jobs/{job_id}/results'
        return self.client.call_api(endpoint,
                                    'GET',
                                    urldata=urldata,
                                    timeout=self.timeout)

    def delete_search(self, job_id):
        """Delete the search by sending a DELETE request to
        https://<server_ip>/api/investigate/v1/orgs/{orgkey}/processes/search_jobs/{job_id}

        204: success deleted a process search
        400: malformed JSON body or invalid value
        403: forbidden
        500: internal server error
        """
        endpoint = f'api/investigate/v1/orgs/{self.org_key}/processes/search_jobs/{job_id}'
        return self.client.call_api(endpoint, 'DELETE', timeout=self.timeout)
Exemple #17
0
class APIClient():

    # API METHODS

    # These methods are used to retrive a) authorization token using
    #   a rest api (sending credentials as params)
    #   b) reports (sending token and report query) using a different rest api

    # This class will encode any data or query parameters which will then be
    # sent to the call_api() method of the RestApiClient
    def __init__(self, connection, configuration):
        #
        self.endpoint_start = 'restAPI/'
        self.connection = connection
        self.configuration = configuration
        self.headers = dict()
        self.search_id = None
        self.query = None
        self.authorization = None
        self.credential = None
        #
        # Check if connection object contains the following

        username = configuration.get('auth').get("username", None)
        password = configuration.get('auth').get("password", None)
        grant_type = connection.get('options', {}).get("grant_type",
                                                       'password')
        client_id = connection.get('options', {}).get("client_id", None)
        client_secret = connection.get('options',
                                       {}).get("client_secret", None)
        # It is decided the authorization will not be sent by UDS
        #
        if (username is None or password is None or grant_type is None
                or client_id is None or client_secret is None):
            self.credential = None
            raise IOError(
                3001,
                "Guardium Credential not provided in the connection / configuration object"
            )
        else:
            self.credential = {
                "username": username,
                "password": password,
                "grant_type": grant_type,
                "client_id": client_id,
                "client_secret": client_secret
            }
#
        host_port = connection.get('host') + ':' + \
            str(connection.get('port', ''))

        url_modifier_function = None
        proxy = connection.get('proxy')
        if proxy is not None:
            proxy_url = proxy.get('url')
            proxy_auth = proxy.get('auth')
            if (proxy_url is not None and proxy_auth is not None):
                self.headers['proxy'] = proxy_url
                self.headers['proxy-authorization'] = 'Basic ' + proxy_auth
            if proxy.get('x_forward_proxy', None) is not None:
                self.headers['x-forward-url'] = 'https://' + \
                    host_port + '/'  # + endpoint, is set by 'add_endpoint_to_url_header'
                host_port = proxy.get('x_forward_proxy')
                if proxy.get('x_forward_proxy_auth', None) is not None:
                    self.headers['x-forward-auth'] = proxy.get(
                        'x_forward_proxy_auth')
                self.headers['user-agent'] = 'UDS'
                url_modifier_function = self.add_endpoint_to_url_header

        self.client = RestApiClient(
            host_port,
            None,
            connection.get('cert', None),
            self.headers,
            url_modifier_function,
            cert_verify=connection.get('selfSignedCert', True),
            mutual_auth=connection.get('use_securegateway', False),
            sni=connection.get('sni', None))
#        self.client = RestApiClient(host_port,None,connection.get('cert', None),self.headers,
#                                    url_modifier_function,
#                                    connection.get('cert_verify', 'True'))
#

    def add_endpoint_to_url_header(self, url, endpoint, headers):
        # this function is called from 'call_api' with proxy forwarding,
        # it concatenates the endpoint to the header containing the url.
        headers['x-forward-url'] += endpoint
        # url is returned since it points to the proxy for initial call
        return url

    def add_header_elements(self, key, value):
        self.headers[key] = value
        return

    def ping_box(self):
        # Subroto -- Guardium does not have ping facility
        # We test if we can get the access token if we can then success = true
        #
        respObj = Response()
        if (self.fetch_accessToken()):
            respObj.code = "200"
            respObj.error_type = ""
            respObj.status_code = 200
            content = '{"status":"OK", "data": {"message": "Service is up."}}'
            respObj._content = bytes(content, 'utf-8')
        else:
            respObj.code = "503"
            respObj.error_type = "Service Unavailable"
            respObj.status_code = 503
            content = '{"status":"Failed", "data": {"message": "Service is down."}}'
            respObj._content = bytes(content, 'utf-8')
        # return
        return ResponseWrapper(respObj)
#

    def get_databases(self):
        # Sends a GET request
        endpoint = self.endpoint_start + 'databases'
        return self.client.call_api(endpoint, 'GET')

    def get_database(self, database_name):
        # Sends a GET request
        endpoint = self.endpoint_start + 'databases' + '/' + database_name
        return self.client.call_api(endpoint, 'GET')

    def isTimestampValid(self, tstamp):
        if tstamp is not None:
            if (tstamp > (datetime.datetime.now()).timestamp()):
                return True
        return False
#

    def get_credential(self):
        # Subroto -- Assumption: credential object will contain the full
        # guardium credential for the call in json format.
        if self.credential is None:
            raise IOError(
                3001,
                "Guardium Credential object is found to be None. Error Raised."
            )
#
        else:
            data = urllib.parse.urlencode(self.credential)
#
        return data
#

    def fetch_accessToken(self):
        # process new authorization token
        # Get access token if not present
        # credential is a string contain a json
        #
        successVal = False
        #
        data = self.get_credential()
        #print(data)
        endpoint = "oauth/token"
        tNow = datetime.datetime.now()
        response = self.client.call_api(endpoint,
                                        "POST",
                                        params=data,
                                        data=None)
        jResp = json.loads(str(response.read(), 'utf-8'))
        #
        #print(jResp)
        if (response.code != 200):
            #print(response.code)
            errMsg = str(jResp) + " -- " + "Authorization Token not received."
            raise ValueError(3002, errMsg)
        else:
            successVal = True
            tExp = (tNow + datetime.timedelta(seconds=jResp.get("expires_in"))
                    ).timestamp()
            self.authorization = json.loads('{"access_token":"' +
                                            jResp.get("access_token") +
                                            '", "expiresTimestamp":' +
                                            str(tExp) + '}')
#
#print(self.authorization)
        return successVal
#

    def get_accessToken(self):
        successVal = False
        if (self.authorization is not None):
            # Test for Authorization validity
            if self.isTimestampValid(
                (self.authorization).get("expiresTimestamp")):
                successVal = True
                self.setAuthorizationHeader()
                return successVal
#
# We could not find a valid token from the  and now we request one
#
        if (self.fetch_accessToken()):
            successVal = True
            self.setAuthorizationHeader()
#
        return successVal
#

    def setAuthorizationHeader(self):
        auth = "Bearer " + \
                        str((self.authorization).get("access_token"))
        #
        # Test  -- uncomment to test token failure
        #auth = "Bearer 575d161c-9a91-4c05-bf0a-1c2d415a8c40"
        self.add_header_elements("authorization", auth)
        return
#
# NOTE: connector architecture forces sync connector behave as async connector
#       therefore, the state of the connector has to preserved in the search_id
#       which is not generated by Guardium.  We generate and store necessary status_code
#       such as self.credential, self.query (may be self.authorization)
#       IF search_id originally generated is changed then the state is lost

    def set_searchId(self, search_id):
        self.search_id = search_id
        return
#

    def build_searchId(self):
        #       It should be called only ONCE when transmit query is called
        # Structure of the search id is
        # '{"query": ' + json.dumps(self.query) + ', "credential" : ' + json.dumps(self.credential) + '}'
        s_id = None
        #
        if (self.query is None or self.authorization is None
                or self.credential is None):
            raise IOError(
                3001,
                "Could not generate search id because 'query' or 'authorization token' or 'credential info' is not available."
            )
#
        else:
            id_str = '{"query": ' + json.dumps(
                self.query) + ', "credential" : ' + json.dumps(
                    self.credential) + '}'
            #print(id_str)
            id_byt = id_str.encode('utf-8')
            s_id = base64.b64encode(id_byt).decode()
            self.set_searchId(s_id)
#
#print(s_id)
        return s_id
#

    def decode_searchId(self):
        # These value (self.credential, self.query) must be present.  self.authorization may not.
        try:
            id_dec64 = base64.b64decode(self.search_id)
            jObj = json.loads(id_dec64.decode('utf-8'))
        except:
            raise IOError(
                3001, "Could not decode search id content - " + self.search_id)
#
        self.query = jObj.get("query", None)
        self.credential = jObj.get("credential", None)
        self.authorization = jObj.get("authorization", None)
        return
#

    def get_searches(self):
        # CAN NOT be implemented for Guardium
        #
        endpoint = self.endpoint_start + "searches"
        return self.client.call_api(endpoint, 'GET')
#

    def create_search(self, query_expression):
        # validate credential and create search_id.  No query submission -- Sync call
        #
        respObj = Response()
        respObj.code = "401"
        respObj.error_type = ""
        respObj.status_code = 401
        if (self.get_accessToken()):
            self.query = query_expression
            response = self.build_searchId()
            if (response != None):
                respObj.code = "200"
                respObj.error_type = ""
                respObj.status_code = 200
                content = '{"search_id": "' + \
                    str(response) + \
                    '", "data": {"message":  "Search id generated."}}'
                respObj._content = bytes(content, 'utf-8')
            else:
                respObj.code = "404"
                respObj.error_type = "Not found"
                respObj.status_code = 404
                respObj.message = "Could not generate search id."
        else:
            respObj.error_type = "Unauthorized: Access token could not be generated."
            respObj.message = "Unauthorized: Access token could not be generated."
#
        return ResponseWrapper(respObj)
#

    def get_status(self, search_id):
        # Subroto we do not need to send anything to Guardium
        # We create response object and send "COMPLETED"
        # Note: we may have an issue with this simplistic approach
        respObj = Response()
        if (self.fetch_accessToken()):
            respObj.code = "200"
            respObj.error_type = ""
            respObj.status_code = 200
            content = '{"search_id": "' + search_id + \
                '", "progress":"Completed", "status":"COMPLETED", "data": {"message":"Completed for the search id provided."}}'
            respObj._content = bytes(content, 'utf-8')
        else:
            respObj.code = "503"
            respObj.error_type = "Service Unavailable"
            respObj.status_code = 503
            content = '{"status":"Failed", "data": {"message": "Could obtain status: Authentication issue / service unavailable."}}'
            respObj._content = bytes(content, 'utf-8')
        #
        return ResponseWrapper(respObj)
#

    def set_IndexAndFsize(self, indexFrom=None, fetchSize=None):
        data = json.loads(self.query)
        try:
            indx = int(indexFrom)
            fsize = int(fetchSize)
        except ValueError:
            print("Offset (indexFrom) or length (fetchSize) is not an integer")
#
#       replace the data string
        data["indexFrom"] = str(indx)
        data["fetchSize"] = str(fsize)
        return json.dumps(data)
#

    def get_search_results(self,
                           search_id,
                           response_type,
                           indexFrom=None,
                           fetchSize=None):
        # Sends a GET request from guardium
        # This function calls Guardium to get data
        self.set_searchId(search_id)
        self.decode_searchId()
        #  replacement indexFrom and fetchSize
        data = self.set_IndexAndFsize(indexFrom, fetchSize)
        #
        if (self.get_accessToken()):
            endpoint = self.endpoint_start + "online_report"
            #
            response = self.client.call_api(endpoint,
                                            'POST',
                                            params=None,
                                            data=data)
            status_code = response.response.status_code
            #
            #           Though the connector gets the authorization token just before fetching the actual result
            #           there is a possibility that the token returned is only valid for a second and response_code = 401
            #           is returned.  Catch that situation (though remote) and process again.
            if status_code != 200:
                error_msg = json.loads(str(response.read(), 'utf-8'))
                error_code = error_msg.get('error', None)
                if status_code == 401 and error_code == "invalid_token":
                    self.authorization = None
                    if (self.get_accessToken()):
                        response = self.client.call_api(endpoint,
                                                        'POST',
                                                        params=None,
                                                        data=data)
                        status_code = response.response.status_code
                    else:
                        raise ValueError(3002,
                                         "Authorization Token not received ")
#
# Now START and STOP are optional -- A situation can occur that data set can be empty -- handle this situation here
            if status_code == 200:
                #
                # Determine if the response is empty if empty Guardium sends {"ID": 0,
                # "Message": "ID=0 The Query did not retrieve any records"}
                # Raise an error -->  1010: ErrorCode.TRANSMISSION_RESPONSE_EMPTY_RESULT
                # response_content = self.raiseErrorIfEmptyResult(response)
                return response
            else:
                raise ValueError(1020, "Error -- Status Code is NOT 200!")
        else:
            raise ValueError(3002, "Authorization Token not received ")
#           End of this function

    def raiseErrorIfEmptyResult(self, response):
        # Determine if the response is empty if empty Guardium sends {"ID": 0,
        # "Message": "ID=0 The Query did not retrieve any records"} <-- check that an raise and Error
        #               1010: ErrorCode.TRANSMISSION_RESPONSE_EMPTY_RESULT
        r_content_str = (response.read()).decode('utf8').replace("'", '"')
        response_content = json.loads(r_content_str)
        #print(r_content_str)
        if "ID" in response_content:
            #print(response_content)
            errMsg = response_content.get(
                "Message",
                "Default Message - NO Records Fetched using this Query.")
            raise ValueError(1010, errMsg)
        else:
            return response_content
#
#

    def update_search(self, search_id, save_results=None, status=None):
        # Subroto -- not used in Guardium context
        # posts search result to site
        endpoint = self.endpoint_start + "searches/" + search_id
        data = {}
        if save_results:
            data['save_results'] = save_results
        if status:
            data['status'] = status
        data = urllib.parse.urlencode(data)
        data = data.encode('utf-8')
        return self.client.call_api(endpoint, 'POST', params=None, data=data)
#

    def delete_search(self, search_id):
        # Subroto -- not used.
        # deletes search created earlier.
        #endpoint = self.endpoint_start + "searches" + '/' + search_id
        #return self.client.call_api(endpoint, 'DELETE')
        return {"success": True, "search_id": search_id}
Exemple #18
0
class APIClient():
    PING_ENDPOINT = '_cluster/health?pretty'

    def __init__(self, connection, configuration):
        self.logger = logger.set_logger(__name__)
        headers = dict()
        url_modifier_function = None
        auth = configuration.get('auth')
        self.indices = connection.get('indices', None)

        if self.indices and type(self.indices) == str:
            self.indices = self.indices.split(",")

        if isinstance(self.indices, list):  # Get list of all indices
            self.indices = [i.strip(' ') for i in self.indices]
            self.indices = ",".join(self.indices)

        if self.indices:
            self.endpoint = self.indices + '/' + '_search'
        else:
            self.endpoint = '_search'

        if auth:
            if 'username' in auth and 'password' in auth:
                headers['Authorization'] = b"Basic " + base64.b64encode(
                    (auth['username'] + ':' +
                     auth['password']).encode('ascii'))
            elif 'api_key' in auth and 'id' in auth:
                headers['Authorization'] = b"ApiKey " + base64.b64encode(
                    (auth['id'] + ':' + auth['api_key']).encode('ascii'))
            elif 'access_token' in auth:
                headers['Authorization'] = "Bearer " + auth['access_token']

        self.client = RestApiClient(
            connection.get('host'),
            connection.get('port'),
            headers,
            url_modifier_function=url_modifier_function,
            cert_verify=connection.get('selfSignedCert', True),
            sni=connection.get('sni', None))

        self.timeout = connection['options'].get('timeout')

    def ping_box(self):
        return self.client.call_api(self.PING_ENDPOINT,
                                    'GET',
                                    timeout=self.timeout)

    def run_search(self, query_expression, offset=None, length=DEFAULT_LIMIT):
        headers = dict()
        headers['Content-Type'] = 'application/json'

        endpoint = self.endpoint

        uri_search = False  # For testing and debugging two ways of _search API methods

        # URI Search
        if uri_search:
            if query_expression is not None:
                # update/add size value
                if length is not None:
                    if re.search(r"&size=\d+", query_expression):
                        query_expression = re.sub(r"(?<=&size=)\d+",
                                                  str(length),
                                                  query_expression)
                    else:
                        query_expression = '{}&size={}'.format(
                            query_expression, length)

                # add offset to query expression
                if offset is not None:
                    query_expression = '{}&from={}'.format(
                        query_expression, offset)

            # addition of QueryString to API END point
            endpoint = endpoint + '?q=' + query_expression

            return self.client.call_api(endpoint,
                                        'GET',
                                        headers,
                                        timeout=self.timeout)
        # Request body search
        else:
            # add size value
            if length is not None:
                endpoint = "{}?size={}".format(endpoint, length)

            # add offset value
            if offset is not None:
                endpoint = "{}&from={}".format(endpoint, offset)

            data = {
                "_source": {
                    "includes": [
                        "@timestamp", "source.*", "destination.*", "event.*",
                        "client.*", "server.*", "host.*", "network.*",
                        "process.*", "user.*", "file.*", "url.*", "registry.*",
                        "dns.*"
                    ]
                },
                "query": {
                    "query_string": {
                        "query": query_expression
                    }
                }
            }

            self.logger.debug("URL endpoint: " + endpoint)
            self.logger.debug("URL data: " + json.dumps(data))

            return self.client.call_api(endpoint,
                                        'GET',
                                        headers,
                                        data=json.dumps(data),
                                        timeout=self.timeout)
Exemple #19
0
class APIClient:
    QUERY_ENDPOINT = "advancedsearch/api/search/"
    PING_ENDPOINT = "status?includechildren=false&fast=false"

    def __init__(self, connection, configuration):
        self.logger = logger.set_logger(__name__)
        headers = {}
        url_modifier_function = None
        self.auth = configuration.get('auth')
        self.client = RestApiClient(
            connection.get('host'),
            port=None,
            headers=headers,
            cert_verify=False,
            url_modifier_function=url_modifier_function)

    def ping_box(self):
        """
            Ping the Data Source
            :return: Response object
        """
        encoded_query = self._encode_query("")
        headers = self.get_header(self.PING_ENDPOINT, encoded_query)
        return self.client.call_api(self.PING_ENDPOINT,
                                    'GET',
                                    headers=headers,
                                    data=None)

    def get_search_results(self, query):
        """
        Get results from Data Source
        :param query: Data Source Query
        :return: Response Object
        """
        self.logger.debug("query: %s", query)
        encoded_query = self._encode_query(query)
        headers = self.get_header(self.QUERY_ENDPOINT, encoded_query)
        return self.client.call_api(self.QUERY_ENDPOINT + encoded_query,
                                    'GET',
                                    headers=headers,
                                    data=None)

    def get_header(self, endpoint, query):
        query_url = "/" + endpoint + query
        time = datetime.datetime.utcnow()
        sig = hmac.new(
            self.auth["private_token"].encode("ASCII"),
            msg=(query_url + "\n" + self.auth["public_token"] + "\n" +
                 time.isoformat(timespec="auto")).encode("ASCII"),
            digestmod=hashlib.sha1).hexdigest()
        header = {
            "DTAPI-Token": self.auth["public_token"],
            "DTAPI-Date": time.isoformat(timespec="auto"),
            "DTAPI-Signature": sig
        }

        return header

    @staticmethod
    def _encode_query(_query):
        """
        Encode Query:
            - Encode query to base64 and convert to string.

        """
        _query_encode_bytes = base64.b64encode(bytes(_query, 'utf-8'))
        _query_encoded = str(_query_encode_bytes, 'utf-8')
        return _query_encoded
Exemple #20
0
class APIClient():

    QUERY_ENDPOINT = 'web/api/v2.1/dv/init-query'
    RESULT_ENDPOINT = 'web/api/v2.1/dv/events'
    QUERY_STATUS = 'web/api/v2.1/dv/query-status'
    PING_STATUS = 'web/api/v2.1/system/status'

    def __init__(self, connection, configuration):

        headers = dict()
        self.auth = configuration.get('auth')
        self.api_key = "ApiToken " + self.auth.get('apitoken')
        headers['Authorization'] = self.api_key
        headers['Content-type'] = 'application/json'
        self.timeout = connection['options']['timeout']
        self.client = RestApiClient(connection.get('host'),
                                    connection.get('port', None),
                                    headers,
                                    url_modifier_function=None)

    def ping_datasource(self):
        """
        ping or check the system status
        """
        endpoint = self.PING_STATUS
        return self.client.call_api(endpoint,
                                    'GET',
                                    headers=self.client.headers,
                                    timeout=self.timeout)

    def create_search(self, query_expression):
        """
        init query
        :param data source query
        :return:queryId
        """

        endpoint = self.QUERY_ENDPOINT
        data = query_expression
        data = data.encode('utf-8')
        return self.client.call_api(endpoint,
                                    'POST',
                                    headers=self.client.headers,
                                    data=data,
                                    timeout=self.timeout)

    def get_search_status(self, search_id):
        """
        get query status
        :param queryId:
        :return:
        """

        endpoint = self.QUERY_STATUS + "?queryId=" + search_id
        params = {}
        params['output'] = 'json'
        return self.client.call_api(endpoint,
                                    'GET',
                                    headers=self.client.headers,
                                    urldata=params,
                                    timeout=self.timeout)

    def get_search_results(self, search_id, offset, length, nextcursor=None):
        """
        Get results from Data Source
        :param query: Data Source QueryId,nextcursor,limit
        :return: Response Object
        """

        endpoint = self.RESULT_ENDPOINT + "?queryId=" + search_id
        #Max limit of getting result in a api call is 1000
        limit = 1000
        if nextcursor is not None:
            endpoint = endpoint + "&cursor=" + nextcursor
        endpoint = endpoint + "&limit=" + str(limit)

        params = {}
        params['output'] = 'json'
        return self.client.call_api(endpoint,
                                    'GET',
                                    headers=self.client.headers,
                                    urldata=params,
                                    timeout=self.timeout)

    def delete_search(self, search_id):
        """
        delete api is not supported
        :param queryId:
        :return:dict
        """
        return {"code": 200, "success": True}
class APIClient():
    # API METHODS

    # These methods are used to call Splunk's API methods through http requests.
    # Each method makes use of the http methods below to perform the requests.

    # This class will encode any data or query parameters which will then be
    # sent to the call_api() method of its inherited class.
    PING_TIMEOUT_IN_SECONDS = 10

    def __init__(self, connection, configuration):

        # This version of the Splunk APIClient is designed to function with
        # Splunk Enterprise version >= 6.5.0 and <= 7.1.2
        # http://docs.splunk.com/Documentation/Splunk/7.1.2/RESTREF/RESTprolog

        self.output_mode = 'json'
        self.endpoint_start = 'services/'
        self.authenticated = False
        headers = dict()
        self.client = RestApiClient(connection.get('host'),
                                    connection.get('port'),
                                    connection.get('cert', None),
                                    headers,
                                    cert_verify=connection.get('selfSignedCert', True),
                                    mutual_auth=connection.get('use_securegateway', False),
                                    sni=connection.get('sni', None)
                                    )
        self.auth = configuration.get('auth')
        self.headers = headers

    def authenticate(self):
        if not self.authenticated:
            self.set_splunk_auth_token(self.auth, self.headers)
            self.authenticated = True
        
    def set_splunk_auth_token(self, auth, headers):
        data = {'username': auth['username'], 'password': auth['password'], 'output_mode': 'json'}
        endpoint = self.endpoint_start + 'auth/login'
        try:
            response_json = json.load(self.client.call_api(endpoint, 'POST', headers, data=data))
            headers['Authorization'] = "Splunk " + response_json['sessionKey']
        except KeyError as e:
            raise Exception('Authentication error occured while getting auth token: ' + str(e))

    def ping_box(self):
        self.authenticate()
        endpoint = self.endpoint_start + 'server/status'
        data = {'output_mode': self.output_mode}
        return self.client.call_api(endpoint, 'GET', data=data, timeout=self.PING_TIMEOUT_IN_SECONDS)
        
    def create_search(self, query_expression):
        # sends a POST request to 
        # https://<server_ip>:<port>/services/search/jobs
        self.authenticate()
        endpoint = self.endpoint_start + "search/jobs"
        data = {'search': query_expression, 'output_mode': self.output_mode}
        return self.client.call_api(endpoint, 'POST', data=data)

    def get_search(self, search_id):
        # sends a GET request to
        # https://<server_ip>:<port>/services/search/jobs/<search_id>
        # returns information about the search job and its properties.
        self.authenticate()
        endpoint = self.endpoint_start + 'search/jobs/' + search_id        
        data = {'output_mode': self.output_mode}        
        return self.client.call_api(endpoint, 'GET', data=data)

    def get_search_results(self, search_id, offset, count):
        # sends a GET request to
        # https://<server_ip>:<port>/services/search/jobs/<search_id>/results
        # returns results associated with the search job.
        self.authenticate()
        endpoint = self.endpoint_start + "search/jobs/" + search_id + '/results'
        data = {'output_mode': self.output_mode}
        if ((offset is not None) and (count is not None)):
            data['offset'] = str(offset)
            data['count'] = str(count)
        # response object body should contain information pertaining to search.
        return self.client.call_api(endpoint, 'GET', urldata=data)
    
    def delete_search(self, search_id):
        # sends a DELETE request to
        # https://<server_ip>:<port>/services/search/jobs/<search_id>
        # cancels and deletes search created earlier.
        self.authenticate()
        endpoint = self.endpoint_start + 'search/jobs/' + search_id
        data = {'output_mode': self.output_mode}
        return self.client.call_api(endpoint, 'DELETE', data=data)
Exemple #22
0
class APIClient:
    def __init__(self, connection, configuration):
        self.logger = logger.set_logger(__name__)
        self.endpoint_start = ''
        headers = dict()
        host_port = connection.get('host') + ':' + str(
            connection.get('port', ''))
        headers['accept'] = 'application/json'
        auth = configuration.get('auth')
        if auth is not None and auth.get('token', None) is not None:
            headers['Authorization'] = 'token {}'.format(auth.get('token'))
        url_modifier_function = None
        headers['user-agent'] = _USER_AGENT

        self.timeout = connection['options'].get('timeout')
        self.result_limit = connection['options'].get('result_limit')
        if self.result_limit > _MAX_RESULT:
            self.logger.warning(
                "The length exceeds length limit. Use default length: %s",
                _MAX_RESULT)
            self.result_limit = _MAX_RESULT

        self.client = RestApiClient(host_port,
                                    None,
                                    headers,
                                    url_modifier_function,
                                    cert_verify=connection.get(
                                        'selfSignedCert', True),
                                    sni=connection.get('sni', None))

    def ping_data_source(self):
        # Pings the data source
        endpoint = 'tide/api/data/threats/state'
        # now = datetime.datetime.utcnow().isoformat(timespec="milliseconds") + "Z"
        # https://csp.infoblox.com:443/tide/api/data/threats/state?type=host&rlimit=1

        return self.client.call_api(endpoint,
                                    'GET',
                                    timeout=self.timeout,
                                    urldata={
                                        "type": "host",
                                        "rlimit": "1"
                                    })

    def get_search_results(self, search_id, range_start=None, range_end=None):
        # Return the search results. Results must be in JSON format before being translated into STIX
        # endpoint = self.endpoint_start + '/api/dnsdata/v2/dns_event'

        payload = json.loads(search_id)
        if payload['source'] == 'dnsEventData':
            return self._get_dnseventdata_results(search_id, range_start,
                                                  range_end)
        elif payload['source'] == 'dossierData':
            return self._get_dossierdata_results(search_id, range_start,
                                                 range_end)
        elif payload['source'] == 'tideDbData':
            return self._get_tidedbdata_results(search_id, range_start,
                                                range_end)

        # default behavior
        raise RuntimeError("Unknown source provided source={}".format(
            payload['source']))

    def _get_dnseventdata_results(self,
                                  search_id,
                                  range_start=None,
                                  range_end=None):
        endpoint = 'api/dnsdata/v2/dns_event'
        headers = dict()
        headers['Content-Type'] = 'application/json'
        headers['Accept'] = 'application/json'
        payload = json.loads(search_id)
        resp_dict = dict()
        resp_dict["data"] = []

        start = range_start if range_start else 0
        end = range_end if range_end else 0
        offset = start
        max_fetch_count = 10
        for fetch_iteration in range(0, max_fetch_count):
            params = {"_limit": self.result_limit, "_offset": offset}
            resp = self.client.call_api(endpoint + "?" + payload["query"],
                                        'GET',
                                        urldata=params,
                                        headers=headers,
                                        timeout=self.timeout)
            resp_dict["code"] = resp.code
            if resp.code != 200:
                if resp.code == 401:
                    resp_dict["message"] = resp.read().decode("utf-8")
                else:
                    response_payload = json.loads(resp.read())
                    resp_dict["message"] = "\n".join([
                        error["message"] for error in response_payload["error"]
                    ])

                del resp_dict["data"]
                return resp_dict

            # successful request, append data to collection and recalculate offset
            response_payload = json.loads(resp.read())
            if "result" not in response_payload or len(
                    response_payload["result"]) == 0:
                self.logger.debug("No additional results found")
                break

            offset += len(response_payload["result"])
            for event in response_payload["result"]:
                resp_dict["data"].append({"dnsEventData": event})

            if len(resp_dict["data"]) > end - start:
                resp_dict["data"] = resp_dict["data"][0:end - start]
                break

            if fetch_iteration == max_fetch_count - 1:
                self.logger.warning("Reach max fetch count %s, stop loop",
                                    max_fetch_count)
                break

        if resp_dict.get("code") == 200:
            self.logger.debug("The DNS Event count is %s",
                              len(resp_dict["data"]))

        return resp_dict

    def _get_dossierdata_results(self,
                                 search_id,
                                 range_start=0,
                                 range_end=None):
        endpoint = 'tide/api/services/intel/lookup/indicator'
        headers = dict()
        headers['Content-Type'] = 'application/json'
        headers['Accept'] = 'application/json'
        payload = json.loads(search_id)
        resp_dict = dict()
        resp_dict["data"] = []
        start = range_start if range_start else 0
        end = range_end if range_end else 0

        params = {'wait': 'true', 'source': 'pdns'}

        # NOTE: Dossier does not support pagination via multiple requests. All results returned in the response.
        resp = self.client.call_api(endpoint + "/" + payload["threat_type"] +
                                    "?" + payload["query"],
                                    'GET',
                                    urldata=params,
                                    headers=headers,
                                    timeout=self.timeout)
        resp_dict["code"] = resp.code
        if resp.code != 200:
            if resp.code == 401:
                resp_dict["message"] = resp.read().decode("utf-8")
            else:
                response_payload = json.loads(resp.read())
                resp_dict["message"] = response_payload["error"]
            del resp_dict["data"]
            return resp_dict

        response_payload = json.loads(resp.read())
        for i in response_payload["results"]:
            for j in i["data"]["items"]:
                restructure_payload = {
                    'job': {
                        'create_time': response_payload['job']['create_time']
                    },
                    'results': [{
                        'data': {
                            'items': [j]
                        }
                    }]
                }
                resp_dict["data"].append({"dossierData": restructure_payload})

        # Trim result set based on min/max range values
        end = end if end < len(resp_dict["data"]) else len(resp_dict["data"])
        num_results = end - start

        if len(resp_dict["data"]) > end - start:
            resp_dict["data"] = resp_dict["data"][start:end]

        if resp_dict.get("code") == 200:
            self.logger.debug("The Dossier count is %s",
                              len(resp_dict["data"]))
        return resp_dict

    def _get_tidedbdata_results(self,
                                search_id,
                                range_start=0,
                                range_end=None):
        endpoint = 'tide/api/data/threats/state'
        headers = dict()
        headers['Content-Type'] = 'application/json'
        headers['Accept'] = 'application/json'
        payload = json.loads(search_id)
        resp_dict = dict()
        resp_dict["data"] = []

        start = range_start if range_start else 0
        end = range_end if range_end else 0

        params = {
            "rlimit": self.result_limit,
        }

        if "type=" not in search_id:
            params["type"] = payload["threat_type"]

        if payload["threat_type"] == "ip":
            params["include_ipv6"] = "true"

        # NOTE: Tide does not support pagination via multiple requests. All results returned in the response.
        resp = self.client.call_api(endpoint + "?" + payload["query"],
                                    'GET',
                                    urldata=params,
                                    headers=headers,
                                    timeout=self.timeout)

        resp_dict["code"] = resp.code
        if resp.code != 200:
            if resp.code == 401:
                resp_dict["message"] = resp.read().decode("utf-8")
            else:
                response_payload = json.loads(resp.read())
                resp_dict["message"] = response_payload["error"]
            del resp_dict["data"]
            return resp_dict

        response_payload = json.loads(resp.read())
        for i in response_payload["threat"]:
            resp_dict["data"].append({"tideDbData": i})

        # Trim result set based on min/max range values
        end = end if end < len(resp_dict["data"]) else len(resp_dict["data"])
        num_results = end - start

        if len(resp_dict["data"]) > end - start:
            resp_dict["data"] = resp_dict["data"][start:end]

        if resp_dict.get("code") == 200:
            self.logger.debug("The TIDE count is %s", len(resp_dict["data"]))
        return resp_dict
class APIClient():
    AUTH_ENDPOINT = "rqt-api/1/authenticate"
    EVENT_ENDPOINT = "rqt-api/1/events/hunt"

    def __init__(self, connection, configuration):
        self.headers = dict()
        self.headers['Accept'] = 'application/json'
        url_modifier_function = None
        auth = configuration.get('auth')
        self.host = connection.get('host')
        self.client = RestApiClient(
            connection.get('host'),
            None,
            self.headers,
            url_modifier_function=url_modifier_function,
            cert_verify=connection.get('selfSignedCert', True),
            sni=connection.get('sni', None))
        self.timeout = connection['options'].get('timeout')
        self.app_id = auth['app_id']
        self.secret_key = auth['secret_key']
        self.token = None
        self.token_expiresat = None

    def ping_data_source(self):
        # Pings the data source
        return self.get_token()

    def get_search_results(self, search_id, length):
        # Return the search results. Results must be in JSON format before being translated into STIX
        params = dict()
        params['count'] = length
        token_response = self.get_token()
        response_code = token_response['code']
        if response_code == 200:
            self.headers['Authorization'] = 'Bearer {}'.format(
                token_response['token'])
        else:
            raise Exception(token_response)

        body_data = {'query': search_id}

        return self.client.call_api(self.EVENT_ENDPOINT,
                                    'POST',
                                    urldata=params,
                                    headers=self.headers,
                                    data=body_data)

    def page_search(self, search_id, next_page_url, length):
        params = dict()
        params['count'] = length
        if not self.token_expired():
            token_response = self.get_token()
            response_code = token_response['code']
            if response_code == 200:
                self.headers['Authorization'] = 'Bearer {}'.format(
                    token_response['token'])

        body_data = {'query': search_id}
        page = next_page_url.split('?', maxsplit=1)[1]
        next_page_endpoint = self.EVENT_ENDPOINT + '?' + page

        return self.client.call_api(next_page_endpoint,
                                    'POST',
                                    headers=self.headers,
                                    data=body_data)

    def get_token(self):
        auth_data = dict()
        response_dict = dict()
        auth_data['id'] = self.app_id
        auth_data['secret'] = self.secret_key

        try:
            response = self.client.call_api(self.AUTH_ENDPOINT,
                                            'POST',
                                            headers=self.headers,
                                            data=auth_data)

            response_dict['code'] = response.code
            response_text = json.loads(response.read())
            if response.code == 200:
                response_dict['token'] = response_text['token']
                self.token_expiresat = response_text['expiresAt']
            else:
                response_dict[
                    'message'] = 'Authentication Error: Token Generation Failed. ' + response_text[
                        'message']
        except Exception as ex:
            if ex.__class__.__name__ == 'ConnectionError':
                raise ConnectionError('Token Generation Failed: ' + str(ex))
            else:
                raise ex

        return response_dict

    def token_expired(self) -> bool:
        """Check if the token is expired.
        :return: True if token is expired, False if not expired
        :rtype: bool
        """
        expires_at = datetime.fromtimestamp(self.token_expiresat)
        return expires_at >= datetime.now()
class APIClient:
    INCIDENTS_IDS_ENDPOINT = 'detects/queries/detects/v1'
    INCIDENTS_INFO_ENDPOINT = 'detects/entities/summaries/GET/v1'
    TOKEN_ENDPOINT = 'https://api.crowdstrike.com/oauth2/token'
    """API Client to handle all calls."""

    def __init__(self, connection, configuration):
        """Initialization.
        :param connection: dict, connection dict
        :param configuration: dict,config dict"""

        headers = dict()
        url_modifier_function = None
        auth = configuration.get('auth')
        # self.endpoint_start = 'incidents/'
        self.host = connection.get('host')
        self.client = RestApiClient(connection.get('host'),
                                    connection.get('port', None),
                                    headers,
                                    url_modifier_function=url_modifier_function,
                                    cert_verify=connection.get('selfSignedCert', True),
                                    sni=connection.get('sni', None)
                                    )
        self.timeout = connection['options'].get('timeout')
        self._client_id = auth['client_id']
        self._client_secret = auth['client_secret']
        self._token = None
        self._token_time = None

    def get_detections_IDs(self, filter, limit, sort=None):
        """get the response from MSatp endpoints
        :param filter: filter incidents by certain value
        :param sort: sort incidents according to sort value
        :return: response, json object"""
        headers = dict()
        data = dict()
        headers['Content-Type'] = 'application/json'
        headers['Accept'] = 'application/json'
        headers['Authorization'] = f'Bearer {self.get_token()}'
        endpoint = self.INCIDENTS_IDS_ENDPOINT
        data['filter'] = filter
        data['limit'] = limit
        if sort:
            data['sort'] = sort
        return self.client.call_api(endpoint, 'GET', headers=headers, urldata=data, timeout=self.timeout)

    def ping_box(self):
        # Sends a GET request
        headers = dict()
        headers['Authorization'] = f'Bearer {self.get_token()}'
        endpoint = 'detects/queries/detects/v1'  # Test if system alive
        return self.client.call_api(endpoint, 'GET', headers=headers, timeout=self.timeout)

    def get_detections_info(self, ids):
        """get the response from crowdstrike endpoints
        :param ids: Provide one or more incident IDs
        :return: response, json object"""
        headers = dict()
        headers['Content-Type'] = 'application/json'
        headers['Accept'] = 'application/json'
        headers['Authorization'] = f'Bearer {self.get_token()}'
        endpoint = self.INCIDENTS_INFO_ENDPOINT
        ids_expression = json.dumps({'ids': ids}).encode("utf-8")
        return self.client.call_api(endpoint, 'POST', headers=headers, data=ids_expression, timeout=self.timeout)

    def get_token(self) -> str:
        """Request a new OAuth2 token.
        :return: [description]
        :rtype: str
        """
        if self.token_expired():
            resp = requests.request(
                'POST',
                self.TOKEN_ENDPOINT,
                headers={
                    'accept': 'application/json',
                    'Content-Type': 'application/x-www-form-urlencoded'
                },
                data=(
                    f'client_id={self._client_id}'
                    f'&client_secret={self._client_secret}'
                )
            )
            token = resp.json().get('access_token')
            self._token = token
            self._token_time = datetime.now()
        return self._token

    def token_expired(self) -> bool:
        """Check if the OAuth2 token is expired.
        :return: True if token is expired, False if not expired
        :rtype: bool
        """
        expired = True
        if self._token:
            expired = (datetime.now() - self._token_time) >= timedelta(minutes=30)
        return expired
Exemple #25
0
class APIClient:
    LOGIN_ENDPOINT = 'login.html'
    QUERY_ENDPOINT = "rest/visualsearch/query/simple"
    LOGOFF_ENDPOINT = 'logout'

    def __init__(self, connection, configuration):
        self.logger = logger.set_logger(__name__)
        headers = {'Content-Type': 'application/json'}
        url_modifier_function = None
        self.auth = configuration.get('auth')
        self.client = RestApiClient(
            connection.get('host'),
            connection.get('port', None),
            headers,
            url_modifier_function=url_modifier_function)
        self.timeout = connection['options'].get('timeout')

    def ping_box(self):
        """
        Ping the Data Source
        :return: Response object
        """
        headers = {}

        query = '{"queryPath": [{"requestedType": "Connection", "filters": [], "isResult": true}],' \
                '"totalResultLimit": 1, "perGroupLimit": 1,"templateContext": "SPECIFIC"}'
        headers['Cookie'] = self.session_log_in()
        return self.client.call_api(self.QUERY_ENDPOINT,
                                    'POST',
                                    headers=headers,
                                    data=query)

    def get_search_results(self, query):
        """
        Get results from Data Source
        :param query: Data Source Query
        :return: Response Object
        """
        headers = {'Cookie': self.session_log_in()}
        self.logger.debug("query: %s", query)
        return self.client.call_api(self.QUERY_ENDPOINT,
                                    'POST',
                                    headers=headers,
                                    data=query)

    def session_log_in(self):
        """
        Create a login session and return the cookie id
        :return: str, cookie id
        """
        headers = {"Content-Type": "application/x-www-form-urlencoded"}
        response_wrapper = self.client.call_api(self.LOGIN_ENDPOINT,
                                                'POST',
                                                headers=headers,
                                                data=self.auth)
        cookie_id = response_wrapper.response.request.headers
        return cookie_id['Cookie']

    def session_log_out(self, response_wrapper):
        """
        Logging out of Session
        :return: response object
        """
        headers = {}
        cookie_dict = response_wrapper.response.request.headers
        cookie_id = cookie_dict["Cookie"]
        headers["Cookie"] = cookie_id
        return self.client.call_api(self.LOGOFF_ENDPOINT,
                                    'GET',
                                    headers=headers)
class APIClient():
    # API METHODS

    # These methods are used to call Ariel's API methods through http requests.
    # Each method makes use of the http methods below to perform the requests.

    # This class will encode any data or query parameters which will then be
    # sent to the call_api() method of the RestApiClient
    PING_TIMEOUT_IN_SECONDS = 10

    def __init__(self, connection, configuration):
        # This version of the ariel APIClient is designed to function with
        # version 6.0 of the ariel API.

        self.endpoint_start = 'api/ariel/'
        self.urldata = {}
        headers = dict()
        host_port = connection.get('host') + ':' + \
            str(connection.get('port', ''))
        headers['version'] = '8.0'
        headers['accept'] = 'application/json'
        auth = configuration.get('auth')
        if auth != None and auth.get('SEC', None) != None:
            headers['sec'] = auth.get('SEC')
        url_modifier_function = None
        proxy = connection.get('proxy')
        if proxy is not None:
            proxy_url = proxy.get('url')
            proxy_auth = proxy.get('auth')
            if (proxy_url is not None and proxy_auth is not None):
                headers['proxy'] = proxy_url
                headers['proxy-authorization'] = 'Basic ' + proxy_auth
            if proxy.get('x_forward_proxy', None) is not None:
                headers['x-forward-url'] = 'https://' + \
                    host_port + '/'  # + endpoint, is set by 'add_endpoint_to_url_header'
                host_port = proxy.get('x_forward_proxy')
                if proxy.get('x_forward_proxy_auth', None) is not None:
                    headers['x-forward-auth'] = proxy.get(
                        'x_forward_proxy_auth')
                headers['user-agent'] = 'UDS'
                url_modifier_function = self.add_endpoint_to_url_header

        self.data_lake = connection.get('data_lake')
        if self.data_lake:
            print('QRadar Cloud Data Lake enabled')

        self.client = RestApiClient(
            host_port,
            None,
            connection.get('cert', None),
            headers,
            url_modifier_function,
            cert_verify=connection.get('selfSignedCert', True),
            mutual_auth=connection.get('use_securegateway', False),
            sni=connection.get('sni', None))

    def add_endpoint_to_url_header(self, url, endpoint, headers):
        # this function is called from 'call_api' with proxy forwarding,
        # it concatenates the endpoint to the header containing the url.
        headers['x-forward-url'] += endpoint
        # url is returned since it points to the proxy for initial call
        return url

    def ping_box(self):
        # Sends a GET request
        # to https://<server_ip>/api/help/resources
        endpoint = 'api/help/resources'  # no 'ariel' in the path
        return self.client.call_api(endpoint,
                                    'GET',
                                    timeout=self.PING_TIMEOUT_IN_SECONDS)

    def get_databases(self):
        # Sends a GET request
        # to  https://<server_ip>/api/ariel/databases
        endpoint = self.endpoint_start + 'databases'
        return self.client.call_api(endpoint, 'GET')

    def get_database(self, database_name):
        # Sends a GET request
        # to https://<server_ip>/api/ariel/databases/<database_name>
        endpoint = self.endpoint_start + 'databases' + '/' + database_name
        return self.client.call_api(endpoint, 'GET')

    def get_searches(self):
        # Sends a GET request
        # to https://<server_ip>/api/ariel/searches
        endpoint = self.endpoint_start + "searches"

        # Send requests to QRadar Cloud Data Lake
        if self.data_lake:
            self.urldata.update({'data_lake': '"qcdl"'})

        return self.client.call_api(endpoint, 'GET', urldata=self.urldata)

    def create_search(self, query_expression):
        # Sends a POST request
        # to https://<server_ip>/api/ariel/searches
        endpoint = self.endpoint_start + "searches"
        data = {'query_expression': query_expression}

        # Send requests to QRadar Cloud Data Lake
        if self.data_lake:
            self.urldata.update({'data_lake': '"qcdl"'})

        return self.client.call_api(endpoint,
                                    'POST',
                                    data=data,
                                    urldata=self.urldata)

    def get_search(self, search_id):
        # Sends a GET request to
        # https://<server_ip>/api/ariel/searches/<search_id>
        endpoint = self.endpoint_start + "searches/" + search_id

        # Send requests to QRadar Cloud Data Lake
        if self.data_lake:
            self.urldata.update({'data_lake': '"qcdl"'})

        return self.client.call_api(endpoint, 'GET', urldata=self.urldata)

    def get_search_results(self,
                           search_id,
                           response_type,
                           range_start=None,
                           range_end=None):
        # Sends a GET request to
        # https://<server_ip>/api/ariel/searches/<search_id>
        # response object body should contain information pertaining to search.
        headers = dict()
        headers['Accept'] = response_type
        if ((range_start is not None) and (range_end is not None)):
            headers['Range'] = ('items=' + str(range_start) + '-' +
                                str(range_end))
        endpoint = self.endpoint_start + "searches/" + search_id + '/results'

        # Send requests to QRadar Cloud Data Lake
        if self.data_lake:
            self.urldata.update({'data_lake': '"qcdl"'})

        return self.client.call_api(endpoint,
                                    'GET',
                                    headers,
                                    urldata=self.urldata)

    def update_search(self, search_id, save_results=None, status=None):
        # Sends a POST request to
        # https://<server_ip>/api/ariel/searches/<search_id>
        # posts search result to site
        endpoint = self.endpoint_start + "searches/" + search_id
        data = {}
        if save_results:
            data['save_results'] = save_results
        if status:
            data['status'] = status

        # Send requests to QRadar Cloud Data Lake
        if self.data_lake:
            self.urldata.update({'data_lake': '"qcdl"'})

        return self.client.call_api(endpoint,
                                    'POST',
                                    data=data,
                                    urldata=self.urldata)

    def delete_search(self, search_id):
        # Sends a DELETE request to
        # https://<server_ip>/api/ariel/searches/<search_id>
        # deletes search created earlier.
        endpoint = self.endpoint_start + "searches" + '/' + search_id

        # Send requests to QRadar Cloud Data Lake
        if self.data_lake:
            self.urldata.update({'data_lake': '"qcdl"'})

        return self.client.call_api(endpoint, 'DELETE', urldata=self.urldata)
class GuardApiClient(RestApiClient):
    def __init__(self, params, host, port, headers, url_modifier_function,
                 cert_verify, sni, auth):
        self.client = RestApiClient(host, port, headers, url_modifier_function,
                                    cert_verify, sni, auth)
        self.logger = logger.set_logger(__name__)
        self.url = params["url"]
        self.secret = params["client_secret"]
        self.user = params["config_uname"]
        self.password = params["config_pass"]
        self.client_id = params["client_id"]
        self.token_target = 'oauth/token'
        self.report_target = 'restAPI/online_report'
        self.qs_target = 'restAPI/quick_search'
        self.fields_target = 'restAPI/fieldsTitles'
        self.fields = {}
        self.get_token()

        # -------------------------------------------------------------------------------
        # REPORT parameters
        # -------------------------------------------------------------------------------
        # TBD dates
        # self.set_call_dates()
        # self.QUERY_FROM_DATE = "Now -60 DAY"
        # self.QUERY_TO_DATE = "Now"
        # -------------------------------------------------------------------------------
        # QS parameters
        # -------------------------------------------------------------------------------
        # TBD dates
        # self.qs_startTime = "20200616 10:00:00"
        # self.qs_endTime = "20200616 21:00:00"

    def set_call_dates(self):
        # look for last_run file - if file exist read last run date from it
        # if file does not exist or date older then x days set from dates to now -1 Day
        # otherwise set from date = last run date
        self.now = datetime.datetime.now()
        self.qs_endTime = self.now.strftime("%Y-%m-%d %H:%M:%S")
        self.QUERY_TO_DATE = self.now.strftime("%Y-%m-%d %H:%M:%S")
        from_file = None
        try:
            file = open("./last_run", "r")
            try:
                text = file.read()
                from_file = json.loads(text)
            finally:
                file.close()
        except:
            pass

        if from_file and self.url in from_file:
            period_start = datetime.datetime.strptime(from_file[self.url],
                                                      "%Y/%m/%d %H:%M:%S")
            if self.now - period_start > datetime.timedelta(days=2):
                period_start = self.now - datetime.timedelta(days=1)
        else:
            period_start = self.now - datetime.timedelta(days=1)
        self.from_file = from_file
        self.QUERY_FROM_DATE = period_start.strftime("%Y-%m-%d %H:%M:%S")
        self.qs_startTime = period_start.strftime("%Y-%m-%d %H:%M:%S")

    def save_last_run_date(self):
        try:
            if self.from_file:
                output = self.from_file
                output[self.url] = self.now.strftime("%Y/%m/%d %H:%M:%S")
            else:
                output = {self.url: self.now.strftime("%Y/%m/%d %H:%M:%S")}
            file = open("./last_run", "w")
            file.write(json.dumps(output))
        finally:
            file.close()

    def get_token(self):
        # -------------------------------------------------------------------------------
        # Authentication
        # -------------------------------------------------------------------------------
        # comment in and out all prints
        # print("client_id="+self.client_id)
        # print("secret="+self.secret)
        # print("user="******"password="******"token ", True):
        self.access_token = json.loads(response.read())['access_token']
        # print("token="+ self.access_token)
        self.headers = {
            'Content-Type': 'application/json',
            'Authorization': 'Bearer {0}'.format(self.access_token)
        }

    def request_token(self):
        self.token_data = 'client_id={0}&grant_type=password&client_secret={1}&username={2}&password={3}'.format(
            self.client_id, self.secret, self.user, self.password)
        return self.client.call_api(self.token_target,
                                    'POST',
                                    urldata=self.token_data)

    def validate_response(self, p_response, prefix, abort=False):
        if p_response.code != 200:
            if abort:
                raise Exception(prefix + "request faild " +
                                str(p_response.code) + "-" + p_response.read())
            return False
        return True

    def handle_report(self, params, index_from, fetch_size):
        # -------------------------------------------------------------------------------
        # REPORT
        # -------------------------------------------------------------------------------
        results = ""
        # context().logger.debug('-------------------  ' + report_name + ' ----------------------')
        params["fetchSize"] = int(fetch_size)

        params["indexFrom"] = int(index_from)
        params["inputTZ"] = "UTC"

        rest_data = json.dumps(params)

        response = self.client.call_api(self.report_target,
                                        'POST',
                                        headers=self.headers,
                                        data=rest_data)

        results = response.read()
        if not isinstance(results, list):
            try:
                # errorCode = results["ErrorCode"]
                errorCode = response.code
                # For compatibility with Guardium -
                # inputTZ parameter was added after v11.3
                # so in case it does not exist execute the query without it
                if errorCode == "27":
                    params.pop("inputTZ")
                    rest_data = json.dumps(params)
                    self.logger.warn(
                        "InputTZ not suppoerted - running query without it")
                    response = self.client.call_api(self.report_target,
                                                    'POST',
                                                    headers=self.headers,
                                                    data=rest_data)
            except:
                pass
        return response

    def handle_qs(self, params, index_from, fetch_size):
        # -------------------------------------------------------------------------------
        # QS
        # -------------------------------------------------------------------------------
        if not self.fields:
            self.get_field_titles()

        results = ""
        params["fetchSize"] = int(fetch_size - 1)
        params["firstPosition"] = int(index_from - 1)
        params["inputTZ"] = "UTC"

        rest_data = json.dumps(params)
        response = self.client.call_api(self.qs_target,
                                        'POST',
                                        data=rest_data,
                                        headers=self.headers)
        results = response.read()

        if not isinstance(results, list):
            try:
                errorCode = response.code
                # For compatibility with Guardium -
                # inputTZ parameter was added after v11.3
                # so in case it does not exist execute the query without it
                if errorCode == "27":
                    params.pop("inputTZ")
                    rest_data = json.dumps(params)
                    self.logger.warn(
                        "InputTZ not suppoerted - running query without it")
                    response = self.client.call_api(self.qs_target,
                                                    'POST',
                                                    data=rest_data,
                                                    headers=self.headers)
            except:
                pass
        response.content = self.translate_response(json.loads(self.fields),
                                                   json.loads(response.read()))
        return response

    def get_field_titles(self):
        # get QS field titles from Guardium
        response = self.client.call_api(self.fields_target,
                                        'GET',
                                        headers=self.headers)
        # response = requests.get(self.url + self.fields_target, headers=self.headers, verify=False)
        try:
            msg = json.loads(response.read())["Message"]
        except Exception as e:
            self.fields = json.dumps(json.loads(response.read())[0])
            return
        self.fields = msg

    def translate_response(self, fields, results):
        # translate fields from numeric tags to field titles
        # set to lower case, replace white spaces with _
        res = []
        for result in results:
            num_rows = result["numRows"]
            count = result["count"]
            category = result["searchArgs"]["category"]
            # print("total num rows " + str(num_rows) + " count " + str(count))
            if num_rows > 0:
                res = []
                items = result["items"]
                # print(items)
                i = 0
                for item in items:
                    res_item = {}
                    for key, value in fields.items():
                        try:
                            val = key.split(";")
                            if item.get(val[0]) is None:
                                continue
                            if len(val) > 1:
                                item_value = ""
                                for val1 in val:
                                    item_value = item_value + str(
                                        item[val1]) + " "
                                item_value = item_value.rstrip()
                            else:
                                item_value = item[key]

                            value = value.lower().replace(" ", "_")
                            if value == "date_time":
                                value = "timestamp"
                            res_item[value] = item_value
                            # print(str(value)+ '->'+str(res_item[value]))
                        except Exception as e:
                            print("ERROR: Category: " + category + " key: " +
                                  key + " value: " + value)
                            print(e)
                    res.append(res_item)

            return json.dumps(res)
Exemple #28
0
class APIClient:
    def __init__(self, connection, configuration):
        self.logger = logger.set_logger(__name__)
        self.endpoint_start = 'v2.0/xdr/'
        headers = dict()
        host_port = connection.get('host') + ':' + str(
            connection.get('port', ''))
        headers['accept'] = 'application/json'
        auth = configuration.get('auth')
        if auth is not None and auth.get('token', None) is not None:
            headers['Authorization'] = 'Bearer {}'.format(auth.get('token'))
        url_modifier_function = None
        headers['user-agent'] = _USER_AGENT

        self.timeout = connection['options'].get('timeout')

        self.client = RestApiClient(host_port,
                                    None,
                                    headers,
                                    url_modifier_function,
                                    cert_verify=connection.get(
                                        'selfSignedCert', True),
                                    sni=connection.get('sni', None))

    def ping_data_source(self):
        # Pings the data source
        endpoint = 'v2.0/siem/events'
        now = datetime.datetime.utcnow().isoformat(
            timespec="milliseconds") + "Z"
        resp = self.client.call_api(endpoint,
                                    'GET',
                                    timeout=self.timeout,
                                    urldata={
                                        "startDateTime": now,
                                        "endDateTime": now
                                    })
        return {"code": resp.code}

    def get_search_results(self, search_id, range_start=None, range_end=None):
        # Return the search results. Results must be in JSON format before being translated into STIX
        endpoint = self.endpoint_start + 'search/data'
        headers = dict()
        headers['Content-Type'] = 'application/json'
        headers['Accept'] = 'application/json'
        payload = json.loads(search_id)
        resp_dict = dict()
        all_data = list()
        resp_dict["data"] = {"logs": all_data}
        start = range_start if range_start else 0
        end = range_end if range_end else 0
        offset = start
        max_fetch_count = 10
        for i in range(0, max_fetch_count):
            code, response = self._fetch(endpoint, headers, payload, offset)
            resp_dict["code"] = code
            if code == 200:
                logs = response["data"]['logs']

                if "offset" not in response["data"]:
                    # not support offset
                    if len(logs) > range_start:
                        all_data += logs[range_start:range_end]
                    break
                if not logs:
                    break
                elif end and len(all_data) + len(logs) >= end - start:
                    offset += len(logs)
                    all_data += logs[0:end - start - len(all_data)]
                    break
                else:
                    offset += len(logs)
                    all_data += logs
            else:
                resp_dict["message"] = response["error"]["message"]
                del resp_dict["data"]
                break
            if i == max_fetch_count - 1:
                self.logger.warning("Reach max fetch count %s, stop loop",
                                    max_fetch_count)
        if resp_dict.get("code") == 200:
            self.logger.debug("The log count is %s",
                              len(resp_dict["data"]["logs"]))
        return resp_dict

    def _fetch(self, endpoint, headers, payload, offset):
        payload["offset"] = offset
        resp = self.client.call_api(endpoint,
                                    'POST',
                                    headers=headers,
                                    data=json.dumps(payload),
                                    timeout=self.timeout)
        payload_dict = json.loads(resp.read())
        return resp.code, payload_dict
class APIClient:
    TOKEN_ENDPOINT = 'core-service/rest/LoginService/login'
    STATUS_ENDPOINT = 'server/search/status'
    QUERY_ENDPOINT = 'server/search'
    RESULT_ENDPOINT = 'server/search/events'
    DELETE_ENDPOINT = 'server/search/close'

    def __init__(self, connection, configuration):
        self.connector = __name__.split('.')[1]
        self.auth = configuration.get('auth')
        headers = {'Accept': 'application/json'}
        self.client = RestApiClient(connection.get('host'),
                                    connection.get('port'),
                                    headers,
                                    cert_verify=connection.get('selfSignedCert', True)
                                    )

    def ping_data_source(self):
        data, headers = dict(), dict()
        data['search_session_id'] = int(round(time.time() * 1000))
        data['user_session_id'] = self.get_user_session_id()
        data['start_time'] = self.get_current_time()['start_time']
        data['end_time'] = self.get_current_time()['end_time']
        headers['Content-Type'] = 'application/json'
        headers['Accept-Charset'] = 'utf-8'
        return self.client.call_api(self.QUERY_ENDPOINT, 'POST', headers, data=json.dumps(data))

    def create_search(self, query_expression):
        return_obj = dict()
        auth = dict()
        auth['search_session_id'] = int(round(time.time() * 1000))
        auth['user_session_id'] = self.get_user_session_id()
        try:
            query = json.loads(query_expression)
            query.update(auth)
            headers = {'Content-Type': 'application/json', 'Accept-Charset': 'utf-8'}
            response = self.client.call_api(self.QUERY_ENDPOINT, 'POST', headers, data=json.dumps(query))
            raw_response = response.read()
            response_code = response.code

            if 199 < response_code < 300:
                response_dict = json.loads(raw_response)
                if response_dict.get('sessionId'):
                    return_obj['success'] = True
                    return_obj['search_id'] = str(auth['search_session_id']) + ':' + str(auth['user_session_id'])
            # arcsight logger error codes - currently unavailable state
            elif response_code in [500, 503]:
                response_string = raw_response.decode()
                ErrorResponder.fill_error(return_obj, response_string, ['message'], connector=self.connector)
            elif isinstance(json.loads(raw_response), dict):
                response_error = json.loads(raw_response)
                response_dict = response_error['errors'][0]
                ErrorResponder.fill_error(return_obj, response_dict, ['message'], connector=self.connector)
            else:
                raise Exception(raw_response)

            return return_obj
        except Exception as err:
            raise err

    def get_search_status(self, search_session_id, user_session_id):
        headers, params = dict(), dict()
        params['search_session_id'] = int(search_session_id)
        params['user_session_id'] = user_session_id
        headers['Content-Type'] = 'application/json'
        headers['Accept-Charset'] = 'utf-8'
        return self.client.call_api(self.STATUS_ENDPOINT, 'POST', headers, data=json.dumps(params))

    def get_search_results(self, search_session_id, user_session_id, range_start=None, range_end=None):
        headers, params = dict(), dict()
        params['search_session_id'] = int(search_session_id)
        params['user_session_id'] = user_session_id
        params['offset'] = int(range_start)
        params['length'] = int(range_end)
        headers['Content-Type'] = 'application/json'
        headers['Accept-Charset'] = 'utf-8'
        return self.client.call_api(self.RESULT_ENDPOINT, 'POST', headers, data=json.dumps(params))

    def delete_search(self, search_session_id, user_session_id):
        headers, params = dict(), dict()
        params['search_session_id'] = int(search_session_id)
        params['user_session_id'] = user_session_id
        headers['Content-Type'] = 'application/json'
        headers['Accept-Charset'] = 'utf-8'
        return self.client.call_api(self.DELETE_ENDPOINT, 'POST', headers, data=json.dumps(params))

    def get_user_session_id(self):
        try:
            response = self.client.call_api(self.TOKEN_ENDPOINT, 'POST', data=self.auth)
            if response.code == 200:
                response_text = json.loads(response.read())
                token = response_text['log.loginResponse']['log.return']
            elif response.read().decode("utf-8") == '':
                return_dict = 'Request error or authentication failure.'
                raise Exception(return_dict)
            else:
                raise Exception(response)

            return token
        except Exception as err:
            raise err

    @staticmethod
    def get_current_time():
        ping_time = dict()
        end_time = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
        start_time = (datetime.utcnow() - timedelta(minutes=5)).strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] + 'Z'
        ping_time['start_time'] = start_time
        ping_time['end_time'] = end_time
        return ping_time
class APIClient:

    endpoint_start = '/v1.0/events'
    toeken_endpoint = '/v1.0/endpoint/default/token'

    def __init__(self, connection, configuration):
        self.logger = logger.set_logger(__name__)

        headers = dict()
        url_modifier_function = None
        auth = configuration.get('auth')
        # self.endpoint_start = 'incidents/'
        self.host = connection.get('host')
        self.client = RestApiClient(
            connection.get('host'),
            connection.get('port', None),
            headers,
            url_modifier_function=url_modifier_function,
            cert_verify=connection.get('selfSignedCert', False),
            sni=connection.get('sni', None))
        self.timeout = connection['options'].get('timeout')
        self._client_id = auth['clientId']
        self._client_secret = auth['clientSecret']
        self._token = None
        self._token_time = None

    def get_token(self):
        """get the token and if expired re-generate and store in token variable"""
        tokenResponse = self.generate_token()
        return tokenResponse.json().get('access_token')

    def generate_token(self):
        """To generate the Token"""
        if self.token_expired():
            resp = requests.request(
                'POST',
                'https://' + self.host + self.toeken_endpoint,
                headers={
                    'accept': 'application/json',
                    'Content-Type': 'application/x-www-form-urlencoded'
                },
                data=(f'client_id={self._client_id}'
                      f'&client_secret={self._client_secret}'
                      f'&grant_type=client_credentials'
                      f'&scope=openid'))
            token = resp.json().get('access_token')
            self._token = token
            self._token_time = datetime.now()
            self.resp = resp
        return self.resp

    def token_expired(self) -> bool:
        """Check if the verify token is expired.
        :return: True if token is expired, False if not expired
        :return type: bool
        """
        expired = True
        if self._token:
            expired = (datetime.now() -
                       self._token_time) >= timedelta(minutes=30)
        return expired

    def run_search(self, query_expr, range_end=None):
        """get the response from verify endpoints
        :param quary_expr: dict, filter parameters
        :param range_end: int,length value
        :return: response, json object"""
        events = []
        # if self._token :
        events = self.get_events(query_expr)
        return self.response_handler(events, query_expr)

    def get_events(self, query_expr):
        self.headers = {
            'Content-Type': 'application/json',
            'Authorization': 'Bearer {0}'.format(self.get_token())
        }
        if query_expr is None:
            data = None
        return self.client.call_api(self.endpoint_start,
                                    'GET',
                                    self.headers,
                                    urldata=query_expr)

    def response_handler(self, data=None, query_expr=None):
        if data is None:
            data = []
        response = dict()
        response['data'] = json.loads(data.read())
        response['error'] = data.code
        response['code'] = data.code
        response['error_msg'] = data.response.reason
        response['success'] = data.code
        if response['code'] == 200:

            response['search_after'] = response.get(
                "data")['response']['events']['search_after']

            try:
                response['event_data'] = self.parseJson(
                    response.get("data")['response']['events']['events'])
            except KeyError:
                self.logger.debug('events data not found in respose object',
                                  response)
                response['event_data'] = []

        elif response['error'] == 500 and "true" in response['error_msg']:
            response.update({"code": 200, "data": []})
        else:
            response["message"] = data.response.reason

        return response

    def parseJson(self, response):
        '''
        Iterate through the response and read the nested data like geoip and data object.
        '''
        jsonObj = response
        finalJson = dict()
        parsedJson = []
        for obj in jsonObj:
            dictC = obj
            if "geoip" in dictC:
                dictA = json.loads(json.dumps(obj["geoip"]))
                del dictC["geoip"]
                dictB = json.loads(json.dumps(obj["data"]))
                del dictC["data"]
                dict_geo_location = json.loads(
                    json.dumps(dictA.get('location')))
                del dictA['location']
                finalJson = {**dictA, **dictB, **dict_geo_location}
            else:
                dictB = json.loads(json.dumps(obj["data"]))
                del dictC["data"]
                finalJson = dictB
            remainingJson = json.loads(json.dumps(dictC))
            finalJson = {**finalJson, **remainingJson}
            parsedJson.append(finalJson)
        return parsedJson

    def key_exist(self, data_element, *keysarr):
        '''
        Check if *keys (nested) exists in `element` (dict).
        '''
        if not isinstance(data_element, dict):
            raise AttributeError(
                'keys_exists() expects dict as first argument.')
        if len(keysarr) == 0:
            raise AttributeError(
                'keys_exists() expects at least two arguments, one given.')

        _element = data_element
        for key in list(keysarr):
            try:
                _element = _element[key]
            except KeyError:
                self.logger.debug('key not found ', key)
                return False
        return True

    def get_search_results(self,
                           search_id,
                           response_type,
                           range_start=None,
                           range_end=None):
        # Sends a GET request to
        # https://<server_ip>//<search_id>
        # response object body should contain information pertaining to search.
        #https://isrras.ice.ibmcloud.com/v1.0/events?event_type="sso"&size=10&after="1640104162523","eeb40fd5-6b84-4dc9-9251-3f7a4cfd91c0"
        headers = dict()
        headers['Accept'] = response_type
        size = 1000
        if ((range_start is None) and (range_end is None)):
            size = range_end - range_start

        request_param = search_id + "& size=" + str(size)
        endpoint = self.endpoint_start + request_param

        return self.run_search(search_id)

    def get_search(self, search_id):
        # Sends a GET request to
        # https://<server_ip>/api/ariel/searches/<search_id>
        response = self.run_search(search_id)
        return response

    def delete_search(self, search_id):
        return self.run_search(search_id)