def intelix_del_info(deletion_data, tenant_url_data):
    logging.info("Generate outcome of local site deletion")
    del_report = dict()
    deleted = 0
    failed = 0
    unknown = 0
    for ten_id, ten_item in tenant_url_data.items():
        tenant_id = ten_id
        for site_id, site_value in deletion_data.items():
            tenant_ref = site_value['tenantId']
            if tenant_ref == tenant_id:
                del_status = site_value['delStatus']
                if del_status == "True":
                    deleted += 1
                elif del_status == "False":
                    failed += 1
                else:
                    unknown += 1
        del_report[tenant_id] = {"Deleted": deleted, "Failed": failed, "Unknown": unknown}

    date = datetime.now().strftime('%Y%m%d_%H%M%S')
    del_filename = "{0}_deletion_details.json".format(date)
    del_report_filename = "{0}_deletion_report.json".format(date)
    api_output.process_output_json(deletion_data, filename=del_filename, api="intelix_del")
    api_output.process_output_json(del_report, filename=del_report_filename, api="intelix_del")
def site_comparison(intelix_dict, site_dict, tenant_id):
    # add details to the site dictionary from intelix output
    logging.info("Combine Intelix and Local-Sites information")
    combined_dict = dict()
    for site_key, site_val in site_dict.items():
        for intx_key, intx_val in intelix_dict.items():
            if site_val['url'] == intx_key:
                if intx_val['lookup_type'] == 'url':
                    intx_data = {"local-site": site_val}, {"intelix": {"intelixCategory": intx_val.setdefault(
                        'productivityCategory', 'null'), "intelixRisk": intx_val.setdefault('riskLevel', 'null'),
                        "intelixSecurity": intx_val.setdefault('securityCategory', 'null')}}
                    combined_dict[site_key] = intx_data
                elif intx_val['lookup_type'] == 'ip':
                    intx_data = {"local-site": site_val}, {"intelix": {"intelixCategory": intx_val.setdefault(
                        'ipCategory', 'null'), "intelixRisk": intx_val.setdefault('riskLevel', 'null')}}
                    combined_dict[site_key] = intx_data
                else:
                    pass
            else:
                pass

    date = datetime.now().strftime('%Y%m%d_%H%M%S')
    filename = "{0}_results_combined.json".format(date)
    logging.info("Saving JSON of Intelix and local-site results combined")
    api_output.process_output_json(combined_dict, filename=filename, api="intelix")
    return combined_dict
Ejemplo n.º 3
0
def local_site_check(intelix_client_id, intelix_client_secret, request_data,
                     tenant_id):
    logging.info("Checking local-sites against SophosLabs Intelix API")
    # dedup the urls from the data so we dont get site data twice
    output_data = dedup_url(request_data)

    # initiate authentication with intelix api
    logging.info("Authenticating with Intelix API")
    intx = intelix.client(intelix_client_id, intelix_client_secret)

    # iterate through the dictionary of urls and output
    intelix_dict = dict()

    logging.info("Pass URL data to Intelix for evaluation")
    for url_id, url_value in output_data.items():
        # check if the url_value is an ip
        try:
            ip_address(url_value)
            ip_val = True
        except:
            ip_val = False

        if ip_val:
            # send to ip_lookup
            intx.ip_lookup(url_value)
            ip_cat = intx.category
            maxRisk = get_ip_category_risk(ip_cat)
            intx_data = {
                "lookup_type": "ip",
                "requestId": intx.requestId,
                "ipCategory": intx.category,
                "riskLevel": maxRisk
            }
            intelix_dict[url_value] = intx_data
        elif not ip_val:
            # send_url_lookup
            intx.url_lookup(url_value)
            intx_data = {
                "lookup_type": "url",
                "requestId": intx.requestId,
                "productivityCategory": intx.productivityCategory,
                "securityCategory": intx.securityCategory,
                "riskLevel": intx.riskLevel
            }
            intelix_dict[url_value] = intx_data
        else:
            pass

    # keep record of results from intelix lookups
    date = datetime.now().strftime('%Y%m%d_%H%M%S')
    intx_filename = "{0}_intelix_results.json".format(date)
    logging.info("Saving JSON of Intelix results")
    api_output.process_output_json(intelix_dict,
                                   filename=intx_filename,
                                   api="intelix")

    # compare against site list
    compared_dict = site_comparison(intelix_dict, request_data, tenant_id)
    return compared_dict
def intelix_report_info(intelix_results, intelix, intx_clean_level, intx_dry_run):
    # set main values
    report_dict = dict()
    high_risk = 0
    medium_risk = 0
    low_risk = 0
    trusted_risk = 0
    unclass_risk = 0
    total_risk = 0
    null_risk = 0

    for site_val in intelix_results.values():
        if intelix == "report":
            intx_data = site_val[1]['intelix']
            ls_data = site_val[0]['local-site']
            risk = intx_data.get('intelixRisk')
            tenant_id = ls_data.get('tenantId')
        elif intx_dry_run:
            risk = site_val.get('intelixRisk')
            tenant_id = site_val.get('tenantId')

        if risk == "HIGH":
            total_risk += 1
            high_risk += 1
        elif risk == "MEDIUM":
            total_risk += 1
            medium_risk += 1
        elif risk == "LOW":
            total_risk += 1
            low_risk += 1
        elif risk == "TRUSTED":
            total_risk += 1
            trusted_risk += 1
        elif risk == "UNCLASSIFIED":
            total_risk += 1
            unclass_risk += 1
        elif risk is None:
            total_risk += 1
            null_risk += 1
        else:
            total_risk += 1
            null_risk += 1
        report_data = {"Totals": {"High Risk": high_risk, "Medium Risk": medium_risk,
                                  "Low Risk": low_risk, "Trusted": trusted_risk,
                                  "Unclassified": unclass_risk, "NULL": null_risk,
                                  "Total": total_risk}}

    if intelix == "report":
        logging.info("Generating report for Intelix results")
        intx_filename = "{0}_intelix_report.json".format(tenant_id)
        report_dict.update(report_data)
        api_output.process_output_json(report_dict, filename=intx_filename, api="intelix")
        return report_dict
    elif intx_dry_run:
        date = datetime.now().strftime('%Y%m%d_%H%M%S')
        intx_filename = "{0}_{1}_{2}_dry_run_report.json".format(tenant_id, date, intx_clean_level)
        dryrun_dict = {**intelix_results, **report_data}
        api_output.process_output_json(dryrun_dict, filename=intx_filename, api="intelix")
        return dryrun_dict
    else:
        pass
Ejemplo n.º 5
0
def main(params):
    # set params to variables
    log_level = params.log_level
    tenant = params.tenant
    search_filter = params.filter
    search_variables = params.variables
    search_type = params.search_type
    search_val = params.search_input
    misp_attr = params.misp
    api = params.api
    output = params.output

    if log_level is None:
        logging.disable(True)
    else:
        logging.disable(False)
        log_name = log_level
        level = getattr(logging, log_name)
        log_fmt = '%(asctime)s: [%(levelname)s]: %(message)s'
        logging.basicConfig(level=level, format=log_fmt, datefmt='%d/%m/%Y %I:%M:%S %p')

    logging.info("Start of Logging")

    if api == "ld":
        api = "live-discover"
    elif api == "xdr":
        api = "xdr-datalake"
    else:
        logging.error("No api passed, exiting")
        exit(1)

    if misp_attr:
        misp_conf_path = api_conf.misp_conf_path
        misp_final_path = api_utils.get_file_location(misp_conf_path)

        misp_conf = cp.ConfigParser(allow_no_value=True)
        misp_conf.read(misp_final_path)
        secret_name = misp_conf.get('aws', 'secret_name')
        region_name = misp_conf.get('aws', 'region_name')
        misp_tok = misp_conf.get('aws', 'api_key')
        misp_url = misp_conf.get('url', 'misp_instance')

        # Get attributes for iocs to search
        misp_type = params.misp_type
        misp_val = params.misp_val

        if not misp_type or not misp_val:
            logging.error("You must specify a MISP type and value")
            exit(1)
        else:
            pass

        if misp_tok:
            try:
                # Pull the credentials from AWS Secret Manager and pass to initialise misp
                misp_secret = awssecret.get_secret(secret_name, region_name)
                misp_tok = misp_secret['{0}'.format(misp_tok)]
                logging.info("MISP auth token applied")
            except Exception as aws_exception:
                # Return the exception raised from the aws secrets script
                raise aws_exception
            finally:
                attributes = sld.get_misp_attributes(misp_url, misp_type, misp_val, misp_tok, wildcard=True)
        else:
            logging.info("No AWS creds set in config. Requesting MISP API token")
            misp_tok = getpass.getpass(prompt="Provide MISP token: ", stream=None)
            if misp_tok:
                attributes = sld.get_misp_attributes(misp_url, misp_type, misp_val, misp_tok, wildcard=True)
            else:
                logging.error("No MISP token provided. Exiting")
                exit(1)

        if not attributes:
            logging.critical("No attributes found, exiting")
            exit(1)
        else:
            logging.info("MISP attributes obtained")

            pass
    else:
        pass

    # format the filter variable to remove escape characters and pass as json
    if search_filter:
        try:
            search_filter = json.loads(re.sub('[\\?]', '', search_filter))
        except ValueError as err:
            logging.error("JSON malformed: {0}".format(search_filter))
            raise err
        else:
            pass
    elif search_type == "list":
        pass
    elif api == "xdr-datalake":
        pass
    else:
        logging.critical("No filter passed, A filter must be provided")
        exit(1)

    if (search_variables and misp_attr) or (misp_attr and api == "xdr-datalake"):
        if api == "live-discover":
            # Format the search date
            date_frmt = "{0}.000Z".format(search_variables[2])
        else:
            pass

        # estimated size of variables
        ioc_size = getsizeof(attributes)
        if ioc_size < 1 or ioc_size > 5000:
            logging.critical(
                "Size of IOC JSON must be in the range of 1 - 5000. Current estimated size is: {0}".format(ioc_size))
            exit(1)
        else:
            pass

        # Build JSON
        if api == "live-discover":
            variables_json = [
                {
                    "name": "Number of Hours of activity to search",
                    "dataType": "text",
                    "value": "{0}".format(search_variables[0])
                },
                {
                    "name": "IOC JSON",
                    "dataType": "text",
                    "value": attributes
                },
                {
                    "name": "Start Search From",
                    "dataType": "dateTime",
                    "value": "{0}".format(date_frmt)
                }
            ]
        elif api == "xdr-datalake":
            variables_json = [
                {
                    "name": "IOC_JSON",
                    "dataType": "text",
                    "value": attributes
                }
            ]
    elif search_variables:
        if api == "live-discover":
            # Format the search date
            date_frmt = "{0}.000Z".format(search_variables[2])
        else:
            pass

        # esitmated size of variables
        ioc_size = getsizeof(search_variables)
        if ioc_size < 1 or ioc_size > 5000:
            logging.critical(
                "Size of IOC JSON must be in the range of 1 - 5000. Current estimated size is: {0}".format(ioc_size))
            exit(1)
        else:
            pass

        # Build JSON
        if api == "live-discover":
            variables_json = [
                {
                    "name": "Number of Hours of activity to search",
                    "dataType": "text",
                    "value": "{0}".format(search_variables[0])
                },
                {
                    "name": "IOC JSON",
                    "dataType": "text",
                    "value": search_variables[1]
                },
                {
                    "name": "Start Search From",
                    "dataType": "dateTime",
                    "value": "{0}".format(date_frmt)
                }
            ]
        elif api == "xdr-datalake":
            variables_json = [
                {
                    "name": "IOC_JSON",
                    "dataType": "text",
                    "value": search_variables[0]
                }
            ]
    else:
        logging.info("No variables passed")
        variables_json = None

    # Auth and get tenant information
    tenant_info = api_auth.ten_auth()

    # Generate tenant url data
    tenant_url_data = api_utils.generate_tenant_urls(tenant_info, page_size, api, from_str=None, to_str=None)

    for key, value in tenant_url_data.items():
        # If a tenant has been passed in the CLI arguments it checks whether it exists in the tenants obtained
        if tenant == key:
            # The tenant passed has been found
            logging.info("Tenant ID passed: '{0}'".format(key))
            tenant_url_data = {key: value}
        else:
            pass

    # kick off live discover search
    if search_type == "list":
        query_data = sld.live_discover(tenant_url_data, search_type, search_val, search_filter, variables_json, api)
        if output == 'json':
            api_output.process_output_json(query_data, "{0}_query_list.json".format(api), api)
        else:
            for key, value in query_data.items():
                print(json.dumps(value, indent=2))
    else:
        ld_data, ep_data, res_data = sld.live_discover(tenant_url_data, search_type, search_val, search_filter,
                                                       variables_json, api)
        # output results to temp

        # get datetime for files
        filetime = datetime.datetime.utcnow()
        filetimestamp = filetime.timestamp()

        if ep_data:
            api_output.process_output_json(ep_data, "{0}_endpoint_data_{1}.json".format(api, filetimestamp), api)
        else:
            logging.info("No EP data to output")

        api_output.process_output_json(ld_data, "{0}_search_data_{1}.json".format(api, filetimestamp), api)
        api_output.process_output_json(res_data, "{0}_result_data_{1}.json".format(api, filetimestamp), api)

    logging.info("Script complete")
    exit(0)