Esempio n. 1
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        # XXX get rid of verify=False
        self.sip_client = pysip.Client(saq.CONFIG['sip']['remote_address'],
                                       saq.CONFIG['sip']['api_key'],
                                       verify=False)
Esempio n. 2
0
def set_sip_indicator_status(indicator_id, status):
    """Sets the given indicator to the given status. Returns True if the operation succeeded."""
    assert isinstance(indicator_id, int)
    assert isinstance(status, str)

    import pysip

    sip_client = pysip.Client(saq.CONFIG['sip']['remote_address'], saq.CONFIG['sip']['api_key'], verify=False)
    return sip_client.put(f'indicators/{indicator_id}', data={"status" : status})
Esempio n. 3
0
def query_sip_indicator(indicator_id):
    """Queries SIP for indicator details. Returns the dictionary containing the information 
       (see the SIP documenation for dictionary schema.)"""
    assert isinstance(indicator_id, int)

    import pysip

    sip_client = pysip.Client(saq.CONFIG['sip']['remote_address'], saq.CONFIG['sip']['api_key'], verify=False)
    return sip_client.get(f'indicators/{indicator_id}')
Esempio n. 4
0
    def setUp(self):
        ACEModuleTestCase.setUp(self)

        if not saq.CONFIG['sip'].getboolean('enabled'):
            return

        # XXX get rid of verify=False
        self.sip_client = pysip.Client(saq.CONFIG['sip']['remote_address'],
                                       saq.CONFIG['sip']['api_key'],
                                       verify=False)

        # insert the indicator we'll test against
        self.test_indicator = self.sip_client.post('indicators', {
            'type': 'Email - Address',
            'value': '*****@*****.**'
        })
        self.test_indicator_id = self.test_indicator['id']
        self.assertTrue(isinstance(self.test_indicator_id, int))
Esempio n. 5
0
    def setUp(self):
        ACEBasicTestCase.setUp(self)

        # XXX get rid of verify=False
        if saq.CONFIG['sip'].getboolean('enabled'):
            self.sip_client = pysip.Client(saq.CONFIG['sip']['remote_address'], saq.CONFIG['sip']['api_key'], verify=False)
            self.test_indicators = []

            # insert the indicator(s) we'll test against
            for indicator in [ 
                { 'type': 'URI - URL', 'value': 'http://whackadoodle.net/dunno.html', 'status': 'Analyzed' },
                #{ 'type': 'Address - ipv4-addr', 'value': '165.45.66.45', 'status': 'Analyzed' },
                { 'type': 'URI - Path', 'value': '/follow/the/white/rabbit.html', 'status': 'Analyzed' },
                { 'type': 'Windows - FileName', 'value': 'ultimate.txt', 'status': 'Analyzed' }, ]:

                self.test_indicators.append(self.sip_client.post('indicators', indicator))

        self.target_urls = [
            'http://whackadoodle.net/dunno.html',
            #'http://165.45.66.45/whatever.asp',
            'http://www.g00gle.com/follow/the/white/rabbit.html',
            'http://www.c00kie.com/ultimate.txt' ]
Esempio n. 6
0
def main():

    parser = argparse.ArgumentParser(
        description="SIP Indicator CbR Search and ACE Alert.")
    parser.add_argument('-d',
                        '--debug',
                        action="store_true",
                        help="set logging to DEBUG",
                        default=False)
    args = parser.parse_args()

    # load config
    config = ConfigParser()
    config.read('etc/config.ini')

    # load SIP indicator specs so we know how to get the indicators we want
    indicator_specs = {}
    with open(config['SIP']['indicator_specifications'], 'r') as stream:
        try:
            indicator_specs = yaml.safe_load(stream)
            logging.info(
                "Successfully loaded indicator specifications: {}".format(
                    indicator_specs))
        except yaml.YAMLError as e:
            logging.error("Couldn't load indicator specs : {}".format(e))
            return

    # Load ACE API
    ace_api.set_default_remote_host(config['ACE']['ace_address'])
    ace_api.set_default_ssl_ca_path(config['ACE']['ca_chain_path'])

    # Create SIP Client and load indicators
    sip_ssl = config['SIP'].getboolean('ssl_verify')
    sc = pysip.Client(config['SIP']['sip_address'],
                      config['SIP']['sip_api_key'],
                      verify=sip_ssl)
    status = indicator_specs[
        'status'] if 'status' in indicator_specs else 'Analyzed'
    indicators = {}
    for i_type in indicator_specs['type']:
        handle_proxy(config['SIP'])
        indicators[i_type] = sc.get('/indicators?type={}&status={}'.format(
            i_type, status))

    # load field mappings
    field_map = ConfigParser()
    field_map.read(config['GLOBAL']['field_mappings'])
    sip_cbr_map = field_map['SIP-TO-CBR']
    sip_ace_map = field_map['SIP-TO-ACE']
    cbr_ace_map = field_map['CBR-TO-ACE']

    submitted_alerts = []

    # Query Carbon Black Response for our indicators
    #cbq = CBquery(profile=config['CbR']['profile'])
    handle_proxy(config['CbR'])
    cb = CbResponseAPI(profile=config['CbR']['profile'])
    for i_type in indicator_specs['type']:
        for i in indicators[i_type]:
            query = '{}:"{}"'.format(sip_cbr_map[i_type], i['value'])
            logging.debug('Querying CbR for indicator:{} query:{}'.format(
                i['id'], query))
            procs = cb.select(Process).where(query).group_by('id')
            if procs:
                # alert ACE
                Alert = ace_api.Analysis(description='CbR - SIP:{}'.format(
                    i['value']),
                                         analysis_mode='correlation',
                                         tool='SipCbrAce')
                print(Alert.description)
                Alert.add_indicator(i['id'])
                # get sip tags and tag Alert
                handle_proxy(config['SIP'])
                i_details = sc.get('/indicators/{}'.format(i['id']))
                handle_proxy(config['CbR'])
                for tag in i_details['tags']:
                    Alert.add_tag(tag)
                alert_details = {}
                alert_details['total_results'] = len(procs)
                max_results = config['GLOBAL'].getint('alert_max_results')
                alert_details['included_results'] = 0
                alert_details['process_details'] = []
                for proc in procs:
                    if alert_details['included_results'] > max_results:
                        break
                    alert_details['process_details'].append(str(proc))
                    alert_details['included_results'] += 1
                    Alert.add_hostname(proc.hostname)
                    Alert.add_md5(proc.process_md5)
                    Alert.add_ipv4(proc.comms_ip)
                    Alert.add_ipv4(proc.interface_ip)
                    Alert.add_process_guid(proc.id)
                    Alert.add_user(proc.username)
                    Alert.add_file_name(proc.process_name)
                    Alert.add_file_path(proc.path)
                    #Alert.add_file_location('{}@{}'.format(proc.hostname, proc.path))
                #Alert.submit_kwargs['details'] = alert_details
                handle_proxy(config['ACE'])
                print(Alert.description)
                submitted_alerts.append(Alert.submit())
                logger.info(
                    "Submitted alert to ACE: {UUID} - URL=https://{HOST}/ace/analysis?direct={UUID}"
                    .format(UUID=Alert.uuid, HOST=Alert.remote_host))

    print(submitted_alerts)
Esempio n. 7
0
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)



if __name__ == "__main__":
   # remove proxy if it's set
   if 'http_proxy' in os.environ:
      del os.environ['http_proxy']
   if 'https_proxy' in os.environ:
      del os.environ['https_proxy']
   # load configuration
   config = ConfigParser()
   config.read(os.path.expanduser("~")+'/.sipit.ini')

   sip_client = pysip.Client(config['sip']['end_point'],config['sip']['api_key'],verify=False)
   parser = argparse.ArgumentParser(description="Add Indicators and query SIP")
   subparsers = parser.add_subparsers(dest='command')
   commands = [ 'create', 'query','update' ]

   query_parser = subparsers.add_parser('query',help="query aspects of SIP. query -h for more")
   query_parser.add_argument('-t','--types',default=False,action='store_true',help='list indicator types')
   query_parser.add_argument('-s','--sources',default=False,action='store_true',help='list sources')
   query_parser.add_argument('-c','--campaigns',default=False,action='store_true',help='list campaigns')
   query_parser.add_argument('--tags',default=False,action='store_true',help='list tags')
   query_parser.add_argument('-v','--value',default=False,dest='value',help='search for an indicator value')
   query_parser.add_argument('-d','--details',default=False,action='store_true',help='all information about an indicator value')
   query_parser.add_argument('--status',default=False,action='store_true',help='list possible status values for indicators')
   query_parser.add_argument('-id','--indicator-id',dest='id',help='query the specific indicator information for a sip id')

Esempio n. 8
0
def update_local_cache():

    import pysip

    # XXX remove verify=False
    sip_client = pysip.Client(saq.CONFIG['sip']['remote_address'],
                              saq.CONFIG['sip']['api_key'],
                              verify=False)
    cache_path = os.path.join(saq.DATA_DIR, saq.CONFIG['sip']['cache_db_path'])

    # the actual file should be a symlink
    if os.path.exists(cache_path) and not os.path.islink(cache_path):
        logging.error(
            "{} should be a symlink but it's not!".format(cache_path))
        return False

    # get the file the symlink points to
    current_cache_path = None
    if os.path.exists(cache_path):
        current_cache_path = os.path.realpath(cache_path)
    else:
        current_cache_path = '{}.b'.format(cache_path)

    # there are two files that end with .a and .b
    if not current_cache_path.endswith(
            '.a') and not current_cache_path.endswith('.b'):
        logging.error(
            "expecting {} to end with .a or .b!".format(current_cache_path))
        return False

    # we edit the other one
    base_cache_path = current_cache_path[:-2]
    if current_cache_path.endswith('.a'):
        target_cache_path = '{}.b'.format(base_cache_path)
    else:
        target_cache_path = '{}.a'.format(base_cache_path)

    logging.info("updating {}".format(target_cache_path))

    if os.path.exists(target_cache_path):
        try:
            logging.info(
                "deleting existing crits cache {}".format(target_cache_path))
            os.remove(target_cache_path)
        except Exception as e:
            logging.error("unable to delete {}: {}".format(
                target_cache_path, e))
            return False

    cache_db = sqlite3.connect(target_cache_path)
    db_cursor = cache_db.cursor()
    db_cursor.execute("""CREATE TABLE indicators ( 
                           id TEXT PRIMARY KEY, 
                           type TEXT NOT NULL,
                           value TEXT NOT NULL )""")
    db_cursor.execute(
        "CREATE INDEX i_type_value_index ON indicators ( type, value )")

    logging.info("caching indicators...")
    c = 0
    for indicator in sip_client.get(
            '/api/indicators?status=Analyzed&bulk=True'):
        db_cursor.execute(
            "INSERT INTO indicators ( id, type, value ) VALUES ( ?, ?, LOWER(?) )",
            (str(indicator['id']), indicator['type'], indicator['value']))
        c += 1

    logging.info("comitting changes to database...")
    cache_db.commit()
    logging.info("updating symlink...")
    # now point current link to our new database
    # leaving the old one in place for current processes to keep using
    try:
        try:
            os.remove(cache_path)
        except:
            pass

        os.symlink(os.path.basename(target_cache_path), cache_path)

    except Exception as e:
        logging.error("failed to update symlink: {}".format(e))

    logging.info("done")
    logging.debug("loaded {} indicators".format(c))
    return True
def main():

    parser = argparse.ArgumentParser(
        description=
        "The tool maps Cofense IOCs from their JSON integration into SIP indicators."
    )
    parser.add_argument('-d',
                        '--debug',
                        action='store_true',
                        help="Turn on debug logging.")
    parser.add_argument(
        '--logging-config',
        required=False,
        default='etc/logging.ini',
        dest='logging_config',
        help="Path to logging configuration file.  Defaults to etc/logging.ini"
    )
    parser.add_argument(
        '-c',
        '--config',
        required=False,
        default='etc/config.ini',
        dest='config_path',
        help="Path to configuration file.  Defaults to etc/config.ini")
    parser.add_argument('-p',
                        '--print-indicator-summary',
                        action='store_true',
                        help="Print a summary of all indicators incoming.")
    parser.add_argument('-rd',
                        '--target-report-dir',
                        action='store',
                        default=None,
                        help="only evaluate this report directory")
    parser.add_argument('-grh',
                        '--get-report-html',
                        action='store_true',
                        help="get report html")

    args = parser.parse_args()

    # work out of home dir
    os.chdir(HOME_PATH)

    # initialize logging
    try:
        logging.config.fileConfig(args.logging_config)
    except Exception as e:
        sys.stderr.write(
            "ERROR: unable to load logging config from {0}: {1}".format(
                args.logging_config, str(e)))
        sys.exit(1)

    coloredlogs.install(level='INFO', logger=logging.getLogger())

    if args.debug:
        coloredlogs.install(level='DEBUG', logger=logging.getLogger())

    config = configparser.ConfigParser()
    config.read(args.config_path)

    sip_map = config['sip_mappings']
    # for turning indicator creation on/off by type
    indicator_filter = config['indicator_filter']

    # variables
    #  - keep a throttle on indicators created per day
    #  - track threats
    indicators_created_today = 0
    max_indicators_per_day = config['collect'].getint('max_indicators_per_day')
    indicator_creation_count_file = os.path.join(
        HOME_PATH, 'var',
        f"indicator_count_for_{datetime.now().strftime('%Y-%m-%d')}")
    if not os.path.exists(indicator_creation_count_file):
        logging.info(f"reseting indicator count for a new day..")
        for old_file in glob.glob(
                f"{os.path.join(HOME_PATH, 'var')}/indicator_count_for_*"):
            logging.info(f"deleting old variable file: {old_file}")
            os.remove(old_file)
        with open(indicator_creation_count_file, 'w') as f:
            f.write(str(0))
    else:
        with open(indicator_creation_count_file, 'r') as f:
            indicators_created_today = f.read()
        indicators_created_today = int(indicators_created_today)

    unique_threat_tracker = {}
    unique_threat_tracker_file = os.path.join(HOME_PATH, 'var',
                                              "unique_threat_tracker.json")
    if os.path.exists(unique_threat_tracker_file):
        with open(unique_threat_tracker_file, 'r') as fp:
            unique_threat_tracker = json.load(fp)

    # connect to sip
    verify_ssl = config['sip'].get('verify_ssl')
    if not os.path.exists(verify_ssl):
        verify_ssl = config['sip'].getboolean('verify_ssl')
    sip = pysip.Client(
        f"{config['sip'].get('server')}:{config['sip'].get('port')}",
        config['sip']['api_key'],
        verify=verify_ssl)

    def _sip_indicator(type: str,
                       value: str,
                       reference: dict,
                       tags: list,
                       username=config['sip'].get('user'),
                       case_sensitive=False) -> dict:
        # A sip indicator with some defaults defined.
        return {
            'type':
            type,
            'status':
            'New',
            'confidence':
            'low',
            'impact':
            'unknown',
            'value':
            value,
            'references': [{
                'source': "Cofense",
                'reference': json.dumps(reference)
            }],
            'username':
            username,
            'case_sensitive':
            case_sensitive,
            'tags':
            list(set(tags))
        }

    resume_report_id = None
    processed_reports = []
    total_incoming_indicators = []
    for report in report_iterator(target_report_dir=args.target_report_dir):

        if args.get_report_html:
            get_html_report(report['reportURL'])
            return True

        global_tags = []
        for threat in report['malwareFamilySet']:
            if threat['familyName'] == 'Credential Phishing':
                global_tags.append('creds_harvesting')
                continue
            if threat['familyName'] not in unique_threat_tracker:
                logging.info(
                    f"found previously un-seen threat name: {threat['familyName']} - {threat['description']}"
                )
                unique_threat_tracker[
                    threat['familyName']] = threat['description']

            if threat['familyName'] not in global_tags:
                global_tags.append(threat['familyName'])

        reference = {
            'id': report['id'],
            'reportURL': report['reportURL'],  # TODO
            'executiveSummary': report['executiveSummary']
        }

        ref_length = len(json.dumps(reference))
        if ref_length > 512:
            # Max length of SIP reference field is 512.
            cut_length = len(
                reference['executiveSummary']) - (ref_length - 512)
            reference['executiveSummary'] = reference[
                'executiveSummary'][:cut_length]

        # report indicators to post to SIP
        potential_indicators = []

        # process blocksSet
        for block in report['blockSet']:
            _tags = global_tags

            # deduplication happens later
            if block['role'] == "InfURL":
                _tags.append('phishing_url')
            else:
                _tags.append(block['role'])

            itype = sip_map[block['blockType']]
            idata = _sip_indicator(type=itype,
                                   value=block['data'],
                                   reference=reference,
                                   tags=_tags)

            if indicator_filter.getboolean(itype):
                potential_indicators.append(idata)

            # create more indicators
            if block['blockType'] == "URL":

                if block['role'] == "InfURL":
                    # this was a phishing url
                    itype = 'Email - Content - Domain Name'
                    value = block['data_1']['host']
                    idata = _sip_indicator(type=itype,
                                           value=value,
                                           reference=reference,
                                           tags=_tags)
                    idata['tags'].append('domain_in_url')
                    if indicator_filter.getboolean(itype):
                        potential_indicators.append(idata)

                # uri path?
                value = block['data_1']['path']
                if value and len(value) > 9:
                    idata = _sip_indicator(type='URI - Path',
                                           value=value,
                                           reference=reference,
                                           tags=_tags)
                    if indicator_filter.getboolean('URI - Path'):
                        potential_indicators.append(idata)

                # default assume domain name
                itype = 'URI - Domain Name'
                value = block['data_1']['host']
                if is_ipv4(block['data_1']['host']):
                    itype = "Address - ipv4-addr"

                idata = _sip_indicator(type=itype,
                                       value=value,
                                       reference=reference,
                                       tags=_tags)
                if indicator_filter.getboolean(itype):
                    potential_indicators.append(idata)

        # process executableSet
        for malfile in report['executableSet']:
            fileName = malfile.get('fileName')
            if fileName and indicator_filter.getboolean(sip_map['fileName']):
                potential_indicators.append(
                    _sip_indicator(type=sip_map['fileName'],
                                   value=fileName,
                                   reference=reference,
                                   tags=global_tags))
            sha256Hex = malfile.get('sha256Hex')
            if sha256Hex and indicator_filter.getboolean(sip_map['fileName']):
                potential_indicators.append(
                    _sip_indicator(type=sip_map['sha256Hex'],
                                   value=sha256Hex,
                                   reference=reference,
                                   tags=global_tags))
            md5Hex = malfile.get('md5Hex')
            if md5Hex and indicator_filter.getboolean(sip_map['fileName']):
                potential_indicators.append(
                    _sip_indicator(type=sip_map['md5Hex'],
                                   value=md5Hex,
                                   reference=reference,
                                   tags=global_tags))

        # process subjectSet
        for subjectSet in report['subjectSet']:
            subject = subjectSet.get('subject')
            if subject and indicator_filter.getboolean(sip_map['fileName']):
                potential_indicators.append(
                    _sip_indicator(type='Email - Subject',
                                   value=subject,
                                   reference=reference,
                                   tags=global_tags))

        if args.print_indicator_summary:
            total_incoming_indicators.extend(potential_indicators)
            continue

        for indicator in potential_indicators:
            if indicators_created_today >= max_indicators_per_day:
                resume_report_id = report['id']
                logging.warning(
                    f"maximum indicators created for the day. Will resume report {resume_report_id} tomorrow."
                )
                break
            if create_sip_indicator(sip, indicator):
                indicators_created_today += 1

        if resume_report_id is not None:
            break

    if args.print_indicator_summary:
        summary_data = []
        for i in total_incoming_indicators:
            summary_data.append({
                'Report ID':
                json.loads(i['references'][0]['reference'])['id'],
                'type':
                i['type'],
                'value':
                i['value']
            })
            #print(f" + type:{i['type']}\t\t\tvalue:{i['value']}")
        print(tabulate(summary_data, headers="keys"))  #, tablefmt="github"))
        print(f"\nTotal Incoming Indicators: {len(total_incoming_indicators)}")
        return

    # update records
    try:
        with open(indicator_creation_count_file, 'w') as fp:
            fp.write(str(indicators_created_today))
    except Exception as e:
        logging.error(f"Problem writing indicator count file: {e}")

    try:
        with open(unique_threat_tracker_file, 'w') as fp:
            json.dump(unique_threat_tracker, fp)
    except Exception as e:
        logging.error(f"Problem writing unique threat tracker: {e}")

    # archive processed reports
    for report_id, report_path in PROCESSED_REPORTS.items():
        if resume_report_id == report_id:
            continue
        archive_dir = os.path.dirname(report_path).replace(
            INCOMING_DIR_NAME, ARCHIVE_DIR_NAME)
        if not os.path.exists(archive_dir):
            os.mkdir(archive_dir)
        archive_path = os.path.join(archive_dir, f"{report_id}.json")
        try:
            os.rename(report_path, archive_path)
        except Exception as e:
            logging.error(f"couldn't archive report: {e}")
        logging.info(f"archived {report_path} to {archive_path}")

    # delete empty dirs
    for report_dir in glob.glob(f"{os.path.join(INCOMING_DIR)}/*"):
        if os.path.isdir(report_dir):
            files = os.listdir(report_dir)
            if len(files) == 0:
                os.rmdir(report_dir)
                logging.info(f"deleted empty report dir: {report_dir}")
Esempio n. 10
0
    # get default config items
    server = config['default']['server']
    port = config['default']['port']
    api_key = config['default']['api_key']
    verify_ssl = config['default']['verify_ssl']
    whitelist_tags = config['default']['whitelist_tags']
    ignore_proxy = config['default']['ignore_proxy']

    if ignore_proxy:
        if 'https_proxy' in os.environ:
            del os.environ['https_proxy']
        if 'http_proxy' in os.environ:
            del os.environ['http_proxy']

    # create sip client and sipwhitelist instance
    sip_client = pysip.Client(server + ':' + port, api_key, verify=verify_ssl)
    swl = SIPWhitelist(whitelist_tags, sip_client)

    indicator_types = [
        i['value'] for i in sip_client.get('/api/indicators/type')
    ]
    istatus_types = [
        s['value'] for s in sip_client.get('/api/indicators/status')
    ]
    isources = [s['value'] for s in sip_client.get('/api/intel/source')]
    whitelisted_itypes = swl.whitelist.keys()

    parser = argparse.ArgumentParser(
        description="Add Indicators and query SIP")
    parser.add_argument('-d',
                        '--debug',