def run( config: DynaBox, logging: DynaBox, inq: JoinableQueue, subscribe_callback: Callable, unsubscribe_callback: Callable, ): global logger, workers logger = threatbus.logger.setup(logging, __name__) assert plugin_name in config, f"Cannot find configuration for {plugin_name} plugin" config = config[plugin_name] cif = None try: cif = Client(remote=config.api.host, token=config.api.token, verify_ssl=config.api.ssl) cif.ping() except Exception as err: logger.error( f"Cannot connect to CIFv3 at {config.api.host}, using SSL: {config.api.ssl}. Exiting plugin. {err}" ) return indicator_q = JoinableQueue() topic = "stix2/indicator" subscribe_callback(topic, indicator_q) workers.append(CIFPublisher(indicator_q, cif, config)) for w in workers: w.start() logger.info("CIF3 plugin started")
def cif(self): try: from cifsdk.client.http import HTTP as Client except ImportError: print('') print('The cif function requires the cifsdk>=4.0') print('$ pip install https://github.com/csirtgadgets/' 'verbose-robot-sdk-py/archive/master.zip') print('$ export CIF_TOKEN=1234...') print('') raise SystemExit return Client().search({'q': self.indicator, 'limit': 25})
def submit_to_cif(data, host, ssl, token, cache): logging.debug('Initializing Client instance to host={}, with ssl={}'.format(host, ssl)) cli = Client(token=token, remote=host, verify_ssl=ssl) logging.info('Submitting indicator: {0}'.format(data)) try: r = cli.indicators_create(json.dumps(data)) cache.setcache(data['indicator']) logging.debug('Indicator submitted with id {}'.format(r)) return True except Exception as e: logging.error('Error submitting indicator: {0}'.format(repr(e))) return False
def farsight(self): if self.itype != 'ipv4': raise TypeError('%s is not supported' % self.itype) try: from csirtg_dnsdb.client import Client except ImportError: print('') print('The csirtg function requires the csirtg_dnsdb client') print('https://github.com/csirtgadgets/dnsdb-py') print('$ pip install csirtg_dnsdb') print('$ export FARSIGHT_TOKEN=1234...') print('') raise SystemExit return Client().search(self.indicator)
def get_feed(self): cli = Client(token=self.token, remote=self.remote, verify_ssl=self.verify) logger.debug('Getting feed with filters: {}'.format(self.filters)) try: logger.debug('Getting feed with filter set: {}'.format( self.filters)) f = cli.feed(filters=self.filters) return f except Exception as e: logger.warning('Exception during get_feed: {}'.format(e)) logger.debug('CLI: {}, Filters: {}'.format(cli, self.filters)) backoff = randint(30, 120) logger.warning( 'Backing off {} seconds after failure'.format(backoff)) time.sleep(backoff) sys.exit(1)
def test_zyre(iface): ioloop.install() loop = ioloop.IOLoop.instance() loop2 = ioloop.IOLoop.instance() client = Client( group='TEST', loop=loop, verbose=1, interface=iface, task=task, ) client.start_zyre() client2 = Zyre( group='TEST', loop=loop2, verbose=1, interface=iface, task=task, ) client2.start() sleep(0.01) def test_fcn(*args): i = Indicator('example.com') client.send_message(str(i)) def test_fcn2(s, e): m = s.recv_multipart() assert m[0] == 'ENTER' m = s.recv_multipart() assert m[0] == 'JOIN' m = s.recv_multipart() assert m[0] == 'SHOUT' i = json.loads(m[4]) i = Indicator(**i) assert i.indicator == 'example.com' loop2.add_handler(client2.actor, test_fcn2, zmq.POLLIN) loop.add_handler(client.actor, test_fcn, zmq.POLLIN) loop.run_sync(test_fcn) client.stop_zyre() client2.stop()
def search_cif(self, indicator): ''' :param indicator: one of domain, fqdn, or hash :return: dictionary of results ''' results = [] for cif_host in self.cif_hosts: cli = Client(token=cif_host['token'], remote=cif_host['remote'], verify_ssl=self.verify) filters = { 'indicator': indicator, 'limit': self.limit, 'nolog': '1' } results += cli.indicators_search(filters=filters) return results
def submit_to_cif(data, host, ssl, token, cache): logging.debug( 'Initializing Client instance to host={}, with ssl={}'.format( host, ssl)) cli = Client(token=token, remote=host, verify_ssl=ssl) logging.info('Submitting indicator: {0}'.format(data)) try: r = cli.indicators_create(json.dumps(data)) cache.setcache(data['indicator']) logging.debug('Indicator submitted with id {}'.format(r)) return True except (SubmissionFailed, Exception) as e: if isinstance(e, SubmissionFailed): logging.error( 'Submission failed due to authorization error; please correct your host/key, remove this container, and try again' ) return False else: logging.error('Error submitting indicator: {} {}'.format( type(e).__name__, e.args)) return False
def run( config: Subview, logging: Subview, inq: JoinableQueue, subscribe_callback: Callable, unsubscribe_callback: Callable, ): global logger, workers logger = threatbus.logger.setup(logging, __name__) config = config[plugin_name] try: validate_config(config) except Exception as e: logger.fatal("Invalid config for plugin {}: {}".format( plugin_name, str(e))) remote, token, ssl = ( config["api"]["host"].get(), config["api"]["token"].get(), config["api"]["ssl"].get(), ) cif = None try: cif = Client(remote=remote, token=token, verify_ssl=ssl) cif.ping() except Exception as err: logger.error( f"Cannot connect to CIFv3 at {remote}, using SSL: {ssl}. Exiting plugin. {err}" ) return indicator_q = JoinableQueue() topic = "stix2/indicator" subscribe_callback(topic, indicator_q) workers.append(CIFPublisher(indicator_q, cif, config)) for w in workers: w.start() logger.info("CIF3 plugin started")
def submit_safelist(self, l): cli = Client(token=self.token, remote=self.remote, verify_ssl=self.verify) filters = [f for f in self._list_to_filters(l)] try: i = 1 for data in self._chunks(filters, 500): ret = cli.indicators_create(json.dumps(data)) logger.debug('Submitted chunk {} with return status {}'.format( i, ret)) i += 1 except Exception as e: logger.warning('Exception during get_feed: {}'.format(e)) logger.debug('CLI: {}, Filters: {}'.format(cli, data)) backoff = randint(30, 120) logger.warning( 'Backing off {} seconds after failure'.format(backoff)) time.sleep(backoff) sys.exit(1) logger.info('Complete submission of safelist with size {}'.format( str(len(l))))
def ping(): return Client().ping()
def client(): yield Client(remote='http://localhost')
def test_client_http(): cli = Client('https://localhost:3000', '12345') assert cli.remote == 'https://localhost:3000' assert cli.token == '12345'
def main(): # pragma: no cover p = get_argument_parser() p = ArgumentParser(description=textwrap.dedent('''\ example usage: $ cif -q example.org -d $ cif --search 1.2.3.0/24 $ cif --profile zeek '''), formatter_class=RawDescriptionHelpFormatter, prog='cif', parents=[p]) p.add_argument('--remote', help='specify API remote [default %(default)s]', default=REMOTE) p.add_argument('--no-verify-ssl', action='store_true') p.add_argument("--create", action="store_true", help="create an indicator") p.add_argument('--delete', action='store_true') p.add_argument('-q', '--search', help="search") p.add_argument('--itype', help='filter by indicator type') p.add_argument('--reported_at', help='specify reported_at filter') p.add_argument('-n', '--nolog', help='do not log search', action='store_true') p.add_argument('-f', '--format', help='specify output format [default: %(default)s]"', default=FORMAT, choices=FORMATS.keys()) p.add_argument('--indicator', help='indicator (ip, url, etc..) ' 'to search for') p.add_argument('--confidence', help="specify confidence level") p.add_argument('--tags', nargs='+', help='filter by tag(s)') p.add_argument('--provider', help='provider to filter by') p.add_argument('--asn', help='filter by asn') p.add_argument('--cc', help='filter by country code') p.add_argument('--asn-name', help='filter by asn name') p.add_argument('--rdata', help='filter by rdata') p.add_argument('--groups', help='filter by group(s)') p.add_argument('--days', help='filter results within last X days') p.add_argument('--today', action='store_true', help='auto-sets reported_at to today, 00:00:00Z (UTC)') p.add_argument('--limit', help='limit results [default %(default)s]', default=SEARCH_LIMIT) p.add_argument('--columns', default=','.join(COLUMNS), help='specify output columns [default %(default)s]') p.add_argument('--no-feed', action='store_true', help='return a non-filtered dataset (no whitelist applied)') p.add_argument('--profile', help='specify feed profile', choices=PROFILES.keys()) args = p.parse_args() setup_logging(args) opts = vars(args) options = {} for k, v in opts.items(): if v: options[k] = v if args.remote.startswith('http'): verify_ssl = True if args.no_verify_ssl: verify_ssl = False if args.remote == 'https://localhost': verify_ssl = False cli = Client(verify_ssl=verify_ssl) else: from cifsdk.client.zeromq import ZMQ cli = ZMQ() filters = {e: options.get(e) for e in VALID_FILTERS} if args.search: filters['indicator'] = args.search for k, v in filters.items(): if v is True: filters[k] = 1 if v is False: filters[k] = 0 if options.get("create"): _create(cli, args, filters) raise SystemExit if options.get("delete"): _delete(cli, args, filters) raise SystemExit if not sys.stdin.isatty(): buffer = sys.stdin.read().rstrip("\n").split("\n") filters = [{'indicator': i} for i in buffer] _search(cli, args, options, filters)
def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ Env Variables: CIF_TOKEN CIF_REMOTE example usage: $ cif-tail '''), formatter_class=RawDescriptionHelpFormatter, prog='cif-tail', parents=[p], ) p.add_argument('--no-verify-ssl', help='turn TLS/SSL verification OFF', action='store_true') p.add_argument('--format', default='table') p.add_argument('--cycle', help='specify a cycle in which to run', default=5) p.add_argument('--filters', help='specify data filters to use', default='itype=ipv4,confidence=7,limit=10') p.add_argument('--remote', default=REMOTE_ADDR) p.add_argument('--token', default=TOKEN) p.add_argument('--start', default=arrow.get((arrow.utcnow().timestamp - 420))) args = p.parse_args() # setup logging setup_logging(args) verify_ssl = True if args.no_verify_ssl: verify_ssl = False filters = {} for k in args.filters.split(','): kk, v = k.split('=') filters[kk] = v remote = args.remote token = args.token client = Client(remote, token, verify_ssl=verify_ssl) start = args.start start = arrow.get(start) cycle = (int(args.cycle) * 60) # we want a 120s buffer for things that are being generated "now" end = arrow.get((arrow.utcnow().timestamp - 120)) while True: logger.debug('now: %s' % arrow.utcnow()) start = start.strftime('%Y-%m-%dT%H:%M:%S') + 'Z' end = end.strftime('%Y-%m-%dT%H:%M:%S') + 'Z' filters['reporttime'] = '{},{}'.format(start, end) logger.debug('searching {} - {}'.format(start, end)) resp = client.indicators_search(filters) if args.format == 'csv': for l in get_lines_csv(resp): print(l) else: for l in get_lines_table(resp): print(l) logger.debug('sleeping for {}m'.format(args.cycle)) sleep(cycle) # todo- this needs some work, maybe use last record if there was one? # what if there wasn't? start = arrow.get(arrow.get(end).timestamp + 1) end = arrow.get((arrow.utcnow().timestamp - 120))
def run(self): Responder.run(self) confidence = None indicators = [] # case details if self.get_param('data._type') == 'case_artifact': a = {} a['indicator'] = self.get_param('data.data', None, 'Missing indicator') a['tags'] = self.get_param('data.tags') a['tlp'] = self.get_param('data.tlp', None) a['desc'] = self.get_param('data.message', None) a['lasttime'] = self.get_param('data.createdAt', None) indicators.append(a) # alert details if self.get_param('data._type') == 'alert': for i in self.get_param('data.artifacts'): a = {} a['indicator'] = i['data'] a['tags'] = i['tags'] a['tlp'] = self.get_param('data.tlp', None) a['desc'] = self.get_param('data.description', None) a['lasttime'] = self.get_param('data.createdAt', None) if self.get_param('data.updatedAt'): a['lasttime'] = self.get_param('data.updatedAt') indicators.append(a) for i in indicators: # map TLP to word tlp = self.TLP_MAP[str(i['tlp'])] # process tags tags = i['tags'] for t in list(tags): # confidence tag check if 'confidence:' in t: tags.remove(t) (k, v) = t.split(':') confidence = int(v) # remove other directive tags elif ':' in t: tags.remove(t) # set to default confidence if not defined if not confidence: confidence = self.d_confidence # convert lasttime lasttime = datetime.utcfromtimestamp(i['lasttime']/1000).strftime('%Y-%m-%dT%H:%M:%S.%fZ') # build indicator ii = { 'indicator': i['indicator'], 'confidence': confidence, 'description': i['desc'], 'tags': tags, 'tlp': tlp, 'group': self.group, 'lasttime': lasttime } # create indicator object try: ii = Indicator(**ii) except InvalidIndicator as e: self.error("Invalid CIF indicator {}".format(e)) except Exception as e: self.error("CIF indicator error: {}".format(e)) # submit indicator cli = Client(token=self.token, remote=self.remote, verify_ssl=self.verify_ssl) try: r = cli.indicators_create(ii) except Exception as e: self.error("CIF submission error: {}".format(e)) self.report({'message': '{} indicator(s) submitted to CIFv3'.format(len(indicators))})
def test_basics(): c = Client() Actor()
def test_basics(): c = Client()
def main(): logging.info("Running build_config.py") MONGODB_HOST = os.environ.get("MONGODB_HOST", "mongodb") MONGODB_PORT = os.environ.get("MONGODB_PORT", "27017") HPFEEDS_HOST = os.environ.get("HPFEEDS_HOST", "hpfeeds3") HPFEEDS_PORT = os.environ.get("HPFEEDS_PORT", "10000") IDENT = os.environ.get("IDENT", "") SECRET = os.environ.get("SECRET", "") CHANNELS = os.environ.get( "CHANNELS", "amun.events,conpot.events,thug.events,beeswarm.hive,dionaea.capture,dionaea.connections,thug.files,beeswarm.feeder,cuckoo.analysis,kippo.sessions,cowrie.sessions,glastopf.events,glastopf.files,mwbinary.dionaea.sensorunique,snort.alerts,wordpot.events,p0f.events,suricata.events,shockpot.events,elastichoney.events,rdphoney.sessions,uhp.events,elasticpot.events,spylex.events,big-hp.events,ssh-auth-logger.events,honeydb-agent.events" ) CIF_HOST = os.environ.get("CIF_HOST", "") CIF_TOKEN = os.environ.get("CIF_TOKEN", "") CIF_PROVIDER = os.environ.get("CIF_PROVIDER", "") CIF_TLP = os.environ.get("CIF_TLP", "") CIF_CONFIDENCE = os.environ.get("CIF_CONFIDENCE", "") CIF_TAGS = os.environ.get("CIF_TAGS", "") CIF_GROUP = os.environ.get("CIF_GROUP", "") CIF_VERIFY_SSL = os.environ.get("CIF_VERIFY_SSL", "") INCLUDE_HP_TAGS = os.environ.get("INCLUDE_HP_TAGS", "false") IGNORE_CIDR = os.environ.get("IGNORE_CIDR", "false") CIF_CACHE_DB = os.environ.get("CIF_CACHE_DB", "2") CIF_CACHE_EXPIRE = os.environ.get("CIF_CACHE_EXPIRE", "300") if IDENT: ident = IDENT else: ident = "hpfeeds-cif-" + str(random.randint(0, 32767)) if SECRET: secret = SECRET else: secret = str(uuid.uuid4()).replace("-", "") config = configparser.ConfigParser() config.read("/opt/hpfeeds-cif.cfg.template") config['hpfeeds']['ident'] = ident config['hpfeeds']['secret'] = secret config['hpfeeds']['hp_host'] = HPFEEDS_HOST config['hpfeeds']['hp_port'] = HPFEEDS_PORT config['hpfeeds']['channels'] = CHANNELS config['hpfeeds']['include_hp_tags'] = INCLUDE_HP_TAGS config['hpfeeds']['ignore_cidr'] = IGNORE_CIDR config['cifv3']['cif_host'] = CIF_HOST config['cifv3']['cif_token'] = CIF_TOKEN config['cifv3']['cif_provider'] = CIF_PROVIDER config['cifv3']['cif_tlp'] = CIF_TLP config['cifv3']['cif_confidence'] = CIF_CONFIDENCE config['cifv3']['cif_tags'] = CIF_TAGS config['cifv3']['cif_group'] = CIF_GROUP config['cifv3']['cif_verify_ssl'] = CIF_VERIFY_SSL config['cifv3']['cif_cache_db'] = CIF_CACHE_DB config['cifv3']['cif_cache_expire'] = CIF_CACHE_EXPIRE create_user(host=MONGODB_HOST, port=int(MONGODB_PORT), owner="chn", ident=ident, secret=secret, publish="", subscribe=CHANNELS) cli = Client(token=CIF_TOKEN, remote=CIF_HOST, verify_ssl=False) try: ret = cli.ping(write=True) except AuthError: logging.error("Authentication to %s failed." % CIF_HOST) sys.exit(1) print("Writing config...") with open("/opt/hpfeeds-cif.cfg", 'w') as config_file: config.write(config_file) sys.exit(0)
def main(): p = get_argument_parser() p = ArgumentParser(description=textwrap.dedent('''\ example usage: $ cif -q example.org -d $ cif --search 1.2.3.0/24 $ cif --ping '''), formatter_class=RawDescriptionHelpFormatter, prog='cif', parents=[p]) p.add_argument('--token', help='specify api token', default=TOKEN) p.add_argument('--remote', help='specify API remote [default %(default)s]', default=REMOTE_ADDR) p.add_argument('--no-verify-ssl', action='store_true') p.add_argument('-p', '--ping', action="store_true") p.add_argument('--ping-indef', action="store_true") p.add_argument("--submit", action="store_true", help="submit an indicator") p.add_argument('--delete', action='store_true') p.add_argument('-q', '--search', help="search") p.add_argument('--id') p.add_argument('--itype', help='filter by indicator type') p.add_argument('--reported_at', help='specify reported_at filter') p.add_argument('-n', '--nolog', help='do not log search', action='store_true') p.add_argument('-f', '--format', help='specify output format [default: %(default)s]"', default=FORMAT, choices=FORMATS.keys()) p.add_argument('--indicator') p.add_argument('--confidence', help="specify confidence level") p.add_argument('--probability') p.add_argument('--tags', nargs='+') p.add_argument('--provider') p.add_argument('--asn') p.add_argument('--cc') p.add_argument('--asn-desc') p.add_argument('--rdata') p.add_argument('--region') p.add_argument('--groups', help='specify groups filter (csv)') p.add_argument('--days', help='filter results within last X days') p.add_argument('--today', help='auto-sets reporttime to today, 00:00:00Z (UTC)', action='store_true') p.add_argument('--limit', help='limit results [default %(default)s]', default=SEARCH_LIMIT) p.add_argument('--columns', help='specify output columns [default %(default)s]', default=','.join(COLUMNS)) p.add_argument('--no-feed', action='store_true') p.add_argument('--graph', help='dump the graph', action='store_true') p.add_argument('--profile', help='specify feed profile', choices=PROFILES.keys()) args = p.parse_args() setup_logging(args) opts = vars(args) options = {} for k, v in opts.items(): if v: options[k] = v verify_ssl = True if args.no_verify_ssl: verify_ssl = False if args.remote == 'https://localhost': verify_ssl = False cli = Client(args.remote, args.token, verify_ssl=verify_ssl) if args.ping or args.ping_indef: _ping(cli, args) filters = { 'itype': options.get('itype'), 'limit': options.get('limit'), 'provider': options.get('provider'), 'indicator': options.get('search') or options.get('indicator'), 'nolog': options.get('nolog'), 'tags': options.get('tags'), 'confidence': options.get('confidence'), 'asn': options.get('asn'), 'asn_desc': options.get('asn_desc'), 'cc': options.get('cc'), 'region': options.get('region'), 'rdata': options.get('rdata'), 'reported_at': options.get('reported_at'), 'groups': options.get('groups'), 'hours': options.get('hours'), 'days': options.get('days'), 'today': options.get('today'), 'no_feed': options.get('no_feed'), 'probability': options.get('probability') } for k, v in filters.items(): if v is True: filters[k] = 1 if v is False: filters[k] = 0 if options.get("submit"): _submit(cli, args, options) if options.get("delete"): _delete(cli, args, options, filters) if args.graph: _graph(cli, args, options, filters) if not sys.stdin.isatty(): buffer = sys.stdin.read().rstrip("\n").split("\n") filters = [{'indicator': i} for i in buffer] _search(cli, args, options, filters)
def create(**kv): i = Indicator(**kv) return Client().indicators_create(i)
def search(filters={}): if len(filters) == 0: filters = {'itype': 'ipv4', 'tags': 'botnet'} return Client().search(filters)