def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ example usage: $ cif-hunter -d '''), formatter_class=RawDescriptionHelpFormatter, prog='cif-hunter', parents=[p], ) p.add_argument('--remote', help="cif-router hunter address [default %(default)s]", default=HUNTER_ADDR) p.add_argument('--router', help='cif-router front end address [default %(default)s]', default=ROUTER_ADDR) p.add_argument('--token', help='specify cif-hunter token [default %(default)s]', default=TOKEN) p.add_argument('--config', default=CONFIG_PATH) args = p.parse_args() setup_logging(args) o = read_config(args) options = vars(args) for v in options: if options[v] is None: options[v] = o.get(v) logger = logging.getLogger(__name__) logger.info('loglevel is: {}'.format( logging.getLevelName(logger.getEffectiveLevel()))) setup_signals(__name__) with Hunter(remote=options.get('remote'), router=args.router, token=options.get('token')) as h: try: logger.info('starting up...') h.start() except KeyboardInterrupt: logging.info("shutting down...") h.stop()
def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ example usage: $ cif-hunter -d '''), formatter_class=RawDescriptionHelpFormatter, prog='cif-hunter', parents=[p], ) p.add_argument('--remote', help="cif-router hunter address [default %(default)s]", default=HUNTER_ADDR) p.add_argument('--router', help='cif-router front end address [default %(default)s]', default=ROUTER_ADDR) p.add_argument('--token', help='specify cif-hunter token [default %(default)s]', default=TOKEN) p.add_argument('--config', default=CONFIG_PATH) args = p.parse_args() setup_logging(args) o = read_config(args) options = vars(args) for v in options: if options[v] is None: options[v] = o.get(v) logger = logging.getLogger(__name__) logger.info('loglevel is: {}'.format(logging.getLevelName(logger.getEffectiveLevel()))) setup_signals(__name__) with Hunter(remote=options.get('remote'), router=args.router, token=options.get('token')) as h: try: logger.info('starting up...') h.start() except KeyboardInterrupt: logging.info("shutting down...") h.stop()
def main(): p = get_argument_parser() p = ArgumentParser(description=textwrap.dedent('''\ example usage: $ cif-tokens --name [email protected] --create --admin '''), formatter_class=RawDescriptionHelpFormatter, prog='cif', parents=[p]) p.add_argument('--token', help='specify api token [default %(default)s]', default=TOKEN) p.add_argument('--remote', help='specify API remote [default %(default)s]', default=REMOTE_ADDR) p.add_argument('--create', help='create token (requires admin token', action='store_true') p.add_argument('--delete', help='delete token (requires admin token)', action='store_true') p.add_argument('--delete-token', help='specify the token to delete') p.add_argument('--username', help='specify username') p.add_argument('--name', help='specify username') p.add_argument('--admin', action='store_true') p.add_argument('--expires', help='set a token expiration timestamp') p.add_argument('--read', help='set the token read flag', action='store_true') p.add_argument('--write', help='set the token write flag', action='store_true') p.add_argument('--revoked', help='set the token revoked flag', action='store_true') p.add_argument( '--groups', help= 'specify token groups (eg: everyone,group1,group2) [default %(default)s]', default='everyone') p.add_argument('--no-everyone', help="do not create key in the 'everyone' group", action='store_true') p.add_argument('--acl', help='set the token itype acls (eg: ipv4,ipv6)', default='') p.add_argument( '--columns', help='specify columns to print when searching [default %(default)s]', default=','.join(COLS)) p.add_argument('--config-generate', help='generate configuration file') p.add_argument('--config', help='specify configuration file [default %(default)s]', default=CONFIG_PATH) p.add_argument('--no-verify-ssl', help='Turn OFF TLS verification', action='store_true') p.add_argument('--update', help='update a token') args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) o = {} if os.path.isfile(args.config): o = read_config(args) options = vars(args) if not options.get('token'): raise RuntimeError('missing --token') verify_ssl = True if o.get('no_verify_ssl') or options.get('no_verify_ssl'): verify_ssl = False options = vars(args) from cifsdk.client.http import HTTP as HTTPClient cli = HTTPClient(args.remote, args.token, verify_ssl=verify_ssl) if options.get('name'): options['username'] = options['name'] rv = False if options.get('create'): if not options.get('username'): raise RuntimeError('missing --username') if not (options.get('read') or options.get('write')): logger.info('assuming --read token') options['read'] = True groups = set(options.get('groups').split(',')) if not options.get('no_everyone'): if 'everyone' not in groups: groups.add('everyone') acl = options.get('acl').split(',') try: rv = cli.tokens_create({ 'username': options.get('username'), 'admin': options.get('admin'), 'expires': options.get('expires'), 'read': options.get('read'), 'revoked': options.get('revoked'), 'write': options.get('write'), 'groups': list(groups), 'acl': acl }) except AuthError as e: logger.error(e) except Exception as e: logger.error('token create failed: {}'.format(e)) else: if options.get('config_generate'): data = { 'remote': options['remote'], 'token': str(rv['token']), } with open(options['config_generate'], 'w') as f: f.write(yaml.dump(data, default_flow_style=False)) t = PrettyTable(args.columns.split(',')) l = [] for c in args.columns.split(','): if c == 'last_activity_at' and rv.get(c): rv[c] = arrow.get(rv[c]).format('YYYY-MM-DDTHH:MM:ss') rv[c] = '{}Z'.format(rv[c]) if c == 'expires' and rv.get(c): rv[c] = arrow.get(rv[c]).format('YYYY-MM-DDTHH:MM:ss') rv[c] = '{}Z'.format(rv[c]) if rv.get(c): if type(rv[c]) == list: l.append(','.join(rv[c])) else: l.append(str(rv[c])) else: l.append(None) t.add_row(l) print(t) elif options.get('delete_token'): try: rv = cli.tokens_delete({ 'token': options.get('delete_token'), 'username': options.get('username') }) if rv: logger.info('deleted: {} tokens successfully'.format(rv)) else: logger.error('no tokens deleted') except Exception as e: logger.error('token delete failed: %s' % e) elif options.get('delete'): if not (options.get('delete_token') or options.get('username')): raise RuntimeError( '--delete requires --delete-token or --username') try: rv = cli.tokens_delete({ 'token': options.get('delete_token'), 'username': options.get('username') }) if rv: logger.info('deleted: {} tokens successfully'.format(rv)) else: logger.error('no tokens deleted') except Exception as e: logger.error('token delete failed: %s' % e) elif options.get('update'): if not options.get('groups'): raise RuntimeError('requires --groups') groups = options['groups'].split(',') rv = cli.tokens_edit({'token': options['update'], 'groups': groups}) if rv: logger.info('token updated successfully') rv = cli.tokens_search({'token': options['update']}) t = PrettyTable(args.columns.split(',')) for r in rv: l = [] for c in args.columns.split(','): if c == 'last_activity_at' and r[c] is not None: r[c] = arrow.get(r[c]).format('YYYY-MM-DDTHH:MM:ss') r[c] = '{}Z'.format(r[c]) if c == 'expires' and r[c] is not None: r[c] = arrow.get(r[c]).format('YYYY-MM-DDTHH:MM:ss') r[c] = '{}Z'.format(r[c]) l.append(r[c]) t.add_row(l) print(t) else: logger.error(rv) else: filters = {} if options.get('username'): filters['username'] = options.get('username') try: rv = cli.tokens_search(filters) except AuthError: logger.error('unauthorized') except Exception as e: import traceback traceback.print_exc() print("\ntoken search failed: %s" % e) else: t = PrettyTable(args.columns.split(',')) for r in rv: l = [] for c in args.columns.split(','): if c == 'last_activity_at' and r.get(c) is not None: r[c] = parse_timestamp( r[c]).format('YYYY-MM-DDTHH:mm:ss.SS') r[c] = '{}Z'.format(r[c]) if c == 'expires' and r.get(c) is not None: r[c] = parse_timestamp( r[c]).format('YYYY-MM-DDTHH:mm:ss.SS') r[c] = '{}Z'.format(r[c]) if type(r.get(c)) == list: r[c] = ','.join(r[c]) l.append(r.get(c)) t.add_row(l) print(t)
def main(): p = get_argument_parser() p = ArgumentParser(description=textwrap.dedent('''\ Env Variables: CIF_RUNTIME_PATH CIF_ROUTER_CONFIG_PATH CIF_ROUTER_ADDR CIF_HUNTER_ADDR CIF_HUNTER_TOKEN CIF_HUNTER_THREADS CIF_GATHERER_THREADS CIF_STORE_ADDR example usage: $ cif-router --listen 0.0.0.0 -d '''), formatter_class=RawDescriptionHelpFormatter, prog='cif-router', parents=[p]) p.add_argument('--config', help='specify config path [default: %(default)s', default=CONFIG_PATH) p.add_argument('--listen', help='address to listen on [default: %(default)s]', default=ROUTER_ADDR) p.add_argument( '--gatherer-threads', help='specify number of gatherer threads to use [default: %(default)s]', default=GATHERER_THREADS) p.add_argument('--hunter', help='address hunters listen on on [default: %(default)s]', default=HUNTER_ADDR) p.add_argument( '--hunter-token', help='specify token for hunters to use [default: %(default)s]', default=HUNTER_TOKEN) p.add_argument( '--hunter-threads', help='specify number of hunter threads to use [default: %(default)s]', default=HUNTER_THREADS) p.add_argument( "--store-address", help="specify the store address cif-router is listening on[default: %(" "default)s]", default=STORE_ADDR) p.add_argument( "--store", help="specify a store type {} [default: %(default)s]".format( ', '.join(STORE_PLUGINS)), default=STORE_DEFAULT) p.add_argument('--store-nodes', help='specify storage nodes address [default: %(default)s]', default=STORE_NODES) p.add_argument('--p2p', action='store_true', help='enable experimental p2p support') p.add_argument('--logging-ignore', help='set logging to WARNING for specific modules') args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) logger.info('loglevel is: {}'.format( logging.getLevelName(logger.getEffectiveLevel()))) if args.logging_ignore: to_ignore = args.logging_ignore.split(',') for i in to_ignore: logging.getLogger(i).setLevel(logging.WARNING) o = read_config(args) options = vars(args) for v in options: if options[v] is None: options[v] = o.get(v) setup_signals(__name__) setup_runtime_path(args.runtime_path) with Router(listen=args.listen, hunter=args.hunter, store_type=args.store, store_address=args.store_address, store_nodes=args.store_nodes, p2p=args.p2p, hunter_token=args.hunter_token, hunter_threads=args.hunter_threads, gatherer_threads=args.gatherer_threads) as r: try: logger.info('starting router..') r.start() except KeyboardInterrupt: # todo - signal to threads to shut down and wait for them to finish logger.info('shutting down...') logger.info('Shutting down')
def main(): p = ArgumentParser( description=textwrap.dedent('''\ Example usage: $ cif -q 130.201.0.2 $ cif -q 130.201.0.0/16 $ cif -q 2001:4860:4860::8888 $ cif -q example.com $ cif -q 'http://www.example.com' $ cif -q '*****@*****.**' $ cif -q bf9d457bcd702fe836201df1b48c0bec $ cif --tags botnet,zeus -c 85 $ cif --application vnc,ssh --asns 1234 --cc RU,US $ cif -q example.com --tags botnet,zeus -c 85 --limit 50 $ cif --otype ipv4 --aggregate observable --today $ cif --feed --otype ipv4 -c 85 -f csv $ cif --feed --otype fqdn -c 95 --tags botnet -f csv $ cif --feed --otype url -c 75 --today -f csv '''), formatter_class=RawDescriptionHelpFormatter, prog='cif' ) # options p.add_argument("-v", "--verbose", dest="verbose", action="store_true", help="logging level: INFO") p.add_argument('-d', '--debug', dest='debug', action="store_true", help="logging level: DEBUG") p.add_argument('-V', '--version', action='version', version=VERSION) p.add_argument('--no-verify-ssl', action="store_true", default=False) p.add_argument('-R', '--remote', help="remote api location", default=REMOTE_ADDR) p.add_argument('-T', '--token', help="specify token [default %(default)s", default=TOKEN) p.add_argument('--timeout', help='connection timeout [default: %(default)s]', default="300") p.add_argument('-C', '--config', help="configuration file [default: %(default)s]", default=os.path.expanduser("~/.cif.yml")) p.add_argument('--sortby', help='sort output [default: %(default)s]', default='lasttime') p.add_argument('--sortby-direction', help='sortby direction [default: %(default)s]', default='ASC') p.add_argument('-f', '--format', help="specify output format [default: %(default)s]", default="table", choices=FORMATS.keys()) # actions p.add_argument('-p', '--ping', action="store_true", help="ping") p.add_argument('-s', '--submit', action="store_true", help="submit a JSON object") # flags p.add_argument('-l', '--limit', help="result limit", default=LIMIT) p.add_argument('-n', '--nolog', help='do not log the search', default=None, action="store_true") # filters p.add_argument('-q', "--query", help="specify a search") p.add_argument('--firsttime', help='specify filter based on firsttime timestmap (greater than, ' 'format: YYYY-MM-DDTHH:MM:SSZ)') p.add_argument('--lasttime', help='specify filter based on lasttime timestamp (less than, format: ' 'YYYY-MM-DDTHH:MM:SSZ)') p.add_argument('--reporttime', help='specify filter based on reporttime timestmap (greater than, format: ' 'YYYY-MM-DDTHH:MM:SSZ)') p.add_argument('--reporttimeend', help='specify filter based on reporttime timestmap (less than, format: ' 'YYYY-MM-DDTHH:MM:SSZ)') p.add_argument("--tags", help="filter for tags") p.add_argument('--description', help='filter on description') p.add_argument('--otype', help='filter by otype') p.add_argument("--cc", help="filter for countrycode") p.add_argument('-c', '--confidence', help="specify confidence") p.add_argument('--rdata', help='filter by rdata') p.add_argument('--provider', help='filter by provider') p.add_argument('--asn', help='filter by asn') #p.add_argument('--tlp', help='filter by tlp') p.add_argument('--proxy', help="specify a proxy to use [default %(default)s]", default=PROXY) p.add_argument('--feed', action="store_true", help="generate a feed of data, meaning deduplicated and whitelisted") p.add_argument('--whitelist-limit', help="specify how many whitelist results to use when applying to --feeds " "[default %(default)s]", default=WHITELIST_LIMIT) p.add_argument('--whitelist-confidence', help='by confidence (greater-than or equal to) [default: %(default)s]', default=WHITELIST_CONFIDENCE) p.add_argument('--last-day', action="store_true", help='auto-sets reporttime to 23 hours and 59 seconds ago ' '(current time UTC) and reporttime-end to "now"') p.add_argument('--last-hour', action='store_true', help='auto-sets reporttime to the beginning of the previous full' ' hour and reporttime-end to end of previous full hour') p.add_argument('--days', help='filter results within last X days') p.add_argument('--today', help='auto-sets reporttime to today, 00:00:00Z (UTC)', action='store_true') p.add_argument('--aggregate', help="aggregate around a specific field (ie: observable)") p.add_argument('--fields', help="specify field list to display [default: %(default)s]", default=','.join(FIELDS)) p.add_argument('--filename', help='specify output filename [default: STDOUT]') p.add_argument('--ttl', help='specify number of pings to send [default: %(default)s]', default=PINGS) p.add_argument('--group', help='filter by group(s) (everyone,group1,group2,...)') p.add_argument('--application', help='filter based on application field') p.add_argument('--id', help='specify an id to retrieve') # Process arguments args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) # read in the config config_opts = read_config(args) cmd_options = vars(args) # check the config against the arguments for v in cmd_options: if cmd_options[v] is None: cmd_options[v] = config_opts.get(v) if v == 'remote': if cmd_options[v] != REMOTE_ADDR_DEFAULT: continue else: cmd_options[v] = config_opts.get('remote', REMOTE_ADDR_DEFAULT) options = cmd_options if not options.get('token'): raise RuntimeError('missing --token') verify_ssl = True if config_opts.get('no_verify_ssl') or options.get('no_verify_ssl'): verify_ssl = False cli = Client(options['token'], remote=options['remote'], proxy=options.get('proxy'), verify_ssl=verify_ssl) if(options.get('query') or options.get('tags') or options.get('cc') or options.get('rdata') or options.get( 'otype') or options.get('provider') or options.get('asn') or options.get('description')): filters = {} if options.get('query'): filters['observable'] = options['query'] if options.get('cc'): filters['cc'] = options['cc'] if options.get('tags'): filters['tags'] = options['tags'] if options.get('description'): filters['description'] = options['description'] if options.get('confidence'): filters['confidence'] = options['confidence'] else: if options.get('feed'): filters['confidence'] = FEED_CONFIDENCE if options.get('firsttime'): filters['firsttime'] = options['firsttime'] if options.get('lasttime'): filters['lasttime'] = options['lasttime'] if options.get('reporttime'): filters['reporttime'] = options['reporttime'] if options.get('reporttimeend'): filters['reporttimeend'] = options['reporttimeend'] if options.get('otype'): filters['otype'] = options['otype'] if options.get('rdata'): filters['rdata'] = options['rdata'] if options.get('nolog'): options['nolog'] = 1 if options.get('provider'): filters['provider'] = options['provider'] if options.get('asn'): filters['asn'] = options['asn'] #if options.get('tlp'): # filters['tlp'] = options['tlp'] if options.get('group'): filters['group'] = options['group'] if options.get('application'): filters['application'] = options['application'] if options.get('id'): filters['id'] = options['id'] # needs to be MEG'd out. if options.get('last_day'): now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) now = now.replace(days=-1) filters['reporttime'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) elif options.get('last_hour'): now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) now = now.replace(hours=-1) filters['reporttime'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) elif options.get('today'): now = arrow.utcnow() filters['reporttime'] = '{0}Z'.format(now.format('YYYY-MM-DDT00:00:00')) if options.get('days'): now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) now = now.replace(days=-int(options['days'])) filters['reporttime'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) DAYS=30 if options.get('feed'): if not options.get('otype'): logger.error('--otype [ipv4|ipv6|fqdn|url|..] flag required when using --feed') raise SystemExit if options['limit'] == LIMIT: options['limit'] = FEED_LIMIT if not options.get('days'): now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) now = now.replace(days=-DAYS) filters['reporttime'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) ret = cli.search(limit=options['limit'], nolog=options['nolog'], filters=filters, sort=options['sortby'], sort_direction=options['sortby_direction']) number_returned = len(ret) logger.info('returned: {0} records'.format(number_returned)) if options.get('aggregate'): ret = cli.aggregate(ret, field=options['aggregate']) if options.get('feed'): wl_filters = copy.deepcopy(filters) wl_filters['tags'] = 'whitelist' wl_filters['confidence'] = args.whitelist_confidence now = arrow.utcnow() now = now.replace(days=-DAYS) wl_filters['reporttime'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) wl = cli.search(limit=options['whitelist_limit'], nolog=True, filters=wl_filters) f = feed_factory(options['otype']) ret = cli.aggregate(ret) if len(ret) != number_returned: logger.info('aggregation removed: {0} records'.format(number_returned - len(ret))) ret = f().process(ret, wl) f = format_factory(options['format']) if f is None: raise SystemError('{0} format not supported, maybe missing a dependency.'.format(options['format'])) try: if len(ret) >= 1: ret = f(ret, cols=options['fields'].split(',')) if args.filename: with open(args.filename, 'w') as F: F.write(str(ret)) else: signal(SIGPIPE, SIG_DFL) print(ret) else: logger.info("no results found...") except AttributeError as e: logger.exception(e) elif options.get('ping'): for num in range(0, args.ttl): ret = cli.ping() print("roundtrip: %s ms" % ret) select.select([], [], [], 1) elif options.get('submit'): if not sys.stdin.isatty(): stdin = sys.stdin.read() else: logger.error("No data passed via STDIN") raise SystemExit try: data = json.loads(stdin) try: ret = cli.submit(data) print('submitted: {0}'.format(ret)) except Exception as e: logger.error(e) raise SystemExit except Exception as e: logger.error(e) raise SystemExit else: logger.warning('operation not supported') p.print_help() raise SystemExit
def main(): p = get_argument_parser() p = ArgumentParser(description=textwrap.dedent('''\ Env Variables: CIF_RUNTIME_PATH CIF_ROUTER_CONFIG_PATH CIF_ROUTER_ADDR CIF_HUNTER_ADDR CIF_HUNTER_TOKEN CIF_HUNTER_THREADS CIF_GATHERER_THREADS CIF_STORE_ADDR example usage: $ cif-router --listen 0.0.0.0 -d '''), formatter_class=RawDescriptionHelpFormatter, prog='cif-router', parents=[p]) p.add_argument('--config', help='specify config path [default: %(default)s', default=CONFIG_PATH) p.add_argument('--listen', help='address to listen on [default: %(default)s]', default=ROUTER_ADDR) p.add_argument( '--gatherer-threads', help='specify number of gatherer threads to use [default: %(default)s]', default=GATHERER_THREADS) p.add_argument('--hunter', help='address hunters listen on on [default: %(default)s]', default=HUNTER_ADDR) p.add_argument( '--hunter-token', help='specify token for hunters to use [default: %(default)s]', default=HUNTER_TOKEN) p.add_argument( '--hunter-threads', help='specify number of hunter threads to use [default: %(default)s]', default=HUNTER_THREADS) p.add_argument( "--store-address", help="specify the store address cif-router is listening on[default: %(" "default)s]", default=STORE_ADDR) p.add_argument( "--store", help="specify a store type {} [default: %(default)s]".format( ', '.join(STORE_PLUGINS)), default=STORE_DEFAULT) p.add_argument('--store-nodes', help='specify storage nodes address [default: %(default)s]', default=STORE_NODES) p.add_argument('--logging-ignore', help='set logging to WARNING for specific modules') p.add_argument('--pidfile', help='specify pidfile location [default: %(default)s]', default=PIDFILE) args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) logger.info('loglevel is: {}'.format( logging.getLevelName(logger.getEffectiveLevel()))) if args.logging_ignore: to_ignore = args.logging_ignore.split(',') for i in to_ignore: logging.getLogger(i).setLevel(logging.WARNING) o = read_config(args) options = vars(args) for v in options: if options[v] is None: options[v] = o.get(v) setup_runtime_path(args.runtime_path) setup_signals(__name__) # http://stackoverflow.com/a/789383/7205341 pid = str(os.getpid()) logger.debug("pid: %s" % pid) if os.path.isfile(args.pidfile): logger.critical("%s already exists, exiting" % args.pidfile) raise SystemExit try: pidfile = open(args.pidfile, 'w') pidfile.write(pid) pidfile.close() except PermissionError as e: logger.error('unable to create pid %s' % args.pidfile) with Router(listen=args.listen, hunter=args.hunter, store_type=args.store, store_address=args.store_address, store_nodes=args.store_nodes, hunter_token=args.hunter_token, hunter_threads=args.hunter_threads, gatherer_threads=args.gatherer_threads) as r: try: logger.info('starting router..') r.start() except KeyboardInterrupt: # todo - signal to threads to shut down and wait for them to finish logger.info('shutting down via SIGINT...') except SystemExit: logger.info('shutting down via SystemExit...') except Exception as e: logger.critical(e) traceback.print_exc() r.stop() logger.info('Shutting down') if os.path.isfile(args.pidfile): os.unlink(args.pidfile)
def main(): p = get_argument_parser() p = ArgumentParser(description=textwrap.dedent('''\ example usage: $ cif -q example.org -d $ cif --search 1.2.3.0/24 $ cif --ping '''), formatter_class=RawDescriptionHelpFormatter, prog='cif', parents=[p]) p.add_argument('--token', help='specify api token', default=TOKEN) p.add_argument('--remote', help='specify API remote [default %(default)s]', default=REMOTE_ADDR) p.add_argument('-p', '--ping', action="store_true") # meg? p.add_argument('--ping-indef', action="store_true") p.add_argument('-q', '--search', help="search") p.add_argument('--itype', help='filter by indicator type' ) ## need to fix sqlite for non-ascii stuff first p.add_argument("--submit", action="store_true", help="submit an indicator") p.add_argument('--limit', help='limit results [default %(default)s]', default=SEARCH_LIMIT) p.add_argument('--reporttime', help='specify reporttime filter') p.add_argument('-n', '--nolog', help='do not log search', action='store_true') p.add_argument('-f', '--format', help='specify output format [default: %(default)s]"', default=FORMAT, choices=FORMATS.keys()) p.add_argument('--indicator') p.add_argument('--tags', nargs='+') p.add_argument('--provider') p.add_argument('--confidence', help="specify confidence level") p.add_argument('--tlp', help="specify traffic light protocol") p.add_argument("--zmq", help="use zmq as a transport instead of http", action="store_true") p.add_argument('--config', help='specify config file [default %(default)s]', default=CONFIG_PATH) p.add_argument('--feed', action='store_true') p.add_argument('--no-verify-ssl', action='store_true') p.add_argument('--last-day', action="store_true", help='auto-sets reporttime to 23 hours and 59 seconds ago ' '(current time UTC) and reporttime-end to "now"') p.add_argument( '--last-hour', action='store_true', help='auto-sets reporttime to the beginning of the previous full' ' hour and reporttime-end to end of previous full hour') p.add_argument('--days', help='filter results within last X days') p.add_argument('--today', help='auto-sets reporttime to today, 00:00:00Z (UTC)', action='store_true') p.add_argument('--columns', help='specify output columns [default %(default)s]', default=','.join(COLUMNS)) p.add_argument('--asn') p.add_argument('--cc') p.add_argument('--asn-desc') p.add_argument('--rdata') p.add_argument('--no-feed', action='store_true') p.add_argument('--region') p.add_argument('--groups', help='specify groups filter (csv)') p.add_argument('--delete', action='store_true') p.add_argument('--id') args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) o = read_config(args) options = vars(args) for v in options: if v == 'remote' and options[v] == REMOTE_ADDR and o.get('remote'): options[v] = o['remote'] if v == 'token' and o.get('token'): options[v] = o['token'] if options[v] is None or options[v] == '': options[v] = o.get(v) if not options.get('token'): raise RuntimeError('missing --token') verify_ssl = True if o.get('no_verify_ssl') or options.get('no_verify_ssl'): verify_ssl = False if options.get("zmq"): from cifsdk.client.zeromq import ZMQ as ZMQClient cli = ZMQClient(**options) else: from cifsdk.client.http import HTTP as HTTPClient if args.remote == 'https://localhost': verify_ssl = False cli = HTTPClient(args.remote, args.token, verify_ssl=verify_ssl) if options.get('ping') or options.get('ping_indef'): logger.info('running ping') n = 4 if args.ping_indef: n = 999 try: for num in range(0, n): ret = cli.ping() if ret != 0: print("roundtrip: {} ms".format(ret)) select.select([], [], [], 1) from time import sleep sleep(1) else: logger.error('ping failed') raise RuntimeError except KeyboardInterrupt: pass raise SystemExit if options.get("submit"): print("submitting {0}".format(options.get("submit"))) i = Indicator(indicator=args.indicator, tags=args.tags, confidence=args.confidence, group=args.groups, tlp=args.tlp, provider=args.provider) rv = cli.indicators_create(i) print('success id: {}\n'.format(rv)) raise SystemExit filters = { 'itype': options['itype'], 'limit': options['limit'], 'provider': options.get('provider'), 'indicator': options.get('search') or options.get('indicator'), 'nolog': options['nolog'], 'tags': options['tags'], 'confidence': options.get('confidence'), 'asn': options.get('asn'), 'asn_desc': options.get('asn_desc'), 'cc': options.get('cc'), 'region': options.get('region'), 'rdata': options.get('rdata'), 'reporttime': options.get('reporttime'), 'groups': options.get('groups'), 'tlp': options.get('tlp') } if args.last_day: filters['days'] = '1' del filters['reporttime'] if args.last_hour: filters['hours'] = '1' del filters['reporttime'] if args.days: filters['days'] = args.days del filters['reporttime'] if args.today: now = arrow.utcnow() filters['reporttime'] = '{0}Z'.format( now.format('YYYY-MM-DDT00:00:00')) if filters.get('itype') and not filters.get('search') and not args.no_feed: logger.info('setting feed flag by default, use --no-feed to override') options['feed'] = True if options.get("delete"): if args.id: filters = {'id': args.id} filters = {f: filters[f] for f in filters if filters.get(f)} print("deleting {0}".format(filters)) rv = cli.indicators_delete(filters) print('deleted: {}'.format(rv)) raise SystemExit if options.get('feed'): if not filters.get('itype') and not ADVANCED: print('\nmissing --itype\n\n') raise SystemExit if not filters.get('tags') and not ADVANCED: print( '\nmissing --tags [phishing|malware|botnet|scanner|pdns|whitelist|...]\n\n' ) raise SystemExit if not filters.get('confidence'): filters['confidence'] = 8 if args.limit == SEARCH_LIMIT: filters['limit'] = FEED_LIMIT try: rv = cli.feed(filters=filters) except AuthError as e: logger.error('unauthorized') except KeyboardInterrupt: pass except Exception as e: logger.error(e) else: print(FORMATS[options.get('format')](data=rv, cols=args.columns.split(','))) raise SystemExit try: rv = cli.search(filters) except AuthError as e: logger.error('unauthorized') except KeyboardInterrupt: pass except Exception as e: import traceback traceback.print_exc() logger.error(e) else: print(FORMATS[options.get('format')](data=rv, cols=args.columns.split(',')))
def main(): # # initialize module # p = ArgumentParser( description=textwrap.dedent('''\ example usage: $ cat test.eml | cgmail $ cgmail --file test.eml '''), formatter_class=RawDescriptionHelpFormatter, prog='cgmail' ) p.add_argument("-v", "--verbose", dest="verbose", action="count", help="set verbosity level [default: %(default)s]") p.add_argument('-d', '--debug', dest='debug', action="store_true") p.add_argument("-f", "--file", dest="file", help="specify email file") # cif arguments p.add_argument("--confidence", help="specify confidence for submitting to CIF", default=CONFIDENCE) p.add_argument("--remote", help="specify CIF remote") p.add_argument("--token", help="specify CIF token") p.add_argument("--config", help="specify CIF config [default: %(default)s", default=os.path.expanduser("~/.cif.yml")) p.add_argument("--tags", help="specify CIF tags [default: %(default)s", default=["phishing"]) p.add_argument("--group", help="specify CIF group [default: %(default)s", default="everyone") p.add_argument("--tlp", help="specify CIF TLP [default: %(default)s", default=TLP) p.add_argument("--no-verify-ssl", action="store_true", default=False) p.add_argument("--raw", action="store_true", help="include raw message data") p.add_argument("--provider", help="specify feed provider [default: %(default)s]", default=PROVIDER) p.add_argument('--exclude', help='url patterns to exclude [default: %(default)s', default=EXCLUDE) p.add_argument('--confidence-lower', help='patterns to automatically lower confidence', default=CONFIDENCE_LOWER) p.add_argument('-n', '--not-really', help='do not submit', action='store_true') p.add_argument('--cache', help='location to cache whitelist [default: %(default)s', default=WHITELIST_CACHE) p.add_argument('--blacklist-cache', default=BLACKLIST_CACHE) # Process arguments args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) exclude = None if args.exclude: exclude = re.compile(args.exclude) confidence_lower = None if args.confidence_lower: confidence_lower = re.compile(args.confidence_lower) o = read_config(args) options = vars(args) for v in options: if options[v] is None: options[v] = o.get(v) if not options.get('token'): raise RuntimeError('missing --token') if options.get("file"): with open(options["file"]) as f: email = f.read() else: email = sys.stdin.read() # extract urls from message body and mail parts bits = cgmail.parse_email_from_string(email) urls = set() for n in bits: if n.get('urls'): for u in n['urls']: urls.add(u) verify_ssl = True if options.get('no_verify_ssl'): verify_ssl = False # initialize cif client cli = Client(remote=options["remote"], token=options["token"], verify_ssl=verify_ssl) update_cache = True if os.path.isfile(args.cache): modified = os.path.getmtime(args.cache) if arrow.utcnow() < arrow.get(modified + 84600): update_cache = False if update_cache: # pull FQDN whitelist filters = { 'tags': 'whitelist', 'otype': 'fqdn', 'confidence': 25, } now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) now = now.replace(days=-7) filters['reporttime'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) ret = cli.search(limit=50000, filters=filters, sort='reporttime', sort_direction='desc') with open(args.cache, 'w') as f: for r in ret: f.write("{0}\n".format(r['observable'])) update_cache = True if os.path.isfile(args.blacklist_cache): modified = os.path.getmtime(args.blacklist_cache) if arrow.utcnow() < arrow.get(modified + 84600): update_cache = False if update_cache: filters = { 'tags': 'phishing,suspicious,malware', 'otype': 'fqdn', 'confidence': 75, } now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) now = now.replace(days=-7) filters['reporttime'] = '{0}Z'.format(now.format('YYYY-MM-DDTHH:mm:ss')) ret = cli.search(limit=50000, filters=filters, sort='reporttime', sort_direction='desc') with open(args.blacklist_cache, 'w') as f: for r in ret: f.write("{0}\n".format(r['observable'])) fqdns = set() with open(args.cache) as f: for l in f: fqdns.add(l.rstrip("\n")) fqdns_blacklist = set() with open(args.blacklist_cache) as f: for l in f: fqdns_blacklist.add(l.rstrip("\n")) for u in urls: u = u.rstrip('\/') u = urlparse(u) fqdn = url_to_fqdn(u.geturl()) if exclude and exclude.search(fqdn): continue confidence = options['confidence'] if match_whitelist(fqdns, u.netloc): if (u.netloc not in URL_SHORTNERS) and (not match_whitelist(HOSTING_PROVIDERS, u.netloc)): confidence = options['confidence'] - 15 else: confidence = options['confidence'] + 5 elif match_whitelist(fqdns_blacklist, u.netloc): confidence = options['confidence'] + 10 else: confidence = options['confidence'] + 5 # else # raise confidence logger.info("submitting: {0}".format(u.geturl())) o = Observable( observable=u.geturl(), confidence=confidence, tlp=options["tlp"], group=options["group"], tags=options["tags"], provider=options.get('provider') ) o = o.__dict__ del o['logger'] if options.get('raw'): o.raw = email if not args.not_really: r = cli.submit(o) logger.info("submitted: {0}".format(r))
def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ example usage: $ cif-tokens --name [email protected] --create --admin '''), formatter_class=RawDescriptionHelpFormatter, prog='cif', parents=[p] ) p.add_argument('--token', help='specify api token [default %(default)s]', default=TOKEN) p.add_argument('--remote', help='specify API remote [default %(default)s]', default=REMOTE_ADDR) p.add_argument('--create', help='create token (requires admin token', action='store_true') p.add_argument('--delete', help='delete token (requires admin token)', action='store_true') p.add_argument('--delete-token', help='specify the token to delete') p.add_argument('--username', help='specify username') p.add_argument('--name', help='specify username') p.add_argument('--admin', action='store_true') p.add_argument('--expires', help='set a token expiration timestamp') p.add_argument('--read', help='set the token read flag', action='store_true') p.add_argument('--write', help='set the token write flag', action='store_true') p.add_argument('--revoked', help='set the token revoked flag', action='store_true') p.add_argument('--groups', help='specify token groups (eg: everyone,group1,group2) [default %(default)s]', default='everyone') p.add_argument('--no-everyone', help="do not create key in the 'everyone' group", action='store_true') p.add_argument('--acl', help='set the token itype acls (eg: ipv4,ipv6)', default='') p.add_argument('--columns', help='specify columns to print when searching [default %(default)s]', default=','.join(COLS)) p.add_argument('--config-generate', help='generate configuration file') p.add_argument('--config', help='specify configuration file [default %(default)s]', default=CONFIG_PATH) p.add_argument('--no-verify-ssl', help='Turn OFF TLS verification', action='store_true') p.add_argument('--update', help='update a token') args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) o = read_config(args) options = vars(args) for v in options: if v == 'remote' and options[v] == REMOTE_ADDR and o.get('remote'): options[v] = o['remote'] if v == 'token' and o.get('token'): options[v] = o['token'] if options[v] is None: options[v] = o.get(v) if not options.get('token'): raise RuntimeError('missing --token') verify_ssl = True if o.get('no_verify_ssl') or options.get('no_verify_ssl'): verify_ssl = False options = vars(args) from cifsdk.client.http import HTTP as HTTPClient cli = HTTPClient(args.remote, args.token, verify_ssl=verify_ssl) if options.get('name'): options['username'] = options['name'] rv = False if options.get('create'): if not options.get('username'): raise RuntimeError('missing --username') if not (options.get('read') or options.get('write')): logger.info('assuming --read token') options['read'] = True groups = set(options.get('groups').split(',')) if not options.get('no_everyone'): if 'everyone' not in groups: groups.add('everyone') acl = options.get('acl').split(',') try: rv = cli.tokens_create({ 'username': options.get('username'), 'admin': options.get('admin'), 'expires': options.get('expires'), 'read': options.get('read'), 'revoked': options.get('revoked'), 'write': options.get('write'), 'groups': list(groups), 'acl': acl }) except AuthError as e: logger.error(e) except Exception as e: logger.error('token create failed: {}'.format(e)) else: if options.get('config_generate'): data = { 'remote': options['remote'], 'token': str(rv['token']), } with open(options['config_generate'], 'w') as f: f.write(yaml.dump(data, default_flow_style=False)) t = PrettyTable(args.columns.split(',')) l = [] for c in args.columns.split(','): if c == 'last_activity_at' and rv.get(c): rv[c] = arrow.get(rv[c]).format('YYYY-MM-DDTHH:MM:ss') rv[c] = '{}Z'.format(rv[c]) if c == 'expires' and rv.get(c): rv[c] = arrow.get(rv[c]).format('YYYY-MM-DDTHH:MM:ss') rv[c] = '{}Z'.format(rv[c]) if rv.get(c): if type(rv[c]) == list: l.append(','.join(rv[c])) else: l.append(str(rv[c])) else: l.append(None) t.add_row(l) print(t) elif options.get('delete_token'): try: rv = cli.tokens_delete({ 'token': options.get('delete_token'), 'username': options.get('username') }) if rv: logger.info('deleted: {} tokens successfully'.format(rv)) else: logger.error('no tokens deleted') except Exception as e: logger.error('token delete failed: %s' % e) elif options.get('delete'): if not (options.get('delete_token') or options.get('username')): raise RuntimeError('--delete requires --delete-token or --username') try: rv = cli.tokens_delete({ 'token': options.get('delete_token'), 'username': options.get('username') }) if rv: logger.info('deleted: {} tokens successfully'.format(rv)) else: logger.error('no tokens deleted') except Exception as e: logger.error('token delete failed: %s' % e) elif options.get('update'): if not options.get('groups'): raise RuntimeError('requires --groups') groups = options['groups'].split(',') rv = cli.tokens_edit({ 'token': options['update'], 'groups': groups }) if rv: print('token updated successfully') print('refreshing tokens...') sleep(2) rv = cli.tokens_search({'token': options['update']}) t = PrettyTable(args.columns.split(',')) for r in rv: l = [] for c in args.columns.split(','): if c == 'last_activity_at' and r.get(c) is not None: r[c] = parse_timestamp(r[c]).format('YYYY-MM-DDTHH:mm:ss.SS') r[c] = '{}Z'.format(r[c]) if c == 'expires' and r.get(c) is not None: r[c] = parse_timestamp(r[c]).format('YYYY-MM-DDTHH:mm:ss.SS') r[c] = '{}Z'.format(r[c]) if type(r.get(c)) == list: r[c] = ','.join(r[c]) l.append(r.get(c)) t.add_row(l) print(t) else: logger.error(rv) else: filters = {} if options.get('username'): filters['username'] = options.get('username') try: rv = cli.tokens_search(filters) except AuthError: logger.error('unauthorized') except Exception as e: logger.error('token search failed: {}'.format(e)) else: t = PrettyTable(args.columns.split(',')) for r in rv: l = [] for c in args.columns.split(','): if c == 'last_activity_at' and r.get(c) is not None: r[c] = parse_timestamp(r[c]).format('YYYY-MM-DDTHH:mm:ss.SS') r[c] = '{}Z'.format(r[c]) if c == 'expires' and r.get(c) is not None: r[c] = parse_timestamp(r[c]).format('YYYY-MM-DDTHH:mm:ss.SS') r[c] = '{}Z'.format(r[c]) if type(r.get(c)) == list: r[c] = ','.join(r[c]) l.append(r.get(c)) t.add_row(l) print(t)
def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ example usage: $ cif -q example.org -d $ cif --search 1.2.3.0/24 $ cif --ping '''), formatter_class=RawDescriptionHelpFormatter, prog='cif', parents=[p] ) p.add_argument('--token', help='specify api token', default=TOKEN) p.add_argument('--remote', help='specify API remote [default %(default)s]', default=REMOTE_ADDR) p.add_argument('-p', '--ping', action="store_true") # meg? p.add_argument('--ping-indef', action="store_true") p.add_argument('-q', '--search', help="search") p.add_argument('--itype', help='filter by indicator type') ## need to fix sqlite for non-ascii stuff first p.add_argument("--submit", action="store_true", help="submit an indicator") p.add_argument('--limit', help='limit results [default %(default)s]', default=SEARCH_LIMIT) p.add_argument('--reporttime', help='specify reporttime filter') p.add_argument('-n', '--nolog', help='do not log search', action='store_true') p.add_argument('-f', '--format', help='specify output format [default: %(default)s]"', default=FORMAT, choices=FORMATS.keys()) p.add_argument('--indicator') p.add_argument('--tags', nargs='+') p.add_argument('--provider') p.add_argument('--confidence', help="specify confidence level") p.add_argument('--tlp', help="specify traffic light protocol") p.add_argument("--zmq", help="use zmq as a transport instead of http", action="store_true") p.add_argument('--config', help='specify config file [default %(default)s]', default=CONFIG_PATH) p.add_argument('--feed', action='store_true') p.add_argument('--no-verify-ssl', action='store_true') p.add_argument('--last-day', action="store_true", help='auto-sets reporttime to 23 hours and 59 seconds ago ' '(current time UTC) and reporttime-end to "now"') p.add_argument('--last-hour', action='store_true', help='auto-sets reporttime to the beginning of the previous full' ' hour and reporttime-end to end of previous full hour') p.add_argument('--days', help='filter results within last X days') p.add_argument('--today', help='auto-sets reporttime to today, 00:00:00Z (UTC)', action='store_true') p.add_argument('--columns', help='specify output columns [default %(default)s]', default=','.join(COLUMNS)) p.add_argument('--fields', help='same as --columns [default %(default)s]', default=','.join(COLUMNS)) p.add_argument('--asn') p.add_argument('--cc') p.add_argument('--asn-desc') p.add_argument('--rdata') p.add_argument('--no-feed', action='store_true') p.add_argument('--region') p.add_argument('--groups', help='specify groups filter (csv)') p.add_argument('--delete', action='store_true') p.add_argument('--id') args = p.parse_args() if args.fields != ','.join(COLUMNS): args.columns = args.fields setup_logging(args) logger = logging.getLogger(__name__) o = read_config(args) options = vars(args) # support for separate read and write tokens if o.get('write_token') and options.get('submit'): o['token'] = o['write_token'] elif o.get('read_token'): o['token'] = o['read_token'] for v in options: if v == 'remote' and options[v] == REMOTE_ADDR and o.get('remote'): options[v] = o['remote'] if v == 'token' and o.get('token'): options[v] = o['token'] if options[v] is None or options[v] == '': options[v] = o.get(v) if not options.get('token'): raise RuntimeError('missing --token') verify_ssl = True if o.get('no_verify_ssl') or options.get('no_verify_ssl'): verify_ssl = False if options.get("zmq"): from cifsdk.client.zeromq import ZMQ as ZMQClient cli = ZMQClient(**options) else: from cifsdk.client.http import HTTP as HTTPClient if args.remote == 'https://localhost': verify_ssl = False cli = HTTPClient(args.remote, args.token, verify_ssl=verify_ssl) if options.get('ping') or options.get('ping_indef'): logger.info('running ping') n = 4 if args.ping_indef: n = 999 try: for num in range(0, n): ret = cli.ping() if ret != 0: print("roundtrip: {} ms".format(ret)) select.select([], [], [], 1) from time import sleep sleep(1) else: logger.error('ping failed') raise RuntimeError except KeyboardInterrupt: pass raise SystemExit if options.get("submit"): print("submitting {0}".format(options.get("submit"))) i = Indicator(indicator=args.indicator, tags=args.tags, confidence=args.confidence, group=args.groups, tlp=args.tlp, provider=args.provider) rv = cli.indicators_create(i) print('success id: {}\n'.format(rv)) raise SystemExit filters = { 'itype': options['itype'], 'limit': options['limit'], 'provider': options.get('provider'), 'indicator': options.get('search') or options.get('indicator'), 'nolog': options['nolog'], 'tags': options['tags'], 'confidence': options.get('confidence'), 'asn': options.get('asn'), 'asn_desc': options.get('asn_desc'), 'cc': options.get('cc'), 'region': options.get('region'), 'rdata': options.get('rdata'), 'reporttime': options.get('reporttime'), 'groups': options.get('groups'), 'tlp': options.get('tlp') } if args.last_day: filters['days'] = '1' del filters['reporttime'] if args.last_hour: filters['hours'] = '1' del filters['reporttime'] if args.days: filters['days'] = args.days del filters['reporttime'] if args.today: now = arrow.utcnow() filters['reporttime'] = '{0}Z'.format(now.format('YYYY-MM-DDT00:00:00')) if filters.get('itype') and not filters.get('search') and not args.no_feed: logger.info('setting feed flag by default, use --no-feed to override') options['feed'] = True if options.get("delete"): if args.id: filters = {'id': args.id} filters = {f: filters[f] for f in filters if filters.get(f)} print("deleting {0}".format(filters)) rv = cli.indicators_delete(filters) print('deleted: {}'.format(rv)) raise SystemExit if options.get('feed'): if not filters.get('itype') and not ADVANCED: print('\nmissing --itype\n\n') raise SystemExit if not filters.get('tags') and not ADVANCED: print('\nmissing --tags [phishing|malware|botnet|scanner|pdns|whitelist|...]\n\n') raise SystemExit if not filters.get('confidence'): filters['confidence'] = 8 if args.limit == SEARCH_LIMIT: filters['limit'] = FEED_LIMIT try: rv = cli.feed(filters=filters) except AuthError as e: logger.error('unauthorized') except KeyboardInterrupt: pass except Exception as e: logger.error(e) else: print(FORMATS[options.get('format')](data=rv, cols=args.columns.split(','))) raise SystemExit try: rv = cli.search(filters) except AuthError as e: logger.error('unauthorized') except KeyboardInterrupt: pass except Exception as e: import traceback traceback.print_exc() logger.error(e) else: print(FORMATS[options.get('format')](data=rv, cols=args.columns.split(',')))
def main(): p = get_argument_parser() p = ArgumentParser(description=textwrap.dedent('''\ example usage: $ cif -q example.org -d $ cif --search 1.2.3.0/24 $ cif --ping '''), formatter_class=RawDescriptionHelpFormatter, prog='cif', parents=[p]) p.add_argument('--token', help='specify api token', default=TOKEN) p.add_argument('--remote', help='specify API remote [default %(default)s]', default=REMOTE_ADDR) p.add_argument('-p', '--ping', action="store_true") # meg? p.add_argument('-q', '--search', help="search") p.add_argument('--itype', help='filter by indicator type' ) ## need to fix sqlite for non-ascii stuff first p.add_argument("--submit", action="store_true", help="submit an indicator") p.add_argument('--limit', help='limit results [default %(default)s]', default=SEARCH_LIMIT) p.add_argument('--reporttime', help='specify reporttime filter') p.add_argument('-n', '--nolog', help='do not log search', action='store_true') p.add_argument('-f', '--format', help='specify output format [default: %(default)s]"', default=FORMAT, choices=FORMATS.keys()) p.add_argument('--indicator') p.add_argument('--tags', nargs='+') p.add_argument('--provider') p.add_argument('--confidence', help="specify confidence level") p.add_argument("--zmq", help="use zmq as a transport instead of http", action="store_true") p.add_argument('--config', help='specify config file [default %(default)s]', default=CONFIG_PATH) p.add_argument('--feed', action='store_true') p.add_argument('--no-verify-ssl', action='store_true') p.add_argument('--last-day', action="store_true", help='auto-sets reporttime to 23 hours and 59 seconds ago ' '(current time UTC) and reporttime-end to "now"') p.add_argument( '--last-hour', action='store_true', help='auto-sets reporttime to the beginning of the previous full' ' hour and reporttime-end to end of previous full hour') p.add_argument('--days', help='filter results within last X days') p.add_argument('--today', help='auto-sets reporttime to today, 00:00:00Z (UTC)', action='store_true') args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) o = read_config(args) options = vars(args) for v in options: if v == 'remote' and options[v] == REMOTE_ADDR and o.get('remote'): options[v] = o['remote'] if options[v] is None: options[v] = o.get(v) if not options.get('token'): raise RuntimeError('missing --token') verify_ssl = True if o.get('no_verify_ssl') or options.get('no_verify_ssl'): verify_ssl = False if options.get("zmq"): from cifsdk.client.zeromq import ZMQ as ZMQClient cli = ZMQClient(**options) else: from cifsdk.client.http import HTTP as HTTPClient if args.remote == 'https://localhost': verify_ssl = False cli = HTTPClient(args.remote, args.token, verify_ssl=verify_ssl) if options.get('ping'): logger.info('running ping') for num in range(0, 4): ret = cli.ping() if ret != 0: print("roundtrip: {} ms".format(ret)) select.select([], [], [], 1) else: logger.error('ping failed') raise RuntimeError raise SystemExit if options.get("submit"): logger.info("submitting {0}".format(options.get("submit"))) i = Indicator(indicator=args.indicator, tags=args.tags, confidence=args.confidence) rv = cli.indicators_create(i) logger.info('success id: {}'.format(rv)) raise SystemExit filters = { 'itype': options['itype'], 'limit': options['limit'], 'provider': options.get('provider'), 'indicator': options.get('search'), 'nolog': options['nolog'], 'tags': options['tags'], 'confidence': options.get('confidence') } if args.last_day: filters['days'] = '1' if args.last_hour: filters['hours'] = '1' if args.days: filters['days'] = args.days if args.today: now = arrow.utcnow() filters['reporttime'] = '{0}Z'.format( now.format('YYYY-MM-DDT00:00:00')) if options.get('feed'): if not filters.get('confidence'): filters['confidence'] = 8 if not filters.get('reporttime') and not filters.get( 'day') and not filters.get('hour'): filters['days'] = FEED_DAYS_LIMIT if args.limit == SEARCH_LIMIT: filters['limit'] = FEED_LIMIT try: rv = cli.feed(filters=filters) except AuthError as e: logger.error('unauthorized') else: print(FORMATS[options.get('format')](data=rv)) raise SystemExit try: rv = cli.search(filters) except AuthError as e: logger.error('unauthorized') else: print(FORMATS[options.get('format')](data=rv))
def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ Env Variables: CIF_RUNTIME_PATH CIF_ROUTER_CONFIG_PATH CIF_ROUTER_ADDR CIF_HUNTER_ADDR CIF_HUNTER_TOKEN CIF_HUNTER_THREADS CIF_GATHERER_THREADS CIF_STORE_ADDR example usage: $ cif-router --listen 0.0.0.0 -d '''), formatter_class=RawDescriptionHelpFormatter, prog='cif-router', parents=[p] ) p.add_argument('--config', help='specify config path [default: %(default)s', default=CONFIG_PATH) p.add_argument('--listen', help='address to listen on [default: %(default)s]', default=ROUTER_ADDR) p.add_argument('--gatherer-threads', help='specify number of gatherer threads to use [default: %(default)s]', default=GATHERER_THREADS) p.add_argument('--hunter', help='address hunters listen on on [default: %(default)s]', default=HUNTER_ADDR) p.add_argument('--hunter-token', help='specify token for hunters to use [default: %(default)s]', default=HUNTER_TOKEN) p.add_argument('--hunter-threads', help='specify number of hunter threads to use [default: %(default)s]', default=HUNTER_THREADS) p.add_argument("--store-address", help="specify the store address cif-router is listening on[default: %(" "default)s]", default=STORE_ADDR) p.add_argument("--store", help="specify a store type {} [default: %(default)s]".format(', '.join(STORE_PLUGINS)), default=STORE_DEFAULT) p.add_argument('--store-nodes', help='specify storage nodes address [default: %(default)s]', default=STORE_NODES) p.add_argument('--p2p', action='store_true', help='enable experimental p2p support') args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) logger.info('loglevel is: {}'.format(logging.getLevelName(logger.getEffectiveLevel()))) o = read_config(args) options = vars(args) for v in options: if options[v] is None: options[v] = o.get(v) setup_signals(__name__) setup_runtime_path(args.runtime_path) with Router(listen=args.listen, hunter=args.hunter, store_type=args.store, store_address=args.store_address, store_nodes=args.store_nodes, p2p=args.p2p, hunter_token=args.hunter_token, hunter_threads=args.hunter_threads, gatherer_threads=args.gatherer_threads) as r: try: logger.info('starting router..') r.start() except KeyboardInterrupt: # todo - signal to threads to shut down and wait for them to finish logger.info('shutting down...') logger.info('Shutting down')
def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ example usage: $ cif -q example.org -d $ cif --search 1.2.3.0/24 $ cif --ping '''), formatter_class=RawDescriptionHelpFormatter, prog='cif', parents=[p] ) p.add_argument('--token', help='specify api token', default=TOKEN) p.add_argument('--remote', help='specify API remote [default %(default)s]', default=REMOTE_ADDR) p.add_argument('-p', '--ping', action="store_true") # meg? p.add_argument('-q', '--search', help="search") p.add_argument('--itype', help='filter by indicator type') ## need to fix sqlite for non-ascii stuff first p.add_argument("--submit", action="store_true", help="submit an indicator") p.add_argument('--limit', help='limit results [default %(default)s]', default=SEARCH_LIMIT) p.add_argument('-n', '--nolog', help='do not log search', action='store_true') p.add_argument('-f', '--format', help='specify output format [default: %(default)s]"', default=FORMAT, choices=FORMATS.keys()) p.add_argument('--indicator') p.add_argument('--tags', nargs='+') p.add_argument('--provider') p.add_argument("--zmq", help="use zmq as a transport instead of http", action="store_true") p.add_argument('--config', help='specify config file [default %(default)s]', default=CONFIG_PATH) args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) o = read_config(args) options = vars(args) for v in options: if options[v] is None: options[v] = o.get(v) if not options.get('token'): raise RuntimeError('missing --token') verify_ssl = True if o.get('no_verify_ssl') or options.get('no_verify_ssl'): verify_ssl = False options = vars(args) if options.get("zmq"): from cifsdk.client.zeromq import ZMQ as ZMQClient cli = ZMQClient(**options) else: from cifsdk.client.http import HTTP as HTTPClient cli = HTTPClient(args.remote, args.token, verify_ssl=verify_ssl) if options.get('ping'): logger.info('running ping') for num in range(0, 4): ret = cli.ping() if ret != 0: print("roundtrip: {} ms".format(ret)) select.select([], [], [], 1) else: logger.error('ping failed') raise RuntimeError elif options.get('itype'): logger.info('searching for {}'.format(options['itype'])) try: rv = cli.search({ 'itype': options['itype'], 'limit': options['limit'], 'provider': options.get('provider') }) except AuthError as e: logger.error('unauthorized') except RuntimeError as e: import traceback traceback.print_exc() logger.error(e) else: print(FORMATS[options.get('format')](data=rv)) elif options.get('search'): logger.info("searching for {0}".format(options.get("search"))) try: rv = cli.indicators_search({ 'indicator': options['search'], 'limit': options['limit'], 'nolog': options['nolog'] } ) except RuntimeError as e: import traceback traceback.print_exc() logger.error(e) except AuthError as e: logger.error('unauthorized') else: print(FORMATS[options.get('format')](data=rv)) elif options.get("submit"): logger.info("submitting {0}".format(options.get("submit"))) rv = cli.submit(indicator=args.indicator, tags=args.tags)
def main(): # # initialize module # p = ArgumentParser(description=textwrap.dedent('''\ example usage: $ cat test.eml | cgmail $ cgmail --file test.eml '''), formatter_class=RawDescriptionHelpFormatter, prog='cgmail') p.add_argument("-v", "--verbose", dest="verbose", action="count", help="set verbosity level [default: %(default)s]") p.add_argument('-d', '--debug', dest='debug', action="store_true") p.add_argument("-f", "--file", dest="file", help="specify email file") # cif arguments p.add_argument("--confidence", help="specify confidence for submitting to CIF", default=CONFIDENCE) p.add_argument("--remote", help="specify CIF remote") p.add_argument("--token", help="specify CIF token") p.add_argument("--config", help="specify CIF config [default: %(default)s", default=os.path.expanduser("~/.cif.yml")) p.add_argument("--tags", help="specify CIF tags [default: %(default)s", default=["phishing"]) p.add_argument("--group", help="specify CIF group [default: %(default)s", default="everyone") p.add_argument("--tlp", help="specify CIF TLP [default: %(default)s", default=TLP) p.add_argument("--no-verify-ssl", action="store_true", default=False) p.add_argument("--raw", action="store_true", help="include raw message data") p.add_argument("--provider", help="specify feed provider [default: %(default)s]", default=PROVIDER) p.add_argument('--exclude', help='url patterns to exclude [default: %(default)s', default=EXCLUDE) p.add_argument('--confidence-lower', help='patterns to automatically lower confidence', default=CONFIDENCE_LOWER) p.add_argument('-n', '--not-really', help='do not submit', action='store_true') p.add_argument('--cache', help='location to cache whitelist [default: %(default)s', default=WHITELIST_CACHE) p.add_argument('--blacklist-cache', default=BLACKLIST_CACHE) # Process arguments args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) exclude = None if args.exclude: exclude = re.compile(args.exclude) confidence_lower = None if args.confidence_lower: confidence_lower = re.compile(args.confidence_lower) o = read_config(args) options = vars(args) for v in options: if options[v] is None: options[v] = o.get(v) if not options.get('token'): raise RuntimeError('missing --token') if options.get("file"): with open(options["file"]) as f: email = f.read() else: email = sys.stdin.read() # extract urls from message body and mail parts bits = cgmail.parse_email_from_string(email) urls = set() for n in bits: if n.get('urls'): for u in n['urls']: urls.add(u) verify_ssl = True if options.get('no_verify_ssl'): verify_ssl = False # initialize cif client cli = Client(remote=options["remote"], token=options["token"], verify_ssl=verify_ssl) update_cache = True if os.path.isfile(args.cache): modified = os.path.getmtime(args.cache) if arrow.utcnow() < arrow.get(modified + 84600): update_cache = False if update_cache: # pull FQDN whitelist filters = { 'tags': 'whitelist', 'otype': 'fqdn', 'confidence': 25, } now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) now = now.shift(days=-7) filters['reporttime'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) ret = cli.search(limit=50000, filters=filters, sort='reporttime', sort_direction='desc') with open(args.cache, 'w') as f: for r in ret: f.write("{0}\n".format(r['observable'])) update_cache = True if os.path.isfile(args.blacklist_cache): modified = os.path.getmtime(args.blacklist_cache) if arrow.utcnow() < arrow.get(modified + 84600): update_cache = False if update_cache: filters = { 'tags': 'phishing,suspicious,malware', 'otype': 'fqdn', 'confidence': 75, } now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) now = now.shift(days=-7) filters['reporttime'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) ret = cli.search(limit=50000, filters=filters, sort='reporttime', sort_direction='desc') with open(args.blacklist_cache, 'w') as f: for r in ret: f.write("{0}\n".format(r['observable'])) fqdns = set() with open(args.cache) as f: for l in f: fqdns.add(l.rstrip("\n")) fqdns_blacklist = set() with open(args.blacklist_cache) as f: for l in f: fqdns_blacklist.add(l.rstrip("\n")) for u in urls: u = u.rstrip('\/') u = urlparse(u) fqdn = url_to_fqdn(u.geturl()) if exclude and exclude.search(fqdn): continue confidence = options['confidence'] if match_whitelist(fqdns, u.netloc): if (u.netloc not in URL_SHORTNERS) and (not match_whitelist( HOSTING_PROVIDERS, u.netloc)): confidence = options['confidence'] - 15 else: confidence = options['confidence'] + 5 elif match_whitelist(fqdns_blacklist, u.netloc): confidence = options['confidence'] + 10 else: confidence = options['confidence'] + 5 # else # raise confidence logger.info("submitting: {0}".format(u.geturl())) o = Observable(observable=u.geturl(), confidence=confidence, tlp=options["tlp"], group=options["group"], tags=options["tags"], provider=options.get('provider')) o = o.__dict__ del o['logger'] if options.get('raw'): o.raw = email if not args.not_really: r = cli.submit(o) logger.info("submitted: {0}".format(r))
def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ Env Variables: CIF_RUNTIME_PATH CIF_ROUTER_CONFIG_PATH CIF_ROUTER_ADDR CIF_HUNTER_ADDR CIF_HUNTER_TOKEN CIF_HUNTER_THREADS CIF_GATHERER_THREADS CIF_STORE_ADDR example usage: $ cif-router --listen 0.0.0.0 -d '''), formatter_class=RawDescriptionHelpFormatter, prog='cif-router', parents=[p] ) p.add_argument('--config', help='specify config path [default: %(default)s', default=CONFIG_PATH) p.add_argument('--listen', help='address to listen on [default: %(default)s]', default=ROUTER_ADDR) p.add_argument('--gatherer-threads', help='specify number of gatherer threads to use [default: %(default)s]', default=GATHERER_THREADS) p.add_argument('--hunter', help='address hunters listen on on [default: %(default)s]', default=HUNTER_ADDR) p.add_argument('--hunter-token', help='specify token for hunters to use [default: %(default)s]', default=HUNTER_TOKEN) p.add_argument('--hunter-threads', help='specify number of hunter threads to use [default: %(default)s]', default=HUNTER_THREADS) p.add_argument("--store-address", help="specify the store address cif-router is listening on[default: %(" "default)s]", default=STORE_ADDR) p.add_argument("--store", help="specify a store type {} [default: %(default)s]".format(', '.join(STORE_PLUGINS)), default=STORE_DEFAULT) p.add_argument('--store-nodes', help='specify storage nodes address [default: %(default)s]', default=STORE_NODES) p.add_argument('--logging-ignore', help='set logging to WARNING for specific modules') p.add_argument('--pidfile', help='specify pidfile location [default: %(default)s]', default=PIDFILE) args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) logger.info('loglevel is: {}'.format(logging.getLevelName(logger.getEffectiveLevel()))) if args.logging_ignore: to_ignore = args.logging_ignore.split(',') for i in to_ignore: logging.getLogger(i).setLevel(logging.WARNING) o = read_config(args) options = vars(args) for v in options: if options[v] is None: options[v] = o.get(v) setup_runtime_path(args.runtime_path) setup_signals(__name__) # http://stackoverflow.com/a/789383/7205341 pid = str(os.getpid()) logger.debug("pid: %s" % pid) if os.path.isfile(args.pidfile): logger.critical("%s already exists, exiting" % args.pidfile) raise SystemExit try: pidfile = open(args.pidfile, 'w') pidfile.write(pid) pidfile.close() except PermissionError as e: logger.error('unable to create pid %s' % args.pidfile) with Router(listen=args.listen, hunter=args.hunter, store_type=args.store, store_address=args.store_address, store_nodes=args.store_nodes, hunter_token=args.hunter_token, hunter_threads=args.hunter_threads, gatherer_threads=args.gatherer_threads) as r: try: logger.info('starting router..') r.start() except KeyboardInterrupt: # todo - signal to threads to shut down and wait for them to finish logger.info('shutting down via SIGINT...') except SystemExit: logger.info('shutting down via SystemExit...') except Exception as e: logger.critical(e) traceback.print_exc() r.stop() logger.info('Shutting down') if os.path.isfile(args.pidfile): os.unlink(args.pidfile)
def main(): p = ArgumentParser(description=textwrap.dedent('''\ Example usage: $ cif -q 130.201.0.2 $ cif -q 130.201.0.0/16 $ cif -q 2001:4860:4860::8888 $ cif -q example.com $ cif -q 'http://www.example.com' $ cif -q '*****@*****.**' $ cif -q bf9d457bcd702fe836201df1b48c0bec $ cif --tags botnet,zeus -c 85 $ cif --application vnc,ssh --asns 1234 --cc RU,US $ cif -q example.com --tags botnet,zeus -c 85 --limit 50 $ cif --otype ipv4 --aggregate observable --today $ cif --feed --otype ipv4 -c 85 -f csv $ cif --feed --otype fqdn -c 95 --tags botnet -f csv $ cif --feed --otype url -c 75 --today -f csv '''), formatter_class=RawDescriptionHelpFormatter, prog='cif') # options p.add_argument("-v", "--verbose", dest="verbose", action="store_true", help="logging level: INFO") p.add_argument('-d', '--debug', dest='debug', action="store_true", help="logging level: DEBUG") p.add_argument('-V', '--version', action='version', version=VERSION) p.add_argument('--no-verify-ssl', action="store_true", default=False) p.add_argument('-R', '--remote', help="remote api location", default=REMOTE_ADDR) p.add_argument('-T', '--token', help="specify token [default %(default)s", default=TOKEN) p.add_argument('--timeout', help='connection timeout [default: %(default)s]', default="300") p.add_argument('-C', '--config', help="configuration file [default: %(default)s]", default=os.path.expanduser("~/.cif.yml")) p.add_argument('--sortby', help='sort output [default: %(default)s]', default='lasttime') p.add_argument('--sortby-direction', help='sortby direction [default: %(default)s]', default='ASC') p.add_argument('-f', '--format', help="specify output format [default: %(default)s]", default="table", choices=FORMATS.keys()) # actions p.add_argument('-p', '--ping', action="store_true", help="ping") p.add_argument('-s', '--submit', action="store_true", help="submit a JSON object") # flags p.add_argument('-l', '--limit', help="result limit", default=LIMIT) p.add_argument('-n', '--nolog', help='do not log the search', default=None, action="store_true") # filters p.add_argument('-q', "--query", help="specify a search") p.add_argument( '--firsttime', help='specify filter based on firsttime timestmap (greater than, ' 'format: YYYY-MM-DDTHH:MM:SSZ)') p.add_argument( '--lasttime', help='specify filter based on lasttime timestamp (less than, format: ' 'YYYY-MM-DDTHH:MM:SSZ)') p.add_argument( '--reporttime', help= 'specify filter based on reporttime timestmap (greater than, format: ' 'YYYY-MM-DDTHH:MM:SSZ)') p.add_argument( '--reporttimeend', help='specify filter based on reporttime timestmap (less than, format: ' 'YYYY-MM-DDTHH:MM:SSZ)') p.add_argument("--tags", help="filter for tags") p.add_argument('--description', help='filter on description') p.add_argument('--otype', help='filter by otype') p.add_argument("--cc", help="filter for countrycode") p.add_argument('-c', '--confidence', help="specify confidence") p.add_argument('--rdata', help='filter by rdata') p.add_argument('--provider', help='filter by provider') p.add_argument('--asn', help='filter by asn') #p.add_argument('--tlp', help='filter by tlp') p.add_argument('--proxy', help="specify a proxy to use [default %(default)s]", default=PROXY) p.add_argument( '--feed', action="store_true", help="generate a feed of data, meaning deduplicated and whitelisted") p.add_argument( '--whitelist-limit', help= "specify how many whitelist results to use when applying to --feeds " "[default %(default)s]", default=WHITELIST_LIMIT) p.add_argument( '--whitelist-confidence', help='by confidence (greater-than or equal to) [default: %(default)s]', default=WHITELIST_CONFIDENCE) p.add_argument('--last-day', action="store_true", help='auto-sets reporttime to 23 hours and 59 seconds ago ' '(current time UTC) and reporttime-end to "now"') p.add_argument( '--last-hour', action='store_true', help='auto-sets reporttime to the beginning of the previous full' ' hour and reporttime-end to end of previous full hour') p.add_argument('--days', help='filter results within last X days') p.add_argument('--today', help='auto-sets reporttime to today, 00:00:00Z (UTC)', action='store_true') p.add_argument('--aggregate', help="aggregate around a specific field (ie: observable)") p.add_argument('--fields', help="specify field list to display [default: %(default)s]", default=','.join(FIELDS)) p.add_argument('--filename', help='specify output filename [default: STDOUT]') p.add_argument( '--ttl', help='specify number of pings to send [default: %(default)s]', default=PINGS) p.add_argument('--group', help='filter by group(s) (everyone,group1,group2,...)') p.add_argument('--application', help='filter based on application field') p.add_argument('--id', help='specify an id to retrieve') # Process arguments args = p.parse_args() setup_logging(args) logger = logging.getLogger(__name__) # read in the config config_opts = read_config(args) cmd_options = vars(args) # check the config against the arguments for v in cmd_options: if cmd_options[v] is None: cmd_options[v] = config_opts.get(v) if v == 'remote': if cmd_options[v] != REMOTE_ADDR_DEFAULT: continue else: cmd_options[v] = config_opts.get('remote', REMOTE_ADDR_DEFAULT) options = cmd_options if not options.get('token'): raise RuntimeError('missing --token') verify_ssl = True if config_opts.get('no_verify_ssl') or options.get('no_verify_ssl'): verify_ssl = False cli = Client(options['token'], remote=options['remote'], proxy=options.get('proxy'), verify_ssl=verify_ssl) if (options.get('query') or options.get('tags') or options.get('cc') or options.get('rdata') or options.get('otype') or options.get('provider') or options.get('asn') or options.get('description')): filters = {} if options.get('query'): filters['observable'] = options['query'] if options.get('cc'): filters['cc'] = options['cc'] if options.get('tags'): filters['tags'] = options['tags'] if options.get('description'): filters['description'] = options['description'] if options.get('confidence'): filters['confidence'] = options['confidence'] else: if options.get('feed'): filters['confidence'] = FEED_CONFIDENCE if options.get('firsttime'): filters['firsttime'] = options['firsttime'] if options.get('lasttime'): filters['lasttime'] = options['lasttime'] if options.get('reporttime'): filters['reporttime'] = options['reporttime'] if options.get('reporttimeend'): filters['reporttimeend'] = options['reporttimeend'] if options.get('otype'): filters['otype'] = options['otype'] if options.get('rdata'): filters['rdata'] = options['rdata'] if options.get('nolog'): options['nolog'] = 1 if options.get('provider'): filters['provider'] = options['provider'] if options.get('asn'): filters['asn'] = options['asn'] #if options.get('tlp'): # filters['tlp'] = options['tlp'] if options.get('group'): filters['group'] = options['group'] if options.get('application'): filters['application'] = options['application'] if options.get('id'): filters['id'] = options['id'] # needs to be MEG'd out. if options.get('last_day'): now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) now = now.shift(days=-1) filters['reporttime'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) elif options.get('last_hour'): now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) now = now.shift(hours=-1) filters['reporttime'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) elif options.get('today'): now = arrow.utcnow() filters['reporttime'] = '{0}Z'.format( now.format('YYYY-MM-DDT00:00:00')) if options.get('days'): now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) now = now.shift(days=-int(options['days'])) filters['reporttime'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) DAYS = 30 if options.get('feed'): if not options.get('otype'): logger.error( '--otype [ipv4|ipv6|fqdn|url|..] flag required when using --feed' ) raise SystemExit if options['limit'] == LIMIT: options['limit'] = FEED_LIMIT if not options.get('days'): now = arrow.utcnow() filters['reporttimeend'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) now = now.shift(days=-DAYS) filters['reporttime'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) ret = cli.search(limit=options['limit'], nolog=options['nolog'], filters=filters, sort=options['sortby'], sort_direction=options['sortby_direction']) number_returned = len(ret) logger.info('returned: {0} records'.format(number_returned)) if options.get('aggregate'): ret = cli.aggregate(ret, field=options['aggregate']) if options.get('feed'): wl_filters = copy.deepcopy(filters) wl_filters['tags'] = 'whitelist' wl_filters['confidence'] = args.whitelist_confidence now = arrow.utcnow() now = now.shift(days=-DAYS) wl_filters['reporttime'] = '{0}Z'.format( now.format('YYYY-MM-DDTHH:mm:ss')) wl = cli.search(limit=options['whitelist_limit'], nolog=True, filters=wl_filters) f = feed_factory(options['otype']) ret = cli.aggregate(ret) if len(ret) != number_returned: logger.info( 'aggregation removed: {0} records'.format(number_returned - len(ret))) ret = f().process(ret, wl) f = format_factory(options['format']) if f is None: raise SystemError( '{0} format not supported, maybe missing a dependency.'.format( options['format'])) try: if len(ret) >= 1: ret = f(ret, cols=options['fields'].split(',')) if args.filename: with open(args.filename, 'w') as F: F.write(str(ret)) else: signal(SIGPIPE, SIG_DFL) print(ret) else: logger.info("no results found...") except AttributeError as e: logger.exception(e) elif options.get('ping'): for num in range(0, args.ttl): ret = cli.ping() print("roundtrip: %s ms" % ret) select.select([], [], [], 1) elif options.get('submit'): if not sys.stdin.isatty(): stdin = sys.stdin.read() else: logger.error("No data passed via STDIN") raise SystemExit try: data = json.loads(stdin) try: ret = cli.submit(data) print('submitted: {0}'.format(ret)) except Exception as e: logger.error(e) raise SystemExit except Exception as e: logger.error(e) raise SystemExit else: logger.warning('operation not supported') p.print_help() raise SystemExit