def add_parser(subs): parser = subs.add_parser('list', help='query blacklisted resources') parser.add_argument( '--filter', '-f', default=None, help='filter to one of "blackhole", "sakura" or "exploitKit"') add_timerange_args(parser) add_render_args(parser)
def add_parser(subs): parser = subs.add_parser('incidentlist', help='query blacklist incidents within given timeframe') parser.add_argument('--all-workspace-crawls', '-a', action='store_true', help='filter crawls to those on workspace') parser.add_argument('--timeout', '-t', type=float, default=None, help='socket timeout in seconds') add_timerange_args(parser) add_render_args(parser, verbose=True)
def add_parser(subs): parser = subs.add_parser('incident', help='query blacklist incident data ' 'by given URL/host/domain') parser.add_argument('urls', nargs='+', metavar='URL', help='URL/host/domain for which to query') parser.add_argument('--start-index', '--si', type=int, help='start index, for pagination (default retrieves all data)') parser.add_argument('--max-results', '--mr', type=int, help='max results to return (default 10 if --start-index given)') add_render_args(parser, verbose=True)
def add_parser(subs): parser = subs.add_parser( "malware", help="query RiskIQ suspicious binary feed for all samples stored within a " "given period" ) parser.add_argument("--filter", "-f", default=None, help='filter to one of "blackhole", "sakura" or "exploitKit"') parser.add_argument( "--confidence", "-c", choices=["H", "M", "L"], default=None, help="restrict results to malicious probability of " "[H]igh, [M]edium, or [L]ow", ) add_timerange_args(parser) add_render_args(parser)
def add_parser(subs): parser = subs.add_parser('incident', help='query blacklist incident data ' 'by given URL/host/domain') parser.add_argument('urls', nargs='+', metavar='URL', help='URL/host/domain for which to query') parser.add_argument( '--start-index', '--si', type=int, help='start index, for pagination (default retrieves all data)') parser.add_argument( '--max-results', '--mr', type=int, help='max results to return (default 10 if --start-index given)') add_render_args(parser, verbose=True)
def add_parser(subs): parser = subs.add_parser( 'malware', help= 'query RiskIQ suspicious binary feed for all samples stored within a ' 'given period') parser.add_argument( '--filter', '-f', default=None, help='filter to one of "blackhole", "sakura" or "exploitKit"') parser.add_argument('--confidence', '-c', choices=['H', 'M', 'L'], default=None, help='restrict results to malicious probability of ' '[H]igh, [M]edium, or [L]ow') add_timerange_args(parser) add_render_args(parser)
def add_parser(subs): parser = subs.add_parser("lookup", help="look up URL/host/domain on " "RiskIQ Global Blacklist (GBL)") parser.add_argument("urls", nargs="+", metavar="URL", help="URL/host/domain for which to query") add_render_args(parser)
def add_parser(subs): parser = subs.add_parser('list', help = 'query blacklisted resources') parser.add_argument('--filter', '-f', default=None, help='filter to one of "blackhole", "sakura" or "exploitKit"') add_timerange_args(parser) add_render_args(parser)
def add_parser(subs): parser = subs.add_parser('lookup', help='look up URL/host/domain on ' 'RiskIQ Global Blacklist (GBL)') parser.add_argument('urls', nargs='+', metavar='URL', help='URL/host/domain for which to query') add_render_args(parser)