Пример #1
0
def main():
    import argparse
    parser = argparse.ArgumentParser()

    parser.add_argument('-j', '--json', action="store_true", dest='as_json',
        help="Output as JSON")
    parser.add_argument('--days', '-d', default=1, type=int,
        help='days to query')
    parser.add_argument('--start', '-s', default=None,
        help='start datetime in "yyyy-mm-dd HH:MM:SS" format '
            '(or "today HH:MM:SS")')
    parser.add_argument('--end', '-e', default=None,
        help='end datetime in "yyyy-mm-dd HH:MM:SS" format '
            '(or "today HH:MM:SS")')
    args = parser.parse_args()

    client = Client.from_config()

    kwargs = {'as_json': args.as_json}
    kwargs['days'] = args.days
    kwargs['start'] = args.start
    kwargs['end'] = args.end
    
    data = client.get_zlist_urls(days=args.days, start=args.start,
        end=args.end)

    if args.as_json:
        print(json.dumps(data, indent=4))
    elif data:
        print(renderer(data, 'zlist/urls'))
Пример #2
0
 def __init__(self):
     self.enabled = True
     self.opts = dict()
     try:
         self.client = Client.from_config()
     except Exception:
         log.error(traceback.format_exc())
         log.error('Failed to load RiskIQ config - disabling RiskIQ plugin.')
         log.error('Please pip install riskiq and run `riq-config setup`')
         self.enabled = False
Пример #3
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')

    name_p = subs.add_parser('name')
    name_p.add_argument('addrs', nargs='+', help='Hostname or IP addresses')
    name_p.add_argument('--rrtype', '-t', default=None)
    name_p.add_argument('--json', '-j', action="store_true",
        help="Output as JSON")
    name_p.add_argument('--short', '-s', action="store_true",
        help="Output newline-separated data only")
    name_p.add_argument('--text', '-T', action="store_true",
        help="Output full human readable text")
    name_p.add_argument('--verbose', '-v', action="store_true",
        help="Output verbose records with first/lastseen times and observed count")

    data_p = subs.add_parser('data')
    data_p.add_argument('addrs', nargs='+', help='Hostname or IP addresses')
    data_p.add_argument('--rrtype', '-t', default=None)
    data_p.add_argument('--json', '-j', action="store_true",
        help="Output as JSON")
    data_p.add_argument('--short', '-s', action="store_true",
        help="Output newline-separated name only")
    data_p.add_argument('--text', '-T', action="store_true",
        help="Output full human readable text")
    data_p.add_argument('--verbose', '-v', action="store_true",
        help="Output verbose records with first/lastseen times and observed count")

    args = parser.parse_args()

    addrs = util.stdin(args.addrs)
    for addr in addrs:
        ip, hostname = ip_hostname(addr)

        client = Client.from_config()
        try:
            data = get_data(client, args.cmd, rrtype=args.rrtype,
                hostname=hostname, ip=ip)
        except ValueError as e:
            parser.print_usage()
            sys.stderr.write('{}\n'.format(str(e)))
            sys.exit(1)

        if args.json:
            print(json.dumps(data, indent=4))
        elif args.short and args.cmd == 'data':
            print(renderer(data, 'dns/dns_data'))
        elif args.short and args.cmd == 'name':
            print(renderer(data, 'dns/dns_name'))
        elif args.text:
            print(renderer(data, 'dns/dns'))
        elif data:
            print(renderer(data, 'dns/dns_oneline', verbose=args.verbose))
Пример #4
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('--domain', '-d')
    parser.add_argument('--email', '-e')
    parser.add_argument('--name-server', '-n')
    parser.add_argument('--max-results', '-m', type=int, default=100)
    parser.add_argument('-j', '--json', action="store_true", dest='as_json',
        help="Output as JSON")
    args = parser.parse_args()
    client = Client.from_config()
    results = client.post_whois(domain=args.domain, email=args.email, 
        name_server=args.name_server, max_results=args.max_results)
    if args.as_json:
        print(json.dumps(results, indent=4))
    else:
        print(renderer(results, 'whois/whois'))
Пример #5
0
def main():
    parser = ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')

    list_parser = subs.add_parser('list', help='list binaries in date range')
    list_parser.add_argument('--days', '-d', default=1, type=int,
        help='days to query')
    list_parser.add_argument('--start', '-s', default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format')
    list_parser.add_argument('--end', '-e', default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format')
    list_parser.add_argument('-j', '--json', action="store_true",
        dest='as_json', help="Output as JSON")

    download_parser = subs.add_parser('download',
        help='download from MD5 hash, or hashes')
    download_parser.add_argument('md5hash',
        help='md5 hash to download')
    download_parser.add_argument('output',
        help='path to output file to, - for stdout')
    download_parser.add_argument('-j', '--json', action="store_true",
        dest='as_json', help="Output as JSON")
    download_parser.add_argument('-d', '--output-dir',
        help='dir to dump $hash.bin to')

    args = parser.parse_args()
    kwargs = {'as_json': args.as_json}
    if hasattr(args, 'days'):
        kwargs['days'] = args.days
        kwargs['start'] = args.start
        kwargs['end'] = args.end

    client = Client.from_config()
    
    if args.cmd == 'list':
        bin_list(client, **kwargs)
    elif args.cmd == 'download':
        hashes = util.stdin([args.md5hash])
        for i, md5hash in enumerate(hashes):
            output = args.output
            if output != '-' and len(hashes) > 1:
                output = '%s.%d' % (args.output, i)
            bin_download(client, md5hash, output, output_dir=args.output_dir,
                **kwargs)
Пример #6
0
def main():
    parser = OptionParser()
    parser.add_option('-q', '--query', dest='query', action="store_true", default=False, help="Query Domain Name")
    parser.add_option('-d', '--data', dest='rdata', action="store_true", default=False, help="Response Data")
    parser.add_option('-i', '--ip', dest='rdata_ip', action="store_true", default=False, help="Response Data(IP)")
    parser.add_option('-t', '--rrtype', dest='rrtype', default=None, help="Record Type")
    parser.add_option('-j', '--json', dest='json', action="store_true", default=False, help="Output as JSON")
    options, args = parser.parse_args()
    if not args:
        parser.print_help()
        sys.exit(-1)
    client = Client.from_config()

    qtype = None
    if options.query:
        qtype = 'query'
    if options.rdata:
        qtype = 'data'
    if options.rdata_ip:
        qtype = 'ip'
    results = []
    for arg in args:
        if not qtype:
            if IP_REGEX.match(arg):
                qtype = 'ip'
            else:
                qtype = 'query'
        if qtype == 'data':
            results.append(client.get_dns_data_by_data(arg, rrtype=options.rrtype))
        if qtype == 'ip':
            results.append(client.get_dns_data_by_ip(arg, rrtype=options.rrtype))
        if qtype == 'query':
            results.append(client.get_dns_data_by_name(arg, rrtype=options.rrtype))
    results = PassiveDNS(results)
    if options.json:
        print(results.json)
        sys.exit(0)
    print(results.text)
    sys.exit(0)
Пример #7
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('--domain', '-d')
    parser.add_argument('--email', '-e')
    parser.add_argument('--name-server', '-n')
    parser.add_argument('--max-results', '-m', type=int, default=100)
    parser.add_argument('-j',
                        '--json',
                        action="store_true",
                        dest='as_json',
                        help="Output as JSON")
    args = parser.parse_args()
    client = Client.from_config()
    results = client.post_whois(domain=args.domain,
                                email=args.email,
                                name_server=args.name_server,
                                max_results=args.max_results)
    if args.as_json:
        print(json.dumps(results, indent=4))
    else:
        print(renderer(results, 'whois/whois'))
Пример #8
0
def main():
    parser = ArgumentParser()
    parser.add_argument('--dump-requests', action='store_true')
    parser.add_argument('--stix',
        help='output results to STIX file (requires riskiq[stix] package)')
    subs = parser.add_subparsers(dest='cmd')
    for module in (lookup, incident, incidentlist, bl_list, malware):
        module.add_parser(subs)
    args = parser.parse_args()

    client = Client.from_config()
    if args.dump_requests:
        client._dump_requests()

    kwargs = {}
    for kwarg in ('as_json', 'oneline', 'stix', 'days', 'start', 'end',
            'verbose', 'timeout', 'six_hours', 'filter', 'confidence',
            'template'):
        if hasattr(args, kwarg):
            kwargs[kwarg] = getattr(args, kwarg)
    
    sub_main = MAIN_FUNC[args.cmd]
    sub_main(client, args, kwargs)
Пример #9
0
def main():
    parser = ArgumentParser()
    parser.add_argument('--dump-requests', action='store_true')
    parser.add_argument(
        '--stix',
        help='output results to STIX file (requires riskiq[stix] package)')
    subs = parser.add_subparsers(dest='cmd')
    for module in (lookup, incident, incidentlist, bl_list, malware):
        module.add_parser(subs)
    args = parser.parse_args()

    client = Client.from_config()
    if args.dump_requests:
        client._dump_requests()

    kwargs = {}
    for kwarg in ('as_json', 'oneline', 'stix', 'days', 'start', 'end',
                  'verbose', 'timeout', 'six_hours', 'filter', 'confidence',
                  'template'):
        if hasattr(args, kwarg):
            kwargs[kwarg] = getattr(args, kwarg)

    sub_main = MAIN_FUNC[args.cmd]
    sub_main(client, args, kwargs)
Пример #10
0
def main():
    parser = ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')

    list_parser = subs.add_parser('list', help='list binaries in date range')
    list_parser.add_argument('--days',
                             '-d',
                             default=1,
                             type=int,
                             help='days to query')
    list_parser.add_argument(
        '--start',
        '-s',
        default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format')
    list_parser.add_argument('--end',
                             '-e',
                             default=None,
                             help='end datetime in yyyy-mm-dd HH:MM:SS format')
    list_parser.add_argument('-j',
                             '--json',
                             action="store_true",
                             dest='as_json',
                             help="Output as JSON")

    download_parser = subs.add_parser('download',
                                      help='download from MD5 hash, or hashes')
    download_parser.add_argument('md5hash', help='md5 hash to download')
    download_parser.add_argument('output',
                                 help='path to output file to, - for stdout')
    download_parser.add_argument('-j',
                                 '--json',
                                 action="store_true",
                                 dest='as_json',
                                 help="Output as JSON")
    download_parser.add_argument('-d',
                                 '--output-dir',
                                 help='dir to dump $hash.bin to')

    args = parser.parse_args()
    kwargs = {'as_json': args.as_json}
    if hasattr(args, 'days'):
        kwargs['days'] = args.days
        kwargs['start'] = args.start
        kwargs['end'] = args.end

    client = Client.from_config()

    if args.cmd == 'list':
        bin_list(client, **kwargs)
    elif args.cmd == 'download':
        hashes = util.stdin([args.md5hash])
        for i, md5hash in enumerate(hashes):
            output = args.output
            if output != '-' and len(hashes) > 1:
                output = '%s.%d' % (args.output, i)
            bin_download(client,
                         md5hash,
                         output,
                         output_dir=args.output_dir,
                         **kwargs)
Пример #11
0
        tf_team = tf.create_team(options.team)
        if tf_team.success:
            print('Team {0} was created with id: {1}'.format(
                options.team, tf_team.data['id']))
        else:
            print('Problem creating team: {0}'.format(tf_team.message))
            exit(1)
    print(
        'Going to create applications in ThreadFix server {0} under team {1}'.
        format(options.server, options.team))

# Query RiskIQ to request a list of the web sites they've discovered

print('Querying RiskIQ to request list of discovered websites')

client = Client.from_config()
queryString = ''
filter = {
    'field': FilterField.AssetType,
    'value': FilterValue.WebSite,
    'type': FilterOperation.Equals
}

results = client.post_inventory_search(queryString, filter)
inventory_assets = results['inventoryAsset']

for asset in inventory_assets:
    website = asset['webSite']
    print('Website identified: {0}'.format(website['initialUrl']))

    # Create the application in ThreadFix, if needed
Пример #12
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')

    get_parser = subs.add_parser(
        'get', help='Retrieve a single landingpage by MD5 hash')
    get_parser.add_argument('md5_hashes', nargs='+')
    get_parser.add_argument('--whois',
                            '-w',
                            action='store_true',
                            help='whether to include whois information')
    get_parser.add_argument('-j',
                            '--json',
                            action="store_true",
                            dest='as_json',
                            help="Output as JSON")

    submit_parser = subs.add_parser(
        'submit', help='Submit at least one or many landing pages.')
    submit_parser.add_argument('urls', nargs='+')
    submit_parser.add_argument('--project',
                               '-p',
                               help='Project name to submit to')
    submit_parser.add_argument('--keyword', '-k', help='Optional Keyword')
    submit_parser.add_argument(
        '--md5', '-m', help='Optional MD5 representing the canonical ID')
    submit_parser.add_argument(
        '--pingback-url',
        '-P',
        help='Optional URL to be GET requested upon completion of analysis')
    submit_parser.add_argument(
        '--fields',
        '-f',
        nargs='*',
        help='Optional list of custom fields eg -f foo=bar alpha=beta')
    submit_parser.add_argument('-j',
                               '--json',
                               action="store_true",
                               dest='as_json',
                               help="Output as JSON")

    crawled_parser = subs.add_parser(
        'crawled', help='List landing pages by crawl date - maximum of 100')
    crawled_parser.add_argument('--whois',
                                '-w',
                                action='store_true',
                                help='whether to include whois information')
    crawled_parser.add_argument('--days',
                                '-d',
                                default=None,
                                type=int,
                                help='days to query')
    crawled_parser.add_argument(
        '--start',
        '-s',
        default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"'
    )
    crawled_parser.add_argument(
        '--end',
        '-e',
        default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    crawled_parser.add_argument('-j',
                                '--json',
                                action="store_true",
                                dest='as_json',
                                help="Output as JSON")

    flagged_parser = subs.add_parser(
        'flagged',
        help='List landing pages by known profile creation date - '
        'maximum of 100')
    flagged_parser.add_argument('--whois',
                                '-w',
                                action='store_true',
                                help='whether to include whois information')
    flagged_parser.add_argument('--days',
                                '-d',
                                default=None,
                                type=int,
                                help='days to query')
    flagged_parser.add_argument(
        '--start',
        '-s',
        default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"'
    )
    flagged_parser.add_argument(
        '--end',
        '-e',
        default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    flagged_parser.add_argument('-j',
                                '--json',
                                action="store_true",
                                dest='as_json',
                                help="Output as JSON")

    binary_parser = subs.add_parser(
        'binary',
        help='List landing pages with malicious binary incidents. '
        'A malicious binary is any non-text file that is suspected of '
        'containing malware or exploit code. A landing page is linked to '
        'any such binary that is embedded or easily reachable from it.')
    binary_parser.add_argument('--whois',
                               '-w',
                               action='store_true',
                               help='whether to include whois information')
    binary_parser.add_argument('--days',
                               '-d',
                               default=1,
                               type=int,
                               help='days to query')
    binary_parser.add_argument(
        '--start',
        '-s',
        default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"'
    )
    binary_parser.add_argument(
        '--end',
        '-e',
        default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    binary_parser.add_argument('-j',
                               '--json',
                               action="store_true",
                               dest='as_json',
                               help="Output as JSON")

    pjs_parser = subs.add_parser(
        'projects',
        help='List all projects that landing pages may be submitted to.')
    pjs_parser.add_argument('-j',
                            '--json',
                            action="store_true",
                            dest='as_json',
                            help="Output as JSON")

    args = parser.parse_args()
    client = Client.from_config()

    kwargs = {'as_json': args.as_json}

    if hasattr(args, 'whois'):
        kwargs['whois'] = args.whois
    if hasattr(args, 'days'):
        kwargs['days'] = args.days
        kwargs['start'] = args.start
        kwargs['end'] = args.end
    if args.cmd == 'get':
        md5_hashes = util.stdin(args.md5_hashes)
        for md5_hash in md5_hashes:
            lp_get(client, md5_hash, **kwargs)
    elif args.cmd == 'submit':
        urls = util.stdin(args.urls)
        kwargs.update({
            'keyword': args.keyword,
            'md5_hash': args.md5,
            'pingback_url': args.pingback_url,
            'project_name': args.project,
        })
        if args.fields:
            kwargs.update(
                {'fields': dict([f.split('=') for f in args.fields])})
        if len(urls) == 1:
            lp_submit(client, urls[0], **kwargs)
        else:
            lp_submit_bulk(client, urls, **kwargs)
    elif args.cmd == 'crawled':
        lp_crawled(client, **kwargs)
    elif args.cmd == 'flagged':
        lp_flagged(client, **kwargs)
    elif args.cmd == 'binary':
        lp_binary(client, **kwargs)
    elif args.cmd == 'projects':
        lp_projects(client, **kwargs)
		print ('Team {0} already exists. Will add applications to that team'.format(options.team))
	else:
		print ('Team {0} does not exist. Creating'.format(options.team))
		tf_team = tf.create_team(options.team)
		if tf_team.success:
			print ('Team {0} was created with id: {1}'.format(options.team, tf_team.data['id']))
		else:
			print ('Problem creating team: {0}'.format(tf_team.message))
			exit(1)
	print ('Going to create applications in ThreadFix server {0} under team {1}'.format(options.server, options.team))

# Query RiskIQ to request a list of the web sites they've discovered

print ('Querying RiskIQ to request list of discovered websites')

client = Client.from_config()
queryString = ''
filter = {'field':FilterField.AssetType, 'value':FilterValue.WebSite, 'type':FilterOperation.Equals}

results = client.post_inventory_search(queryString, filter)
inventory_assets = results['inventoryAsset']

for asset in inventory_assets:
	website = asset['webSite']
	print('Website identified: {0}'.format(website['initialUrl']))

	# Create the application in ThreadFix, if needed
	if options.create:
		application_name = website['initialUrl']
		# Use the application name also for the ThreadFix application URL because that is what RiskIQ gives us
		tf_application = tf.create_application(tf_team.data['id'], application_name, application_name)
Пример #14
0
def main():
    parser = OptionParser()
    parser.add_option('-q',
                      '--query',
                      dest='query',
                      action="store_true",
                      default=False,
                      help="Query Domain Name")
    parser.add_option('-d',
                      '--data',
                      dest='rdata',
                      action="store_true",
                      default=False,
                      help="Response Data")
    parser.add_option('-i',
                      '--ip',
                      dest='rdata_ip',
                      action="store_true",
                      default=False,
                      help="Response Data(IP)")
    parser.add_option('-t',
                      '--rrtype',
                      dest='rrtype',
                      default=None,
                      help="Record Type")
    parser.add_option('-j',
                      '--json',
                      dest='json',
                      action="store_true",
                      default=False,
                      help="Output as JSON")
    options, args = parser.parse_args()
    if not args:
        parser.print_help()
        sys.exit(-1)
    client = Client.from_config()

    qtype = None
    if options.query:
        qtype = 'query'
    if options.rdata:
        qtype = 'data'
    if options.rdata_ip:
        qtype = 'ip'
    results = []
    for arg in args:
        if not qtype:
            if IP_REGEX.match(arg):
                qtype = 'ip'
            else:
                qtype = 'query'
        if qtype == 'data':
            results.append(
                client.get_dns_data_by_data(arg, rrtype=options.rrtype))
        if qtype == 'ip':
            results.append(
                client.get_dns_data_by_ip(arg, rrtype=options.rrtype))
        if qtype == 'query':
            results.append(
                client.get_dns_data_by_name(arg, rrtype=options.rrtype))
    results = PassiveDNS(results)
    if options.json:
        print(results.json)
        sys.exit(0)
    print(results.text)
    sys.exit(0)
Пример #15
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')

    get_parser = subs.add_parser('get',
        help='Retrieve a single landingpage by MD5 hash')
    get_parser.add_argument('md5_hashes', nargs='+')
    get_parser.add_argument('--whois', '-w', action='store_true',
        help='whether to include whois information')
    get_parser.add_argument('-j', '--json', action="store_true", dest='as_json',
        help="Output as JSON")

    submit_parser = subs.add_parser('submit',
        help='Submit at least one or many landing pages.')
    submit_parser.add_argument('urls', nargs='+')
    submit_parser.add_argument('--project', '-p',
        help='Project name to submit to')
    submit_parser.add_argument('--keyword', '-k',
        help='Optional Keyword')
    submit_parser.add_argument('--md5', '-m',
        help='Optional MD5 representing the canonical ID')
    submit_parser.add_argument('--pingback-url', '-P',
        help='Optional URL to be GET requested upon completion of analysis')
    submit_parser.add_argument('--fields', '-f', nargs='*',
        help='Optional list of custom fields eg -f foo=bar alpha=beta')
    submit_parser.add_argument('-j', '--json', action="store_true", dest='as_json',
        help="Output as JSON")
    
    crawled_parser = subs.add_parser('crawled',
        help='List landing pages by crawl date - maximum of 100')
    crawled_parser.add_argument('--whois', '-w', action='store_true',
        help='whether to include whois information')
    crawled_parser.add_argument('--days', '-d', default=None, type=int,
        help='days to query')
    crawled_parser.add_argument('--start', '-s', default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    crawled_parser.add_argument('--end', '-e', default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    crawled_parser.add_argument('-j', '--json', action="store_true",
        dest='as_json', help="Output as JSON")

    flagged_parser = subs.add_parser('flagged',
        help='List landing pages by known profile creation date - '
            'maximum of 100')
    flagged_parser.add_argument('--whois', '-w', action='store_true',
        help='whether to include whois information')
    flagged_parser.add_argument('--days', '-d', default=None, type=int,
        help='days to query')
    flagged_parser.add_argument('--start', '-s', default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    flagged_parser.add_argument('--end', '-e', default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    flagged_parser.add_argument('-j', '--json', action="store_true",
        dest='as_json', help="Output as JSON")

    binary_parser = subs.add_parser('binary',
        help='List landing pages with malicious binary incidents. '
            'A malicious binary is any non-text file that is suspected of '
            'containing malware or exploit code. A landing page is linked to '
            'any such binary that is embedded or easily reachable from it.'
    )
    binary_parser.add_argument('--whois', '-w', action='store_true',
        help='whether to include whois information')
    binary_parser.add_argument('--days', '-d', default=1, type=int,
        help='days to query')
    binary_parser.add_argument('--start', '-s', default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    binary_parser.add_argument('--end', '-e', default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    binary_parser.add_argument('-j', '--json', action="store_true",
        dest='as_json', help="Output as JSON")

    pjs_parser = subs.add_parser('projects',
        help='List all projects that landing pages may be submitted to.')
    pjs_parser.add_argument('-j', '--json', action="store_true", dest='as_json',
        help="Output as JSON")

    args = parser.parse_args()
    client = Client.from_config()

    kwargs = {'as_json': args.as_json}

    if hasattr(args, 'whois'):
        kwargs['whois'] = args.whois
    if hasattr(args, 'days'):
        kwargs['days'] = args.days
        kwargs['start'] = args.start
        kwargs['end'] = args.end
    if args.cmd == 'get':
        md5_hashes = util.stdin(args.md5_hashes)
        for md5_hash in md5_hashes:
            lp_get(client, md5_hash, **kwargs)
    elif args.cmd == 'submit':
        urls = util.stdin(args.urls)
        kwargs.update({
            'keyword': args.keyword,
            'md5_hash': args.md5,
            'pingback_url': args.pingback_url,
            'project_name': args.project,
        })
        if args.fields:
            kwargs.update({'fields': dict([f.split('=') for f in args.fields])})
        if len(urls) == 1:
            lp_submit(client, urls[0], **kwargs)
        else:
            lp_submit_bulk(client, urls, **kwargs)
    elif args.cmd == 'crawled':
        lp_crawled(client, **kwargs)
    elif args.cmd == 'flagged':
        lp_flagged(client, **kwargs)
    elif args.cmd == 'binary':
        lp_binary(client, **kwargs)
    elif args.cmd == 'projects':
        lp_projects(client, **kwargs)