Ejemplo n.º 1
0
def run(client, args, kwargs):
    urls = stdin(args.urls)
    for url in urls:
        data = client.get_blacklist_lookup(url)
        if not data:
            continue
        yield {url: data}, kwargs
Ejemplo n.º 2
0
def run(client, args, kwargs):
    urls = stdin(args.urls)
    if args.start_index is None and args.max_results is None:
        for url in urls:
            chunk_i, data_ct = 0, 0
            data = {}
            incidents = []
            while True:
                data_i = client.get_blacklist_incident(url,
                                                       start_index=chunk_i,
                                                       max_results=CHUNK_SIZE)
                data.update(data_i)
                incidents += data_i['incident']
                size_data = len(data_i['incident'])
                data_ct += size_data
                chunk_i += size_data
                if data_ct >= data_i['totalResults']:
                    break
            if not incidents:
                continue
            data['incident'] = incidents
            yield {url: data}, kwargs
    else:
        blkwargs = {}
        for url_param in ('start_index', 'max_results'):
            if getattr(args, url_param) is not None:
                blkwargs[url_param] = getattr(args, url_param)
        for url in urls:
            data = client.get_blacklist_incident(url, **blkwargs)
            yield {url: data}, kwargs
Ejemplo n.º 3
0
def run(client, args, kwargs):
    urls = stdin(args.urls)
    if args.start_index is None and args.max_results is None:
        for url in urls:
            chunk_i, data_ct = 0, 0
            data = {}
            incidents = []
            while True:
                data_i = client.get_blacklist_incident(url, start_index=chunk_i,
                    max_results=CHUNK_SIZE)
                data.update(data_i)
                incidents += data_i['incident']
                size_data = len(data_i['incident'])
                data_ct += size_data
                chunk_i += size_data
                if data_ct >= data_i['totalResults']:
                    break
            if not incidents:
                continue
            data['incident'] = incidents
            yield {url: data}, kwargs
    else:
        blkwargs = {}
        for url_param in ('start_index', 'max_results'):
            if getattr(args, url_param) is not None:
                blkwargs[url_param] = getattr(args, url_param)
        for url in urls:
            data = client.get_blacklist_incident(url, **blkwargs)
            yield {url: data}, kwargs
Ejemplo n.º 4
0
def run(client, args, kwargs):
    urls = stdin(args.urls)
    for url in urls:
        data = client.get_blacklist_lookup(url)
        if not data:
            continue
        yield {url: data}, kwargs
Ejemplo n.º 5
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')

    name_p = subs.add_parser('name')
    name_p.add_argument('addrs', nargs='+', help='Hostname or IP addresses')
    name_p.add_argument('--rrtype', '-t', default=None)
    name_p.add_argument('--json', '-j', action="store_true",
        help="Output as JSON")
    name_p.add_argument('--short', '-s', action="store_true",
        help="Output newline-separated data only")
    name_p.add_argument('--text', '-T', action="store_true",
        help="Output full human readable text")
    name_p.add_argument('--verbose', '-v', action="store_true",
        help="Output verbose records with first/lastseen times and observed count")

    data_p = subs.add_parser('data')
    data_p.add_argument('addrs', nargs='+', help='Hostname or IP addresses')
    data_p.add_argument('--rrtype', '-t', default=None)
    data_p.add_argument('--json', '-j', action="store_true",
        help="Output as JSON")
    data_p.add_argument('--short', '-s', action="store_true",
        help="Output newline-separated name only")
    data_p.add_argument('--text', '-T', action="store_true",
        help="Output full human readable text")
    data_p.add_argument('--verbose', '-v', action="store_true",
        help="Output verbose records with first/lastseen times and observed count")

    args = parser.parse_args()

    addrs = util.stdin(args.addrs)
    for addr in addrs:
        ip, hostname = ip_hostname(addr)

        client = Client.from_config()
        try:
            data = get_data(client, args.cmd, rrtype=args.rrtype,
                hostname=hostname, ip=ip)
        except ValueError as e:
            parser.print_usage()
            sys.stderr.write('{}\n'.format(str(e)))
            sys.exit(1)

        if args.json:
            print(json.dumps(data, indent=4))
        elif args.short and args.cmd == 'data':
            print(renderer(data, 'dns/dns_data'))
        elif args.short and args.cmd == 'name':
            print(renderer(data, 'dns/dns_name'))
        elif args.text:
            print(renderer(data, 'dns/dns'))
        elif data:
            print(renderer(data, 'dns/dns_oneline', verbose=args.verbose))
Ejemplo n.º 6
0
def main():
    parser = ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')

    list_parser = subs.add_parser('list', help='list binaries in date range')
    list_parser.add_argument('--days', '-d', default=1, type=int,
        help='days to query')
    list_parser.add_argument('--start', '-s', default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format')
    list_parser.add_argument('--end', '-e', default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format')
    list_parser.add_argument('-j', '--json', action="store_true",
        dest='as_json', help="Output as JSON")

    download_parser = subs.add_parser('download',
        help='download from MD5 hash, or hashes')
    download_parser.add_argument('md5hash',
        help='md5 hash to download')
    download_parser.add_argument('output',
        help='path to output file to, - for stdout')
    download_parser.add_argument('-j', '--json', action="store_true",
        dest='as_json', help="Output as JSON")
    download_parser.add_argument('-d', '--output-dir',
        help='dir to dump $hash.bin to')

    args = parser.parse_args()
    kwargs = {'as_json': args.as_json}
    if hasattr(args, 'days'):
        kwargs['days'] = args.days
        kwargs['start'] = args.start
        kwargs['end'] = args.end

    client = Client.from_config()
    
    if args.cmd == 'list':
        bin_list(client, **kwargs)
    elif args.cmd == 'download':
        hashes = util.stdin([args.md5hash])
        for i, md5hash in enumerate(hashes):
            output = args.output
            if output != '-' and len(hashes) > 1:
                output = '%s.%d' % (args.output, i)
            bin_download(client, md5hash, output, output_dir=args.output_dir,
                **kwargs)
Ejemplo n.º 7
0
def main():
    parser = ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')

    list_parser = subs.add_parser('list', help='list binaries in date range')
    list_parser.add_argument('--days',
                             '-d',
                             default=1,
                             type=int,
                             help='days to query')
    list_parser.add_argument(
        '--start',
        '-s',
        default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format')
    list_parser.add_argument('--end',
                             '-e',
                             default=None,
                             help='end datetime in yyyy-mm-dd HH:MM:SS format')
    list_parser.add_argument('-j',
                             '--json',
                             action="store_true",
                             dest='as_json',
                             help="Output as JSON")

    download_parser = subs.add_parser('download',
                                      help='download from MD5 hash, or hashes')
    download_parser.add_argument('md5hash', help='md5 hash to download')
    download_parser.add_argument('output',
                                 help='path to output file to, - for stdout')
    download_parser.add_argument('-j',
                                 '--json',
                                 action="store_true",
                                 dest='as_json',
                                 help="Output as JSON")
    download_parser.add_argument('-d',
                                 '--output-dir',
                                 help='dir to dump $hash.bin to')

    args = parser.parse_args()
    kwargs = {'as_json': args.as_json}
    if hasattr(args, 'days'):
        kwargs['days'] = args.days
        kwargs['start'] = args.start
        kwargs['end'] = args.end

    client = Client.from_config()

    if args.cmd == 'list':
        bin_list(client, **kwargs)
    elif args.cmd == 'download':
        hashes = util.stdin([args.md5hash])
        for i, md5hash in enumerate(hashes):
            output = args.output
            if output != '-' and len(hashes) > 1:
                output = '%s.%d' % (args.output, i)
            bin_download(client,
                         md5hash,
                         output,
                         output_dir=args.output_dir,
                         **kwargs)
Ejemplo n.º 8
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')

    get_parser = subs.add_parser(
        'get', help='Retrieve a single landingpage by MD5 hash')
    get_parser.add_argument('md5_hashes', nargs='+')
    get_parser.add_argument('--whois',
                            '-w',
                            action='store_true',
                            help='whether to include whois information')
    get_parser.add_argument('-j',
                            '--json',
                            action="store_true",
                            dest='as_json',
                            help="Output as JSON")

    submit_parser = subs.add_parser(
        'submit', help='Submit at least one or many landing pages.')
    submit_parser.add_argument('urls', nargs='+')
    submit_parser.add_argument('--project',
                               '-p',
                               help='Project name to submit to')
    submit_parser.add_argument('--keyword', '-k', help='Optional Keyword')
    submit_parser.add_argument(
        '--md5', '-m', help='Optional MD5 representing the canonical ID')
    submit_parser.add_argument(
        '--pingback-url',
        '-P',
        help='Optional URL to be GET requested upon completion of analysis')
    submit_parser.add_argument(
        '--fields',
        '-f',
        nargs='*',
        help='Optional list of custom fields eg -f foo=bar alpha=beta')
    submit_parser.add_argument('-j',
                               '--json',
                               action="store_true",
                               dest='as_json',
                               help="Output as JSON")

    crawled_parser = subs.add_parser(
        'crawled', help='List landing pages by crawl date - maximum of 100')
    crawled_parser.add_argument('--whois',
                                '-w',
                                action='store_true',
                                help='whether to include whois information')
    crawled_parser.add_argument('--days',
                                '-d',
                                default=None,
                                type=int,
                                help='days to query')
    crawled_parser.add_argument(
        '--start',
        '-s',
        default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"'
    )
    crawled_parser.add_argument(
        '--end',
        '-e',
        default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    crawled_parser.add_argument('-j',
                                '--json',
                                action="store_true",
                                dest='as_json',
                                help="Output as JSON")

    flagged_parser = subs.add_parser(
        'flagged',
        help='List landing pages by known profile creation date - '
        'maximum of 100')
    flagged_parser.add_argument('--whois',
                                '-w',
                                action='store_true',
                                help='whether to include whois information')
    flagged_parser.add_argument('--days',
                                '-d',
                                default=None,
                                type=int,
                                help='days to query')
    flagged_parser.add_argument(
        '--start',
        '-s',
        default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"'
    )
    flagged_parser.add_argument(
        '--end',
        '-e',
        default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    flagged_parser.add_argument('-j',
                                '--json',
                                action="store_true",
                                dest='as_json',
                                help="Output as JSON")

    binary_parser = subs.add_parser(
        'binary',
        help='List landing pages with malicious binary incidents. '
        'A malicious binary is any non-text file that is suspected of '
        'containing malware or exploit code. A landing page is linked to '
        'any such binary that is embedded or easily reachable from it.')
    binary_parser.add_argument('--whois',
                               '-w',
                               action='store_true',
                               help='whether to include whois information')
    binary_parser.add_argument('--days',
                               '-d',
                               default=1,
                               type=int,
                               help='days to query')
    binary_parser.add_argument(
        '--start',
        '-s',
        default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"'
    )
    binary_parser.add_argument(
        '--end',
        '-e',
        default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    binary_parser.add_argument('-j',
                               '--json',
                               action="store_true",
                               dest='as_json',
                               help="Output as JSON")

    pjs_parser = subs.add_parser(
        'projects',
        help='List all projects that landing pages may be submitted to.')
    pjs_parser.add_argument('-j',
                            '--json',
                            action="store_true",
                            dest='as_json',
                            help="Output as JSON")

    args = parser.parse_args()
    client = Client.from_config()

    kwargs = {'as_json': args.as_json}

    if hasattr(args, 'whois'):
        kwargs['whois'] = args.whois
    if hasattr(args, 'days'):
        kwargs['days'] = args.days
        kwargs['start'] = args.start
        kwargs['end'] = args.end
    if args.cmd == 'get':
        md5_hashes = util.stdin(args.md5_hashes)
        for md5_hash in md5_hashes:
            lp_get(client, md5_hash, **kwargs)
    elif args.cmd == 'submit':
        urls = util.stdin(args.urls)
        kwargs.update({
            'keyword': args.keyword,
            'md5_hash': args.md5,
            'pingback_url': args.pingback_url,
            'project_name': args.project,
        })
        if args.fields:
            kwargs.update(
                {'fields': dict([f.split('=') for f in args.fields])})
        if len(urls) == 1:
            lp_submit(client, urls[0], **kwargs)
        else:
            lp_submit_bulk(client, urls, **kwargs)
    elif args.cmd == 'crawled':
        lp_crawled(client, **kwargs)
    elif args.cmd == 'flagged':
        lp_flagged(client, **kwargs)
    elif args.cmd == 'binary':
        lp_binary(client, **kwargs)
    elif args.cmd == 'projects':
        lp_projects(client, **kwargs)
Ejemplo n.º 9
0
def main():
    import argparse
    parser = argparse.ArgumentParser()
    subs = parser.add_subparsers(dest='cmd')

    get_parser = subs.add_parser('get',
        help='Retrieve a single landingpage by MD5 hash')
    get_parser.add_argument('md5_hashes', nargs='+')
    get_parser.add_argument('--whois', '-w', action='store_true',
        help='whether to include whois information')
    get_parser.add_argument('-j', '--json', action="store_true", dest='as_json',
        help="Output as JSON")

    submit_parser = subs.add_parser('submit',
        help='Submit at least one or many landing pages.')
    submit_parser.add_argument('urls', nargs='+')
    submit_parser.add_argument('--project', '-p',
        help='Project name to submit to')
    submit_parser.add_argument('--keyword', '-k',
        help='Optional Keyword')
    submit_parser.add_argument('--md5', '-m',
        help='Optional MD5 representing the canonical ID')
    submit_parser.add_argument('--pingback-url', '-P',
        help='Optional URL to be GET requested upon completion of analysis')
    submit_parser.add_argument('--fields', '-f', nargs='*',
        help='Optional list of custom fields eg -f foo=bar alpha=beta')
    submit_parser.add_argument('-j', '--json', action="store_true", dest='as_json',
        help="Output as JSON")
    
    crawled_parser = subs.add_parser('crawled',
        help='List landing pages by crawl date - maximum of 100')
    crawled_parser.add_argument('--whois', '-w', action='store_true',
        help='whether to include whois information')
    crawled_parser.add_argument('--days', '-d', default=None, type=int,
        help='days to query')
    crawled_parser.add_argument('--start', '-s', default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    crawled_parser.add_argument('--end', '-e', default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    crawled_parser.add_argument('-j', '--json', action="store_true",
        dest='as_json', help="Output as JSON")

    flagged_parser = subs.add_parser('flagged',
        help='List landing pages by known profile creation date - '
            'maximum of 100')
    flagged_parser.add_argument('--whois', '-w', action='store_true',
        help='whether to include whois information')
    flagged_parser.add_argument('--days', '-d', default=None, type=int,
        help='days to query')
    flagged_parser.add_argument('--start', '-s', default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    flagged_parser.add_argument('--end', '-e', default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    flagged_parser.add_argument('-j', '--json', action="store_true",
        dest='as_json', help="Output as JSON")

    binary_parser = subs.add_parser('binary',
        help='List landing pages with malicious binary incidents. '
            'A malicious binary is any non-text file that is suspected of '
            'containing malware or exploit code. A landing page is linked to '
            'any such binary that is embedded or easily reachable from it.'
    )
    binary_parser.add_argument('--whois', '-w', action='store_true',
        help='whether to include whois information')
    binary_parser.add_argument('--days', '-d', default=1, type=int,
        help='days to query')
    binary_parser.add_argument('--start', '-s', default=None,
        help='start datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    binary_parser.add_argument('--end', '-e', default=None,
        help='end datetime in yyyy-mm-dd HH:MM:SS format, or "today HH:MM:SS"')
    binary_parser.add_argument('-j', '--json', action="store_true",
        dest='as_json', help="Output as JSON")

    pjs_parser = subs.add_parser('projects',
        help='List all projects that landing pages may be submitted to.')
    pjs_parser.add_argument('-j', '--json', action="store_true", dest='as_json',
        help="Output as JSON")

    args = parser.parse_args()
    client = Client.from_config()

    kwargs = {'as_json': args.as_json}

    if hasattr(args, 'whois'):
        kwargs['whois'] = args.whois
    if hasattr(args, 'days'):
        kwargs['days'] = args.days
        kwargs['start'] = args.start
        kwargs['end'] = args.end
    if args.cmd == 'get':
        md5_hashes = util.stdin(args.md5_hashes)
        for md5_hash in md5_hashes:
            lp_get(client, md5_hash, **kwargs)
    elif args.cmd == 'submit':
        urls = util.stdin(args.urls)
        kwargs.update({
            'keyword': args.keyword,
            'md5_hash': args.md5,
            'pingback_url': args.pingback_url,
            'project_name': args.project,
        })
        if args.fields:
            kwargs.update({'fields': dict([f.split('=') for f in args.fields])})
        if len(urls) == 1:
            lp_submit(client, urls[0], **kwargs)
        else:
            lp_submit_bulk(client, urls, **kwargs)
    elif args.cmd == 'crawled':
        lp_crawled(client, **kwargs)
    elif args.cmd == 'flagged':
        lp_flagged(client, **kwargs)
    elif args.cmd == 'binary':
        lp_binary(client, **kwargs)
    elif args.cmd == 'projects':
        lp_projects(client, **kwargs)