def host(format, history, filename, save, ip): """View all available information for an IP address""" key = get_api_key() api = shodan.Shodan(key) try: host = api.host(ip, history=history) # Print the host information to the terminal using the user-specified format HOST_PRINT[format](host, history=history) # Store the results if filename or save: if save: filename = '{}.json.gz'.format(ip) # Add the appropriate extension if it's not there atm if not filename.endswith('.json.gz'): filename += '.json.gz' # Create/ append to the file fout = helpers.open_file(filename) for banner in sorted(host['data'], key=lambda k: k['port']): if 'placeholder' not in banner: helpers.write_banner(fout, banner) except shodan.APIError as e: raise click.ClickException(e.value)
def scan_internet(quiet, port, protocol): """Scan the Internet for a specific port and protocol using the Shodan infrastructure.""" key = get_api_key() api = shodan.Shodan(key) try: # Submit the request to Shodan click.echo('Submitting Internet scan to Shodan...', nl=False) scan = api.scan_internet(port, protocol) click.echo('Done') # If the requested port is part of the regular Shodan crawling, then # we don't know when the scan is done so lets return immediately and # let the user decide when to stop waiting for further results. official_ports = api.ports() if port in official_ports: click.echo( 'The requested port is already indexed by Shodan. A new scan for the port has been launched, please subscribe to the real-time stream for results.' ) else: # Create the output file filename = '{0}-{1}.json.gz'.format(port, protocol) counter = 0 with helpers.open_file(filename, 'w') as fout: click.echo('Saving results to file: {0}'.format(filename)) # Start listening for results done = False # Keep listening for results until the scan is done click.echo('Waiting for data, please stand by...') while not done: try: for banner in api.stream.ports([port], timeout=90): counter += 1 helpers.write_banner(fout, banner) if not quiet: click.echo('{0:<40} {1:<20} {2}'.format( click.style(helpers.get_ip(banner), fg=COLORIZE_FIELDS['ip_str']), click.style(str(banner['port']), fg=COLORIZE_FIELDS['port']), ';'.join(banner['hostnames']))) except shodan.APIError: # We stop waiting for results if the scan has been processed by the crawlers and # there haven't been new results in a while if done: break scan = api.scan_status(scan['id']) if scan['status'] == 'DONE': done = True except socket.timeout: # We stop waiting for results if the scan has been processed by the crawlers and # there haven't been new results in a while if done: break scan = api.scan_status(scan['id']) if scan['status'] == 'DONE': done = True except Exception as e: raise click.ClickException(repr(e)) click.echo('Scan finished: {0} devices found'.format(counter)) except shodan.APIError as e: raise click.ClickException(e.value)
def scan_submit(wait, filename, force, verbose, netblocks): """Scan an IP/ netblock using Shodan.""" key = get_api_key() api = shodan.Shodan(key) alert = None # Submit the IPs for scanning try: # Submit the scan scan = api.scan(netblocks, force=force) now = datetime.datetime.now().strftime('%Y-%m-%d %H:%M') click.echo('') click.echo('Starting Shodan scan at {} - {} scan credits left'.format( now, scan['credits_left'])) if verbose: click.echo('# Scan ID: {}'.format(scan['id'])) # Return immediately if wait <= 0: click.echo( 'Exiting now, not waiting for results. Use the API or website to retrieve the results of the scan.' ) else: # Setup an alert to wait for responses alert = api.create_alert('Scan: {}'.format(', '.join(netblocks)), netblocks) # Create the output file if necessary filename = filename.strip() fout = None if filename != '': # Add the appropriate extension if it's not there atm if not filename.endswith('.json.gz'): filename += '.json.gz' fout = helpers.open_file(filename, 'w') # Start a spinner finished_event = threading.Event() progress_bar_thread = threading.Thread(target=async_spinner, args=(finished_event, )) progress_bar_thread.start() # Now wait a few seconds for items to get returned hosts = collections.defaultdict(dict) done = False scan_start = time.time() cache = {} while not done: try: for banner in api.stream.alert(aid=alert['id'], timeout=wait): ip = banner.get('ip', banner.get('ipv6', None)) if not ip: continue # Don't show duplicate banners cache_key = '{}:{}'.format(ip, banner['port']) if cache_key not in cache: hosts[helpers.get_ip(banner)][ banner['port']] = banner cache[cache_key] = True # If we've grabbed data for more than 60 seconds it might just be a busy network and we should move on if time.time() - scan_start >= 60: scan = api.scan_status(scan['id']) if verbose: click.echo('# Scan status: {}'.format( scan['status'])) if scan['status'] == 'DONE': done = True break except shodan.APIError: # If the connection timed out before the timeout, that means the streaming server # that the user tried to reach is down. In that case, lets wait briefly and try # to connect again! if (time.time() - scan_start) < wait: time.sleep(0.5) continue # Exit if the scan was flagged as done somehow if done: break scan = api.scan_status(scan['id']) if scan['status'] == 'DONE': done = True if verbose: click.echo('# Scan status: {}'.format(scan['status'])) except socket.timeout: # If the connection timed out before the timeout, that means the streaming server # that the user tried to reach is down. In that case, lets wait a second and try # to connect again! if (time.time() - scan_start) < wait: continue done = True except Exception as e: finished_event.set() progress_bar_thread.join() raise click.ClickException(repr(e)) finished_event.set() progress_bar_thread.join() def print_field(name, value): click.echo(' {:25s}{}'.format(name, value)) def print_banner(banner): click.echo(' {:20s}'.format( click.style(str(banner['port']), fg='green') + '/' + banner['transport']), nl=False) if 'product' in banner: click.echo(banner['product'], nl=False) if 'version' in banner: click.echo(' ({})'.format(banner['version']), nl=False) click.echo('') # Show optional ssl info if 'ssl' in banner: if 'versions' in banner['ssl']: # Only print SSL versions if they were successfully tested versions = [ version for version in sorted(banner['ssl']['versions']) if not version.startswith('-') ] if len(versions) > 0: click.echo(' |-- SSL Versions: {}'.format( ', '.join(versions))) if 'dhparams' in banner['ssl'] and banner['ssl'][ 'dhparams']: click.echo(' |-- Diffie-Hellman Parameters:') click.echo(' {:15s}{}\n {:15s}{}'.format( 'Bits:', banner['ssl']['dhparams']['bits'], 'Generator:', banner['ssl']['dhparams']['generator'])) if 'fingerprint' in banner['ssl']['dhparams']: click.echo(' {:15s}{}'.format( 'Fingerprint:', banner['ssl']['dhparams']['fingerprint'])) if hosts: # Remove the remaining spinner character click.echo('\b ') for ip in sorted(hosts): host = next(iter(hosts[ip].items()))[1] click.echo(click.style(ip, fg='cyan'), nl=False) if 'hostnames' in host and host['hostnames']: click.echo(' ({})'.format(', '.join( host['hostnames'])), nl=False) click.echo('') if 'location' in host and 'country_name' in host[ 'location'] and host['location']['country_name']: print_field('Country', host['location']['country_name']) if 'city' in host['location'] and host['location'][ 'city']: print_field('City', host['location']['city']) if 'org' in host and host['org']: print_field('Organization', host['org']) if 'os' in host and host['os']: print_field('Operating System', host['os']) click.echo('') # Output the vulnerabilities the host has if 'vulns' in host and len(host['vulns']) > 0: vulns = [] for vuln in host['vulns']: if vuln.startswith('!'): continue if vuln.upper() == 'CVE-2014-0160': vulns.append( click.style('Heartbleed', fg='red')) else: vulns.append(click.style(vuln, fg='red')) if len(vulns) > 0: click.echo(' {:25s}'.format('Vulnerabilities:'), nl=False) for vuln in vulns: click.echo(vuln + '\t', nl=False) click.echo('') # Print all the open ports: click.echo(' Open Ports:') for port in sorted(hosts[ip]): print_banner(hosts[ip][port]) # Save the banner in a file if necessary if fout: helpers.write_banner(fout, hosts[ip][port]) click.echo('') else: # Prepend a \b to remove the spinner click.echo( '\bNo open ports found or the host has been recently crawled and cant get scanned again so soon.' ) except shodan.APIError as e: raise click.ClickException(e.value) finally: # Remove any alert if alert: api.delete_alert(alert['id'])
def stream(color, fields, separator, limit, datadir, ports, quiet, timeout, streamer, countries, asn, alert, tags, compresslevel, vulns): """Stream data in real-time.""" # Setup the Shodan API key = get_api_key() api = shodan.Shodan(key) # Temporarily change the baseurl api.stream.base_url = streamer # Strip out any whitespace in the fields and turn them into an array fields = [item.strip() for item in fields.split(',')] if len(fields) == 0: raise click.ClickException( 'Please define at least one property to show') # The user must choose "ports", "countries", "asn" or nothing - can't select multiple # filtered streams at once. stream_type = [] if ports: stream_type.append('ports') if countries: stream_type.append('countries') if asn: stream_type.append('asn') if alert: stream_type.append('alert') if tags: stream_type.append('tags') if vulns: stream_type.append('vulns') if len(stream_type) > 1: raise click.ClickException( 'Please use --ports, --countries, --tags, --vulns OR --asn. You cant subscribe to multiple filtered streams at once.' ) stream_args = None # Turn the list of ports into integers if ports: try: stream_args = [int(item.strip()) for item in ports.split(',')] except ValueError: raise click.ClickException('Invalid list of ports') if alert: alert = alert.strip() if alert.lower() != 'all': stream_args = alert if asn: stream_args = asn.split(',') if countries: stream_args = countries.split(',') if tags: stream_args = tags.split(',') if vulns: stream_args = vulns.split(',') # Flatten the list of stream types # Possible values are: # - all # - asn # - countries # - ports if len(stream_type) == 1: stream_type = stream_type[0] else: stream_type = 'all' # Decide which stream to subscribe to based on whether or not ports were selected def _create_stream(name, args, timeout): return { 'all': api.stream.banners(timeout=timeout), 'alert': api.stream.alert(args, timeout=timeout), 'asn': api.stream.asn(args, timeout=timeout), 'countries': api.stream.countries(args, timeout=timeout), 'ports': api.stream.ports(args, timeout=timeout), 'tags': api.stream.tags(args, timeout=timeout), 'vulns': api.stream.vulns(args, timeout=timeout), }.get(name, 'all') stream = _create_stream(stream_type, stream_args, timeout=timeout) counter = 0 quit = False last_time = timestr() fout = None if datadir: fout = open_streaming_file(datadir, last_time, compresslevel) while not quit: try: for banner in stream: # Limit the number of results to output if limit > 0: counter += 1 if counter > limit: quit = True break # Write the data to the file if datadir: cur_time = timestr() if cur_time != last_time: last_time = cur_time fout.close() fout = open_streaming_file(datadir, last_time) helpers.write_banner(fout, banner) # Print the banner information to stdout if not quiet: row = u'' # Loop over all the fields and print the banner as a row for field in fields: tmp = u'' value = get_banner_field(banner, field) if value: field_type = type(value) # If the field is an array then merge it together if field_type == list: tmp = u';'.join(value) elif field_type in [int, float]: tmp = u'{}'.format(value) else: tmp = escape_data(value) # Colorize certain fields if the user wants it if color: tmp = click.style(tmp, fg=COLORIZE_FIELDS.get( field, 'white')) # Add the field information to the row row += tmp row += separator click.echo(row) except requests.exceptions.Timeout: raise click.ClickException('Connection timed out') except KeyboardInterrupt: quit = True except shodan.APIError as e: raise click.ClickException(e.value) except Exception: # For other errors lets just wait a bit and try to reconnect again time.sleep(1) # Create a new stream object to subscribe to stream = _create_stream(stream_type, stream_args, timeout=timeout)
def parse(color, fields, filters, filename, separator, filenames): """Extract information out of compressed JSON files.""" # Strip out any whitespace in the fields and turn them into an array fields = [item.strip() for item in fields.split(',')] if len(fields) == 0: raise click.ClickException( 'Please define at least one property to show') has_filters = len(filters) > 0 # Setup the output file handle fout = None if filename: # If no filters were provided raise an error since it doesn't make much sense w/out them if not has_filters: raise click.ClickException( 'Output file specified without any filters. Need to use filters with this option.' ) # Add the appropriate extension if it's not there atm if not filename.endswith('.json.gz'): filename += '.json.gz' fout = helpers.open_file(filename) for banner in helpers.iterate_files(filenames): row = u'' # Validate the banner against any provided filters if has_filters and not match_filters(banner, filters): continue # Append the data if fout: helpers.write_banner(fout, banner) # Loop over all the fields and print the banner as a row for i, field in enumerate(fields): tmp = u'' value = get_banner_field(banner, field) if value: field_type = type(value) # If the field is an array then merge it together if field_type == list: tmp = u';'.join(value) elif field_type in [int, float]: tmp = u'{}'.format(value) else: tmp = escape_data(value) # Colorize certain fields if the user wants it if color: tmp = click.style(tmp, fg=COLORIZE_FIELDS.get(field, 'white')) # Add the field information to the row if i > 0: row += separator row += tmp click.echo(row)
def download(limit, skip, filename, query): """Download search results and save them in a compressed JSON file.""" key = get_api_key() # Create the query string out of the provided tuple query = ' '.join(query).strip() # Make sure the user didn't supply an empty string if query == '': raise click.ClickException('Empty search query') filename = filename.strip() if filename == '': raise click.ClickException('Empty filename') # Add the appropriate extension if it's not there atm if not filename.endswith('.json.gz'): filename += '.json.gz' # Perform the search api = shodan.Shodan(key) try: total = api.count(query)['total'] info = api.info() except Exception: raise click.ClickException( 'The Shodan API is unresponsive at the moment, please try again later.' ) # Print some summary information about the download request click.echo('Search query:\t\t\t%s' % query) click.echo('Total number of results:\t%s' % total) click.echo('Query credits left:\t\t%s' % info['unlocked_left']) click.echo('Output file:\t\t\t%s' % filename) if limit > total: limit = total # A limit of -1 means that we should download all the data if limit <= 0: limit = total # Adjust the total number of results we should expect to download if the user is skipping results if skip > 0: limit -= skip with helpers.open_file(filename, 'w') as fout: count = 0 try: cursor = api.search_cursor(query, minify=False, skip=skip) with click.progressbar(cursor, length=limit) as bar: for banner in bar: helpers.write_banner(fout, banner) count += 1 if count >= limit: break except Exception: pass # Let the user know we're done if count < limit: click.echo( click.style('Notice: fewer results were saved than requested', 'yellow')) click.echo( click.style( u'Saved {} results into file {}'.format(count, filename), 'green'))
def domain_info(domain, details, save, history, type): """View all available information for a domain""" key = get_api_key() api = shodan.Shodan(key) try: info = api.dns.domain_info(domain, history=history, type=type) except shodan.APIError as e: raise click.ClickException(e.value) # Grab the host information for any IP records that were returned hosts = {} if details: ips = [ record['value'] for record in info['data'] if record['type'] in ['A', 'AAAA'] ] ips = set(ips) fout = None if save: filename = u'{}-hosts.json.gz'.format(domain) fout = helpers.open_file(filename) for ip in ips: try: hosts[ip] = api.host(ip) # Store the banners if requested if fout: for banner in hosts[ip]['data']: if 'placeholder' not in banner: helpers.write_banner(fout, banner) except shodan.APIError: pass # Ignore any API lookup errors as this isn't critical information # Save the DNS data if save: filename = u'{}.json.gz'.format(domain) fout = helpers.open_file(filename) for record in info['data']: helpers.write_banner(fout, record) click.secho(info['domain'].upper(), fg='green') click.echo('') for record in info['data']: click.echo( u'{:32} {:14} {}'.format( click.style(record['subdomain'], fg='cyan'), click.style(record['type'], fg='yellow'), record['value']), nl=False, ) if record['value'] in hosts: host = hosts[record['value']] click.secho(u' Ports: {}'.format(', '.join( [str(port) for port in sorted(host['ports'])])), fg='blue', nl=False) click.echo('')
def alert_download(filename, alert_id): """Download all information for monitored networks/ IPs.""" key = get_api_key() api = shodan.Shodan(key) ips = set() networks = set() # Helper method to process batches of IPs def batch(iterable, size=1): iter_length = len(iterable) for ndx in range(0, iter_length, size): yield iterable[ndx:min(ndx + size, iter_length)] try: # Get the list of alerts for the user click.echo('Looking up alert information...') if alert_id: alerts = [api.alerts(aid=alert_id.strip())] else: alerts = api.alerts() click.echo('Compiling list of networks/ IPs to download...') for alert in alerts: for net in alert['filters']['ip']: if '/' in net: networks.add(net) else: ips.add(net) click.echo('Downloading...') with open_file(filename) as fout: # Check if the user is able to use batch IP lookups batch_size = 1 if len(ips) > 0: api_info = api.info() if api_info['plan'] in ['corp', 'stream-100']: batch_size = 100 # Convert it to a list so we can index into it ips = list(ips) # Grab all the IP information for ip in batch(ips, size=batch_size): try: click.echo(ip) results = api.host(ip) if not isinstance(results, list): results = [results] for host in results: for banner in host['data']: write_banner(fout, banner) except APIError: pass sleep( 1 ) # Slow down a bit to make sure we don't hit the rate limit # Grab all the network ranges for net in networks: try: counter = 0 click.echo(net) for banner in api.search_cursor('net:{}'.format(net)): write_banner(fout, banner) # Slow down a bit to make sure we don't hit the rate limit if counter % 100 == 0: sleep(1) counter += 1 except APIError: pass except shodan.APIError as e: raise click.ClickException(e.value) click.secho('Successfully downloaded results into: {}'.format(filename), fg='green')