def scan_subnets(subnets): nmap_args = ["-nsP"] + [item for item in subnets] vlans = nmap(nmap_args) ip_patt = '(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' p = re.compile(ip_patt) #p.findall(vlans.stdout) active_ips = p.findall(vlans.stdout) #storage_ips = [iptools.IpRangeList(subnet) for subnet in subnets] subnet_ips = {subnet: iptools.IpRangeList(subnet) for subnet in subnets} ip_usage = {} for subnet, ip_range in subnet_ips.iteritems(): ip_list = [] for ip in ip_range: #print ip try: host = reversename.from_address(ip) #in case there are multiple reverse entries for a #given IP names = [nm.to_text() for nm in resolver.query(host, 'PTR')] name = ','.join(names) except resolver.NXDOMAIN: name = '' if ip in active_ips: ip_list.append([ip, 'ACTIVE', name]) else: ip_list.append([ip, 'INACTIVE', name]) ip_usage[subnet] = ip_list return ip_usage
def testMixedRange(self): INTERNAL_IPS = iptools.IpRangeList( '127.0.0.1', # single ip '192.168/16', # CIDR network block ('10.0.0.1', '10.0.0.19'), # arbitrary inclusive range '::1', # single IPv6 address 'fe80::/10', # IPv6 CIDR block '::ffff:0:0/96', # IPv4-mapped IPv6 ) self.assertTrue('127.0.0.1' in INTERNAL_IPS) self.assertTrue('192.168.0.1' in INTERNAL_IPS) self.assertTrue('192.168.255.254' in INTERNAL_IPS) self.assertTrue('10.0.0.1' in INTERNAL_IPS) self.assertTrue('10.0.0.19' in INTERNAL_IPS) self.assertTrue('::1' in INTERNAL_IPS) self.assertTrue('fe80::1' in INTERNAL_IPS) self.assertTrue('fe80::ffff' in INTERNAL_IPS) self.assertTrue( 'fe80:ffff:ffff:ffff:ffff:ffff:ffff:ffff' in INTERNAL_IPS) self.assertTrue('::ffff:172.16.11.12' in INTERNAL_IPS) self.assertFalse('209.19.170.129' in INTERNAL_IPS)
class Ip(Observable): geoip = DictField() regex = r'(?P<search>((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)[.,](25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\[?[.,]\]?(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\[?[.,]\]?(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))|((([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}:[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){5}:([0-9A-Fa-f]{1,4}:)?[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){4}:([0-9A-Fa-f]{1,4}:){0,2}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){3}:([0-9A-Fa-f]{1,4}:){0,3}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){2}:([0-9A-Fa-f]{1,4}:){0,4}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b)\[?\.\]?){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(([0-9A-Fa-f]{1,4}:){0,5}:((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b)\[?\.\]){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(::([0-9A-Fa-f]{1,4}:){0,5}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b)\[?\.\]){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|([0-9A-Fa-f]{1,4}::([0-9A-Fa-f]{1,4}:){0,5}[0-9A-Fa-f]{1,4})|(::([0-9A-Fa-f]{1,4}:){0,6}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){1,7}:)))' version = IntField() exclude_fields = Observable.exclude_fields + ['version'] DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("version", "IP version"), ("geoip__country", "Country"), ("geoip__city", "City")] ignore = iptools.IpRangeList( iptools.ipv4.BENCHMARK_TESTS, iptools.ipv4.BROADCAST, iptools.ipv4.DUAL_STACK_LITE, iptools.ipv4.IETF_PROTOCOL_RESERVED, iptools.ipv4.LINK_LOCAL, iptools.ipv4.LOOPBACK, iptools.ipv4.LOCALHOST, iptools.ipv4.MULTICAST, iptools.ipv4.MULTICAST_INTERNETWORK, iptools.ipv4.MULTICAST_LOCAL, iptools.ipv4.PRIVATE_NETWORK_10, iptools.ipv4.PRIVATE_NETWORK_172_16, iptools.ipv4.PRIVATE_NETWORK_192_168) @classmethod def is_valid(cls, match): value = refang(match.group('search')) return iptools.ipv4.validate_ip(value) or iptools.ipv6.validate_ip( value) def normalize(self): self.value = refang(self.value) if iptools.ipv4.validate_ip(self.value): self.value = iptools.ipv4.hex2ip(iptools.ipv4.ip2hex(self.value)) self.version = 4 elif iptools.ipv6.validate_ip(self.value): self.value = iptools.ipv6.long2ip(iptools.ipv6.ip2long(self.value)) self.version = 6
def get_endpoint_host(b64txt): url = urlparse(b64txt) if url.hostname == 'localhost': return None, b64txt if not url.hostname: return None, b64txt no_route_ip_range_list = iptools.IpRangeList( iptools.ipv4.LOCALHOST, iptools.ipv4.PRIVATE_NETWORK_10, iptools.ipv4.PRIVATE_NETWORK_172_16, iptools.ipv4.PRIVATE_NETWORK_192_168, iptools.ipv4.LINK_LOCAL, iptools.ipv4.MULTICAST, iptools.ipv6.LOCALHOST, iptools.ipv6.PRIVATE_NETWORK, iptools.ipv6.LINK_LOCAL, iptools.ipv6.MULTICAST) if not iptools.ipv4.validate_ip( url.hostname) and not iptools.ipv6.validate_ip(url.hostname): # This will catch hostnames, determine if reachable, and return as a URL to fetch or raw value to return. try: socket.gethostbyname(url.hostname) return url.geturl(), None except socket.gaierror: return None, b64txt elif url.hostname in no_route_ip_range_list: return None, b64txt return url.geturl(), None
def generateNmap(ipSpace, output_filename): for key in ipSpace: # ips is a list of all valid IPs in IP-Space ips = iptools.IpRangeList(ipSpace[key]) # for every ip, call nmap on it and write output to file for ip in ips: # resets output output = None # executes terminal command with ip # EXAMPLE OUTPUT --- begin --- # Starting Nmap 6.40 ( http://nmap.org ) at 2016-01-11 10:45 PST # Nmap scan report for example-host.com (192.168.1.1) # Host is up (0.00025s latency). # Not shown: 999 closed ports # PORT STATE SERVICE # 22/tcp open ssh # Nmap done: 1 IP address (1 host up) scanned in 0.08 seconds # --- end --- try: output = subprocess.check_output( ["nmap " + ip + ">> " + output_filename], shell=True) except subprocess.CalledProcessError as e: output = e.output
def Parse_hosts(hosts, Log=logging): try: Log.debug(hosts) assert (hosts is not None) all_host_set = set(hosts.replace(' ', '').split(',')) pre_host_set = set(host for host in all_host_set if re.search('[^\d./]', host)) sig_host_set = set(host for host in pre_host_set if re.search('[^\d.-]', host)) solve_list = list(all_host_set - pre_host_set) for host in pre_host_set - sig_host_set: if host.count('-') == 1: ip_group = sorted([i.split('.') for i in host.split('-')], lambda x, y: len(y) - len(x)) if len(ip_group[0]) == 4: solve_list.append( tuple('.'.join(ip) for ip in [ ip_group[0], [ ip_group[0][i] for i in range( len(ip_group[0]) - len(ip_group[1])) ] + ip_group[1] ])) continue Log.warn('[FAIL] Unable to recognize host:' + host) import iptools return sorted( set(ip for ranges in iptools.IpRangeList(*solve_list).ips for ip in ranges) | sig_host_set) except Exception as e: Log.error('[FAIL] Resolve host list "%s" failed\n%s' % (hosts, traceback.format_exc())) return None
def test_CIDR_speed(self): line = 'Sep 16 20:00:02 srv testsrv 192.%s.119.%s - ' \ '16/Sep/2012/20/00/02 GET /v1/a/c/o HTTP/1.0 ' \ '200 - StaticWeb - - 17005 - txn - 0.0095 -' ips1 = iptools.IpRangeList(*[ x.strip() for x in '127.0.0.1,192.168/16,10/24'.split(',') if x.strip() ]) ips2 = iptools.IpRangeList( *[x.strip() for x in '172.168/16,10/30'.split(',') if x.strip()]) ips3 = iptools.IpRangeList( *[x.strip() for x in '127.0.0.1,11/24'.split(',') if x.strip()]) orig_start = datetime.datetime.utcnow() hit = 0 for n in range(255): for a in range(255): stream = line % (n, a) data = stream.split(" ") if data[5] in ips1 or data[5] in ips2 or data[5] in ips3: hit += 1 orig_end = datetime.datetime.utcnow() orig_secs = float("%d.%d" % ((orig_end - orig_start).seconds, (orig_end - orig_start).microseconds)) self.assertEqual(hit, 255) # now, let's check the speed with sets set1 = set(k for k in ips1) set2 = set(k for k in ips2) set3 = set(k for k in ips3) new_start = datetime.datetime.utcnow() hit = 0 for n in range(255): for a in range(255): stream = line % (n, a) data = stream.split(" ") if data[5] in set1 or data[5] in set2 or data[5] in set3: hit += 1 new_end = datetime.datetime.utcnow() new_secs = float("%d.%d" % ((new_end - new_start).seconds, (new_end - new_start).microseconds)) self.assertEqual(hit, 255) # assert that using pure types directly is faster self.assertTrue(new_secs < orig_secs)
def _get_excluded_ips(self): if self._enrich_config.get('excluded_ips'): excluded_ips = [ _ip.strip() for _ip in self._enrich_config['excluded_ips'].split(',') ] return iptools.IpRangeList(*excluded_ips) return None
def checkIPcorrect(IP, IPpattern=COINBASE_CORRECT_IP, dbg=DEBUG_MESSAGES): "callbacks should come from Coinbase" if (IP in iptools.IpRangeList(IPpattern)): if dbg: print "'%s' is a coinbase IP, all cool." % IP return True else: if dbg: print "'%s' is NOT a coinbase IP!" % IP return False
def generate_ips(ip_range): try: ip_addresses = iptools.IpRangeList(args.ip_range) return ip_addresses except: ip_start = ip_range.split("-")[0] ip_end = ip_range.split("-")[1] return iptools.IpRange(ip_start, ip_end)
def search_results(request): conn = Solr(request.registry.settings['solr_base_url'], decoder=decoder) params = request.GET.copy() q = params.pop('q', None) if q is None: return HTTPFound('http://2012.haip.cc/') params.update({ 'facet': 'true', 'facet.limit': 20, 'facet.mincount': 1, 'facet.sort': 'count', 'facet.field': ['language', 'author_exact', 'year'], 'fl': '*', }) # TODO: get cover data, description from https://developers.google.com/books/docs/v1/reference/volumes # TODO: refactor logic from template to view # TODO: tests # first do request without fq so we get all facet values params_temp = params.copy() if 'fq' in params_temp: del params_temp['fq'] facet_fields = conn.search(q, **params_temp).facets['facet_fields'] # workaround due to limitation that kwargs can't handle multidict if 'fq' in params: params['fq'] = ' AND '.join(params.getall('fq')) log.debug(params) results = conn.search(q, **params) log.debug(results) allowed_networks = request.registry.settings['allowed_networks'].split(',') if request.client_addr in iptools.IpRangeList(*allowed_networks): is_trusted_ip = True else: is_trusted_ip = False out = { 'results': list(results), 'q': q, 'facet_fields': facet_fields, 'facets': params.get('fq', []), } if request.matched_route.name.endswith('json'): return out else: out.update({ 'with_facet': with_facet, 'without_facet': without_facet, 'format_byte_size': format_byte_size, 'format_facet': format_facet, 'is_trusted_ip': is_trusted_ip, }) return out
def test__get_excluded_ips__with_excluded_ips_in_config(self): # config file with excluded_ips self.enricher._enrich_config = {'dnshost': '8.8.8.8', 'dnsport': '53', 'geoippath': '/usr/share/GeoIP', 'excluded_ips': '1.1.1.1, 2.2.2.2,3.3.3.3'} expected = iptools.IpRangeList('1.1.1.1', '2.2.2.2', '3.3.3.3') result = self.enricher._get_excluded_ips() self.assertItemsEqual(expected, result)
def testSPF(headers): #spf is txt records #validate return-path #lets check from too just as a warning situation #not meant for testing for authenticity # domain.com txt # broken from forwarding can we check this? can we just avoid these checks by doing somethings that shows forwarding? recievedheaders = [] successes = set() failures = set() returnpath = None for header in headers.items(): if header[0] == 'Return-Path': returnpath = header[1] elif header[0] == 'Received': recievedheaders.append(header) if returnpath is not None: domain = returnpath.split('@')[1][:-1] ips = pullSpfRecords(domain) if ips: ipranges = iptools.IpRangeList(*ips) for header in recievedheaders: valuelist = header[1].split() if valuelist[1][-1].isdigit(): if valuelist[1] in ipranges: successes.add(valuelist[1]) else: failures.add(valuelist[1]) else: try: ans = dns.resolver.query(valuelist[1], 'A') for rdata in ans: rdata = rdata.address if rdata in ipranges: successes.add(rdata) successes.add(valuelist[1]) else: failures.add(rdata) failures.add(valuelist[1]) except dns.resolver.NXDOMAIN: failures.add(valuelist[1]) print('Failed SPF received ips {}'.format(failures)) print('Succeeded SPF received ips {}'.format(successes)) if successes: return True elif failures: return False return 'No ips in SPF records' #TODO needs to not be this considering true or false elsewhere
def ipv4_detail(request, pk): ipv4network = get_object_or_404(Network, pk=pk) mask = (ipaddress.ip_network(ipv4network.network)).netmask gwfind = list(iptools.IpRangeList(ipv4network.network).__iter__()) gw = gwfind[1] ipfrom = gwfind[2] ipto = gwfind[-2] context = { 'ipv4network': ipv4network, 'mask': mask, 'gw': gw, 'ipfrom': ipfrom, 'ipto': ipto } return render(request, 'inventory/ipv4_detail.html', context)
def test__filter_out_excluded_ips__with_no_ip_in_excluded_ips(self): self.enricher.excluded_ips = iptools.IpRangeList('1.1.1.1', '2.2.2.2', '3.3.3.3') data = RecordDict({ "url": "http://www.nask.pl/asd", "address": [{'ip': '1.1.1.5'}, {'ip': '2.1.1.1'}], }) expected = RecordDict({ "url": "http://www.nask.pl/asd", "address": [{'ip': '1.1.1.5'}, {'ip': '2.1.1.1'}], }) ip_to_enr_mock = mock.MagicMock() ip_to_enr_expected_calls = [] self.enricher._filter_out_excluded_ips(data, ip_to_enr_mock) self.assertEqualIncludingTypes(expected, data) self.assertEqual(ip_to_enr_mock.mock_calls, ip_to_enr_expected_calls)
def get_dg(): global extIPandMask global defaultgateway defaultgateway = raw_input('Default Gateway IP: ') r = iptools.IpRangeList(extIPandMask).__contains__(defaultgateway) x = extIPandMask.split('/') address = x[0] if r is False: print 'Make sure it is a valid IP within the external IP network' get_dg() if defaultgateway == address: print 'Your gateway cannot be the same as your local IP' get_dg() else: pass
def main(event, context): logging.info('Python HTTP trigger function processed a request.') cidr = event['cidrSignature'] lis = iptools.IpRangeList(cidr) first = str(lis).split(',')[0].split("'")[-2] last = str(lis).split(',')[1].split("'")[-2] data = { 'cidrSignature': cidr, 'firstAddress': first, 'lastAddress': last, 'addressCount': len(lis) } return data
class Ip(Observable): regex = r'([\d+]{1,3}\.[\d+]{1,3}\.[\d+]{1,3}\.[\d+]{1,3})' geoip = DictField() version = IntField() exclude_fields = Observable.exclude_fields + ['version'] DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [ ("version", "IP version"), ("geoip__country", "Country"), ("geoip__city", "City") ] IPV4_IGNORE_RANGE = iptools.IpRangeList( iptools.ipv4.BENCHMARK_TESTS, iptools.ipv4.BROADCAST, iptools.ipv4.DUAL_STACK_LITE, iptools.ipv4.IETF_PROTOCOL_RESERVED, iptools.ipv4.LINK_LOCAL, iptools.ipv4.LOOPBACK, iptools.ipv4.LOCALHOST, iptools.ipv4.MULTICAST, iptools.ipv4.MULTICAST_INTERNETWORK, iptools.ipv4.MULTICAST_LOCAL, iptools.ipv4.PRIVATE_NETWORK_10, iptools.ipv4.PRIVATE_NETWORK_172_16, iptools.ipv4.PRIVATE_NETWORK_192_168) def clean(self): """Performs some normalization on IP addresses entered""" ip = self.value if iptools.ipv4.validate_ip(ip): # is IPv4 self.value = iptools.ipv4.hex2ip( iptools.ipv4.ip2hex(ip)) # normalize ip self.version = 4 elif iptools.ipv6.validate_ip(ip): # is IPv6 self.value = iptools.ipv6.long2ip( iptools.ipv6.ip2long(ip)) # normalize ip self.version = 6 else: raise ObservableValidationError( "{} is not a valid IP address".format(ip)) @staticmethod def check_type(txt): if txt: match = re.match("^" + Ip.regex + "$", txt) if match: return match.group(1) else: return False
def main(): print '' arguments = getArgvs() if arguments.targets == "": sys.exit("Must provide at least one host to scan.") try: targets = iptools.IpRangeList(arguments.targets) for ip in targets: try: a = getCertificate(ip) else: print ip +' - problem with connecting to 443 to get cert.' f = open(ip + '.pem', 'wb') f.write(a) f.close() except: print "The IP range: " + arguments.targets + " is not a valid one."
def main(): if len(sys.argv) != 2: usage() sys.exit() queue = Queue.Queue() if os.path.exists(sys.argv[1]): hosts = [line.strip() for line in open(sys.argv[1])] else: hosts = iptools.IpRangeList(sys.argv[1]) for host in hosts: queue.put(host) for thr in range(MAX_THREADS): t = ThreadFTP(queue) t.setDaemon(True) t.start() queue.join()
def ips(ipListStr): import iptools ''' Takes a traditional nmap string of ips and converts it to a list of ips. Only takes individuals or CIDR notation. ''' ipListPrimary = ipListStr.split(',') ipListSecondary = [] for ipElt in ipListPrimary: if '/' in ipElt: ipRange = iptools.IpRangeList(ipElt) ipIter = ipRange.__iter__() while True: try: ipListSecondary.append(next(ipIter)) except: break else: ipListSecondary.append(ipElt) return ipListSecondary
def test__filter_out_excluded_ips__with_range_of_ips(self): self.enricher.excluded_ips = iptools.IpRangeList('3.0.0.0/8') data = RecordDict({ "url": "http://www.nask.pl/asd", "address": [ { 'ip': '3.3.3.3', 'asn': 1234 }, { 'ip': '3.255.255.255', 'asn': 5632 }, { 'ip': '3.0.0.0', 'asn': 5631 }, { 'ip': '2.255.255.255', 'asn': 5632 }, ], }) expected = RecordDict({ "url": "http://www.nask.pl/asd", "address": [ { 'ip': '2.255.255.255', 'asn': 5632, }, ], }) ip_to_enr_mock = mock.MagicMock() ip_to_enr_expected_call_items = [ mock.call.pop('3.3.3.3', None), mock.call.pop('3.255.255.255', None), mock.call.pop('3.0.0.0', None), ] self.enricher._filter_out_excluded_ips(data, ip_to_enr_mock) self.assertEqualIncludingTypes(expected, data) self.assertItemsEqual(ip_to_enr_mock.mock_calls, ip_to_enr_expected_call_items)
def render(cirt): file = __file__ abspath = path.dirname(path.abspath(file)) abspath = abspath.replace('\\base', '') abspath = path.join(abspath, 'jinja') template = path.join(abspath, 'incident_plain_html.jinja') with open(template, 'rb') as html: content = ''.join(html.readlines()) t = J.Template(content) t = t.render( sin=cirt.sin['restapi']['caseId'], cat=cirt.sin['tags'][1], type=cirt.sin['tags'][6] + " - " + cirt.sin['tags'][7], severity=cirt.sin['tags'][2], zone=cirt.sin['tags'][3], tier=cirt.sin['tags'][4], agency=cirt.sin['tags'][5], time=datetime.now(), overview=cirt.sin['description'], URL_VALUES=cirt.sin['observables']['urls'], DOMAIN_VALUES=cirt.sin['observables']['fqdns'], USER_VALUES=cirt.sin['observables']['ldap_user'], #HOSTNAME_VALUES = cirt.sin['observables']['ldap_host'] , SRC_IP_VALUES=cirt.sin['observables']['src_ip'], DST_IP_VALUES=cirt.sin['observables']['dst_ip'], HASH_VALUES=cirt.sin['observables']['hashes'], EMAIL_SENDER_VALUES=cirt.sin['observables']['src_email'], EMAIL_RECIPIENT_VALUES=cirt.sin['observables']['dst_email'], PRIVATE_IPS_RFC1918=iptools.IpRangeList('127.0.0.1', '10.0.0.1/8', '172.16.0.1/20', '192.168.0.1/16'), PRIVATE_ORG_DOMAINS=('acme.org', 'kitty.org', 'xmen.org', 'venom.com', 'spiderman.org', 'superman.org', 'rip.org', 'aspect.com', 'sensus.org', 'styleit.org')) cirt.sin['template'] = t return cirt
#!/usr/bin/python #alex mclean, 2013 import iptools, httplib for ip in iptools.IpRangeList('0.0.0.0/0'): try: print "Greeting " + ip cx = httplib.HTTPConnection("%s:80" % ip) cx.request("POST", '/', "message=Hello+world!") except: pass
# Async tasks with django-celery, for testing we use a memory test backend #----------------------------------------------------------------------------------- BROKER_BACKEND = 'memory' #----------------------------------------------------------------------------------- # Django-Nose config #----------------------------------------------------------------------------------- TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' #----------------------------------------------------------------------------------- # Debug Toolbar #----------------------------------------------------------------------------------- import iptools INTERNAL_IPS = iptools.IpRangeList( '127.0.0.1', '192.168.0.10', '192.168.0.0/24', # network block '0.0.0.0') DEBUG_TOOLBAR_CONFIG = { 'INTERCEPT_REDIRECTS': False, # disable redirect traps } #----------------------------------------------------------------------------------- # Crontab Settings .. #----------------------------------------------------------------------------------- from datetime import timedelta from celery.schedules import crontab CELERYBEAT_SCHEDULE = {
_direct_database_config = _default_database_config.copy() _default_database_config["DISABLE_SERVER_SIDE_CURSORS"] = True DATABASES = { "default": _default_database_config, "direct": _direct_database_config } # If we are testing, set both our connections as the same, Django seems to get # confused on Python 3.6 with transactional tests otherwise if TESTING: DATABASES["default"] = _direct_database_config INTERNAL_IPS = iptools.IpRangeList("127.0.0.1", "192.168.0.10", "192.168.0.0/24", "0.0.0.0") # network block # ----------------------------------------------------------------------------------- # Crontab Settings .. # ----------------------------------------------------------------------------------- CELERYBEAT_SCHEDULE = { "check-channels": { "task": "check_channels_task", "schedule": timedelta(seconds=300) }, "check-credits": { "task": "check_credits_task", "schedule": timedelta(seconds=900) }, "check-elasticsearch-lag": {
import iptools from .base import * # noqa: F401 DEBUG = True INTERNAL_IPS = iptools.IpRangeList('10/8', '127/8', '172.16/12', '192.168/16') INSTALLED_APPS.append('debug_toolbar') MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
def _get_excluded_ips(self): if self._enrich_config['excluded_ips']: return iptools.IpRangeList( *self._enrich_config['excluded_ips']) #3 --replace iptools return None
def get_ip_range_list(self): """Returns IpRangeList object for the range of IPs of this library. """ ranges = list(self.parse_ip_ranges(self.ip_ranges or "")) return iptools.IpRangeList(*ranges)
import flask import iptools app = flask.Flask(__name__) iter = iptools.IpRangeList('192.93.0.0/16').__iter__() @app.route('/next', methods=['GET']) def next_ip_pool(): try: global iter ip = iptools.next(iter) return str(ip) except StopIteration: return "end" if __name__ == '__main__': app.run(host='0.0.0.0', port=80)