def test_edit_prefix(self): """ We should NOT be able to execute edit_prefix as read-only user """ p = Prefix() p.id = 123 with self.assertRaises(NipapAuthorizationError): p.save()
def test_add_prefix(self): """ We should NOT be able to execute add_prefix as read-only user """ p = Prefix() p.prefix = '1.3.3.7' with self.assertRaises(NipapAuthorizationError): p.save()
def test_stats5(self): """ Add prefixes within other prefix and verify parent prefix has correct statistics """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(0, res['result'][0].used_addresses) self.assertEqual(256, res['result'][0].free_addresses) # add a host in our top prefix p2 = th.add_prefix('1.0.0.1/32', 'host', 'bar') # check stats for p1, our top level prefix res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(1, res['result'][0].used_addresses) self.assertEqual(255, res['result'][0].free_addresses) # check stats for p2, our new host prefix res = Prefix.smart_search('1.0.0.1/32', {}) self.assertEqual(1, res['result'][0].total_addresses) self.assertEqual(1, res['result'][0].used_addresses) self.assertEqual(0, res['result'][0].free_addresses)
def test_stats3(self): """ Check stats are correct when shrinking prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('1.0.7.0/24', 'assignment', 'test') # add a covering supernet around p1 and p2 p3 = th.add_prefix('1.0.0.0/21', 'reservation', 'bar') # check that p3 looks good res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(512, res['result'][0].used_addresses) self.assertEqual(1536, res['result'][0].free_addresses) # now move our supernet, so we see that the update thingy works p3.prefix = '1.0.0.0/22' p3.save() # check that p3 only covers p1 res = Prefix.smart_search('1.0.0.0/22', {}) self.assertEqual(1024, res['result'][0].total_addresses) self.assertEqual(256, res['result'][0].used_addresses) self.assertEqual(768, res['result'][0].free_addresses)
def test_tags1(self): """ Verify tags are correctly inherited """ th = TestHelper() # add to "top level" prefix, each with a unique tag p1 = th.add_prefix('1.0.0.0/8', 'reservation', 'test', tags=['a']) p2 = th.add_prefix('1.0.0.0/9', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/10', 'reservation', 'test') # p3 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/10', {}) self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) p4 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.0.0/23', 'reservation', 'test') p6 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') # p4 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) # change tags on top level prefix p1.tags = ['b'] p1.save() # p4 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/8', {}) self.assertEqual([], res['result'][0].inherited_tags.keys()) self.assertEqual(['b'], res['result'][1].inherited_tags.keys()) self.assertEqual(['b'], res['result'][2].inherited_tags.keys()) self.assertEqual(['b'], res['result'][3].inherited_tags.keys()) self.assertEqual(['b'], res['result'][4].inherited_tags.keys()) self.assertEqual(['b'], res['result'][5].inherited_tags.keys())
def add_prefix_to_vrf(self, vrfrt, prefix, type, description, status, tags=[]): """ Note: This function adds a prefix to a given VRF, if the prefix is used or invalid, it will return None :param vrfrt: String like "209:123" :param prefix: String like "1.0.0.0/29" :param type: String, must be on of the following: 'reservation', 'assignment', 'host' :param description: String :param status: String, must be "assigned" or "reserved" :param tags: Array of Strings :return: Prefix object or None """ myvrf = None p = None # get the vrf myvrf = self.find_vrf('rt', vrfrt) p = Prefix() p.prefix = prefix p.type = type p.status = status p.description = description p.vrf = myvrf p.tags = tags try: p.save() except: e = sys.exc_info()[0] logging.error("Error: could not add prefix: %s" % e) return p
def test_children8(self): """ Remove prefix and check old parent is correctly updated """ th = TestHelper() # p1 children are p2 (which covers p3 and p4) and p5 p1 = th.add_prefix('1.0.0.0/20', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p4 = th.add_prefix('1.0.1.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.7.0/24', 'reservation', 'test') # moving p2 means that p1 get p3, p4 and p5 as children p2.prefix = '2.0.0.0/22' p2.save() # check stats for p1 res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(3, res['result'][0].children) # moving back p2 which means that p1 get p2 and p5 as children p2.prefix = '1.0.0.0/22' p2.save() # check stats for p1 res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(2, res['result'][0].children)
def test_children9(self): """ Move prefix several indent steps and check children is correct """ th = TestHelper() # tree of prefixes p1 = th.add_prefix('1.0.0.0/20', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/21', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p4 = th.add_prefix('1.0.0.0/23', 'reservation', 'test') p5 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p6 = th.add_prefix('1.0.2.0/24', 'reservation', 'test') p7 = th.add_prefix('1.0.4.0/22', 'reservation', 'test') # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2, res['result'][0].children) # move p3 outside of the tree p3.prefix = '2.0.0.0/22' p3.save() # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(3, res['result'][0].children) # move p3 into the tree again p3.prefix = '1.0.0.0/22' p3.save() # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2, res['result'][0].children)
def add_prefix(self): """ Add prefix according to the specification. The following keys can be used: schema Schema to which the prefix is to be added (mandatory) prefix the prefix to add if already known family address family (4 or 6) description A short description comment Longer comment node Hostname of node type Type of prefix; reservation, assignment, host pool ID of pool country Country where the prefix is used order_id Order identifier vrf VRF alarm_priority Alarm priority of prefix monitor If the prefix should be monitored or not from-prefix A prefix the prefix is to be allocated from from-pool A pool (ID) the prefix is to be allocated from prefix_length Prefix length of allocated prefix """ p = Prefix() # parameters which are "special cases" try: p.schema = Schema.get(int(request.params['schema'])) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__})
def test_stats7(self): """ Enlarge / shrink prefix over several indent levels """ th = TestHelper() # p1 children are p2 (which covers p3 and p4) and p5 p1 = th.add_prefix('1.0.0.0/16', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/23', 'reservation', 'FOO') p4 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.1.0/24', 'reservation', 'test') p6 = th.add_prefix('1.0.2.0/24', 'reservation', 'test') p7 = th.add_prefix('1.0.3.0/24', 'reservation', 'test') # enlarge p3 so that it covers p2, ie moved up several indent levels p3.prefix = '1.0.0.0/21' p3.save() # check stats for p3 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(1024, res['result'][0].used_addresses) self.assertEqual(1024, res['result'][0].free_addresses) # move back p3 p3.prefix = '1.0.0.0/23' p3.save() # check stats for p3 res = Prefix.smart_search('1.0.0.0/23', {}) self.assertEqual(512, res['result'][0].total_addresses) self.assertEqual(512, res['result'][0].used_addresses) self.assertEqual(0, res['result'][0].free_addresses)
def test_remove_prefix(self): """ We should NOT be able to execute remove_prefix as read-only user """ p = Prefix() p.id = 0 with self.assertRaises(NipapAuthorizationError): p.remove()
def add_prefix_from_pool(self, pool, family, description): p = Prefix() args = {} args['from-pool'] = pool args['family'] = family p.type = pool.default_type p.save(args) return p
def add_prefix(self): """ Add prefix according to the specification. The following keys can be used: vrf ID of VRF to place the prefix in prefix the prefix to add if already known family address family (4 or 6) description A short description expires Expiry time of assignment comment Longer comment node Hostname of node type Type of prefix; reservation, assignment, host status Status of prefix; assigned, reserved, quarantine pool ID of pool country Country where the prefix is used added Timestamp of added prefix last_modified Timestamp of last modify order_id Order identifier customer_id Customer identifier vlan VLAN ID alarm_priority Alarm priority of prefix monitor If the prefix should be monitored or not from-prefix A prefix the prefix is to be allocated from from-pool A pool (ID) the prefix is to be allocated from prefix_length Prefix length of allocated prefix """ p = Prefix() # Sanitize input parameters if 'vrf' in request.json: try: if request.json['vrf'] is None or len( unicode(request.json['vrf'])) == 0: p.vrf = None else: p.vrf = VRF.get(int(request.json['vrf'])) except ValueError: return json.dumps({ 'error': 1, 'message': "Invalid VRF ID '%s'" % request.json['vrf'] }) except NipapError, e: return json.dumps({ 'error': 1, 'message': e.args, 'type': type(e).__name__ })
def new_prefix(): p = Prefix() p.monitor = True p.alarm_priority = 'high' p.vrf = DEFAULT_VRF p.node = None p.tags['infoblox-import'] = 1 p.customer_id = DEFAULT_CUSTOMER p.authoritative_source = 'import' # https://github.com/SpriteLink/NIPAP/issues/721 p.expires = '2100-01-30 00:00:00' return p
def add_prefix(self, prefix, type, description, tags=[]): p = Prefix() p.prefix = prefix p.type = type p.description = description p.tags = tags p.save() return p
def search_prefix(self): """ Search prefixes. Does not yet incorporate all the functions of the search_prefix API function due to difficulties with transferring a complete 'dict-to-sql' encoded data structure. Instead, a list of prefix attributes can be given which will be matched with the 'equals' operator if notheing else is specified. If multiple attributes are given, they will be combined with the 'and' operator. Currently, it is not possible to specify different operators for different attributes. """ # extract operator if 'operator' in request.json: operator = request.json['operator'] else: operator = 'equals' # fetch attributes from request.json attr = XhrController.extract_prefix_attr(request.json) # build query dict n = 0 q = {} for key, val in attr.items(): if n == 0: q = { 'operator': operator, 'val1': key, 'val2': val } else: q = { 'operator': 'and', 'val1': { 'operator': operator, 'val1': key, 'val2': val }, 'val2': q } n += 1 # extract search options search_opts = {} if 'children_depth' in request.json: search_opts['children_depth'] = request.json['children_depth'] if 'parents_depth' in request.json: search_opts['parents_depth'] = request.json['parents_depth'] if 'include_neighbors' in request.json: search_opts['include_neighbors'] = request.json['include_neighbors'] if 'max_result' in request.json: search_opts['max_result'] = request.json['max_result'] if 'offset' in request.json: search_opts['offset'] = request.json['offset'] try: result = Prefix.search(q, search_opts) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__})
def prefix_get(self, prefix, vrf_id): """ Get prefix object :param vrf_id: :param prefix: :return: """ # Search for prefixes matching prefix & vrf_id query = { 'operator': 'and', 'val1': { 'operator': 'equals', 'val1': 'prefix', 'val2': prefix, }, 'val2': { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf_id } } self.lock.acquire() try: prefixes = Prefix.search(query)['result'] except Exception as e: self.lock.release() raise e else: self.lock.release() return prefixes[0] if len(prefixes) == 1 else None
def test_prefix_edit(self): """ Verify indent is correct after prefix edit """ th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/16', 'reservation', 'test') p2 = th.add_prefix('192.168.0.0/24', 'reservation', 'test') p3 = th.add_prefix('192.168.1.0/24', 'reservation', 'test') # now edit the "middle prefix" so that it now covers 192.168.1.0/24 p3.prefix = '192.168.0.0/20' p3.save() expected = [] # expected result is a list of list, each row is a prefix, first value is prefix, next is indent level # notice how p2 and p3 switch places efter the edit expected.append([p1.prefix, 0]) expected.append([p3.prefix, 1]) expected.append([p2.prefix, 2]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.indent]) self.assertEqual(expected, result)
def edit(self, id): """ Edit a pool. """ c.pool = Pool.get(int(id)) c.prefix_list = Prefix.list({'pool_id': c.pool.id}) c.prefix = '' # save changes to NIPAP if request.method == 'POST': c.pool.name = request.params['name'] c.pool.description = request.params['description'] c.pool.default_type = request.params['default_type'] if request.params['ipv4_default_prefix_length'].strip() == '': c.pool.ipv4_default_prefix_length = None else: c.pool.ipv4_default_prefix_length = request.params[ 'ipv4_default_prefix_length'] if request.params['ipv6_default_prefix_length'].strip() == '': c.pool.ipv6_default_prefix_length = None else: c.pool.ipv6_default_prefix_length = request.params[ 'ipv6_default_prefix_length'] c.pool.save() redirect(url(controller='pool', action='list')) c.search_opt_parent = 'all' c.search_opt_child = 'none' return render("/pool_edit.html")
def edit(self, id): """ Edit a pool. """ c.pool = Pool.get(int(id)) c.prefix_list = Prefix.list({ 'pool_id': c.pool.id }) c.prefix = '' # save changes to NIPAP if request.method == 'POST': c.pool.name = request.params['name'] c.pool.description = request.params['description'] c.pool.default_type = request.params['default_type'] if request.params['ipv4_default_prefix_length'].strip() == '': c.pool.ipv4_default_prefix_length = None else: c.pool.ipv4_default_prefix_length = request.params['ipv4_default_prefix_length'] if request.params['ipv6_default_prefix_length'].strip() == '': c.pool.ipv6_default_prefix_length = None else: c.pool.ipv6_default_prefix_length = request.params['ipv6_default_prefix_length'] c.pool.save() redirect(url(controller = 'pool', action = 'list')) c.search_opt_parent = 'all' c.search_opt_child = 'none' return render("/pool_edit.html")
def view_prefix(arg, opts): """ View a single prefix. """ s = get_schema() res = Prefix.search(s, { 'operator': 'equals', 'val1': 'prefix', 'val2': arg }, {}) if len(res['result']) == 0: print "Address %s not found." % arg return p = res['result'][0] print "-- Address " print " %-15s : %s" % ("Prefix", p.prefix) print " %-15s : %s" % ("Display prefix", p.display_prefix) print " %-15s : %s" % ("Type", p.type) print " %-15s : IPv%s" % ("Family", p.family) print " %-15s : %s" % ("Description", p.description) print " %-15s : %s" % ("Node", p.node) print " %-15s : %s" % ("Order", p.order_id) print " %-15s : %s" % ("VRF", p.vrf) print " %-15s : %s" % ("Alarm priority", p.alarm_priority) print " %-15s : %s" % ("Monitor", p.monitor) print "-- Comment" print p.comment
def new_prefix(): p = Prefix() p.monitor = True p.alarm_priority = "high" p.vrf = DEFAULT_VRF p.node = None p.tags["infoblox-import"] = 1 p.customer_id = DEFAULT_CUSTOMER p.authoritative_source = "import" # https://github.com/SpriteLink/NIPAP/issues/721 p.expires = "2100-01-30 00:00:00" return p
def test_children3(self): """ Check children are correct when adding prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(0, res['result'][0].children) # add a covering supernet around p1 p2 = th.add_prefix('1.0.0.0/20', 'reservation', 'bar') # check stats for p2, our new top level prefix res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(1, res['result'][0].children)
def test_find_free_prefix(self): """ We should be able to execute find_free_prefix as read-only user """ v = VRF.get(0) p = Prefix.find_free(v, { 'from-prefix': ['1.3.3.0/24'], 'prefix_length': 27 })
def add_prefix(self, prefix, type, description, tags=None): if tags is None: tags = [] p = Prefix() p.prefix = prefix p.type = type p.description = description p.tags = tags p.save() return p
def edit_prefix(self, id): """ Edit a prefix. """ try: p = Prefix.get(int(id)) # extract attributes if 'prefix' in request.json: p.prefix = validate_string(request.json, 'prefix') if 'type' in request.json: p.type = validate_string(request.json, 'type') if 'description' in request.json: p.description = validate_string(request.json, 'description') if 'expires' in request.json: p.expires = validate_string(request.json, 'expires') if 'comment' in request.json: p.comment = validate_string(request.json, 'comment') if 'node' in request.json: p.node = validate_string(request.json, 'node') if 'status' in request.json: p.status = validate_string(request.json, 'status') if 'pool' in request.json: if request.json['pool'] is None: p.pool = None else: try: p.pool = Pool.get(int(request.json['pool'])) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) if 'alarm_priority' in request.json: p.alarm_priority = validate_string(request.json, 'alarm_priority') if 'monitor' in request.json: if request.json['monitor'] == 'true': p.monitor = True else: p.monitor = False if 'country' in request.json: p.country = validate_string(request.json, 'country') if 'order_id' in request.json: p.order_id = validate_string(request.json, 'order_id') if 'customer_id' in request.json: p.customer_id = validate_string(request.json, 'customer_id') if 'vrf' in request.json: try: if request.json['vrf'] is None or len(unicode(request.json['vrf'])) == 0: p.vrf = None else: p.vrf = VRF.get(int(request.json['vrf'])) except ValueError: return json.dumps({'error': 1, 'message': "Invalid VRF ID '%s'" % request.json['vrf']}) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__})
def save_hosts(): for host in hosts: try: host.save() continue except: pass r = Prefix().search({"operator": "contains", "val1": "prefix", "val2": host.prefix}) for p in r["result"]: try: p.type = "assignment" p.tags["guessed"] = 1 p.save() except: pass try: host.save() continue except: pass # this is a last and probably wrong attempt # to fix the bad data in infoblox. p = Prefix() p.type = "assignment" p.description = "AUTO: host container (import)" p.tags["auto"] = 1 ip = ipaddr.IPNetwork(host.prefix) p.prefix = str(ip.supernet(prefixlen_diff=1).network) + "/127" p.save() host.save()
def remove_prefix(self, id): """ Remove a prefix. """ try: p = Prefix.get(int(id)) p.remove() except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__})
def add_prefix(self, prefix, type, description, tags=[], pool_id=None): p = Prefix() p.prefix = prefix p.type = type p.description = description p.tags = tags if pool_id: pool = Pool.get(pool_id) p.pool = pool p.save() return p
def remove_prefix(self, id): """ Remove a prefix from pool 'id'. """ if 'prefix' not in request.params: abort(400, 'Missing prefix.') prefix = Prefix.get(int(request.params['prefix'])) prefix.pool = None prefix.save() redirect(url(controller='pool', action='edit', id=id))
def search(self, search_string='', vrf_id=None, filters=None): """ Fetch prefixes matching search string :param search_string: Search string provided by GUI :param vrf_id: VRF ID provided by GUI :param filters: Filters (Prefix type) provided by GUI :return: None """ self.lock.acquire() # Clear current dictionary self._init_db() # Compile search string self.search_string = search_string self.search_pattern = re.compile(self.search_string, re.IGNORECASE) # Build VRF query based on `vrf_id` to be used as `extra_query` param vrf_q = None if not vrf_id else { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf_id } # Build status filters filter_q = self.status_filter_build(filters) # Combine vrf_q and filter_q if vrf_q: extra_q = vrf_q if not filter_q else { 'operator': 'and', 'val1': vrf_q, 'val2': filter_q } else: extra_q = filter_q try: search_result = Prefix.smart_search(search_string, search_options={ 'parents_depth': -1, 'children_depth': -1, 'max_result': 0 }, extra_query=extra_q)['result'] # For each prefix in search results find a parent prefix # This is (unfortunately) based on the fact that prefix # list from search IS ordered (parent exists before children) for prefix in search_result: self.find_parent(prefix, self.db) except Exception as e: self.lock.release() raise e self.lock.release()
def edit(self, id): """ Edit a prefix. """ # find prefix c.prefix = Prefix.get(int(id)) # we got a HTTP POST - edit object if request.method == 'POST': c.prefix.prefix = request.params['prefix_prefix'] c.prefix.description = request.params['prefix_description'] if request.params['prefix_node'].strip() == '': c.prefix.node = None else: c.prefix.node = request.params['prefix_node'] if request.params['prefix_country'].strip() == '': c.prefix.country = None else: c.prefix.country = request.params['prefix_country'] if request.params['prefix_comment'].strip() == '': c.prefix.comment = None else: c.prefix.comment = request.params['prefix_comment'] if request.params['prefix_order_id'].strip() == '': c.prefix.order_id = None else: c.prefix.order_id = request.params['prefix_order_id'] if request.params['prefix_customer_id'].strip() == '': c.prefix.customer_id = None else: c.prefix.customer_id = request.params['prefix_customer_id'] if request.params['prefix_vrf'].strip() == '': c.prefix.vrf = None else: # TODO: handle non-existent VRF... c.prefix.vrf = VRF.list({'rt': request.params['prefix_vrf']})[0] if request.params.get('prefix_monitor') != None: c.prefix.monitor = True else: c.prefix.monitor = False c.prefix.alarm_priority = request.params['prefix_alarm_priority'] c.prefix.save() redirect(url(controller='prefix', action='list')) return render('/prefix_edit.html')
def remove_prefix(self, id): """ Remove a prefix from pool 'id'. """ if 'prefix' not in request.params: abort(400, 'Missing prefix.') prefix = Prefix.get(int(request.params['prefix'])) prefix.pool = None prefix.save() redirect(url(controller = 'pool', action = 'edit', id = id))
def remove_prefix(self): """ Remove a prefix. """ try: schema = Schema.get(int(request.params['schema'])) p = Prefix.get(schema, int(request.params['id'])) p.remove() except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__})
def list_prefix(self): """ List prefixes and return JSON encoded result. """ # fetch attributes from request.json attr = XhrController.extract_prefix_attr(request.json) try: prefixes = Prefix.list(attr) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__})
def get_prefixs(self, name=''): """ Return a prefix with the passed in name :param name: prefix name such as '1.1.1.0/32' :return: Prefix object list """ if len(name) > 0: pass else: p = Prefix.list() return p
def edit(self, id): """ Edit a prefix. """ # find prefix c.prefix = Prefix.get(int(id)) # we got a HTTP POST - edit object if request.method == 'POST': c.prefix.prefix = request.params['prefix_prefix'] c.prefix.description = request.params['prefix_description'] if request.params['prefix_node'].strip() == '': c.prefix.node = None else: c.prefix.node = request.params['prefix_node'] if request.params['prefix_country'].strip() == '': c.prefix.country = None else: c.prefix.country = request.params['prefix_country'] if request.params['prefix_comment'].strip() == '': c.prefix.comment = None else: c.prefix.comment = request.params['prefix_comment'] if request.params['prefix_order_id'].strip() == '': c.prefix.order_id = None else: c.prefix.order_id = request.params['prefix_order_id'] if request.params['prefix_customer_id'].strip() == '': c.prefix.customer_id = None else: c.prefix.customer_id = request.params['prefix_customer_id'] if request.params['prefix_vrf'].strip() == '': c.prefix.vrf = None else: # TODO: handle non-existent VRF... c.prefix.vrf = VRF.list({ 'rt': request.params['prefix_vrf'] })[0] if request.params.get('prefix_monitor') is not None: c.prefix.monitor = True else: c.prefix.monitor = False c.prefix.alarm_priority = request.params['prefix_alarm_priority'] c.prefix.save() redirect(url(controller='prefix', action='list')) return render('/prefix_edit.html')
def test_children7(self): """ Add prefixes within other prefix and verify parent prefix has correct children """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(0, res['result'][0].children) # add a host in our top prefix p2 = th.add_prefix('1.0.0.1/32', 'host', 'bar') # check stats for p1, our top level prefix res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(1, res['result'][0].children) # check stats for p2, our new host prefix res = Prefix.smart_search('1.0.0.1/32', {}) self.assertEqual(0, res['result'][0].children)
def test_stats1(self): """ Check stats are correct when adding prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(0, res['result'][0].used_addresses) self.assertEqual(256, res['result'][0].free_addresses) # add a covering supernet around p1 p2 = th.add_prefix('1.0.0.0/20', 'reservation', 'bar') # check stats for p2, our new top level prefix res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(4096, res['result'][0].total_addresses) self.assertEqual(256, res['result'][0].used_addresses) self.assertEqual(3840, res['result'][0].free_addresses)
def remove(self, id): """ Remove a prefix. """ # find prefix c.prefix = Prefix.get(int(id)) if 'confirmed' not in request.params: return render('/prefix_remove_confirm.html') c.prefix.remove() redirect(url(controller='prefix', action='list'))
def freeprefix(nipap_deamon_ip, account_cb_alias, account_iam_alias, vpc_network, vpc_prefix): # Lookup nipap daemon password cipher nipapCfn = dynamodb.Table(os.environ['TAILOR_TABLENAME_NIPAPCFN']) getNipapCfn = nipapCfn.get_item( Key={ 'nipapAlias': account_cb_alias } ) # Decrypt nipap daemon password nipapDaemonPasswordCipherBlob = getNipapCfn['Item']['nipapDaemonPasswordCipherBlob'] nipapDeamonPassword = bytes(kms.decrypt(CiphertextBlob=b64decode(nipapDaemonPasswordCipherBlob))['Plaintext']) # Look up free CIDR block pynipap.xmlrpc_uri = "http://*****:*****@" + nipap_deamon_ip + ":1337" a = pynipap.AuthOptions({ 'authoritative_source': 'tailor_nipap_client' }) # Allocate first available new_prefix = Prefix() new_prefix.description = account_iam_alias new_prefix.type = "assignment" # Save will communicate with the backend and ask for the next available desired prefix size new_prefix.save({'from-prefix': [vpc_network], 'prefix_length': vpc_prefix}) # Read the assigned prefix from the new_prefix object print("VPC Cidr is: ", new_prefix.prefix) return new_prefix.prefix
def remove_prefix(self, id): """ Remove a prefix. """ try: p = Prefix.get(int(id)) p.remove() except NipapError, e: return json.dumps({ 'error': 1, 'message': e.args, 'type': type(e).__name__ })
def add_prefix(self, id): """ Add a prefix to pool 'id' """ if 'prefix' not in request.params: abort(400, 'Missing prefix.') pool = Pool.get(int(id)) prefix = Prefix.get(int(request.params['prefix'])) prefix.pool = pool prefix.save() redirect(url(controller='pool', action='edit', id=id))
def get_prefixes(self, query): """ Get prefix data from NIPAP """ try: res = Prefix.smart_search(query, {}) except socket.error: print >> sys.stderr, "Connection refused, please check hostname & port" sys.exit(1) except xmlrpclib.ProtocolError: print >> sys.stderr, "Authentication failed, please check your username / password" sys.exit(1) for p in res['result']: p.prefix_ipy = IPy.IP(p.prefix) self.prefixes.append(p)
def list_prefix(self): """ List prefixes and return JSON encoded result. """ # fetch attributes from request.json attr = XhrController.extract_prefix_attr(request.json) try: prefixes = Prefix.list(attr) except NipapError, e: return json.dumps({ 'error': 1, 'message': e.args, 'type': type(e).__name__ })
def remove_prefix(self, id): """ Remove a prefix. """ try: p = Prefix.get(int(id)) prefix = p.display_prefix p.remove() log.info('remove_prefix (%s) %s' % (session['user'], prefix)) except NipapError, e: return json.dumps({ 'error': 1, 'message': e.args, 'type': type(e).__name__ })
def save_hosts(): for host in hosts: try: host.save() continue except: pass r = Prefix().search({ 'operator': 'contains', 'val1': 'prefix', 'val2': host.prefix }) for p in r['result']: try: p.type = 'assignment' p.tags['guessed'] = 1 p.save() except: pass try: host.save() continue except: pass # this is a last and probably wrong attempt # to fix the bad data in infoblox. p = Prefix() p.type = 'assignment' p.description = 'AUTO: host container (import)' p.tags['auto'] = 1 ip = ipaddr.IPNetwork(host.prefix) p.prefix = str(ip.supernet(prefixlen_diff=1).network) + '/127' p.save() host.save()
def add_prefix_from_pool(self, pool, family, description): p = Prefix() args = {} args['from-pool'] = pool args['family'] = family p.type = pool.default_type p.status = 'assigned' try: p.save(args) return p except NipapError as exc: print("Error: could not add prefix: %s" % str(exc)) return None
def find_prefix(self, rt, prefix): """ Find a prefix for a given route target (VRF) :param rt: string such as '1.1.1.0/24' :param prefix: string such as '1.1.1.0/24' :return: a Prefix object or None """ retVal = None try: # retVal = VRF.search({'val1': 'id', 'operator': 'equals', 'val2': '10'})['result'][0] retVal = Prefix.search({'val1': 'prefix', 'operator': 'equals', 'val2': prefix}) if not retVal['result']: retVal = None return retVal for myPrefix in retVal['result']: if myPrefix.vrf.rt == rt: return myPrefix except: e = sys.exc_info()[0] logging.error("Error: could not find prefix: %s" % e) retVal = None return retVal
def _thread_ipam_add_prefix(self): try: vrf_id = self.master.vrf_list.get(self.master.current_vrf.get()) self.new_prefix = Prefix() self.new_prefix.prefix = self.val_prefix.get() self.new_prefix.type = self.val_type.get() self.new_prefix.status = self.val_status.get() # TODO: set vrf # self.new_prefix.vrf = self.master.ipam.get_vrf(vrf_id) self.new_prefix.description = self.val_description.get() self.master.ipam.save_prefix(self.new_prefix) tmp_message = "Prefix %s added." % self.new_prefix.prefix self.queue.put( QueMsg(QueMsg.TYPE_STATUS, tmp_message, QueMsg.STATUS_OK)) self.event_generate('<<nipap_prefix_added>>', when='tail') except NipapError as e: self.queue.put( QueMsg(QueMsg.TYPE_STATUS, e, QueMsg.STATUS_NIPAP_ERROR)) self.event_generate('<<nipap_error>>', when='tail') except Exception as e: self.queue.put(QueMsg(QueMsg.TYPE_STATUS, e, QueMsg.STATUS_ERROR)) self.event_generate('<<nipap_error>>', when='tail')
def find_free_prefix(self, rt, fromprefix, prefixlength): """ Note: this method simply finds the next free prefix, it does not reserve it :param rt: String like '209:123' :param fromprefix: String like '1.1.1.0/29' :param prefixlength: String like '32' :return: Prefix object or none """ retVal = None myVrf = None try: myVrf = self.find_vrf('rt', rt) except: e = sys.exc_info()[0] logging.error("Error: could not find prefix: %s" % e) retVal = None return retVal if myVrf: retVal = Prefix.find_free(myVrf, {'from-prefix': [fromprefix], 'prefix_length': prefixlength}) else: retVal = None return retVal
def smart_search_prefix(self): """ Perform a smart search. The smart search function tries extract a query from a text string. This query is then passed to the search_prefix function, which performs the search. """ search_options = {} extra_query = None vrf_filter = None if 'query_id' in request.json: search_options['query_id'] = request.json['query_id'] if 'include_all_parents' in request.json: if request.json['include_all_parents'] == 'true': search_options['include_all_parents'] = True else: search_options['include_all_parents'] = False if 'include_all_children' in request.json: if request.json['include_all_children'] == 'true': search_options['include_all_children'] = True else: search_options['include_all_children'] = False if 'parents_depth' in request.json: search_options['parents_depth'] = request.json['parents_depth'] if 'children_depth' in request.json: search_options['children_depth'] = request.json['children_depth'] if 'include_neighbors' in request.json: if request.json['include_neighbors'] == 'true': search_options['include_neighbors'] = True else: search_options['include_neighbors'] = False if 'max_result' in request.json: if request.json['max_result'] == 'false': search_options['max_result'] = False else: search_options['max_result'] = request.json['max_result'] if 'offset' in request.json: search_options['offset'] = request.json['offset'] if 'parent_prefix' in request.json: search_options['parent_prefix'] = request.json['parent_prefix'] if 'vrf_filter' in request.json: vrf_filter_parts = [] # Fetch VRF IDs from search query and build extra query dict for # smart_search_prefix. vrfs = request.json['vrf_filter'] if len(vrfs) > 0: vrf = vrfs[0] vrf_filter = { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf if vrf != 'null' else None } for vrf in vrfs[1:]: vrf_filter = { 'operator': 'or', 'val1': vrf_filter, 'val2': { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf if vrf != 'null' else None } } if vrf_filter: extra_query = vrf_filter if 'indent' in request.json: if extra_query: extra_query = { 'operator': 'and', 'val1': extra_query, 'val2': { 'operator': 'equals', 'val1': 'indent', 'val2': request.json['indent'] } } else: extra_query = { 'operator': 'equals', 'val1': 'indent', 'val2': request.json['indent'] } try: result = Prefix.smart_search(request.json['query_string'], search_options, extra_query) # Remove error key in result from backend as it interferes with the # error handling of the web interface. # TODO: Reevaluate how to deal with different types of errors; soft # errors like query string parser errors and hard errors like lost # database. del result['error'] except NipapError, e: return json.dumps({ 'error': 1, 'message': e.args, 'type': type(e).__name__ })
def edit_prefix(self, id): """ Edit a prefix. """ try: p = Prefix.get(int(id)) # extract attributes if 'prefix' in request.json: p.prefix = validate_string(request.json, 'prefix') if 'type' in request.json: p.type = validate_string(request.json, 'type') if 'description' in request.json: p.description = validate_string(request.json, 'description') if 'expires' in request.json: p.expires = validate_string(request.json, 'expires') if 'comment' in request.json: p.comment = validate_string(request.json, 'comment') if 'node' in request.json: p.node = validate_string(request.json, 'node') if 'status' in request.json: p.status = validate_string(request.json, 'status') if 'pool' in request.json: if request.json['pool'] is None: p.pool = None else: try: p.pool = Pool.get(int(request.json['pool'])) except NipapError, e: return json.dumps({ 'error': 1, 'message': e.args, 'type': type(e).__name__ }) if 'alarm_priority' in request.json: p.alarm_priority = validate_string(request.json, 'alarm_priority') if 'monitor' in request.json: if request.json['monitor'] == 'true': p.monitor = True else: p.monitor = False if 'country' in request.json: p.country = validate_string(request.json, 'country') if 'order_id' in request.json: p.order_id = validate_string(request.json, 'order_id') if 'customer_id' in request.json: p.customer_id = validate_string(request.json, 'customer_id') if 'vrf' in request.json: try: if request.json['vrf'] is None or len( unicode(request.json['vrf'])) == 0: p.vrf = None else: p.vrf = VRF.get(int(request.json['vrf'])) except ValueError: return json.dumps({ 'error': 1, 'message': "Invalid VRF ID '%s'" % request.json['vrf'] }) except NipapError, e: return json.dumps({ 'error': 1, 'message': e.args, 'type': type(e).__name__ })
import pynipap from pynipap import Prefix a = pynipap.AuthOptions({ 'authoritative_source': 'NIPAP-Sync/1.0' }) pynipap.xmlrpc_uri = "{nipapurl}" query = { 'operator': 'equals', 'val1': 'type', 'val2': 'host' } search_options = { 'max_result': 1000 } search_result = Prefix.search(query, search_options) file = open("/tmp/nipap.hosts", "w") for p in search_result['result']: ip = p.prefix.split('/')[0] host = p.description.replace(" ", "_") file.write(ip + " " + host + "\n") file.close()