def test_tags1(self): """ Verify tags are correctly inherited """ th = TestHelper() # add to "top level" prefix, each with a unique tag p1 = th.add_prefix('1.0.0.0/8', 'reservation', 'test', tags=['a']) p2 = th.add_prefix('1.0.0.0/9', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/10', 'reservation', 'test') # p3 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/10', {}) self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) p4 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.0.0/23', 'reservation', 'test') p6 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') # p4 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) # change tags on top level prefix p1.tags = ['b'] p1.save() # p4 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/8', {}) self.assertEqual([], res['result'][0].inherited_tags.keys()) self.assertEqual(['b'], res['result'][1].inherited_tags.keys()) self.assertEqual(['b'], res['result'][2].inherited_tags.keys()) self.assertEqual(['b'], res['result'][3].inherited_tags.keys()) self.assertEqual(['b'], res['result'][4].inherited_tags.keys()) self.assertEqual(['b'], res['result'][5].inherited_tags.keys())
def test_stats3(self): """ Check stats are correct when shrinking prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('1.0.7.0/24', 'assignment', 'test') # add a covering supernet around p1 and p2 p3 = th.add_prefix('1.0.0.0/21', 'reservation', 'bar') # check that p3 looks good res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(512, res['result'][0].used_addresses) self.assertEqual(1536, res['result'][0].free_addresses) # now move our supernet, so we see that the update thingy works p3.prefix = '1.0.0.0/22' p3.save() # check that p3 only covers p1 res = Prefix.smart_search('1.0.0.0/22', {}) self.assertEqual(1024, res['result'][0].total_addresses) self.assertEqual(256, res['result'][0].used_addresses) self.assertEqual(768, res['result'][0].free_addresses)
def test_children9(self): """ Move prefix several indent steps and check children is correct """ th = TestHelper() # tree of prefixes p1 = th.add_prefix('1.0.0.0/20', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/21', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p4 = th.add_prefix('1.0.0.0/23', 'reservation', 'test') p5 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p6 = th.add_prefix('1.0.2.0/24', 'reservation', 'test') p7 = th.add_prefix('1.0.4.0/22', 'reservation', 'test') # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2, res['result'][0].children) # move p3 outside of the tree p3.prefix = '2.0.0.0/22' p3.save() # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(3, res['result'][0].children) # move p3 into the tree again p3.prefix = '1.0.0.0/22' p3.save() # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2, res['result'][0].children)
def test_children8(self): """ Remove prefix and check old parent is correctly updated """ th = TestHelper() # p1 children are p2 (which covers p3 and p4) and p5 p1 = th.add_prefix('1.0.0.0/20', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p4 = th.add_prefix('1.0.1.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.7.0/24', 'reservation', 'test') # moving p2 means that p1 get p3, p4 and p5 as children p2.prefix = '2.0.0.0/22' p2.save() # check stats for p1 res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(3, res['result'][0].children) # moving back p2 which means that p1 get p2 and p5 as children p2.prefix = '1.0.0.0/22' p2.save() # check stats for p1 res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(2, res['result'][0].children)
def test_stats5(self): """ Add prefixes within other prefix and verify parent prefix has correct statistics """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(0, res['result'][0].used_addresses) self.assertEqual(256, res['result'][0].free_addresses) # add a host in our top prefix p2 = th.add_prefix('1.0.0.1/32', 'host', 'bar') # check stats for p1, our top level prefix res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(1, res['result'][0].used_addresses) self.assertEqual(255, res['result'][0].free_addresses) # check stats for p2, our new host prefix res = Prefix.smart_search('1.0.0.1/32', {}) self.assertEqual(1, res['result'][0].total_addresses) self.assertEqual(1, res['result'][0].used_addresses) self.assertEqual(0, res['result'][0].free_addresses)
def test_stats7(self): """ Enlarge / shrink prefix over several indent levels """ th = TestHelper() # p1 children are p2 (which covers p3 and p4) and p5 p1 = th.add_prefix('1.0.0.0/16', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/23', 'reservation', 'FOO') p4 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.1.0/24', 'reservation', 'test') p6 = th.add_prefix('1.0.2.0/24', 'reservation', 'test') p7 = th.add_prefix('1.0.3.0/24', 'reservation', 'test') # enlarge p3 so that it covers p2, ie moved up several indent levels p3.prefix = '1.0.0.0/21' p3.save() # check stats for p3 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(1024, res['result'][0].used_addresses) self.assertEqual(1024, res['result'][0].free_addresses) # move back p3 p3.prefix = '1.0.0.0/23' p3.save() # check stats for p3 res = Prefix.smart_search('1.0.0.0/23', {}) self.assertEqual(512, res['result'][0].total_addresses) self.assertEqual(512, res['result'][0].used_addresses) self.assertEqual(0, res['result'][0].free_addresses)
def test_prefix_edit(self): """ Verify indent is correct after prefix edit """ th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/16', 'reservation', 'test') p2 = th.add_prefix('192.168.0.0/24', 'reservation', 'test') p3 = th.add_prefix('192.168.1.0/24', 'reservation', 'test') # now edit the "middle prefix" so that it now covers 192.168.1.0/24 p3.prefix = '192.168.0.0/20' p3.save() expected = [] # expected result is a list of list, each row is a prefix, first value is prefix, next is indent level # notice how p2 and p3 switch places efter the edit expected.append([p1.prefix, 0]) expected.append([p3.prefix, 1]) expected.append([p2.prefix, 2]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.indent]) self.assertEqual(expected, result)
def test_children3(self): """ Check children are correct when adding prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(0, res['result'][0].children) # add a covering supernet around p1 p2 = th.add_prefix('1.0.0.0/20', 'reservation', 'bar') # check stats for p2, our new top level prefix res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(1, res['result'][0].children)
def search(self, search_string='', vrf_id=None, filters=None): """ Fetch prefixes matching search string :param search_string: Search string provided by GUI :param vrf_id: VRF ID provided by GUI :param filters: Filters (Prefix type) provided by GUI :return: None """ self.lock.acquire() # Clear current dictionary self._init_db() # Compile search string self.search_string = search_string self.search_pattern = re.compile(self.search_string, re.IGNORECASE) # Build VRF query based on `vrf_id` to be used as `extra_query` param vrf_q = None if not vrf_id else { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf_id } # Build status filters filter_q = self.status_filter_build(filters) # Combine vrf_q and filter_q if vrf_q: extra_q = vrf_q if not filter_q else { 'operator': 'and', 'val1': vrf_q, 'val2': filter_q } else: extra_q = filter_q try: search_result = Prefix.smart_search(search_string, search_options={ 'parents_depth': -1, 'children_depth': -1, 'max_result': 0 }, extra_query=extra_q)['result'] # For each prefix in search results find a parent prefix # This is (unfortunately) based on the fact that prefix # list from search IS ordered (parent exists before children) for prefix in search_result: self.find_parent(prefix, self.db) except Exception as e: self.lock.release() raise e self.lock.release()
def test_children4(self): """ Check children are correct when enlarging prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('1.0.7.0/24', 'assignment', 'test') # add a covering supernet around p1 p3 = th.add_prefix('1.0.0.0/22', 'reservation', 'bar') # check that p3 looks good res = Prefix.smart_search('1.0.0.0/22', {}) self.assertEqual(1, res['result'][0].children) # now move our supernet, so we see that the update thingy works p3.prefix = '1.0.0.0/21' p3.save() # check stats for p2, our new top level prefix res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2, res['result'][0].children)
def test_stats1(self): """ Check stats are correct when adding prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(0, res['result'][0].used_addresses) self.assertEqual(256, res['result'][0].free_addresses) # add a covering supernet around p1 p2 = th.add_prefix('1.0.0.0/20', 'reservation', 'bar') # check stats for p2, our new top level prefix res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(4096, res['result'][0].total_addresses) self.assertEqual(256, res['result'][0].used_addresses) self.assertEqual(3840, res['result'][0].free_addresses)
def test_children7(self): """ Add prefixes within other prefix and verify parent prefix has correct children """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(0, res['result'][0].children) # add a host in our top prefix p2 = th.add_prefix('1.0.0.1/32', 'host', 'bar') # check stats for p1, our top level prefix res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(1, res['result'][0].children) # check stats for p2, our new host prefix res = Prefix.smart_search('1.0.0.1/32', {}) self.assertEqual(0, res['result'][0].children)
def test_children5(self): """ Check children are correct when shrinking prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('1.0.7.0/24', 'assignment', 'test') # add a covering supernet around p1 and p2 p3 = th.add_prefix('1.0.0.0/21', 'reservation', 'bar') # check that p3 looks good res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2, res['result'][0].children) # shrink our supernet, so it only covers p1 p3.prefix = '1.0.0.0/22' p3.save() # check that p3 only covers p1 res = Prefix.smart_search('1.0.0.0/22', {}) self.assertEqual(1, res['result'][0].children)
def test_stats7(self): """ Move prefix several indent steps and check stats are correct """ th = TestHelper() # tree of prefixes p1 = th.add_prefix('1.0.0.0/20', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/21', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p4 = th.add_prefix('1.0.0.0/23', 'reservation', 'test') p5 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p6 = th.add_prefix('1.0.2.0/24', 'reservation', 'test') p7 = th.add_prefix('1.0.4.0/22', 'reservation', 'test') # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(2048, res['result'][0].used_addresses) self.assertEqual(0, res['result'][0].free_addresses) # move p3 outside of the tree p3.prefix = '2.0.0.0/22' p3.save() # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(1792, res['result'][0].used_addresses) self.assertEqual(256, res['result'][0].free_addresses) # move p3 into the tree again p3.prefix = '1.0.0.0/22' p3.save() # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(2048, res['result'][0].used_addresses) self.assertEqual(0, res['result'][0].free_addresses)
def test_children6(self): """ Check children are correct when moving prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('2.0.0.0/25', 'reservation', 'bar') # now move our supernet, so we see that the update thingy works p2.prefix = '2.0.0.0/22' p2.save() # check stats for p2, we shouldn't see children based on our old # position (2.0.0.0/25) res = Prefix.smart_search('2.0.0.0/22', {}) self.assertEqual(0, res['result'][0].children) # now move our supernet, so we see that the update thingy works p2.prefix = '1.0.0.0/22' p2.save() # check stats for p2, we should get p1 as child res = Prefix.smart_search('1.0.0.0/22', {}) self.assertEqual(1, res['result'][0].children)
def get_prefixes(self, query): """ Get prefix data from NIPAP """ try: res = Prefix.smart_search(query, {}) except socket.error: print >> sys.stderr, "Connection refused, please check hostname & port" sys.exit(1) except xmlrpclib.ProtocolError: print >> sys.stderr, "Authentication failed, please check your username / password" sys.exit(1) for p in res['result']: p.prefix_ipy = IPy.IP(p.prefix) self.prefixes.append(p)
def get_prefixes(self, query): """ Get prefix data from NIPAP """ try: res = Prefix.smart_search(query, {}) except socket.error: print >> sys.stderr, "Connection refused, please check hostname & port" sys.exit(1) except xmlrpclib.ProtocolError: print >> sys.stderr, "Authentication failed, please check your username / password" sys.exit(1) for p in res['result']: p.prefix_ipy = IPy.IP(p.prefix) self.prefixes.append(p)
def test_prefix_edit(self): """ Verify tags are correct after prefix edit """ # ran into this issue in #507 th = TestHelper() # add to "top level" prefix, each with a unique tag p1 = th.add_prefix('1.0.0.0/8', 'reservation', 'test', tags=['a']) p2 = th.add_prefix('2.0.0.0/8', 'reservation', 'test', tags=['b']) # add a subnet of p1 p3 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') # p3 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) # edit p3 to become subnet of p2 p3.prefix = '2.0.0.0/24' p3.save() # p3 should have inherited_tags = ['b'] from p2 res = Prefix.smart_search('2.0.0.0/24', {}) self.assertEqual(['b'], res['result'][0].inherited_tags.keys())
def smart_search_prefix(self): """ Perform a smart search. The smart search function tries extract a query from a text string. This query is then passed to the search_prefix function, which performs the search. """ search_options = {} if 'query_id' in request.params: search_options['query_id'] = request.params['query_id'] if 'include_all_parents' in request.params: if request.params['include_all_parents'] == 'true': search_options['include_all_parents'] = True else: search_options['include_all_parents'] = False if 'include_all_children' in request.params: if request.params['include_all_children'] == 'true': search_options['include_all_children'] = True else: search_options['include_all_children'] = False if 'parents_depth' in request.params: search_options['parents_depth'] = request.params['parents_depth'] if 'children_depth' in request.params: search_options['children_depth'] = request.params['children_depth'] if 'max_result' in request.params: search_options['max_result'] = request.params['max_result'] if 'offset' in request.params: search_options['offset'] = request.params['offset'] log.debug("Smart search query: schema=%s q=%s search_options=%s" % (str(request.params.get('schema')), request.params.get('query_string'), str(search_options) )) try: schema = Schema.get(int(request.params['schema'])) result = Prefix.smart_search(schema, request.params['query_string'], search_options ) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__})
def test_stats4(self): """ Check stats are correct when moving prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('2.0.0.0/25', 'reservation', 'bar') # now move our supernet, so we see that the update thingy works p2.prefix = '2.0.0.0/22' p2.save() # check stats for p2, we shouldn't see stats based on our old position # (2.0.0.0/25) res = Prefix.smart_search('2.0.0.0/22', {}) self.assertEqual(1024, res['result'][0].total_addresses) self.assertEqual(0, res['result'][0].used_addresses) self.assertEqual(1024, res['result'][0].free_addresses)
def test_prefix_edit(self): """ Make sure display_prefix is correctly updated on modification of parent """ # we ran into display_prefix not being updated correctly in #515 th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/24', 'assignment', 'test') p2 = th.add_prefix('192.168.0.1/32', 'host', 'test') # now edit the "middle prefix" so that it now covers 192.168.1.0/24 p1.prefix = '192.168.0.0/23' p1.save() # check that display_prefix of host is as expected res = Prefix.smart_search('192.168.0.1/32', {}) self.assertEqual('192.168.0.1/23', res['result'][0].display_prefix)
def test_children2(self): """ Add an assignment and a host and make children calculation works after modifying the assignment """ # we ran into children not being updated correctly in #515 th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/24', 'assignment', 'test') p2 = th.add_prefix('192.168.0.1/32', 'host', 'test') # now edit the "middle prefix" so that it now covers 192.168.1.0/24 p1.prefix = '192.168.0.0/23' p1.save() # check that children of parent is as expected res = Prefix.smart_search('192.168.0.0/23', {}) self.assertEqual(1, res['result'][0].children)
def test_parent_prefix(self): """ Verify that listing with parent_prefix returns match for 'foo' """ expected = [] parent = self.add_prefix('1.3.0.0/16', 'reservation', 'test') expected.append([parent.prefix, False]) expected.append([self.add_prefix('1.3.1.0/24', 'assignment', 'foo').prefix, True]) expected.append([self.add_prefix('1.3.2.0/24', 'assignment', 'test').prefix, False]) expected.append([self.add_prefix('1.3.3.0/24', 'assignment', 'test').prefix, False]) expected.append([self.add_prefix('1.3.4.0/24', 'assignment', 'test').prefix, False]) self.add_prefix('1.2.4.0/24', 'assignment', 'test') res = Prefix.smart_search('foo', { 'parent_prefix': parent.id }) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.match]) self.assertEqual(expected, result)
def test_parent_prefix2(self): """ Verify that listing with parent_prefix returns a list with no matches Nothing matches foo but we should still get a list of prefixes """ expected = [] parent = self.add_prefix('1.3.0.0/16', 'reservation', 'test') expected.append([parent.prefix, False]) expected.append([self.add_prefix('1.3.1.0/24', 'assignment', 'test').prefix, False]) expected.append([self.add_prefix('1.3.2.0/24', 'assignment', 'test').prefix, False]) expected.append([self.add_prefix('1.3.3.0/24', 'assignment', 'test').prefix, False]) expected.append([self.add_prefix('1.3.4.0/24', 'assignment', 'test').prefix, False]) self.add_prefix('1.2.4.0/24', 'assignment', 'test') res = Prefix.smart_search('foo', { 'parent_prefix': parent.id }) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.match]) self.assertEqual(expected, result)
def list_prefix(arg, opts): """ List prefixes matching 'arg' """ s = get_schema() res = Prefix.smart_search(s, arg, { 'parents_depth': -1, 'max_result': 1200 }) if len(res['result']) == 0: print "No addresses matching '%s' found." % arg return for p in res['result']: if p.display == False: continue try: print "%-29s %-2s %-19s %-14s %-40s" % ( "".join(" " for i in range(p.indent)) + p.display_prefix, p.type[0].upper(), p.node, p.order_id, p.description ) except UnicodeEncodeError, e: print >> sys.stderr, "\nCrazy encoding for prefix %s\n" % p.prefix
def smart_search_prefix(self): """ Perform a smart search. The smart search function tries extract a query from a text string. This query is then passed to the search_prefix function, which performs the search. """ search_options = {} extra_query = None vrf_filter = None if 'query_id' in request.params: search_options['query_id'] = request.params['query_id'] if 'include_all_parents' in request.params: if request.params['include_all_parents'] == 'true': search_options['include_all_parents'] = True else: search_options['include_all_parents'] = False if 'include_all_children' in request.params: if request.params['include_all_children'] == 'true': search_options['include_all_children'] = True else: search_options['include_all_children'] = False if 'parents_depth' in request.params: search_options['parents_depth'] = request.params['parents_depth'] if 'children_depth' in request.params: search_options['children_depth'] = request.params['children_depth'] if 'include_neighbors' in request.params: if request.params['include_neighbors'] == 'true': search_options['include_neighbors'] = True else: search_options['include_neighbors'] = False if 'max_result' in request.params: search_options['max_result'] = request.params['max_result'] if 'offset' in request.params: search_options['offset'] = request.params['offset'] if 'parent_prefix' in request.params: search_options['parent_prefix'] = request.params['parent_prefix'] if 'vrf_filter[]' in request.params: vrf_filter_parts = [] # Fetch VRF IDs from search query and build extra query dict for # smart_search_prefix. vrfs = request.params.getall('vrf_filter[]') if len(vrfs) > 0: vrf = vrfs[0] vrf_filter = { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf if vrf != 'null' else None } for vrf in vrfs[1:]: vrf_filter = { 'operator': 'or', 'val1': vrf_filter, 'val2': { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf if vrf != 'null' else None } } if vrf_filter: extra_query = vrf_filter if 'indent' in request.params: if extra_query: extra_query = { 'operator': 'and', 'val1': extra_query, 'val2': { 'operator': 'equals', 'val1': 'indent', 'val2': request.params['indent'] } } else: extra_query = { 'operator': 'equals', 'val1': 'indent', 'val2': request.params['indent'] } try: result = Prefix.smart_search(request.params['query_string'], search_options, extra_query) except NipapError, e: return json.dumps({ 'error': 1, 'message': e.args, 'type': type(e).__name__ })
def replace(pattern, replacement): # Fetch prefixes matching the string to replace print "Fetching prefixes from NIPAP... ", sys.stdout.flush() n = 1 prefix_list = [] t0 = time.time() query = { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'description', 'val2': pattern }, 'val2': { 'operator': 'regex_match', 'val1': 'node', 'val2': pattern } } full_result = Prefix.search(query, { 'parents_depth': -1, 'max_result': BATCH_SIZE }) prefix_result = full_result['result'] prefix_list += prefix_result print len(prefix_list), sys.stdout.flush() while len(prefix_result) == 100: full_result = Prefix.smart_search(pattern, { 'parents_depth': -1, 'max_result': BATCH_SIZE, 'offset': n * BATCH_SIZE }) prefix_result = full_result['result'] prefix_list += prefix_result print len(prefix_list), sys.stdout.flush() n += 1 t1 = time.time() print " done in %.1f seconds" % (t1 - t0) # Display list print_pattern = "%-2s%-14s%-2s%-30s%-20s%s" print "\n\nPrefixes to change:" print print_pattern % ("", "VRF", "", "Prefix", "Node", "Description") i_match = 0 for i, prefix in enumerate(prefix_list): if prefix.match: print COLOR_RESET, print " -- %d --" % i color = COLOR_RED else: color = COLOR_RESET print (color + print_pattern) % ( "-" if prefix.match else "", prefix.vrf.rt, prefix.type[0].upper(), ((" " * prefix.indent) + prefix.display_prefix)[:min([ len(prefix.display_prefix) + 2*prefix.indent, 30 ])], (prefix.node or '')[:min([ len(prefix.node or ''), 20 ])], (prefix.description or '')[:min([ len(prefix.description or ''), 900 ])] ) if prefix.match: new_prefix_node = re.sub(pattern, replacement, (prefix.node or ''), flags=re.IGNORECASE) new_prefix_desc = re.sub(pattern, replacement, (prefix.description or ''), flags=re.IGNORECASE) print (COLOR_GREEN + print_pattern) % ( "+", prefix.vrf.rt, prefix.type[0].upper(), (" " * prefix.indent + prefix.display_prefix)[:min([ len(prefix.display_prefix) + 2*prefix.indent, 30 ])], new_prefix_node[:min([ len(new_prefix_node), 20 ])], new_prefix_desc[:min([ len(new_prefix_desc), 90 ])] ) # reset colors print COLOR_RESET, # Perform action? print "Select replacements to perform" print "Enter comma-separated selection (eg. 5,7,10) or \"all\" for all prefixes." print "Prefix list with ! to invert selection (eg !5,7,10 to perform operation on all except the entered prefixes)" inp = raw_input("Selection: ").strip() if len(inp) == 0: print "Empty selection, quitting." sys.exit(0) invert = False if inp[0] == "!": inp = inp[1:] invert = True rename_all = False if inp == 'all': rename_all = True selection = [] else: selection = inp.split(",") try: selection = map(lambda x: int(x.strip()), selection) except ValueError as e: print >> sys.stderr, "Could not parse selection: %s" % str(e) sys.exit(1) for i, prefix in enumerate(prefix_list): if prefix.match and ((invert and i not in selection) or (not invert and i in selection) or rename_all): if prefix.node is not None: prefix.node = re.sub(pattern, replacement, prefix.node, flags=re.IGNORECASE) if prefix.description is not None: prefix.description = re.sub(pattern, replacement, prefix.description, flags=re.IGNORECASE) print "Saving prefix %s..." % prefix.display_prefix prefix.save()
def smart_search_prefix(self): """ Perform a smart search. The smart search function tries extract a query from a text string. This query is then passed to the search_prefix function, which performs the search. """ search_options = {} extra_query = None vrf_filter = None if 'query_id' in request.json: search_options['query_id'] = request.json['query_id'] if 'include_all_parents' in request.json: if request.json['include_all_parents'] == 'true': search_options['include_all_parents'] = True else: search_options['include_all_parents'] = False if 'include_all_children' in request.json: if request.json['include_all_children'] == 'true': search_options['include_all_children'] = True else: search_options['include_all_children'] = False if 'parents_depth' in request.json: search_options['parents_depth'] = request.json['parents_depth'] if 'children_depth' in request.json: search_options['children_depth'] = request.json['children_depth'] if 'include_neighbors' in request.json: if request.json['include_neighbors'] == 'true': search_options['include_neighbors'] = True else: search_options['include_neighbors'] = False if 'max_result' in request.json: if request.json['max_result'] == 'false': search_options['max_result'] = False else: search_options['max_result'] = request.json['max_result'] if 'offset' in request.json: search_options['offset'] = request.json['offset'] if 'parent_prefix' in request.json: search_options['parent_prefix'] = request.json['parent_prefix'] if 'vrf_filter' in request.json: vrf_filter_parts = [] # Fetch VRF IDs from search query and build extra query dict for # smart_search_prefix. vrfs = request.json['vrf_filter'] if len(vrfs) > 0: vrf = vrfs[0] vrf_filter = { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf if vrf != 'null' else None } for vrf in vrfs[1:]: vrf_filter = { 'operator': 'or', 'val1': vrf_filter, 'val2': { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf if vrf != 'null' else None } } if vrf_filter: extra_query = vrf_filter if 'indent' in request.json: if extra_query: extra_query = { 'operator': 'and', 'val1': extra_query, 'val2': { 'operator': 'equals', 'val1': 'indent', 'val2': request.json['indent'] } } else: extra_query = { 'operator': 'equals', 'val1': 'indent', 'val2': request.json['indent'] } try: result = Prefix.smart_search(request.json['query_string'], search_options, extra_query) # Remove error key in result from backend as it interferes with the # error handling of the web interface. # TODO: Reevaluate how to deal with different types of errors; soft # errors like query string parser errors and hard errors like lost # database. del result['error'] except NipapError, e: return json.dumps({ 'error': 1, 'message': e.args, 'type': type(e).__name__ })
def test_smart_search_prefix(self): """ We should be able to execute smart_search_prefix as read-only user """ p = Prefix.smart_search('default')
def testPrefixInclusion(self): """ Test prefix inclusion like include_neighbors, include_parents and include_children """ th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/16', 'reservation', 'root') p2 = th.add_prefix('192.168.0.0/20', 'reservation', 'test') p3 = th.add_prefix('192.168.0.0/24', 'reservation', 'foo') p4 = th.add_prefix('192.168.1.0/24', 'reservation', 'test') p5 = th.add_prefix('192.168.2.0/24', 'reservation', 'test') p6 = th.add_prefix('192.168.32.0/20', 'reservation', 'bar') p7 = th.add_prefix('192.168.32.0/24', 'assignment', 'test') p8 = th.add_prefix('192.168.32.1/32', 'host', 'test') p9 = th.add_prefix('192.168.32.2/32', 'host', 'xyz') p10 = th.add_prefix('192.168.32.3/32', 'host', 'test') expected = [] # expected result is a list where each row is a prefix expected.append(p1.prefix) expected.append(p2.prefix) expected.append(p3.prefix) expected.append(p4.prefix) expected.append(p5.prefix) expected.append(p6.prefix) expected.append(p7.prefix) expected.append(p8.prefix) expected.append(p9.prefix) expected.append(p10.prefix) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) expected = [] # expected result is a list where each row is a prefix expected.append(p1.prefix) res = Prefix.smart_search('root', {}) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) expected = [] # expected result is a list where each row is a prefix expected.append(p3.prefix) res = Prefix.smart_search('foo', {}) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) expected = [] # expected result is a list where each row is a prefix expected.append(p1.prefix) expected.append(p2.prefix) expected.append(p3.prefix) expected.append(p4.prefix) expected.append(p5.prefix) expected.append(p6.prefix) expected.append(p7.prefix) expected.append(p8.prefix) expected.append(p9.prefix) expected.append(p10.prefix) res = Prefix.smart_search('root', { 'children_depth': -1 }) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) expected = [] # expected result is a list where each row is a prefix expected.append(p1.prefix) expected.append(p2.prefix) expected.append(p3.prefix) res = Prefix.smart_search('foo', { 'parents_depth': -1 }) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) expected = [] # expected result is a list where each row is a prefix expected.append(p8.prefix) expected.append(p9.prefix) expected.append(p10.prefix) res = Prefix.smart_search('xyz', { 'include_neighbors': True }) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result)
def test_children1(self): """ Add some prefixes and make sure number of children is correct """ th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/16', 'reservation', 'test') p2 = th.add_prefix('192.168.0.0/20', 'reservation', 'test') p3 = th.add_prefix('192.168.0.0/24', 'reservation', 'test') p4 = th.add_prefix('192.168.1.0/24', 'reservation', 'test') p5 = th.add_prefix('192.168.2.0/24', 'reservation', 'test') p6 = th.add_prefix('192.168.32.0/20', 'reservation', 'test') p7 = th.add_prefix('192.168.32.0/24', 'reservation', 'test') expected = [] # expected result is a list of list, each row is a prefix, first value # is prefix, next is number of children expected.append([p1.prefix, 2]) expected.append([p2.prefix, 3]) expected.append([p3.prefix, 0]) expected.append([p4.prefix, 0]) expected.append([p5.prefix, 0]) expected.append([p6.prefix, 1]) expected.append([p7.prefix, 0]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.children]) self.assertEqual(expected, result) p5.prefix = '192.0.2.0/24' p5.save() expected = [] expected.append([p5.prefix, 0]) expected.append([p1.prefix, 2]) expected.append([p2.prefix, 2]) expected.append([p3.prefix, 0]) expected.append([p4.prefix, 0]) expected.append([p6.prefix, 1]) expected.append([p7.prefix, 0]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.children]) self.assertEqual(expected, result) # p4 192.168.1.0/24 => 192.168.0.0/21 p4.prefix = '192.168.0.0/21' p4.save() expected = [] expected.append([p5.prefix, 0]) expected.append([p1.prefix, 2]) expected.append([p2.prefix, 1]) expected.append([p4.prefix, 1]) expected.append([p3.prefix, 0]) expected.append([p6.prefix, 1]) expected.append([p7.prefix, 0]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.children]) self.assertEqual(expected, result) p1.remove() expected = [] expected.append([p5.prefix, 0]) expected.append([p2.prefix, 1]) expected.append([p4.prefix, 1]) expected.append([p3.prefix, 0]) expected.append([p6.prefix, 1]) expected.append([p7.prefix, 0]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.children]) self.assertEqual(expected, result)
def smart_search_prefix(self): """ Perform a smart search. The smart search function tries extract a query from a text string. This query is then passed to the search_prefix function, which performs the search. """ search_options = {} extra_query = None vrf_filter = None if 'query_id' in request.params: search_options['query_id'] = request.params['query_id'] if 'include_all_parents' in request.params: if request.params['include_all_parents'] == 'true': search_options['include_all_parents'] = True else: search_options['include_all_parents'] = False if 'include_all_children' in request.params: if request.params['include_all_children'] == 'true': search_options['include_all_children'] = True else: search_options['include_all_children'] = False if 'parents_depth' in request.params: search_options['parents_depth'] = request.params['parents_depth'] if 'children_depth' in request.params: search_options['children_depth'] = request.params['children_depth'] if 'include_neighbors' in request.params: if request.params['include_neighbors'] == 'true': search_options['include_neighbors'] = True else: search_options['include_neighbors'] = False if 'max_result' in request.params: search_options['max_result'] = request.params['max_result'] if 'offset' in request.params: search_options['offset'] = request.params['offset'] if 'parent_prefix' in request.params: search_options['parent_prefix'] = request.params['parent_prefix'] if 'vrf_filter[]' in request.params: vrf_filter_parts = [] # Fetch VRF IDs from search query and build extra query dict for # smart_search_prefix. vrfs = request.params.getall('vrf_filter[]') if len(vrfs) > 0: vrf = vrfs[0] vrf_filter = { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf if vrf != 'null' else None } for vrf in vrfs[1:]: vrf_filter = { 'operator': 'or', 'val1': vrf_filter, 'val2': { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf if vrf != 'null' else None } } if vrf_filter: extra_query = vrf_filter if 'indent' in request.params: if extra_query: extra_query = { 'operator': 'and', 'val1': extra_query, 'val2': { 'operator': 'equals', 'val1': 'indent', 'val2': request.params['indent'] } } else: extra_query = { 'operator': 'equals', 'val1': 'indent', 'val2': request.params['indent'] } try: result = Prefix.smart_search(request.params['query_string'], search_options, extra_query) except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__})
outFilenameToday = 'nipap_prefixes_' + ymdToday outFilenameRemovedPrefixes = 'nipap_removed_prefixes_' + ymdToday yesterday = date.today() - timedelta(days=1) outFilenameYesterday = 'nipap_prefixes_' + yesterday.strftime('%Y-%m-%d') ######################## # Nipap authentication # ######################## pynipap.xmlrpc_uri = "http://*****:*****@nipap.local:1337/XMLRPC" a = pynipap.AuthOptions({'authoritative_source': 'nlogin_nipap_client'}) customerQuery = { 'val1': 'customer_id', 'operator': 'regex_match', 'val2': '^CUST-\d{5,}$' } prefixSearch = Prefix.smart_search('', {}, customerQuery) nipapPrefixObjectsList = prefixSearch['result'] ######################################## # Functions: # # 1. Group Nipap prefix data by 'custId' # # 2. Print variable contents to a file # # 3. Write file contents to a variable # ######################################## def addNipapPrefixToCustomerPrefixList(customerPrefixList, customerId, nipapPrefix): ipList = list() ipList.append(nipapPrefix.split("/")[0]) customerPrefixes = {'custId': customerId, 'ipList': ipList} customerPrefixList.append(customerPrefixes)
def smart_search_prefix(self): """ Perform a smart search. The smart search function tries extract a query from a text string. This query is then passed to the search_prefix function, which performs the search. """ search_options = {} extra_query = None vrf_filter = None if 'query_id' in request.json: search_options['query_id'] = request.json['query_id'] if 'include_all_parents' in request.json: if request.json['include_all_parents'] == 'true': search_options['include_all_parents'] = True else: search_options['include_all_parents'] = False if 'include_all_children' in request.json: if request.json['include_all_children'] == 'true': search_options['include_all_children'] = True else: search_options['include_all_children'] = False if 'parents_depth' in request.json: search_options['parents_depth'] = request.json['parents_depth'] if 'children_depth' in request.json: search_options['children_depth'] = request.json['children_depth'] if 'include_neighbors' in request.json: if request.json['include_neighbors'] == 'true': search_options['include_neighbors'] = True else: search_options['include_neighbors'] = False if 'max_result' in request.json: if request.json['max_result'] == 'false': search_options['max_result'] = False else: search_options['max_result'] = request.json['max_result'] if 'offset' in request.json: search_options['offset'] = request.json['offset'] if 'parent_prefix' in request.json: search_options['parent_prefix'] = request.json['parent_prefix'] if 'vrf_filter' in request.json: vrf_filter_parts = [] # Fetch VRF IDs from search query and build extra query dict for # smart_search_prefix. vrfs = request.json['vrf_filter'] if len(vrfs) > 0: vrf = vrfs[0] vrf_filter = { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf if vrf != 'null' else None } for vrf in vrfs[1:]: vrf_filter = { 'operator': 'or', 'val1': vrf_filter, 'val2': { 'operator': 'equals', 'val1': 'vrf_id', 'val2': vrf if vrf != 'null' else None } } if vrf_filter: extra_query = vrf_filter if 'indent' in request.json: if extra_query: extra_query = { 'operator': 'and', 'val1': extra_query, 'val2': { 'operator': 'equals', 'val1': 'indent', 'val2': request.json['indent'] } } else: extra_query = { 'operator': 'equals', 'val1': 'indent', 'val2': request.json['indent'] } try: result = Prefix.smart_search(request.json['query_string'], search_options, extra_query) # Remove error key in result from backend as it interferes with the # error handling of the web interface. # TODO: Reevaluate how to deal with different types of errors; soft # errors like query string parser errors and hard errors like lost # database. del result['error'] except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__})
def replace(pattern, replacement): # Fetch prefixes matching the string to replace print "Fetching prefixes from NIPAP... ", sys.stdout.flush() n = 1 prefix_list = [] t0 = time.time() query = { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'description', 'val2': pattern }, 'val2': { 'operator': 'regex_match', 'val1': 'node', 'val2': pattern } } full_result = Prefix.search(query, { 'parents_depth': -1, 'max_result': BATCH_SIZE }) prefix_result = full_result['result'] prefix_list += prefix_result print len(prefix_list), sys.stdout.flush() while len(prefix_result) == 100: full_result = Prefix.smart_search( pattern, { 'parents_depth': -1, 'max_result': BATCH_SIZE, 'offset': n * BATCH_SIZE }) prefix_result = full_result['result'] prefix_list += prefix_result print len(prefix_list), sys.stdout.flush() n += 1 t1 = time.time() print " done in %.1f seconds" % (t1 - t0) # Display list print_pattern = "%-2s%-14s%-2s%-30s%-20s%s" print "\n\nPrefixes to change:" print print_pattern % ("", "VRF", "", "Prefix", "Node", "Description") i_match = 0 for i, prefix in enumerate(prefix_list): if prefix.match: print COLOR_RESET, print " -- %d --" % i color = COLOR_RED else: color = COLOR_RESET print(color + print_pattern) % ( "-" if prefix.match else "", prefix.vrf.rt, prefix.type[0].upper(), ((" " * prefix.indent) + prefix.display_prefix )[:min([len(prefix.display_prefix) + 2 * prefix.indent, 30])], (prefix.node or '')[:min([len(prefix.node or ''), 20])], (prefix.description or '')[:min([len(prefix.description or ''), 900])]) if prefix.match: new_prefix_node = re.sub(pattern, replacement, (prefix.node or ''), flags=re.IGNORECASE) new_prefix_desc = re.sub(pattern, replacement, (prefix.description or ''), flags=re.IGNORECASE) print(COLOR_GREEN + print_pattern) % ( "+", prefix.vrf.rt, prefix.type[0].upper(), (" " * prefix.indent + prefix.display_prefix )[:min([len(prefix.display_prefix) + 2 * prefix.indent, 30])], new_prefix_node[:min([len(new_prefix_node), 20])], new_prefix_desc[:min([len(new_prefix_desc), 90])]) # reset colors print COLOR_RESET, # Perform action? print "Select replacements to perform" print "Enter comma-separated selection (eg. 5,7,10) or \"all\" for all prefixes." print "Prefix list with ! to invert selection (eg !5,7,10 to perform operation on all except the entered prefixes)" inp = raw_input("Selection: ").strip() if len(inp) == 0: print "Empty selection, quitting." sys.exit(0) invert = False if inp[0] == "!": inp = inp[1:] invert = True rename_all = False if inp == 'all': rename_all = True selection = [] else: selection = inp.split(",") try: selection = map(lambda x: int(x.strip()), selection) except ValueError as e: print >> sys.stderr, "Could not parse selection: %s" % str(e) sys.exit(1) for i, prefix in enumerate(prefix_list): if prefix.match and ((invert and i not in selection) or (not invert and i in selection) or rename_all): if prefix.node is not None: prefix.node = re.sub(pattern, replacement, prefix.node, flags=re.IGNORECASE) if prefix.description is not None: prefix.description = re.sub(pattern, replacement, prefix.description, flags=re.IGNORECASE) print "Saving prefix %s..." % prefix.display_prefix prefix.save()