Ejemplo n.º 1
0
def verify_routing_static_routes(device,
                                 destination_address,
                                 to=None,
                                 not_to=None,
                                 known_via=None,
                                 tag=None,
                                 preference=None,
                                 protocol_name=None,
                                 extensive=False,
                                 max_time=60,
                                 check_interval=10):
    """ Verify static route exists

        Args:
            device ('str'): Device str
            destination_address ('str'): destination ip address
            to ('str'): to value
            not_to ('str'): not to value
            known_via ('str'): known via value
            tag ('str'): Tag value
            preference ('str'): Preference value
            protocol_name ('str'): Protocol name
            extensive ('bool'): if command with extensive at the end
            max_time (`int`): Max time, default: 60
            check_interval (`int`): Check interval, default: 10
        Returns:
            True / False
        Raises:
            None

    """
    timeout = Timeout(max_time, check_interval)
    while timeout.iterate():
        try:
            if extensive:
                out = device.parse('show route protocol static extensive')
            else:
                out = device.parse('show route protocol static')
        except SchemaEmptyParserError:
            timeout.sleep()
            continue

        # Example dictionary structure:
        #         {
        #             "rt": [
        #                 {
        #                     "rt-destination": "10.169.14.240/32",
        #                     "rt-entry": {
        #                         "nh": [
        #                             {
        #                                 "to": "10.169.14.121",
        #                                 "via": "ge-0/0/1.0"
        #                             }
        #                         ],
        #                         "rt-tag": "100",
        #                         "preference": "5",
        #                         "protocol-name": "Static"
        #                     }
        #                 }
        #             ],
        #             "table-name": "inet.0",
        #             "total-route-count": "240"
        #         },
        rt_list = Dq(out).get_values("rt")
        rt_destination_ = None
        for rt_dict in rt_list:
            current_rt_destination = Dq(rt_dict).get_values(
                "rt-destination", 0)
            if current_rt_destination:
                rt_destination_ = current_rt_destination
            if not rt_destination_:
                continue
            if not rt_destination_.startswith(destination_address):
                continue

            if not_to:
                not_to_ = Dq(rt_dict).get_values("to", 0)
                if not_to_.startswith(not_to):
                    continue
            if to:
                to_ = Dq(rt_dict).get_values("to", 0)
                if not to_.startswith(to):
                    continue
            if known_via:
                via_ = Dq(rt_dict).get_values("via", 0)
                if not via_.startswith(known_via):
                    continue
            if tag:
                tag_ = Dq(rt_dict).get_values("rt-tag", 0)
                if str(tag_) != str(tag):
                    continue
            if preference:
                preference_ = Dq(rt_dict).get_values("preference", 0)
                if str(preference_) != str(preference):
                    continue
            if protocol_name:
                protocol_ = Dq(rt_dict).get_values("protocol-name", 0)
                if protocol_.upper() != protocol_name.upper():
                    continue
            return True
        timeout.sleep()
    return False
Ejemplo n.º 2
0
def get_acl_hit_counts(device,
                       name,
                       source_network='',
                       destination_network='',
                       output='',
                       parsed_output=''):
    """ Get ACL(access-list) hit counts

        Args:
            device(`obj`): Device object
            name(`str`): Access-list name
            source_network(`str`): source network
            destination_network(`str`): destination network
            output(`str`): output of show access-lists
            parsed_output(`str`): parsed_output from show access-lists
        Returns:
            List:
            [[str, int]]

            Example:
            [['acl1', 100]]
        Raises:
            None
    """

    ret_list = []

    if not source_network and not destination_network:
        return ret_list

    if parsed_output:
        out = parsed_output
    else:
        if output:
            try:
                out = device.parse("show access-lists {name}".format(name=name),
                               output=output)
            except SchemaEmptyParserError:
                return ret_list
        else:
            try:
                out = device.parse("show access-lists {name}".format(name=name))
            except SchemaEmptyParserError:
                return ret_list
        if not out:
            return ret_list

    src_aces = Dq(out).contains_key_value('source_network',
                                          source_network).get_values('aces')
    dest_aces = Dq(out).contains_key_value(
        'destination_network', destination_network).get_values('aces')

    if source_network and destination_network:
        aces = set(src_aces) & set(dest_aces)
    elif source_network and not destination_network:
        aces = src_aces
    else:
        aces = dest_aces

    for ace in aces:
        hit_count = Dq(out).contains_key_value(
            'aces', ace).get_values('matched_packets')
        if hit_count:
            ret_list.append([ace, hit_count[0]])

    return ret_list
Ejemplo n.º 3
0
 def ospf_status(self):
     for device in self.parameters["testbed"].devices.values():
         os = device.os
         if os == "iosxr":
             ospf_status = device.parse(
                 'show ospf vrf all-inclusive neighbor detail')
             ospf_vrfs = ospf_status.q.get_values('vrf')
             with open(
                     f"/root/NETDEVOPS/NORNIR/PROJECTS/DEMOS/Test/golden_ops/{device.name}_show-ospf-vrf-all-inclusive-neighbor-detail_parsed.txt",
                     "rb") as file:
                 golden_template = json.load(file)
                 for vrf in ospf_vrfs:
                     current_vrf = ospf_status["vrf"][vrf]
                     ospf_intfs = Dq(current_vrf).contains(
                         "interfaces").get_values("interfaces")
                     golden_vrfs = golden_template["vrf"][vrf]
                     golden_intfs = Dq(golden_vrfs).contains(
                         "interfaces").get_values("interfaces")
                     for int in golden_intfs:
                         golden_neighbors = Dq(
                             golden_vrfs).contains_key_value(
                                 "interfaces", int).get_values("neighbors")
                         golden_states = Dq(golden_vrfs).contains_key_value(
                             "interfaces", int).get_values("state")
                         current_neighbors = Dq(
                             current_vrf).contains_key_value(
                                 "interfaces", int).get_values("neighbors")
                         current_states = Dq(
                             current_vrf).contains_key_value(
                                 "interfaces", int).get_values("state")
                         try:
                             assert int in ospf_intfs
                         except Exception as e:
                             self.failed(
                                 f"{device.name} {int} is not active int vrf {vrf}"
                             )
                         try:
                             assert golden_neighbors == current_neighbors
                         except Exception as e:
                             self.failed(
                                 f"{device.name} {golden_neighbors} over Interface {int} Is not active"
                             )
                         try:
                             assert golden_states == current_states
                         except Exception as e:
                             self.failed(
                                 f"{device.name} {golden_neighbors} over Interface {int} Is {current_states}"
                             )
         elif os == "junos":
             ospf_status = device.parse('show ospf neighbor instance all')
             ospf_sessions = ospf_status.q.get_values('ospf-neighbor')
             with open(
                     f"/root/NETDEVOPS/NORNIR/PROJECTS/DEMOS/Test/golden_ops/{device.name}_show-ospf-neighbor-instance-all_parsed.txt",
                     "rb") as file:
                 golden_template = json.load(file)
                 golden_sessions = Dq(golden_template).get_values(
                     "ospf-neighbor")
                 for session in golden_sessions:
                     ospf_vrfs = ospf_status.q.get_values(
                         "ospf-instance-name")
                     ospf_intfs = Dq(
                         ospf_sessions[0]).get_values('interface-name')
                     golden_vrfs = Dq(golden_template).get_values(
                         'ospf-instance-name')
                     golden_intfs = Dq(golden_template).get_values(
                         'interface-name')
                     for int in golden_intfs:
                         golden_neighbors = Dq(session).get_values(
                             "neighbor-id")
                         golden_states = Dq(session).get_values(
                             'ospf-neighbor-state')
                         current_neighbors = Dq(
                             ospf_sessions[0]).get_values("neighbor-id")
                         current_states = Dq(ospf_sessions[0]).get_values(
                             "ospf-neighbor-state")
                         try:
                             assert int in ospf_intfs
                         except Exception as e:
                             self.failed(
                                 f"{device.name} {int} is not active")
                         try:
                             assert golden_neighbors == current_neighbors
                         except Exception as e:
                             self.failed(
                                 f"{device.name} {golden_neighbors} over Interface {int} Is not active"
                             )
                         try:
                             assert golden_states == current_states
                         except Exception as e:
                             self.failed(
                                 f"{device.name} {golden_neighbors} over Interface {int} Is {current_states}"
                             )
Ejemplo n.º 4
0
def get_interface_ip_address(device,
                             interface,
                             address_family,
                             return_all=False):
    """ Get interface ip address from device

        Args:
            interface('str'): Interface to get address
            device ('obj'): Device object
            address_family ('str'): Address family
            return_all ('bool'): return List of values
        Returns:
            None
            ip_address ('str'): If has multiple addresses
                                will return the first one.

        Raises:
            None
    """
    if address_family not in ["ipv4", "ipv6", "inet", "inet6"]:
        log.info('Must provide one of the following address families: '
                 '"ipv4", "ipv6", "inet", "inet6"')
        return

    if address_family == "ipv4":
        address_family = "inet"
    elif address_family == "ipv6":
        address_family = "inet6"

    try:
        out = device.parse(
            'show interfaces terse {interface}'.format(interface=interface))
    except SchemaEmptyParserError:
        return

    # Example dictionary structure:
    #         {
    #             "ge-0/0/0.0": {
    #                 "protocol": {
    #                     "inet": {
    #                         "10.189.5.93/30": {
    #                             "local": "10.189.5.93/30"
    #                         }
    #                     },
    #                     "inet6": {
    #                         "2001:db8:223c:2c16::1/64": {
    #                             "local": "2001:db8:223c:2c16::1/64"
    #                         },
    #                         "fe80::250:56ff:fe8d:c829/64": {
    #                             "local": "fe80::250:56ff:fe8d:c829/64"
    #                         }
    #                     },
    #                 }
    #             }
    #         }

    found = Dq(out).contains(interface).contains(address_family). \
        get_values("local")
    if found:
        if return_all:
            return found
        return found[0]
    return None
Ejemplo n.º 5
0
    def _pre_post_processors(self,
                             testbed,
                             processor,
                             section,
                             data,
                             name,
                             devices_connected,
                             processor_targets,
                             processor_type,
                             pre_processor_result=Passed):
        """
        execute pre/post processors and return if pre-processor runs and processor result

        Arguments:
            testbed (`obj`): testbed object
            processor (`obj`): Aetest Processor object
            section (`obj`): Aetest Section object
            data (`list`) : data of section
            name (`str`) : name of section in health yaml
            devices_connected (`list`) : list of connected devices
            processor_targets (`list`) : list of `processor_flag which ones 
                                         will be run as pre/post processors
            processor_type (`str`) : processor type `pre` or `post`
            pre_processor_result (`ob`) : result object. Default to `Passed`

        Returns:
            pre_processor_run (`bool`) : if pre processor runs or not
            pre_processor_result (`obj`) : return processor result (Result obj)
        """
        new_data_dict = {}
        selected_options = 0
        list_of_args = []
        # flag if health args are given to pyats command
        args_flag = False
        # flag if health args are defined under action in health yaml
        args_in_yaml_flag = False
        log.debug(
            'data:\n{d}'.format(d=json.dumps(data, indent=2, sort_keys=True)))
        orig_data = copy.deepcopy(data)

        # check if health arguments are given to pyats command
        for arg_name in ['health_sections', 'health_uids', 'health_groups']:
            if getattr(runtime.args, arg_name):
                args_flag = True
            for item in self._get_actions(data):
                if Dq(item).contains(
                        'health_sections|health_uids|health_groups',
                        regex=True):
                    args_in_yaml_flag = True

        for arg_name in ['health_sections', 'health_uids', 'health_groups']:
            log.debug('Checking {an}'.format(an=arg_name))
            selected = None
            selected_options = 0
            for item in self._get_actions(data):
                # from argument

                arg_search_keyword = getattr(runtime.args, arg_name)
                if arg_search_keyword:
                    args_flag = True
                    selected = self._select_health(
                        section, item, arg_search_keyword.split(' '), arg_name)
                    selected_options += 1
                    list_of_args.append(arg_name)
                if selected:
                    new_data_dict.setdefault(arg_name, {}).setdefault(
                        selected_options, selected)

                if not args_flag:
                    # from datafile
                    search_keywords = []
                    search_keywords = getattr(
                        runtime.args,
                        arg_name) or Dq(item).get_values(arg_name)
                    if not isinstance(search_keywords, list):
                        search_keywords = [search_keywords]
                    if search_keywords == []:
                        # if args are given to one of actions, other actions
                        # will run to all sections by default. To do so,
                        # adding `.*` as search_keywords
                        # ex.)
                        # - api:               # only section1
                        #     function: func1
                        #     health_sections: section1
                        # - api:               # all sections
                        #     function: func2
                        if (args_in_yaml_flag and arg_name == 'health_sections'
                            ) and (not Dq(item).get_values('health_sections')
                                   and not Dq(item).get_values('health_uids')):
                            search_keywords = ['.*']
                        else:
                            search_keywords = None

                    log.debug(
                        "arg_name, search_keywords: {sel_name}, {sel}".format(
                            sel_name=arg_name, sel=search_keywords))
                    if search_keywords:
                        selected_options += 1
                        list_of_args.append(arg_name)
                        selected = self._select_health(section, item,
                                                       search_keywords,
                                                       arg_name)
                    if selected:
                        new_data_dict.setdefault(arg_name, {}).setdefault(
                            selected_options, selected)

        if args_flag:
            # check for the case which multiple `arg_name`s given and check the same
            # among the `arg_name`s. if same between `arg_name`s, data will be overwittern
            # by one of new_data_dict value to execute selected ones
            new_data_flag = False
            if new_data_dict:
                value = ''
                log.debug(
                    'num of health args: {n}'.format(n=len(set(list_of_args))))
                log.debug(
                    'num of new_data_dict: {n}'.format(n=len(new_data_dict)))
                if len(set(list_of_args)) == len(new_data_dict):
                    for key, value_ in new_data_dict.items():
                        if value == value_:
                            new_data_flag = True
                        else:
                            new_data_flag = False
                            if not value:
                                value = value_
                                if len(new_data_dict) == 1:
                                    new_data_flag = True
        else:
            new_data_flag = bool(new_data_dict)

        log.debug('new_data_flag: {f}'.format(f=new_data_flag))

        log.debug('new_data_dict: {ndd}'.format(
            ndd=json.dumps(new_data_dict, indent=2, sort_keys=True)))

        if new_data_flag:
            temp_data = []
            # override data because meeting criteria by `arg_name`s
            for key in new_data_dict:
                for idx in new_data_dict[key]:
                    temp_data.append(new_data_dict[key][idx].pop())
                data = temp_data
        else:
            if (not new_data_dict or len(set(list_of_args)) !=
                    len(new_data_dict)) and len(set(list_of_args)) != 0:
                data = []
        # processor start message
        log.debug('{type}-processor {name} started'.format(
            name=name, type=processor_type.capitalize()))
        pre_processor_run = True

        # check if `processor` tag matches processor_targets and
        # if device for action is connected
        # create temp_data with matched actions and override data by temp_data
        temp_data = []
        # list of checked devices. flag to ignore checked device
        device_checked = []
        # None if no device is defined in any actions
        all_devices_connected = None

        for each_data in self._get_actions(data):
            for key in each_data:
                log.debug(
                    'processor_targets: {pt}'.format(pt=processor_targets))
                log.debug('processor: {p}'.format(
                    p=each_data[key].get('processor', 'both')))
                if each_data[key].get('processor',
                                      'both') in processor_targets:
                    # check if device for action is connected
                    all_devices_connected = None
                    for uut in self._get_device_names(orig_data, each_data):
                        if uut not in device_checked:
                            device_checked.append(uut)
                            if isinstance(uut, str):
                                if (testbed.devices[uut].name
                                        in devices_connected) or (
                                            testbed.devices[uut].alias
                                            in devices_connected):
                                    all_devices_connected = True
                                else:
                                    all_devices_connected = False
                                    log.info(
                                        'Device {d} is not connected.'.format(
                                            d=testbed.devices[uut].name))
                                    break
                            else:
                                if (uut.name in devices_connected) or (
                                        uut.alias in devices_connected):
                                    all_devices_connected = True
                                else:
                                    all_devices_connected = False
                                    log.info(
                                        'Device {d} is not connected.'.format(
                                            d=testbed.devices[uut].name))
                                    break
                    if (all_devices_connected == True
                            or all_devices_connected is None):
                        temp_data.append(each_data)

        # until here, data contains only actions
        # for cases like `parallel`, `loop`, need to put the headers
        # from original data `orig_data`
        if 'actions' in orig_data:
            data = copy.deepcopy(orig_data)
            if temp_data:
                data['actions'] = temp_data
                data = [{'loop': data}]
            else:
                data = []
        elif isinstance(orig_data, list):
            if len(orig_data) > 0 and 'parallel' in orig_data[0]:
                data = copy.deepcopy(orig_data)[0]
                if temp_data:
                    data['parallel'] = temp_data
                    data = [data]
                else:
                    data = []
            else:
                data = temp_data
        else:
            data = temp_data
        # remove section if no data
        if not data:
            processor.reporter.remove_section(id_list=processor.uid.list)

        # if any device is not connected, processor will be skipped
        if devices_connected:
            # instantiate Steps() to reset step number
            steps = Steps()
            # execute dispatcher in Blitz
            result = self.dispatcher(steps, testbed, section, data, name)
            try:
                log.debug('Blitz section return:\n{result}'.format(
                    result=json.dumps(result, indent=2, sort_keys=True)))
            except TypeError:
                log.debug('Blitz section return:\n{result}'.format(
                    result=format_output(result)))
            # check section result
            log.debug('section result: {section_result}'.format(
                section_result=section.result.name))
            log.debug('steps result: {steps_result}'.format(
                steps_result=steps.result.name))
            if processor_type == 'pre' and steps.result != Passed and steps.result != Passx:
                log.info(
                    "Pre-processor pyATS Health {name} was failed, but continue section and Post-processor"
                    .format(name=name))
                # save pre-processor result
                pre_processor_result = steps.result
                return pre_processor_run, pre_processor_result
            elif processor_type == 'post':
                # refrect processor results to section
                processor.result += steps.result
                section.result = section.result + processor.result + self.pre_processor_result

                return pre_processor_run, pre_processor_result
        else:
            if processor_type == 'pre':
                pre_processor_run = False
                # processor is skipped. but call passed to move forward     for this case
                log.info(
                    "Pre-processor pyATS Health '{name}' is skipped because devices are not connected."
                    .format(name=name))
                return pre_processor_run, pre_processor_result
            elif processor_type == 'post':
                # for the case only pre-processors runs
                if section.result == pre_processor_result:
                    log.info('Only Pre-processor runs. Section result and '
                             'Pre-processor result are different.Reflecting '
                             'Post-processor result to Section.')
                    # reflect processor results to section
                    section.result = section.result + processor.result + self.pre_processor_result
                log.info(
                    "Post-processor pyATS Health '{name}' was skipped because devices are not connected."
                    .format(name=name))
                return pre_processor_run, pre_processor_result

        return pre_processor_run, pre_processor_result
Ejemplo n.º 6
0
def verify_ping(device,
                address,
                loss_rate=0,
                count=None,
                max_time=30,
                check_interval=10):
    """ Verify ping loss rate on ip address provided

        Args:
            device ('obj'): Device object
            address ('str'): Address value
            loss_rate ('int'): Expected loss rate value
            count ('int'): Count value for ping command
            max_time (`int`): Max time, default: 30
            check_interval (`int`): Check interval, default: 10
        Returns:
            Boolean
        Raises:
            None
    """
    timeout = Timeout(max_time, check_interval)
    while timeout.iterate():
        if count:
            cmd = 'ping {address} count {count}'.format(address=address,
                                                        count=count)
        else:
            cmd = 'ping {address}'.format(address=address)
        try:
            out = device.parse(cmd)
        except SchemaEmptyParserError as e:
            timeout.sleep()
            continue
        # Example dictionary structure:
        #     {
        #         "ping": {
        #             "address": "10.189.5.94",
        #             "data-bytes": 56,
        #             "result": [
        #                 {
        #                     "bytes": 64,
        #                     "from": "10.189.5.94",
        #                     "icmp-seq": 0,
        #                     "time": "2.261",
        #                     "ttl": 62
        #                 },
        #             ],
        #             "source": "10.189.5.94",
        #             "statistics": {
        #                 "loss-rate": 0,
        #                 "received": 1,
        #                 "round-trip": {
        #                     "avg": "2.175",
        #                     "max": "2.399",
        #                     "min": "1.823",
        #                     "stddev": "0.191"
        #                 },
        #                 "send": 1
        #             }
        #         }
        #     }
        loss_rate_found = Dq(out).get_values("loss-rate", 0)

        if loss_rate_found == loss_rate:
            return True
    return False
Ejemplo n.º 7
0
def get_interface_speed(device, interface, bit_size='gbps'):
    """Get speed of an interface

    Args:
        device (obj): device object
        interface (str): interface name
        bit_size (str): desired return size (gbps/mbps/kbps)
    
    Returns:
        Device speed or None

    Raises:
        None
    """

    try:
        out = device.parse('show interfaces extensive {interface}'.format(
            interface=interface.split('.')[0]))
    except SchemaEmptyParserError as e:
        return None

    # Example Dictionary
    # "physical-interface": [
    #             {
    #                 "name": "ge-0/0/0",
    #                 "speed": "1000mbps",
    #               }

    speed_matrix = {
        'kbps': {
            'kbps': 1,
            'mbps': 1000,
            'gbps': 1000000,
        },
        'mbps': {
            'kbps': 0.001,
            'mbps': 1,
            'gbps': 1000,
        },
        'gbps': {
            'kbps': .0000001,
            'mbps': 0.001,
            'gbps': 1,
        },
    }

    interfaces_list = Dq(out).get_values('physical-interface')
    for interfaces_dict in interfaces_list:
        speed_ = Dq(interfaces_dict).get_values('speed', 0)
        if not speed_:
            continue

        if 'kbps' in speed_:
            speed_ = int(re.sub(r'[a-z,]', '',
                                speed_)) / speed_matrix['kbps'][bit_size]
        elif 'mbps' in speed_:
            speed_ = int(re.sub(r'[a-z,]', '',
                                speed_)) / speed_matrix['mbps'][bit_size]
        else:
            speed_ = int(re.sub(r'[a-z,]', '',
                                speed_)) / speed_matrix['gbps'][bit_size]
        return speed_
Ejemplo n.º 8
0
    def _pre_post_processors(self,
                             testbed,
                             processor,
                             section,
                             data,
                             name,
                             reconnect,
                             processor_targets,
                             processor_type,
                             pre_processor_result=Passed,
                             health_settings=None):
        """
        execute pre/post processors and return if pre-processor runs and processor result

        Arguments:
            testbed (`obj`): testbed object
            processor (`obj`): Aetest Processor object
            section (`obj`): Aetest Section object
            data (`list`) : data of section
            name (`str`) : name of section in health yaml
            reconnect (`dict` or None) : parameters for reconnect
            processor_targets (`list`) : list of `processor_flag which ones 
                                         will be run as pre/post processors
            processor_type (`str`) : processor type `pre` or `post`
            pre_processor_result (`ob`) : result object. Default to `Passed`

        Returns:
            pre_processor_run (`bool`) : if pre processor runs or not
            pre_processor_result (`obj`) : return processor result (Result obj)
        """
        devices_connected = []
        new_data_dict = {}
        selected_options = 0
        list_of_args = []
        # store reasons why processor is skipped
        reasons = []
        # flag if health args are given to pyats command
        args_flag = False
        # flag if health args are defined under action in health yaml
        args_in_yaml_flag = False
        log.debug(
            'data:\n{d}'.format(d=json.dumps(data, indent=2, sort_keys=True)))
        orig_data = copy.deepcopy(data)

        # check if health arguments are given to pyats command
        for arg_name in [
                'health_tc_sections', 'health_tc_uids', 'health_tc_groups',
                'health_sections', 'health_uids', 'health_groups'
        ]:
            if getattr(runtime.args, arg_name):
                args_flag = True
            for item in self._get_actions(data, processor_targets):
                if Dq(item).contains(
                        'health_tc_sections|health_tc_uids|health_tc_groups|health_sections|health_uids|health_groups',
                        regex=True):
                    args_in_yaml_flag = True

        for arg_name in [
                'health_tc_sections', 'health_tc_uids', 'health_tc_groups',
                'health_sections', 'health_uids', 'health_groups'
        ]:
            log.debug('Checking {an}'.format(an=arg_name))
            selected = None
            selected_options = 0
            for item in self._get_actions(data, processor_targets):
                # from argument

                arg_search_keyword = getattr(runtime.args, arg_name)
                if arg_search_keyword:
                    args_flag = True
                    selected = self._select_health(
                        section, item, arg_search_keyword.split(' '), arg_name)
                    selected_options += 1
                    list_of_args.append(arg_name)
                if selected:
                    new_data_dict.setdefault(arg_name, {}).setdefault(
                        selected_options, selected)

                if not args_flag:
                    # from datafile
                    search_keywords = []
                    search_keywords = getattr(
                        runtime.args,
                        arg_name) or Dq(item).get_values(arg_name)
                    if not isinstance(search_keywords, list):
                        search_keywords = [search_keywords]
                    if search_keywords == []:
                        # if args are given to one of actions, other actions
                        # will run to all sections by default. To do so,
                        # adding `.*` as search_keywords
                        # ex.)
                        # - api:               # only section1
                        #     function: func1
                        #     health_tc_sections: section1
                        # - api:               # all sections
                        #     function: func2
                        if (args_in_yaml_flag and arg_name
                                in ['health_tc_sections', 'health_sections']
                                and
                            ((not Dq(item).get_values('health_tc_sections')
                              or not Dq(item).get_values('health_sections'))
                             and (not Dq(item).get_values('health_tc_uids')
                                  or not Dq(item).get_values('health_uids')))):
                            search_keywords = ['.*']
                        else:
                            search_keywords = None

                    log.debug(
                        "arg_name, search_keywords: {sel_name}, {sel}".format(
                            sel_name=arg_name, sel=search_keywords))
                    if search_keywords:
                        selected_options += 1
                        list_of_args.append(arg_name)
                        selected = self._select_health(section, item,
                                                       search_keywords,
                                                       arg_name)
                    if selected:
                        new_data_dict.setdefault(arg_name, {}).setdefault(
                            selected_options, selected)

        if args_flag:
            # check for the case which multiple `arg_name`s given and check the same
            # among the `arg_name`s. if same between `arg_name`s, data will be overwittern
            # by one of new_data_dict value to execute selected ones
            new_data_flag = False
            if new_data_dict:
                value = ''
                log.debug(
                    'num of health args: {n}'.format(n=len(set(list_of_args))))
                log.debug(
                    'num of new_data_dict: {n}'.format(n=len(new_data_dict)))
                if len(set(list_of_args)) == len(new_data_dict):
                    for key, value_ in new_data_dict.items():
                        if value == value_:
                            new_data_flag = True
                        else:
                            new_data_flag = False
                            if not value:
                                value = value_
                                if len(new_data_dict) == 1:
                                    new_data_flag = True
        else:
            new_data_flag = len(set(list_of_args)) == len(new_data_dict)

        log.debug('new_data_flag: {f}'.format(f=new_data_flag))

        log.debug('new_data_dict: {ndd}'.format(
            ndd=json.dumps(new_data_dict, indent=2, sort_keys=True)))

        if new_data_flag:
            temp_data = []
            # override data because meeting criteria by `arg_name`s
            for key, value__ in new_data_dict.items():
                for idx in value__:
                    # data from each health arg should be same
                    # so remove redundant data by overwriting
                    temp_data = [new_data_dict[key][idx].pop()]
                data = temp_data
        elif (not new_data_dict or len(set(list_of_args)) != len(new_data_dict)
              ) and len(set(list_of_args)) != 0:
            reasons.append(
                f"health arg {set(list_of_args)-set(new_data_dict.keys())} does not meet criteria"
            )
            data = []
        # processor start message
        log.debug('{type}-processor {name} started'.format(
            name=name, type=processor_type.capitalize()))
        pre_processor_run = True

        # check if `processor` tag matches processor_targets and
        # if device for action is connected
        # create temp_data with matched actions and override data by temp_data
        temp_data = []
        # list of checked devices. flag to ignore checked device
        device_checked = []
        # None if no device is defined in any actions
        all_devices_connected = None

        common_api = False

        if new_data_dict and new_data_flag:
            # get connected devices list
            devices_connected = self._check_all_devices_connected(
                testbed, data, reconnect)
            devices_connected = [dev for dev in devices_connected if dev != '']

        actions = self._get_actions(data, processor_targets)
        if not actions:
            # check processor in action and put in proc_in_action
            proc_in_action = []
            if isinstance(data, list):
                for each_data in data:
                    for each_proc in Dq(each_data).get_values('processor'):
                        proc_in_action.append(each_proc)
            else:
                for each_proc in Dq(data).get_values('processor'):
                    proc_in_action.append(each_proc)
            proc_in_action = set(proc_in_action)
            if proc_in_action:
                reasons.append(
                    f"processor {proc_in_action} does not meet criteria {processor_targets}"
                )
        for each_data in actions:
            for key in each_data:
                # get processor key from action. by default, `both`
                each_data_dq = Dq(each_data)
                processor_from_yaml = each_data_dq.contains(key).get_values(
                    'processor', 0)
                if not processor_from_yaml:
                    processor_from_yaml = 'both'

                log.debug(
                    'processor_targets: {pt}'.format(pt=processor_targets))
                log.debug('processor: {p}'.format(p=processor_from_yaml))

                # find `common_api` key and return True/False
                common_api = any(each_data_dq.get_values('common_api'))

                if processor_from_yaml in processor_targets:
                    # check if device for action is connected
                    all_devices_connected = None
                    devices_not_connected = []
                    for uut in self._get_device_names(orig_data, each_data):
                        if uut not in device_checked:
                            device_checked.append(uut)
                            if isinstance(uut, str):
                                if (testbed.devices[uut].name
                                        in devices_connected) or (
                                            testbed.devices[uut].alias
                                            in devices_connected):
                                    all_devices_connected = True
                                else:
                                    all_devices_connected = False
                                    devices_not_connected.append(uut)
                            elif (uut.name in devices_connected) or (
                                    uut.alias in devices_connected):
                                all_devices_connected = True
                            else:
                                all_devices_connected = False
                                devices_not_connected.append(uut)

                    if devices_not_connected:
                        log.warning("devices are not connected: {}".format(
                            devices_not_connected))

                    force_all_connected = health_settings.get(
                        'force_all_connected', True)
                    if device_checked and not force_all_connected and devices_connected:
                        log.warning(
                            "force_all_connected is False. Executing even though some of devices might not be connected."
                        )
                    # data will be created if all devices are connected or
                    # if force_all_connected == False and one of devices is connected
                    if (all_devices_connected == True or all_devices_connected
                            is None) or (force_all_connected == False
                                         and devices_connected):
                        temp_data.append(each_data)
                    else:
                        log.warning(
                            'health check is blocked due to force_all_connected is True.'
                        )

        # until here, data contains only actions
        # for cases like `parallel`, `loop`, need to put the headers
        # from original data `orig_data`
        if 'actions' in orig_data and data and temp_data:
            data = copy.deepcopy(orig_data)
            if temp_data:
                data['actions'] = temp_data
                data = [{'loop': data}]
            else:
                data = []
        elif isinstance(orig_data, list):
            if len(orig_data
                   ) > 0 and 'parallel' in orig_data[0] and data and temp_data:
                data = copy.deepcopy(orig_data)[0]
                if temp_data:
                    data['parallel'] = temp_data
                    data = [data]
                else:
                    data = []
            elif len(orig_data) > 0 and 'run_condition' in orig_data[
                    0] and data and temp_data:
                data = copy.deepcopy(orig_data)[0]
                data = [data]
            else:
                data = temp_data
        else:
            data = temp_data
        # remove section if no data
        removed_section = False
        # set reason in case device is not connected
        if (not devices_connected and not common_api) and not reasons:
            reasons.append('Device is not connected')
        if not data or reasons:
            processor.result = Skipped
            processor.reporter.remove_section(id_list=processor.uid.list)
            removed_section = True

        # if any device is not connected, processor will be skipped
        # if common_api is True, will execute
        if devices_connected or common_api:
            # instantiate Steps() to reset step number
            steps = Steps()
            # execute dispatcher in Blitz
            result = self.dispatcher(steps, testbed, section, data, name)

            if isinstance(data, list):
                hide_processor = any(
                    Dq(data[0]).get_values('hide_processor', 0) == True
                    for each_data in data)

            else:
                hide_processor = Dq(data[0]).get_values('hide_processor', 0)

            if hide_processor and not removed_section:
                removed_section = self._remove_section(processor)
            try:
                log.debug('Blitz section return:\n{result}'.format(
                    result=json.dumps(result, indent=2, sort_keys=True)))
            except TypeError:
                log.debug('Blitz section return:\n{result}'.format(
                    result=format_output(result)))
            # check section result
            log.debug('section result: {section_result}'.format(
                section_result=section.result.name))
            log.debug('steps result: {steps_result}'.format(
                steps_result=steps.result.name))

            # if section is skipped by run_condition, remove section
            if (isinstance(result, dict) and 'run_condition_skipped' in result
                    and not removed_section
                    and result['run_condition_skipped'] == True):
                processor.result = Skipped
                removed_section = self._remove_section(processor)
            if processor_type == 'pre' and steps.result != Passed and steps.result != Passx:
                log.info(
                    "Pre-processor pyATS Health {name} was failed, but continue section and Post-processor"
                    .format(name=name))
                # save pre-processor result
                pre_processor_result = steps.result
                return pre_processor_run, pre_processor_result
            elif processor_type == 'post':
                # refrect processor results to section
                processor.result += steps.result
                section.result = section.result + processor.result + self.pre_processor_result

                # return processor.result to raise the result
                # at end of context post processor
                return pre_processor_run, processor.result

        elif processor_type == 'pre':
            pre_processor_run = False
            # processor is skipped
            log.info(
                f"Pre-processor pyATS Health '{name}' is skipped due to: {reasons}"
            )
            if pre_processor_result == Passed:
                # processor.skipped()
                pre_processor_result = Skipped
            return pre_processor_run, pre_processor_result
        elif processor_type == 'post':
            # for the case only pre-processors runs
            if section.result == pre_processor_result:
                log.info('Only Pre-processor runs. Section result and '
                         'Pre-processor result are different.Reflecting '
                         'Post-processor result to Section.')
                # reflect processor results to section
                section.result = section.result + processor.result + self.pre_processor_result
            # processor is skipped
            log.info(
                f"Post-processor pyATS Health '{name}' was skipped due to: {reasons}"
            )
            if pre_processor_result == Passed:
                # processor.skipped()
                pre_processor_result = Skipped

            # return processor.result to raise the result
            # at end of context post processor
            return pre_processor_run, processor.result

        return pre_processor_run, pre_processor_result
Ejemplo n.º 9
0
    def _select_health(self, section, data, search_keywords, arg_name):
        """
        check if pyATS Health Check processor meets criteria
        via `health_tc_sections`, `health_tc_uids` and `health_tc_groups`

        Arguments:
            section (`obj`) : Aetest Subsection object.
            data (`dict`) : data of section
            search_keywords (`list`) :  list of search keywords
            arg_name (`str`) : `health_tc_sections`, `health_tc_uids` or
                               `health_tc_groups`

        Returns:
            new_data (`list`) : Updated data which meets args criteria
                                Otherwise, return empty list
        """
        # initialize
        new_data = []
        search_target = ''

        # replicate search_keywords for further loop
        search_keywords = copy.deepcopy(search_keywords)

        for search_keyword in search_keywords:
            # save original `search_keyword` in `pre_search_keyword`
            # which has `%VARIABLES{}`
            pre_search_keyword = search_keyword
            # load `%VARIABLES{} and replace in `search_keyword`
            _, search_keyword = _load_saved_variable(self,
                                                     section=section,
                                                     val=search_keyword)

            if 'type:' in search_keyword:
                search_class = search_keyword.replace('type:', '')
                if isinstance(section,
                              SECTION_CLASS_MAPPING.get(search_class)):
                    new_data.append(data)

            else:
                # get search_target such as section.uid, section.groups from section
                search_target = self._find_search_target(
                    section, arg_name, search_keyword, search_keywords)
                log.debug('search_target: {st}'.format(st=search_target))
                if re.search(search_keyword, search_target):
                    # when args exist, don't need to do `contains` because
                    # args will affect to all items in pyATS Health
                    if getattr(runtime.args, arg_name):
                        dq_item = self._find_item_by_search_keyword(
                            section, data, arg_name, search_target)
                    else:
                        # in `data`, %VARIABLES doesn't need to be converted
                        # so, need to use `pre_search_keyword`
                        data_dq = Dq(data)
                        dq_item = data_dq.contains(pre_search_keyword,
                                                   regex=True,
                                                   level=1).reconstruct()
                        # for the case regex is used. need to do exact match
                        # without `regex=True`
                        if not dq_item:
                            dq_item = data_dq.contains(pre_search_keyword,
                                                       level=1).reconstruct()
                    if dq_item and dq_item not in new_data:
                        new_data.append(dq_item)

        log.debug("new_data: {}".format(new_data))
        return new_data
Ejemplo n.º 10
0
    def _get_actions(self, data, processor_targets=None):
        """
        get action items only from any Blitz sections such as parallel and loop

        Arguments:
            data   (`dict`) : data of section
            processor_targets (`list`) : list of `processor_flag which ones
                                         will be run as pre/post processors
                                         Defaults to ['pre', 'post', 'both']

        Returns:
            (`dict`): only actions from data
        """
        def _check_processor(actions, processor_targets):
            """
            check `processor` in pyats health yaml should be executed or not

              test_sections:
              - get_testcase_name:
                - api:
                    common_api: true
                    function: get_testcase_name
                    health_sections:
                    - ^(?!common_).*
                    health_uids:
                    - ^(?!common_).*
                    hide_processor: true
                    processor: pre # <- check this
                    save:
                    - variable_name: testcase_name

            Arguments:
                actions (`dict`): action data
                processor_targets (`list`) : list of `processor` which ones
                                             will be run as pre/post processors

            Returns:
                (`list`): list of action data which is supposed to be executed
            """
            return_actions = []
            # example of actions
            # actions = [{
            #     'api': {
            #         'common_api': True,
            #         'function': 'get_testcase_name',
            #         'health_sections': ['^(?!common_).*'],
            #         'health_uids': ['^(?!common_).*'],
            #         'hide_processor': True,
            #         'processor': 'pre',
            #         'save': [{
            #             'variable_name': 'testcase_name'
            #         }]
            #     }
            # }]
            for each_action in actions:
                processor = Dq(each_action).get_values('processor', 0)
                if not processor:
                    processor = 'both'
                if processor in processor_targets:
                    return_actions.append(each_action)
            return return_actions

        if processor_targets is None:
            processor_targets = ['pre', 'post', 'both', 'post_if_pre_execute']
        if data:
            # parsing YAML if data is list or dict
            # Dq doesn't support list at first level, so having this check
            if isinstance(data, list):
                # parallel
                if 'parallel' in data[0]:
                    return _check_processor(data[0]['parallel'],
                                            processor_targets)
                # run_condition
                elif 'run_condition' in data[0]:
                    return _check_processor(
                        data[0]['run_condition']['actions'], processor_targets)
                # normal action
                else:
                    return _check_processor(data, processor_targets)
            elif isinstance(data, dict):
                # loop
                if Dq(data).contains('actions'):
                    return _check_processor(
                        Dq(data).get_values('actions'), processor_targets)

        # for case section doesn't have loop/parallel
        # return data as is.
        return data
Ejemplo n.º 11
0
    def _get_device_names(self, data, each_data):
        """
        Check if %VARIABLES in device field and then resolve the device name

        Arguments:
            data        (`list`) : data of section
            each_data   (`dict`) : each of data element

        Returns:
            device_list (`list`): list of devices
        """
        device_list = []
        # before running health check, analyze actions and check/resolve
        # device name from %VARIABLES and loop_variable_name for device connectivity check
        for device in Dq(each_data).get_values('device'):
            m = re.search('%VARIABLES{(?P<var_name>.*)}',
                          device.name if hasattr(device, 'name') else device)
            if m:
                var_name = m.groupdict()['var_name']
                if isinstance(
                        data, dict
                ) and 'loop_variable_name' in data and 'value' in data:
                    # loop with list
                    if var_name == data['loop_variable_name']:
                        for dev in data['value']:
                            if dev not in device_list:
                                device_list.append(dev)
                    elif var_name in [
                            data['loop_variable_name'] + '._keys',
                            data['loop_variable_name'] + '._values',
                    ]:
                        if data['value']:
                            for item in data['value']:
                                for dev in item.keys():
                                    if dev not in device_list:
                                        device_list.append(dev)
                elif Dq(each_data).contains('loop_variable_name') and Dq(
                        each_data).contains('value'):
                    # loop with list
                    if var_name == Dq(each_data).get_values(
                            'loop_variable_name', 0):
                        loop_value = Dq(each_data).get_values('value', 0)
                        m = re.search('%VARIABLES{(?P<dev_var_name>.*)}',
                                      loop_value)
                        if m:
                            dev_var_name = m.groupdict()['dev_var_name']
                            # for testscript variables
                            if dev_var_name.startswith('testscript.'):
                                dev_var_name = dev_var_name[len('testscript.'
                                                                ):]
                                try:
                                    loop_value = self.parent.parameters[
                                        'save_variable_name'].setdefault(
                                            'testscript',
                                            {}).get(dev_var_name, [])
                                # if no key yet, just put empty list as iterable
                                except (KeyError, AttributeError):
                                    loop_value = []
                            # testcase variables
                            else:
                                try:
                                    loop_value = self.parameters[
                                        'save_variable_name'].get(
                                            dev_var_name, [])
                                # if no key yet, just put empty list as iterable
                                except KeyError:
                                    loop_value = []
                        for dev in loop_value:
                            if dev not in device_list:
                                device_list.append(dev)
                    elif var_name in [
                            Dq(each_data).get_values('loop_variable_name', 0) +
                            '._keys',
                            Dq(each_data).get_values('loop_variable_name', 0) +
                            '._values',
                    ]:
                        loop_value = Dq(each_data).get_values('value', 0)
                        m = re.search('%VARIABLES{(?P<dev_var_name>.*)}',
                                      loop_value)
                        if m:
                            dev_var_name = m.groupdict()['dev_var_name']
                            markup_variable_value = self.parent.parameters[
                                'save_variable_name'].setdefault(
                                    'testscript', {})
                            # for testscript variables
                            if dev_var_name.startswith('testscript.'):
                                dev_var_name = dev_var_name[len('testscript.'
                                                                ):]
                                if isinstance(
                                        markup_variable_value.setdefault(
                                            dev_var_name, None), dict):
                                    try:
                                        if '_keys' in var_name:
                                            loop_value = markup_variable_value.get(
                                                dev_var_name, []).keys()
                                        else:
                                            loop_value = markup_variable_value.get(
                                                dev_var_name, []).values()
                                    except (KeyError, AttributeError):
                                        loop_value = []
                                elif isinstance(
                                        markup_variable_value.setdefault(
                                            dev_var_name, None), list):
                                    loop_value = markup_variable_value[
                                        dev_var_name]
                                else:
                                    # return empty in case none of above matched
                                    loop_value = []
                            elif isinstance(
                                    markup_variable_value[dev_var_name], dict):
                                try:
                                    if '_keys' in var_name:
                                        loop_value = markup_variable_value.get(
                                            dev_var_name, []).keys()
                                    elif '_values' in var_name:
                                        loop_value = markup_variable_value.get(
                                            dev_var_name, []).values()
                                # if no key yet, just put empty list as iterable
                                except KeyError:
                                    loop_value = []
                            elif isinstance(
                                    markup_variable_value[dev_var_name], list):
                                loop_value = markup_variable_value[
                                    dev_var_name]
                            else:
                                # return empty in case none of above matched
                                loop_value = []
                        for dev in loop_value:
                            if dev not in device_list:
                                device_list.append(dev)
            elif device not in device_list:
                device_list.append(device)

        log.debug('device_list: {}'.format(device_list))
        return device_list
Ejemplo n.º 12
0
 def setup(self):
     self.parameters["testbed"].connect(log_stdout=False)
     with open(f"./golden_ops/golden-ospf-database.txt",  "rb") as f:
         self.routers =  Dq(json.load(f)).contains_key_value("lsa_types", "1").get_values("lsa_id")
Ejemplo n.º 13
0
    def health_dispatcher(self,
                          steps,
                          section,
                          data,
                          testbed,
                          processor,
                          name='',
                          **kwargs):
        """
        excute health yaml based on Blitz logic. This will be calling Blitz's
        `dispacher` to execute all the actions in health yaml
        
        `data` contains all the items under a section in health yaml
        
        example of `data`:
        [
          {
            'parallel': [
              {
                'api': {
                  'device': 'uut',
                  'function': 'get_platform_cpu_load',
                  'arguments': {
                    'command': 'show processes cpu',
                    'processes': ['BGP I/O']
                  },
                  'save': [
                    {
                      'variable_name': 'cpu'
                    }
                  ]
                }
              },
              {
                'api': {
                  'device': 'uut',
                  (snip)
        
        `data` is List, so store the `data` as dict to `data_dict` for Dq

        Arguments:
            steps (`obj`) : Aetest Steps object
            section (`obj`) : Aetest Section object
            data (`list`) : data of section
            testbed (`obj`) : testbed object
            processor (`obj`) : Aetest processor object
            name (`str`) : name of section in health yaml
                           Default to ``
        Returns:
            None
        """

        # ---------------------
        # pre-context processor
        # ---------------------

        data_dict, processor_flag = self._check_processor_tag(data=data)
        log.debug('processor_flag: {flag}'.format(flag=processor_flag))

        # check if all devices are connected
        data_dict_dq = Dq(data_dict)
        devices_connected = self._check_all_devices_connected(
            testbed, data_dict_dq)

        # execute pre-processor and received result in self.pre_processor_result
        self.pre_processor_run, self.pre_processor_result = self._pre_post_processors(
            testbed,
            processor,
            section,
            data,
            name,
            devices_connected,
            processor_flag,
            processor_targets=['pre', 'both'],
            processor_type='pre')

        try:
            yield
        except Exception as e:
            # for case section gets Exception
            section.errored(e)

        # ----------------------
        # post-context processor
        # ----------------------

        # check `post_if_pre_execute` and if pre-processor is executed
        if (data_dict_dq.get_values('processor', 0) == 'post_if_pre_execute'
                and not self.pre_processor_run):
            log.info(
                "Post-processor pyATS Health '{name}' was skipped because required Pre-processor was not executed."
                .format(name=name))
        else:
            # check if all devices are connected
            data_dict_dq = Dq(data_dict)
            devices_connected = self._check_all_devices_connected(
                testbed, data_dict_dq)

            # execute post-processor
            self._pre_post_processors(
                testbed,
                processor,
                section,
                data,
                name,
                devices_connected,
                processor_flag,
                processor_targets=['post', 'post_if_pre_execute', 'both'],
                processor_type='post',
                pre_processor_result=self.pre_processor_result)
Ejemplo n.º 14
0
    def _pre_post_processors(self,
                             testbed,
                             processor,
                             section,
                             data,
                             name,
                             devices_connected,
                             processor_flag,
                             processor_targets,
                             processor_type,
                             pre_processor_result=Passed):
        """
        execute pre/post processors and return if pre-processor runs and processor result

        Arguments:
            testbed (`obj`): testbed object
            processor (`obj`): Aetest Processor object
            section (`obj`): Aetest Section object
            data (`list`) : data of section
            name (`str`) : name of section in health yaml
            devices_connected (`bool`) : if devices are connected, or not
            processor_flag (`str`) : `pre`, `post`, `both` and etc
            processor_targets (`list`) : list of `processor_flag which ones 
                                         will be run as pre/post processors
            processor_type (`str`) : processor type `pre` or `post`
            pre_processor_result (`ob`) : result object. Default to `Passed`

        Returns:
            pre_processor_run (`bool`) : if pre processor runs or not
            pre_processor_result (`obj`) : return processor result (Result obj)
        """
        new_data_dict = {}
        selected_options = 0
        list_of_args = []
        args_flag = False

        # check if health arguments are given to pyats command
        for arg_name in ['health_sections', 'health_uids', 'health_groups']:
            if getattr(runtime.args, arg_name):
                args_flag = True

        for arg_name in ['health_sections', 'health_uids', 'health_groups']:

            log.debug('Checking {an}'.format(an=arg_name))
            selected = None
            selected_options = 0
            for item in data:
                # from argument

                arg_search_keyword = getattr(runtime.args, arg_name)
                if arg_search_keyword:
                    args_flag = True
                    selected = self._select_health(
                        section, item, arg_search_keyword.split(' '), arg_name)
                    selected_options += 1
                    list_of_args.append(arg_name)
                if selected:
                    new_data_dict.setdefault(arg_name, {}).setdefault(
                        selected_options, selected)

                if not args_flag:
                    # from datafile
                    search_keywords = []
                    search_keywords = getattr(
                        runtime.args,
                        arg_name) or Dq(item).get_values(arg_name)
                    if not isinstance(search_keywords, list):
                        search_keywords = [search_keywords]
                    if search_keywords == []:
                        search_keywords = None

                    log.debug(
                        "arg_name, search_keywords: {sel_name}, {sel}".format(
                            sel_name=arg_name, sel=search_keywords))
                    if search_keywords:
                        selected_options += 1
                        list_of_args.append(arg_name)
                        selected = self._select_health(section, item,
                                                       search_keywords,
                                                       arg_name)
                    if selected:
                        new_data_dict.setdefault(arg_name, {}).setdefault(
                            selected_options, selected)

        # check for the case which multiple `arg_name`s given and check the same
        # among the `arg_name`s. if same between `arg_name`s, data will be overwittern
        # by one of new_data_dict value to execute selected ones
        new_data_flag = False
        if new_data_dict:
            value = ''
            log.debug(
                'num of health args: {n}'.format(n=len(set(list_of_args))))
            log.debug('num of new_data_dict: {n}'.format(n=len(new_data_dict)))
            if len(set(list_of_args)) == len(new_data_dict):
                for key, value_ in new_data_dict.items():
                    if value == value_:
                        new_data_flag = True
                    else:
                        new_data_flag = False
                        if not value:
                            value = new_data_dict[key]
                            if len(new_data_dict) == 1:
                                new_data_flag = True

        log.debug('new_data_flag: {f}'.format(f=new_data_flag))
        log.debug('new_data_dict: {ndd}'.format(ndd=new_data_dict))
        if new_data_flag:
            data2 = []
            # override data because meeting criteria by `arg_name`s
            for key in new_data_dict:
                for idx in new_data_dict[key]:
                    data2.append(new_data_dict[key][idx].pop())
                data = data2
        else:
            # remove report based on conditions
            # - no found data based on search
            # - devices are not connected
            # - number of given arguments and found data are not equal
            # - number of given arguments is not 0
            if (not new_data_dict or not devices_connected
                    or len(set(list_of_args)) != len(new_data_dict)) and len(
                        set(list_of_args)) != 0:
                processor.reporter.remove_section(id_list=processor.uid.list)
            if (not new_data_dict or len(set(list_of_args)) !=
                    len(new_data_dict)) and len(set(list_of_args)) != 0:
                data = []
            # if devices are not connected, delete processor from reporter
            if not devices_connected and data:
                processor.reporter.remove_section(id_list=processor.uid.list)

        # processor start message
        log.debug('{type}-processor {name} started'.format(
            name=name, type=processor_type.capitalize()))
        pre_processor_run = True

        # check `processor` to control
        if processor_flag in processor_targets:
            # if any device is not connected, processor will be skipped
            if devices_connected:
                # instantiate Steps() to reset step number
                steps = Steps()
                result = self.dispatcher(steps, testbed, section, data, name)

                log.debug('Blitz section return:\n{result}'.format(
                    result=format_output(result)))
                # check section result
                log.debug('section result: {section_result}'.format(
                    section_result=section.result.name))
                log.debug('steps result: {steps_result}'.format(
                    steps_result=steps.result.name))
                if processor_type == 'pre' and steps.result != Passed and steps.result != Passx:
                    log.info(
                        "Pre-processor pyATS Health {name} was failed, but continue section and Post-processor"
                        .format(name=name))
                    # save pre-processor result
                    pre_processor_result = steps.result
                    return pre_processor_run, pre_processor_result
                elif processor_type == 'post':
                    # refrect result to section
                    getattr(
                        section,
                        str(steps.result + steps.result +
                            self.pre_processor_result))()
                    return pre_processor_run, pre_processor_result

            else:
                if processor_type == 'pre':
                    pre_processor_run = False
                    # processor is skipped. but call passed to move forward     for this case
                    log.info(
                        "Pre-processor pyATS Health '{name}' is skipped because devices are not connected."
                        .format(name=name))
                    return pre_processor_run, pre_processor_result
                elif processor_type == 'post':
                    # for the case only pre-processors runs
                    if section.result == pre_processor_result:
                        log.info(
                            'Only Pre-processor runs. Section result and Pre-processor result are different. Reflecting Post-processor result to Section.'
                        )
                        getattr(section,
                                str(section.result + pre_processor_result))()
                    log.info(
                        "Post-processor pyATS Health '{name}' was skipped because devices are not connected."
                        .format(name=name))
                    return pre_processor_run, pre_processor_result
        else:
            log.info('Skipped because {name} is not {type}-processor'.format(
                name=name, type=processor_type.capitalize()))
            return pre_processor_run, pre_processor_result

        return pre_processor_run, pre_processor_result
Ejemplo n.º 15
0
def get_route_destination_address(device,
                                  extensive=None,
                                  prefix='inet.0',
                                  protocol='Direct',
                                  interface='ge-0/0/0.0'):
    """Get destination address that matches criteria

    Args:
        device (obj): device object
        extensive (bool): Show extensive output. Defaults to None.
        prefix (str, optional): Route prefix. Defaults to None.
        protocol (str, optional): Route protocol. Defaults to None.
        interface (str, optional): Route interface. Defaults to None.

    Returns:
        str: The destination address
    """

    try:
        if extensive:
            out = device.parse('show route extensive')
        else:
            out = device.parse('show route')
    except SchemaEmptyParserError:
        return None

    # Example dictionary structure:
    #         {
    #             'rt': [{'rt-destination': '0.0.0.0/0',
    #                    'rt-entry': {'active-tag': '*',
    #                                 'age': {'#text': '02:53:14'},
    #                                 'nh': [{'to': '172.16.1.254',
    #                                         'via': 'ge-0/0/0.0'}],
    #                                 'preference': '12',
    #                                 'protocol-name': 'Access-internal'}},
    #                   {'rt-destination': '12.1.1.0/24',
    #                    'rt-entry': {'active-tag': '*',
    #                                 'age': {'#text': '5w1d '
    #                                                  '19:01:21'},
    #                                 'nh': [{'via': 'ge-0/0/3.0'}],
    #                                 'preference': '0',
    #                                 'protocol-name': 'Direct'}},
    #                   {'rt-destination': '12.1.1.2/32',
    #                    'rt-entry': {'active-tag': '*',
    #                                 'age': {'#text': '5w1d '
    #                                                  '19:01:21'},
    #                                 'nh': [{'nh-local-interface': 'ge-0/0/3.0'}],
    #                                 'preference': '0',
    #                                 'protocol-name': 'Local'}},
    #         },
    route_table_list = Dq(out).get_values("route-table")
    for route in route_table_list:
        if prefix:
            prefix_ = Dq(route).get_values('table-name', 0)
            if not prefix_.lower().startswith(prefix.lower()):
                continue

        rt_list = Dq(route).get_values('rt')
        for rt_dict in rt_list:
            if protocol:
                protocol_ = Dq(rt_dict).get_values('protocol-name', 0)
                if not protocol_.lower().startswith(protocol.lower()):
                    continue

            if interface:
                interface_ = Dq(rt_dict).get_values(
                    'via', 0) or Dq(rt_dict).get_values(
                        'nh-local-interface', 0)
                if not interface_.lower().startswith(interface.lower()):
                    continue

            return Dq(rt_dict).get_values('rt-destination', 0)

    return None
Ejemplo n.º 16
0
    def health_dispatcher(self,
                          steps,
                          section,
                          data,
                          testbed,
                          processor,
                          reconnect,
                          name='',
                          **kwargs):
        """
        excute health yaml based on Blitz logic. This will be calling Blitz's
        `dispacher` to execute all the actions in health yaml
        
        `data` contains all the items under a section in health yaml
        
        example of `data`:
        [
          {
            'parallel': [
              {
                'api': {
                  'device': 'uut',
                  'function': 'get_platform_cpu_load',
                  'arguments': {
                    'command': 'show processes cpu',
                    'processes': ['BGP I/O']
                  },
                  'save': [
                    {
                      'variable_name': 'cpu'
                    }
                  ]
                }
              },
              {
                'api': {
                  'device': 'uut',
                  (snip)
        
        `data` is List, so store the `data` as dict to `data_dict` for Dq

        Arguments:
            steps (`obj`) : Aetest Steps object
            section (`obj`) : Aetest Section object
            data (`list`) : data of section
            testbed (`obj`) : testbed object
            processor (`obj`) : Aetest processor object
            name (`str`) : name of section in health yaml
                           Default to ``
            reconnect (`dict` or None) : parameters for reconnect
                                         ex.)
                                         {
                                             'max_time': 900, # maximum time to reconnect
                                             'interval': 60,  # sleep before retry
                                         }
        Returns:
            None
        """

        if 'genie' not in testbed.__module__:
            # convert testbed from pyATS to Genie
            testbed = Converter.convert_tb(runtime.testbed)

        if 'genie' not in testbed.__module__:
            # convert testbed from pyATS to Genie
            testbed = Converter.convert_tb(runtime.testbed)

        if 'health_settings' in kwargs:
            health_settings = kwargs['health_settings']
        else:
            health_settings = {}

        # save `health_settings` as testscript variable
        save_variable(self, section, 'testscript.health_settings',
                      health_settings)
        # handling for `health_settings.devices`. TODO; AttrDict support
        if 'devices' in health_settings:
            save_variable(self, section, 'testscript.health_settings.devices',
                          health_settings['devices'])

        # ---------------------
        # pre-context processor
        # ---------------------

        # set result Passed at beginning of section in pre processor
        # because section sometimes doesn't have any item like commonCleanup
        # if the section doesn't have any, section.result is None and rolled-up
        # only with pyATS Health Check result. But section should have Passed
        # at first place
        if section.__result__ is None:
            section.result = Passed

        # execute pre-processor and received result in self.pre_processor_result
        self.pre_processor_run, self.pre_processor_result = self._pre_post_processors(
            testbed,
            processor,
            section,
            data,
            name,
            reconnect,
            processor_targets=['pre', 'both'],
            processor_type='pre',
            health_settings=health_settings)

        try:
            yield
        except Exception as e:
            # make section Errored when exception happens
            section.result = Errored.clone('Caught exception in %s' %
                                           str(section),
                                           data={'traceback': e})

        # ----------------------
        # post-context processor
        # ----------------------
        post_if_pre_execute_flag = not any(
            Dq(each_data).get_values('processor', 0) == 'post_if_pre_execute'
            and not self.pre_processor_run
            for each_data in self._get_actions(data))

        if not post_if_pre_execute_flag:
            log.info(
                "Post-processor pyATS Health '{name}' was skipped because required Pre-processor was not executed."
                .format(name=name))

        else:
            if 'genie' not in testbed.__module__:
                # convert testbed from pyATS to Genie
                # need to convert to bring latest status from runtime again
                # for the case devices are connected after pre-processor
                testbed = Converter.convert_tb(runtime.testbed)

            # execute post-processor
            _, post_processor_result = self._pre_post_processors(
                testbed,
                processor,
                section,
                data,
                name,
                reconnect,
                processor_targets=['post', 'post_if_pre_execute', 'both'],
                processor_type='post',
                pre_processor_result=self.pre_processor_result,
                health_settings=health_settings)

            # raise result
            getattr(processor, post_processor_result.name)()
Ejemplo n.º 17
0
def get_route_uptime(device, route, protocol, push=None, output=None):
    """
    Get uptime of active route in routing table

    Args:
        device (`obj`): Device object
        route (`str`): Route Information such as `192.168.1.0/24`
        protocol (`str`): Protocol name such as `Direct`, `Static` and etc
        push (`bool`): flag to check route only with `Push` in route entry
        output` (`str`): output of show route summary

    
    Returns:
        uptime(`int`): return uptime in seconds
    """
    try:
        if output:
            out = device.parse('show route {route}'.format(route=route),
                               output=output)
        else:
            out = device.parse('show route {route}'.format(route=route))
    except SchemaEmptyParserError:
        return None

    # example of out
    # {
    #   "route-information": {
    #     "route-table": [
    #       {
    #         "active-route-count": "98",
    #         "destination-count": "98",
    #         "hidden-route-count": "0",
    #         "holddown-route-count": "0",
    #         "rt": [
    #           {
    #             "rt-destination": "30.0.0.0/24",
    #             "rt-entry": {
    #               "active-tag": "*",
    #               "age": {
    #                 "#text": "00:17:42"
    #               },
    #               "as-path": " 65002 I",
    #               "learned-from": "2.2.2.2",
    #               "local-preference": "100",
    #               "nh": [
    #                 {
    #                   "mpls-label": "Push 300768",
    #                   "to": "10.0.1.2",
    #                   "via": "xe-0/1/0.11"
    #                 }
    #               ],
    #               "preference": "170",
    #               "protocol-name": "BGP",
    #               "validation-state": "unverified"
    #             }
    #           }
    #         ],

    for rt in out.q.get_values('rt'):
        if rt['rt-entry']['protocol-name'] == protocol and rt['rt-entry'][
                'active-tag'] == '*':
            if push:
                if 'Push' in Dq(rt).get_values('mpls-label', 0):
                    return device.api.time_to_int(
                        rt['rt-entry']['age']['#text'])
                else:
                    return 0
            else:
                return device.api.time_to_int(rt['rt-entry']['age']['#text'])

    return 0
Ejemplo n.º 18
0
def delete_unprotected_files(device,
                             directory,
                             protected,
                             files_to_delete=None,
                             dir_output=None,
                             allow_failure=False):
    """delete all files not matching regex in the protected list
        Args:
            device ('obj'): Device object
            directory ('str'): working directory to perform the operation
            protected ('list'): list of file patterns that won't be deleted. If it begins
                                and ends with (), it will be considered as a regex
            files_to_delete('list') list of files that should be deleted unless they are not protected
            dir_output ('str'): output of dir command, if not provided execute the cmd on device to get the output
            allow_failure (bool, optional): Allow the deletion of a file to silently fail. Defaults to False.
        Returns:
            None
            """

    protected_set = set()
    fu_device = FileUtils.from_device(device)
    file_set = set(
        Dq(device.parse('dir {}'.format(directory),
                        output=dir_output)).get_values('files'))

    if isinstance(protected, str):
        protected = [protected]
    elif not isinstance(protected, (list, set)):
        raise TypeError("'{p}' must be a list")

    for pattern in protected:
        # it's a regex!
        if pattern.startswith('(') and pattern.endswith(')'):
            regexp = re.compile(pattern)
            protected_set.update(set(filter(regexp.match, file_set)))

        # just file names, exact match only
        elif pattern in file_set:
            protected_set.add(pattern)

    # if files_to_delete is given,updated protected files with the diff of file_set - files_to_delete
    # so that we only delete files that are in files_to_delete and NOT protected
    # in other words we remove the protected files from file_to_delete
    if files_to_delete:
        protected_set.update(file_set - set(files_to_delete))

    not_protected = file_set - protected_set
    error_messages = []

    if not_protected:
        log.info("The following files will be deleted:\n{}".format(
            '\n'.join(not_protected)))
        dont_delete_list = protected_set.intersection(files_to_delete)
        if dont_delete_list:
            log.info(
                "The following files will not be deleted because they are protected:\n{}"
                .format('\n'.join(dont_delete_list)))
        for file in not_protected:
            # it's a directory, dont delete
            if file.endswith('/'):
                continue
            log.info('Deleting the unprotected file "{}"'.format(file))
            try:
                fu_device.deletefile(file, device=device)
            except Exception as e:
                if allow_failure:
                    log.info(
                        'Failed to delete file "{}" but ignoring and moving '
                        'on due to "allow_failure=True".'.format(file))
                    continue

                error_messages.append('Failed to delete file "{}" due '
                                      'to :{}'.format(file, str(e)))
        if error_messages:
            raise Exception('\n'.join(error_messages))
    else:
        log.info(
            "No files will be deleted, the following files are protected:\n{}".
            format('\n'.join(protected_set)))
Ejemplo n.º 19
0
def verify_interfaces_input_output_policer_found(device,
                                                 interface,
                                                 logical_interface,
                                                 max_time=90,
                                                 check_interval=10):
    """ Verify input and output policer value for interface

        Args:
            device ('obj'): Device object
            interface('str'): Interface name
            logical_interface ('str'): Logical interface name
            max_time ('int'): Maximum time to keep checking
            check_interval ('int'): How often to check
        Returns:
            Boolean

        Raises:
            None
    """
    timeout = Timeout(max_time, check_interval)
    # Dictionary:
    # "interface-policer-information": {
    #     "physical-interface": [
    #         {
    #             "admin-status": "up",
    #             "logical-interface": [
    #                 {
    #                     "admin-status": "up",
    #                     "name": "ge-0/0/2.0",
    #                     "oper-status": "up",
    #                     "policer-information": [
    #                         {
    #                             "policer-family": "inet",
    #                             "policer-input": "GE_1M-ge-0/0/2.0-log_int-i",
    #                             "policer-output": "GE_1M-ge-0/0/2.0-log_int-o"
    while timeout.iterate():
        try:
            out = device.parse('show interfaces policers {interface}'.format(
                interface=interface.split('.')[0]))
        except SchemaEmptyParserError as e:
            return None

        logical_interface_list = out.q.contains(
            'policer-information|{logical_interface}'.format(
                logical_interface=logical_interface),
            regex=True).get_values('logical-interface')

        for logical_intf_dict in logical_interface_list:
            name = logical_intf_dict.get('name', None)
            if name != logical_interface:
                continue
            policer_information_list = logical_intf_dict.get(
                'policer-information', [])
            if not policer_information_list:
                continue

            policer_input = Dq(
                policer_information_list[0]).get_values('policer-input')
            policer_output = Dq(
                policer_information_list[0]).get_values('policer-output')

            if not policer_input or not policer_output:
                continue

            return True

        timeout.sleep()
    return False
Ejemplo n.º 20
0
def free_up_disk_space(device,
                       destination,
                       required_size,
                       skip_deletion,
                       protected_files,
                       compact=False,
                       min_free_space_percent=None,
                       dir_output=None):
    '''Delete files to create space on device except protected files
    Args:
        device ('Obj') : Device object
        destination ('str') : Destination directory, i.e bootflash:/
        required_size ('int') : Check if enough space to fit given size in bytes.
                                If this number is negative it will be assumed
                                the required size is not available.
        skip_deletion ('bool') : Only performs checks, no deletion
        protected_files ('list') : List of file patterns that wont be deleted
        compact ('bool'): Compact option for n9k, used for size estimation,
                          default False
        min_free_space_percent ('int'): Minimum acceptable free disk space %.
                                        Optional,
        dir_output ('str'): Output of 'dir' command
                            if not provided, executes the cmd on device
    Returns:
         True if there is enough space after the operation, False otherwise
    '''
    # For n9k compact copy:
    # observationally, depending on release, the compacted image is 36-48% the
    # size of the original image. For now we'll use 60% as a conservative estimate.
    if compact:
        required_size *= .6

    # Parse directory output to check
    dir_out = dir_output or device.execute('dir {}'.format(destination))

    # Get available free space on device
    available_space = device.api.get_available_space(directory=destination,
                                                     output=dir_out)

    log.debug('available_space: {avs}'.format(avs=available_space))

    # Check if available space is sufficient
    if min_free_space_percent:

        # Get total space
        total_space = device.api.get_total_space(directory=destination,
                                                 output=dir_out)

        # Get current available space in %
        avail_percent = available_space / total_space * 100

        log.info("There is {avail} % of free space on the disk, which is "
                 "{compare} than the target of {target} %.".\
                 format(avail=round(avail_percent, 2), compare='less' if \
                        avail_percent < min_free_space_percent else 'greater',
                        target=min_free_space_percent))

        # get bigger of required_space or min_free_space_percent
        required_size = round(
            max(required_size, min_free_space_percent * .01 * total_space))

    # If there's not enough space, delete non-protected files
    if device.api.verify_enough_disk_space(required_size=required_size,
                                           directory=destination,
                                           dir_output=dir_out):
        if required_size < 0:
            log.info("Required disk space is unknown, will not delete files")
        else:
            log.info("Verified there is enough space on the device. "
                     "No files are deleted")
        return True

    if skip_deletion:
        log.error(
            "'skip_deletion' is set to True and there isn't enough space "
            "on the device, files cannot be deleted.")
        return False
    else:
        log.info("Deleting unprotected files to free up some space")

        running_images = []
        log.info("Sending 'show version' to learn the current running images")
        running_image = device.api.get_running_image()
        if isinstance(running_image, list):
            for image in running_image:
                running_images.append(os.path.basename(image))
        else:
            running_images.append(os.path.basename(running_image))

        # convert to set for O(1) lookup
        protected_files = set(protected_files)
        parsed_dir_out = device.parse('dir {}'.format(destination),
                                      output=dir_out)
        dq = Dq(parsed_dir_out)

        # turn parsed dir output to a list of files for sorting
        # Large files are given priority when deleting
        file_list = []
        running_image_list = []
        for file in dq.get_values('files'):
            # separate running image from other files
            if any(file in image for image in running_images):
                running_image_list.append(
                    (file, int(dq.contains(file).get_values('size')[0])))
            else:
                file_list.append(
                    (file, int(dq.contains(file).get_values('size')[0])))

        file_list.sort(key=lambda x: x[1], reverse=True)

        # add running images to the end so they are deleted as a last resort
        file_list.extend(running_image_list)
        log.debug('file_list: {fl}'.format(fl=file_list))

        for file, size in file_list:
            device.api.delete_unprotected_files(directory=destination,
                                                protected=protected_files,
                                                files_to_delete=[file],
                                                dir_output=dir_out)

            if device.api.verify_enough_disk_space(required_size, destination):
                log.info("Verified there is enough space on the device after "
                         "deleting unprotected files.")
                return True

        # Exhausted list of files - still not enough space
        log.error('There is still not enough space on the device after '
                  'deleting unprotected files.')
        return False
Ejemplo n.º 21
0
def get_interface_logical_output_bps(device,
                                     logical_interface,
                                     interface=None,
                                     extensive=False,
                                     output_dict=None):
    """Get logical output bps of a logical interface

    Args:
        device ('obj'): device object
        logical_interface ('str'): Logical interface to check output bps
        interface ('str'): interface name to pass in show command
        extensive ('bool'): Use extensive in show command
        output_dict ('dict'): Pass if dictionary already exist
    
    Returns:
        Device speed or None

    Raises:
        None
    """
    out = None
    try:
        if not output_dict:
            try:
                if interface:
                    cmd = 'show interfaces {interface}'.format(
                        interface=interface)
                else:
                    cmd = 'show interfaces'
                if extensive:
                    cmd = '{cmd} extensive'.format(cmd=cmd)
                out = device.parse(cmd)
            except SchemaEmptyParserError:
                return None
        else:
            out = output_dict

    except SchemaEmptyParserError:
        return None

    result = True

    # Get first interface inorder to compare output-bps with other interfaces
    physical_intf_check = out.q.contains(
        '{interface}|.*output-bps.*'.format(interface=logical_interface),
        regex=True)

    # To handle list within list
    logical_interface_check = Dq(physical_intf_check.reconstruct())

    logical_intf_list = logical_interface_check.contains(
        'name|output-bps', regex=True).get_values('logical-interface')

    for l_i_dict in logical_intf_list:
        name = l_i_dict.get('name', None)
        if not name or name != logical_interface:
            continue

        transit_traffic_statistic = l_i_dict.get('transit-traffic-statistics',
                                                 0)

        if not transit_traffic_statistic:
            return None

        output_bps = transit_traffic_statistic.get('output-bps', 0)
        if not output_bps:
            return None

        return output_bps

    return None
Ejemplo n.º 22
0
    def pre_task(self, task):
        # after loading health file, add all the sections/actions in health yaml
        # will be added as global context processors to pyATS job.
        # In `health.py`, `health_dispatcher` is the code of context processor.
        # It's same concept with generator-style context-processors in pyATS.
        # the code before `yield` is pre-processor, after `yield`, it's post-processor
        #
        # reference in pyATS doc : https://pubhub.devnetcloud.com/media/pyats/docs/aetest/processors.html#context-processors

        # Skip if no testbed or no health_file
        if not runtime.testbed:
            if self.runtime.args.health_file:
                # show message when testbed yaml only is missed
                logger.info('testbed yaml was not given, so pyATS health will not run')
            return

        # skip if no health_file
        if not self.runtime.args.health_file:
            return

        logger.info('Pre-Task %s: pyATS Health' % task.taskid)

        # convert from pyATS testbed to Genie testbed
        tb = testbed.load(runtime.testbed)

        # convert from pyATS testbed to Genie testbed
        loader = TriggerdatafileLoader(testbed=tb)
        with open(self.runtime.args.health_file) as f:
            health_loaded = loader.load(f.read())

        # save `pyats_health.yaml` to runtime.directory for archive
        with open(
                "{rundir}/pyats_health.yaml".format(
                    rundir=self.runtime.directory), 'w') as f:
            yaml.dump(health_loaded, f)

        # get `source` for pyATS Health processors and instantiate class
        source = health_loaded.get('pyats_health_processors',
                                   {}).get('source', {})
        if source:
            # get class name of testcase in health yaml
            pkg_name = source.get('pkg', '')
            class_name = source.get('class', '')
            class_path_list = '.'.join([pkg_name, class_name]).split('.')
            module = importlib.import_module('.'.join(class_path_list[:-1]))
            class_ = getattr(module, class_path_list[-1])
            # instantiate Health class which inherited from Blitz class
            # `health_dispacher` function from Health class will be used as processor
            health = class_()

            # get section names for pyATS Health processors
            section_names = Dq(health_loaded).get_values('test_sections')
            if section_names:
                processors = task.kwargs.setdefault('processors', {})

                # loop by health items (sections)
                for section in section_names:
                    for section_name, section_data in section.items():

                        # add processors to pyATS
                        processor_decorator = ProcessorDecorator()
                        processor_method = processor_decorator.context(
                            func=health.health_dispatcher,
                            name='pyATS Health Check {section_name}'.format(
                                section_name=section_name))
                        processor = functools.partial(
                            processor_method,
                            # enable processor report
                            report = True,
                            # params for health dispatcher
                            parameters={
                                'name': section_name,
                                'data': section_data
                            }
                        )
                        processors.setdefault('context',
                                              []).append(processor)

            else:
                # Block testcase when error is found
                raise Exception("Couldn't find any 'test_sections'.")
        else:
            # Block testcase when error is found
            raise Exception(
                "Couldn't find 'pyats_health_processors' section in health.yaml."
            )
Ejemplo n.º 23
0
def verify_ospfv3_interface_in_database(device,
                                       expected_interface,
                                       expected_interface_type=None,
                                       expected_metric=None,
                                       adv_router=False,
                                       expect_output=True,
                                       max_time=60,
                                       check_interval=10):
    """ Verifies ospf interface exists with criteria

        Args:
            device ('obj'): device to use
            expected_interface ('str'): Interface to use
            expected_interface_type ('str'): Interface type
            expected_metric ('str'): Metric of Interface
            adv_router ('bool'): Whether to look for address in adversiting router
            expect_output ('str'): Flag, either expecting output or no output
            max_time ('int'): Maximum time to keep checking
            check_interval ('int'): How often to check

        Returns:
            Boolean

        Raises:
            N/A
    """
    timeout = Timeout(max_time, check_interval)
    while timeout.iterate():
        try:
            out = device.parse('show ospf3 database extensive')
        except SchemaEmptyParserError:
            timeout.sleep()
            continue

        #'ospf3-database': [{
        #    'lsa-type':
        #        'Router',
        #        'lsa-id':
        #            '0.0.0.0',
        #        'advertising-router':
        #            '1.1.1.1'
        #            }
        #        ]

        for ospf3_database in Dq(out).get_values('ospf3-database'):

            if not adv_router:
                if expected_interface_type != None:
                    #'lsa-type': 'Router'
                    lsa_type = ospf3_database.get('lsa-type', None)
                    lsa_type = lsa_type.lower() if lsa_type else lsa_type
                    if expected_interface_type.lower() != lsa_type:
                        continue

                #'type-value': '2'
                if str(expected_metric) != ospf3_database.get('ospf3-external-lsa', {}).get('type-value',None):
                    continue

                #'ospf3-prefix': ['2001::1/128']
                current_prefix = ospf3_database.get('ospf3-external-lsa', {}).get('ospf3-prefix',None)
                if expected_interface != current_prefix:
                    continue
            else:
                prefix_ips = ospf3_database.get('ospf3-intra-area-prefix-lsa',{}).get('ospf3-prefix',{})
                if expected_interface not in prefix_ips:
                    continue

            return True
        timeout.sleep()
    return False
Ejemplo n.º 24
0
def verify_ping(device,
                address=None,
                ttl=None,
                tos=None,
                size=None,
                wait=None,
                mpls_rsvp=None,
                loss_rate=0,
                ping_size=None,
                count=None,
                interface=None,
                source=None,
                rapid=False,
                do_not_fragment=False,
                max_time=30,
                check_interval=10):
    """ Verify ping loss rate on ip address provided

        Args:
            device ('obj'): Device object
            address ('str'): Address value
            size {'str'}: Size value for ping command
            tos {'str'}: tos value for ping command
            ping_size {'str'}: data bytes expected
            ttl ('int'): ttl value passed in command
            wait ('int'): wait value passed in command
            mpls_rsvp ('str'): MPLS RSVP value
            loss_rate ('int'): Expected loss rate value
            count ('int'): Count value for ping command
            interface ('str'): source interface
            source ('str'): Source IP address, default: None
            rapid ('bool'): Appears in command or not, default: False
            do_not_fragment ('bool'): Appears in command or not, default: False
            max_time (`int`): Max time, default: 30
            check_interval (`int`): Check interval, default: 10
        Returns:
            Boolean
        Raises:
            None
    """
    timeout = Timeout(max_time, check_interval)

    if address or mpls_rsvp:
        cmd = ['ping {address}'.format(address=address)]
        if source:
            cmd.append('source {source}'.format(source=source))
        if size:
            cmd.append('size {size}'.format(size=size))
        if count:
            cmd.append('count {count}'.format(count=count))
        if interface:
            cmd.append('interface {interface}'.format(interface=interface))
        if tos:
            cmd.append('tos {tos}'.format(tos=tos))
        if ttl:
            cmd.append('ttl {ttl}'.format(ttl=ttl))
        if wait:
            cmd.append('wait {wait}'.format(wait=wait))
        if rapid:
            cmd.append('rapid')
        if do_not_fragment:
            cmd.append('do-not-fragment')
        if not address:
            cmd = ['ping mpls rsvp {rsvp}'.format(rsvp=mpls_rsvp)]
    else:
        log.info('Need to pass address or mpls_rsvp as argument')
        return False

    cmd = ' '.join(cmd)

    while timeout.iterate():
        try:
            # junos ping command can accept various paramegers order like below
            # ping 192.168.1.1 count 1 size 1514
            # ping 192.168.1.1 size 1514 count 1
            # so, store ping output as string and call parser with just `ping {addrss}`
            # with passing the ping output
            output = device.execute(cmd)
            out = device.parse('ping {address}'.format(address=address),
                               output=output)
        except SchemaEmptyParserError as e:
            timeout.sleep()
            continue
        # Example dictionary structure:
        #     {
        #         "ping": {
        #             "address": "10.189.5.94",
        #             "data-bytes": 56,
        #             "result": [
        #                 {
        #                     "bytes": 64,
        #                     "from": "10.189.5.94",
        #                     "icmp-seq": 0,
        #                     "time": "2.261",
        #                     "ttl": 62
        #                 },
        #             ],
        #             "source": "10.189.5.94",
        #             "statistics": {
        #                 "loss-rate": 0,
        #                 "received": 1,
        #                 "round-trip": {
        #                     "avg": "2.175",
        #                     "max": "2.399",
        #                     "min": "1.823",
        #                     "stddev": "0.191"
        #                 },
        #                 "send": 1
        #             }
        #         }
        #     }
        loss_rate_found = Dq(out).get_values("loss-rate", 0)

        if (size and loss_rate_found == loss_rate
                and Dq(out).get_values("data-bytes", 0) == int(size)):
            return True
        if loss_rate_found == loss_rate:
            return True
        timeout.sleep()
    return False
Ejemplo n.º 25
0
    def health_dispatcher(self,
                          steps,
                          section,
                          data,
                          testbed,
                          processor,
                          reconnect,
                          name='',
                          **kwargs):
        """
        excute health yaml based on Blitz logic. This will be calling Blitz's
        `dispacher` to execute all the actions in health yaml
        
        `data` contains all the items under a section in health yaml
        
        example of `data`:
        [
          {
            'parallel': [
              {
                'api': {
                  'device': 'uut',
                  'function': 'get_platform_cpu_load',
                  'arguments': {
                    'command': 'show processes cpu',
                    'processes': ['BGP I/O']
                  },
                  'save': [
                    {
                      'variable_name': 'cpu'
                    }
                  ]
                }
              },
              {
                'api': {
                  'device': 'uut',
                  (snip)
        
        `data` is List, so store the `data` as dict to `data_dict` for Dq

        Arguments:
            steps (`obj`) : Aetest Steps object
            section (`obj`) : Aetest Section object
            data (`list`) : data of section
            testbed (`obj`) : testbed object
            processor (`obj`) : Aetest processor object
            name (`str`) : name of section in health yaml
                           Default to ``
            reconnect (`dict` or None) : parameters for reconnect
                                         ex.)
                                         {
                                             'max_time': 900, # maximum time to reconnect
                                             'interval': 60,  # sleep before retry
                                         }
        Returns:
            None
        """

        if 'genie' not in testbed.__module__:
            # convert testbed from pyATS to Genie
            testbed = Converter.convert_tb(runtime.testbed)

        if 'genie' not in testbed.__module__:
            # convert testbed from pyATS to Genie
            testbed = Converter.convert_tb(runtime.testbed)

        # ---------------------
        # pre-context processor
        # ---------------------

        # get connected devices list
        devices_connected = self._check_all_devices_connected(
            testbed, data, reconnect)

        # execute pre-processor and received result in self.pre_processor_result
        self.pre_processor_run, self.pre_processor_result = self._pre_post_processors(
            testbed,
            processor,
            section,
            data,
            name,
            devices_connected,
            processor_targets=['pre', 'both'],
            processor_type='pre')

        try:
            yield
        except Exception as e:
            # make section Errored when exception happens
            section.result = Errored.clone('Caught exception in %s' %
                                           str(section),
                                           data={'traceback': e})

        # ----------------------
        # post-context processor
        # ----------------------

        post_if_pre_execute_flag = True
        # check `post_if_pre_execute` and if pre-processor is executed
        for each_data in self._get_actions(data):
            if Dq(each_data).get_values(
                    'processor',
                    0) == 'post_if_pre_execute' and not self.pre_processor_run:
                post_if_pre_execute_flag = False

        if not post_if_pre_execute_flag:
            log.info(
                "Post-processor pyATS Health '{name}' was skipped because required Pre-processor was not executed."
                .format(name=name))

        else:
            if 'genie' not in testbed.__module__:
                # convert testbed from pyATS to Genie
                # need to convert to bring latest status from runtime again
                # for the case devices are connected after pre-processor
                testbed = Converter.convert_tb(runtime.testbed)

            # get connected devices list
            devices_connected = self._check_all_devices_connected(
                testbed, data, reconnect)

            # execute post-processor
            self._pre_post_processors(
                testbed,
                processor,
                section,
                data,
                name,
                devices_connected,
                processor_targets=['post', 'post_if_pre_execute', 'both'],
                processor_type='post',
                pre_processor_result=self.pre_processor_result)
Ejemplo n.º 26
0
def verify_ping_one_iterations(device,
                               address=None,
                               ttl=None,
                               tos=None,
                               size=None,
                               wait=None,
                               mpls_rsvp=None,
                               loss_rate=0,
                               ping_size=None,
                               count=None,
                               source=None):
    """ Verify ping loss rate on ip address provided

        Args:
            device ('obj'): Device object
            address ('str'): Address value
            size {'str'}: Size value for ping command
            tos {'str'}: tos value for ping command
            ping_size {'str'}: data bytes expected
            ttl ('int'): ttl value passed in command
            wait ('int'): wait value passed in command
            mpls_rsvp ('str'): MPLS RSVP value
            loss_rate ('int'): Expected loss rate value
            count ('int'): Count value for ping command
            source ('str'): Source IP address, default: None
        Returns:
            Boolean
        Raises:
            None
    """
    if tos:
        cmd = 'ping {address} source {source} size {size} count {count} tos {tos} rapid'.format(
            address=address, source=source, size=size, count=count, tos=tos)
    elif address and count and source:
        cmd = 'ping {address} source {source} count {count}'.format(
            address=address, source=source, count=count)
    elif address and count and not ttl and not wait:
        cmd = 'ping {address} count {count}'.format(address=address,
                                                    count=count)
    elif address and count and ttl and wait:
        cmd = 'ping {address} ttl {ttl} count {count} wait {wait}'.format(
            address=address, ttl=ttl, count=count, wait=wait)
    elif not address and mpls_rsvp:
        cmd = 'ping mpls rsvp {rsvp}'.format(rsvp=mpls_rsvp)
    elif address:
        cmd = 'ping {address}'.format(address=address)
    else:
        log.info('Need to pass address as argument')
        return False
    try:
        out = device.parse(cmd)
    except SchemaEmptyParserError as e:
        return False
    # Example dictionary structure:
    #     {
    #         "ping": {
    #             "address": "10.189.5.94",
    #             "data-bytes": 56,
    #             "result": [
    #                 {
    #                     "bytes": 64,
    #                     "from": "10.189.5.94",
    #                     "icmp-seq": 0,
    #                     "time": "2.261",
    #                     "ttl": 62
    #                 },
    #             ],
    #             "source": "10.189.5.94",
    #             "statistics": {
    #                 "loss-rate": 0,
    #                 "received": 1,
    #                 "round-trip": {
    #                     "avg": "2.175",
    #                     "max": "2.399",
    #                     "min": "1.823",
    #                     "stddev": "0.191"
    #                 },
    #                 "send": 1
    #             }
    #         }
    #     }
    loss_rate_found = Dq(out).get_values("loss-rate", 0)

    if size:
        if loss_rate_found == loss_rate and Dq(out).get_values(
                "data-bytes", 0) == int(size):
            return True
    if loss_rate_found == loss_rate:
        return True
    return False
Ejemplo n.º 27
0
def maple(self,
          steps,
          device,
          maple_plugin_input,
          maple_action=None,
          output=None,
          include=None,
          exclude=None,
          continue_=True,
          **kwargs):
    '''
        3 types of maple plugins exist: 
            1) confirm
            2) matcher
            3) command
        
        Example of converting maple plugins into equivalent blitz action
        apply:
            devices:
                N93_3:
                    type: dmerest
                    commands: |
                        #@# command:{       <-- plugin_type  (command|matcher|confirm)
                            "method":"processdme",
                            "options":[
                                {"method":"GET"},
                                {"url":"http://ott-ads-019:8025/api/mo/sys/fm/mplssgmntrtg.json"}
                            ]} 
                        #@#
        ==================================================================================
        Blitz equivalent of the abovementioned command maple plugin
        - maple:
            # maple_plugin_input keyword below is section dict containing all the maple_action information and is input to blitz code  
            
            maple_plugin_input: '{"type": "dmerest", "commands":   < -- string representation of the dictionary representation of the maple_plugin with maple_action_type and rule_id for use in blitz
                        "command:{\n
                            \"method\":\"processdme\",\n
                            \"options\":[\n
                                        {\"method\":\"GET\"},\n
                                        {\"url\":\"http://ott-ads-019:8025/api/mo/sys/fm/mplssgmntrtg.json\"}\n
                                        ]}"}'
            
            device: N93_3
            maple_action: apply <-- necessary for blitz
            save:
            - variable_name: my_variable
              append: true
            continue: false
    '''
    '''
    Example of maple_plugin_input:
    maple_plugin_input: '{"type": "dmerest", "commands":   < -- string representation of the dictionary representation of the maple_plugin with maple_action_type and rule_id for use in blitz
            "command:{\n
                \"package\":\"CommandPlugins\",\n
                \"method\":\"processdme\",\n
                \"options\":[\n
                            {\"method\":\"GET\"},\n
                            {\"url\":\"http://ott-ads-019:8025/api/mo/sys/fm/mplssgmntrtg.json\"}\n
                            ]}"}'
    '''
    # cast the maple_plugin_input input into json_data
    # maple_plugin_input keyword of the yaml file is dictionary casted as string
    maple_plugin_input = json.loads(maple_plugin_input,
                                    object_pairs_hook=OrderedDict)
    plugin_extract = re.compile(
        r'(?P<plugin_type>\w+):{(?P<plugin_data>[\S\s]+)}')

    # extracting 'commands' keyword in maple_plugin_input and match it with above regex
    # Plugin_type == command|matcher|confirm
    # plugin_data == info that'd be used int maple code
    m = plugin_extract.match(
        Dq(maple_plugin_input).get_values('commands', index=0))
    matched_group = m.groupdict()
    json_data_str = "{{{}}}".format(matched_group['plugin_data'])
    plugin_data = json.loads(json_data_str)

    # package extracted form plugin_data, containing the package that contains maple plugin
    package = plugin_data.pop('package', None)
    # maple plugin function name
    method = plugin_data.pop('method', None)
    # if there the class that contains maple plugin method
    _class = plugin_data.pop('class', None)

    # if no method to call raise the exception
    if not method:
        raise Exception('No method was provided to call')

    # objects == kwargs to the maple plugin method which mostly coming from maple_plugin_input
    # plugin_source contains the package that plugin is in (e.g CommandPlugins, MatcherPlugins etc)
    # It is possible that the plugin_source is a class as well
    objects, plugin_source = _maple_plugins_input(self,
                                                  steps,
                                                  device,
                                                  plugin_data,
                                                  maple_action,
                                                  matched_group,
                                                  maple_plugin_input,
                                                  package,
                                                  method,
                                                  _class=_class,
                                                  output=output)

    # if plugin_source is class create an object of that class
    # store its name in plugin_source_print_str
    if inspect.isclass(plugin_source):
        plugin_source_print_str = plugin_source.__name__
        plugin_source = plugin_source()

    # if plugin_source only a module
    # Only store its name in plugin_source_print_str
    # replace package name in module name and strip all the "." the name of the module would be stored
    else:
        plugin_source_print_str = plugin_source.__name__.replace(
            plugin_source.__package__, '').strip('.')

    # Calling the maple method
    with steps.start("Calling method '{m}' from maple plugin '{p}' on '{d}'".\
                      format(m=method, p=plugin_source_print_str, d=device.name), continue_=continue_) as step:

        # calling the function in the plugin
        # receveing the output of the plugin method
        ret_value = getattr(plugin_source, method)(objects)

        # matchObjs in maple == save_variable_name in blitz
        # ixiaObjs in maple is saving ixia values in maple
        # still storing as same as save_variable_name
        # They need to be extracte from ret_value and store
        # in self.parameters['save_variable_name']
        if 'matchObjs' in ret_value:
            for key, val in ret_value['matchObjs'].items():
                save_variable(self, key, val)
        if 'ixiaObjs' in ret_value:
            for key, val in ret_value['ixiaObjs'].items():
                if val != {}:
                    save_variable(self, key, val)

        # checking if there is a results that needs to be used
        # to pass or fail the action
        if 'result' in ret_value:
            if matched_group['plugin_type'] == 'confirm':
                result = ret_value['result']
            if matched_group['plugin_type'] == 'command':
                result = ret_value['result'][0]
            if result == True:
                step.passed()
            else:
                step.failed()

    return ret_value.get('output')
Ejemplo n.º 28
0
def get_route_nexthop(device,
                      route,
                      extensive=False,
                      all_nexthops=True,
                      only_best=False,
                      only_non_best=False):
    """ Get nexthops of route from routing table

        Args:
            device (`obj`): Device object
            route (`str`): route in routing table
            extensive (`bool`): flag to add `extensive` to show command
                                Default to False
            all_nexthops (`bool`):  flag to return all nexthops as list or only first one as string
            only_best (`bool`): only best nexthop
            only_non_best (`bool`): only non-best nexthop
        Returns:
            nexthop address (list or string)
    """
    if only_best or only_non_best:
        log.info(
            "only_best or only_non_best is passed as True, so set all_next_hops as False."
        )
        all_nexthops = False

    if only_best and only_non_best:
        log.warn(
            'only_best and only_non_best, only one of them can be passed as True to api.'
        )
        return None

    try:
        if extensive:
            out = device.parse(
                'show route {route} extensive'.format(route=route))
        else:
            out = device.parse('show route {route}'.format(route=route))
    except SchemaEmptyParserError:
        return None

    # Example dictionary structure:
    #         {
    #             "rt": [
    #                 {
    #                     "rt-destination": "10.169.14.240/32",
    #                     "rt-entry": {
    #                         "nh": [
    #                             {
    #                                 "to": "10.169.14.121",
    #                                 "via": "ge-0/0/1.0"
    #                             }
    #                         ],
    #                         "rt-tag": "100",
    #                         "preference": "5",
    #                         "protocol-name": "Static"
    #                     }
    #                 }
    #             ],
    #             "table-name": "inet.0",
    #             "total-route-count": "240"
    #         },

    if all_nexthops:
        return out.q.get_values('to')
    if only_best:
        routes = out.q.get_values('rt')
        for route in routes:
            if route['rt-entry'].get('active-tag', '') == '*':
                return Dq(route).get_values('to', 0)
        return None
    if only_non_best:
        routes = out.q.get_values('rt')
        for route in routes:
            if route['rt-entry'].get('active-tag', '') == '':
                return Dq(route).get_values('to')
        return []
    else:
        return out.q.get_values('to', 0)
Ejemplo n.º 29
0
def verify_ospf_interface_in_database(device,
                                      expected_interface,
                                      expected_interface_type=None,
                                      subnet_mask=None,
                                      expected_metric=None,
                                      adv_router=False,
                                      max_time=60,
                                      check_interval=10):
    """ Verifies ospf interface exists with criteria

        Args:
            device ('obj'): device to use
            expected_interface ('str'): Interface to use
            expected_interface_type ('str'): Interface type
            subnet_mask ('str'): Subnet mask
            expected_metric ('str'): Metric of Interface
            adv_router ('bool'): Whether to look for address in adversiting router
            max_time ('int'): Maximum time to keep checking
            check_interval ('int'): How often to check

        Returns:
            Boolean
        Raises:
            N/A
    """
    timeout = Timeout(max_time, check_interval)
    while timeout.iterate():
        try:
            out = device.parse('show ospf database extensive')
        except SchemaEmptyParserError:
            timeout.sleep()
            continue

        #'ospf-database':[
        #    {'lsa-type':
        #        'Router',
        #        'lsa-id': '1.1.1.1'
        #    }
        #]

        for ospf_database in Dq(out).get_values('ospf-database'):

            #'ospf-external-lsa':
            #   {'address-mask': '255.255.255.255',
            #   'ospf-external-lsa-topology': {
            #        'ospf-topology-name':
            #            'default'}}
            ospf_external_lsa = ospf_database.get('ospf-external-lsa', {})
            if not adv_router:
                if 'address-mask' not in ospf_external_lsa:
                    continue
                else:
                    #{'address-mask': '255.255.255.255'}
                    current_mask = IPAddress(
                        ospf_external_lsa.get('address-mask')).netmask_bits()
                    if str(current_mask) != subnet_mask:
                        continue

                #'type-value': '2'
                if not ospf_external_lsa:
                    continue
                if not ospf_external_lsa.get('ospf-external-lsa-topology', {}):
                    continue
                if str(expected_metric) != ospf_external_lsa.get(
                        'ospf-external-lsa-topology', {}).get(
                            'type-value', {}):
                    continue

                #'lsa-type': 'Extern'
                lsa_type = ospf_database.get('lsa-type', None)
                lsa_type = lsa_type.lower() if lsa_type else lsa_type

                if expected_interface_type.lower() != lsa_type:
                    continue

                #'lsa-id': '11.11.11.11'
                lsa_id = ospf_database.get('lsa-id', None)
                if expected_interface != lsa_id:
                    continue
            else:
                advertising_router = ospf_database.get('advertising-router',
                                                       None)
                if expected_interface != advertising_router:
                    continue

            return True

        timeout.sleep()

    return False
Ejemplo n.º 30
0
def verify_metric_in_route(device,
                           address,
                           expected_metric,
                           table_name,
                           max_time=60,
                           check_interval=10):
    """Verify metric in 'show route {address}' when given table_name

        Args:
            device ('obj'): Device to use
            address ('str'): IP address in show command
            expected_metric ('int'): Expected metric number
            table_name ('str'): Table name. E.g. "inet.3".
            max_time ('int', optional): Maximum time to keep checking. Default to 60.
            check_interval ('int', optional): How often to check. Default to 10.

        Returns:
            True/False

        Raises:
            N/A
    """

    timeout = Timeout(max_time, check_interval)

    while timeout.iterate():
        try:

            out = device.parse("show route {address}".format(address=address))
        except SchemaEmptyParserError:
            timeout.sleep()
            continue

        # Sample output
        #  {'route-information': {'route-table': [{'active-route-count': '8',
        #                                         'destination-count': '8',
        #                                         'hidden-route-count': '0',
        #                                         'holddown-route-count': '0',
        #                                         'rt': [{'rt-destination': '106.187.14.240/32',
        #                                                 'rt-entry': {'active-tag': '*',
        #                                                             'age': {'#text': '00:07:19'},
        #                                                             'metric': '1',  <--------------------------
        #                                                             'nh': [{'to': '106.187.14.157',
        #                                                                     'via': 'ge-0/0/0.0'}],
        #                                                             'preference': '10',
        #                                                             'protocol-name': 'OSPF'}}],
        #                                         'table-name': 'inet.0', <--------------------------
        #                                         'total-route-count': '8'},
        #                                     {'active-route-count': '1',
        #                                         'destination-count': '1',
        #                                         'hidden-route-count': '0',
        #                                         'holddown-route-count': '0',
        #                                         'rt': [{'rt-destination': '106.187.14.240/32',
        #                                                 'rt-entry': {'active-tag': '*',
        #                                                             'age': {'#text': '00:07:19'},
        #                                                             'metric': '1',  <--------------------------
        #                                                             'nh': [{'to': '106.187.14.157',
        #                                                                     'via': 'ge-0/0/0.0'}],
        #                                                             'preference': '9',
        #                                                             'protocol-name': 'LDP'}}],
        #                                         'table-name': 'inet.3', <--------------------------
        #                                         'total-route-count': '1'}]}}

        # Filter the outputs:
        # Input:        out.q.contains('metric|inet.3', regex=True).reconstruct()
        # Output:       {'route-information': {'route-table': [{'rt': [{'rt-entry': {'metric': '1'}}]},
        #                                                      {'rt': [{'rt-entry': {'metric': '1'}}],
        #                                                                    'table-name': 'inet.3'}
        #                                                     ]}}
        filtered_output = out.q.contains(
            'metric|{table_name}'.format(table_name=table_name),
            regex=True).reconstruct()

        rt_list = filtered_output['route-information']['route-table']

        for rt in rt_list:
            if 'table-name' in rt and rt['table-name'] == table_name:
                if expected_metric == int(Dq(rt).get_values('metric', 0)):
                    return True

        timeout.sleep()
    return False