Beispiel #1
0
def thread_websocket_events():
    redis: StrictRedis
    with redis_session() as redis:
        while True:
            result = redis.xread({"events": b"$"}, count=10, block=200)
            for item in result:
                event = parse_redis_event(item)
                if not event:
                    continue
                emit_redis_event(event)
Beispiel #2
0
def update_device_progress(job_id: int):
    new_finished_devices = []
    with redis_session() as db:
        while db.llen('finished_devices_' + str(job_id)) != 0:
            last_finished = db.lpop('finished_devices_' + str(job_id))
            new_finished_devices.append(last_finished)

    if new_finished_devices:
        with sqla_session() as session:
            job = session.query(Job).filter(Job.id == job_id).one_or_none()
            if not job:
                raise ValueError(
                    "Could not find Job with ID {}".format(job_id))
            finished_devices = job.finished_devices + new_finished_devices
            job.finished_devices = finished_devices
Beispiel #3
0
def add_event(message: Optional[str] = None,
              event_type: str = "log",
              level: str = "INFO",
              update_type: Optional[str] = None,
              json_data: Optional[str] = None):
    with redis_session() as redis:
        try:
            send_data = {"type": event_type, "level": level}
            if event_type == "log":
                send_data['message'] = message
            elif event_type == "update":
                send_data['update_type'] = update_type
                send_data['json'] = json_data
            redis.xadd("events", send_data, maxlen=100)
        except Exception as e:
            print("Error in add_event: {}".format(e))
Beispiel #4
0
def push_sync_device(task, dry_run: bool = True, generate_only: bool = False,
                     job_id: Optional[str] = None,
                     scheduled_by: Optional[str] = None):
    """
    Nornir task to generate config and push to device

    Args:
        task: nornir task, sent by nornir when doing .run()
        dry_run: Don't commit config to device, just do compare/diff
        generate_only: Only generate text config, don't try to commit or
                       even do dry_run compare to running config

    Returns:

    """
    set_thread_data(job_id)
    logger = get_logger()
    hostname = task.host.name
    with sqla_session() as session:
        dev: Device = session.query(Device).filter(Device.hostname == hostname).one()
        template_vars = populate_device_vars(session, dev)
        platform = dev.platform
        devtype = dev.device_type

    with open('/etc/cnaas-nms/repository.yml', 'r') as db_file:
        repo_config = yaml.safe_load(db_file)
        local_repo_path = repo_config['templates_local']

    mapfile = os.path.join(local_repo_path, platform, 'mapping.yml')
    if not os.path.isfile(mapfile):
        raise RepoStructureException("File {} not found in template repo".format(mapfile))
    with open(mapfile, 'r') as f:
        mapping = yaml.safe_load(f)
        template = mapping[devtype.name]['entrypoint']

    logger.debug("Generate config for host: {}".format(task.host.name))
    r = task.run(task=template_file,
                 name="Generate device config",
                 template=template,
                 jinja_env=cnaas_jinja_env,
                 path=f"{local_repo_path}/{task.host.platform}",
                 **template_vars)

    # TODO: Handle template not found, variables not defined
    # jinja2.exceptions.UndefinedError

    task.host["config"] = r.result
    task.host["template_vars"] = template_vars

    if generate_only:
        task.host["change_score"] = 0
    else:
        logger.debug("Synchronize device config for host: {} ({}:{})".format(
            task.host.name, task.host.hostname, task.host.port))

        task.host.open_connection("napalm", configuration=task.nornir.config)
        task.run(task=napalm_configure,
                 name="Sync device config",
                 replace=True,
                 configuration=task.host["config"],
                 dry_run=dry_run
                 )
        task.host.close_connection("napalm")

        if task.results[1].diff:
            config = task.results[1].host["config"]
            diff = task.results[1].diff
            task.host["change_score"] = calculate_score(config, diff)
        else:
            task.host["change_score"] = 0
    if job_id:
        with redis_session() as db:
            db.lpush('finished_devices_' + str(job_id), task.host.name)
Beispiel #5
0
def device_upgrade_task(task,
                        job_id: str,
                        reboot: False,
                        filename: str,
                        url: str,
                        download: Optional[bool] = False,
                        pre_flight: Optional[bool] = False,
                        activate: Optional[bool] = False) -> NornirJobResult:

    # If pre-flight is selected, execute the pre-flight task which
    # will verify the amount of disk space and so on.
    if pre_flight:
        logger.info('Running pre-flight check on {}'.format(task.host.name))
        try:
            res = task.run(task=arista_pre_flight_check)
            print_result(res)
        except Exception as e:
            logger.exception(
                "Exception while doing pre-flight check: {}".format(str(e)))
            raise Exception('Pre-flight check failed')
        else:
            if res.failed:
                logger.exception('Pre-flight check failed for: {}'.format(
                    ' '.join(res.failed_hosts.keys())))
                raise e

    # If download is true, go ahead and download the firmware
    if download:
        # Download the firmware from the HTTP container.
        logger.info('Downloading firmware {} on {}'.format(
            filename, task.host.name))
        try:
            res = task.run(task=arista_firmware_download,
                           filename=filename,
                           httpd_url=url)
            print_result(res)
        except Exception as e:
            logger.exception('Exception while downloading firmware: {}'.format(
                str(e)))
            raise e

    # If download_only is false, continue to activate the newly downloaded
    # firmware and verify that it if present in the boot-config.
    if activate:
        logger.info('Activating firmware {} on {}'.format(
            filename, task.host.name))
        try:
            res = task.run(task=arista_firmware_activate, filename=filename)
            print_result(res)
        except Exception as e:
            logger.exception('Exception while activating firmware: {}'.format(
                str(e)))
            raise e

    # Reboot the device if needed, we will then lose the connection.
    if reboot:
        logger.info('Rebooting {}'.format(task.host.name))
        try:
            res = task.run(task=arista_device_reboot)
        except Exception as e:
            pass

    if job_id:
        with redis_session() as db:
            db.lpush('finished_devices_' + str(job_id), task.host.name)
Beispiel #6
0
def _refresh_repo_task(repo_type: RepoType = RepoType.TEMPLATES) -> str:
    """Should only be called by refresh_repo function."""
    with open('/etc/cnaas-nms/repository.yml', 'r') as db_file:
        repo_config = yaml.safe_load(db_file)

    if repo_type == RepoType.TEMPLATES:
        local_repo_path = repo_config['templates_local']
        remote_repo_path = repo_config['templates_remote']
    elif repo_type == RepoType.SETTINGS:
        local_repo_path = repo_config['settings_local']
        remote_repo_path = repo_config['settings_remote']
    else:
        raise ValueError("Invalid repository")

    ret = ''
    changed_files: Set[str] = set()
    try:
        local_repo = Repo(local_repo_path)
        prev_commit = local_repo.commit().hexsha
        diff = local_repo.remotes.origin.pull()
        for item in diff:
            ret += 'Commit {} by {} at {}\n'.format(
                item.commit.name_rev, item.commit.committer,
                item.commit.committed_datetime)
            diff_files = local_repo.git.diff('{}..{}'.format(
                prev_commit, item.commit.hexsha),
                                             name_only=True).split()
            changed_files.update(diff_files)
            prev_commit = item.commit.hexsha
    except (InvalidGitRepositoryError, NoSuchPathError) as e:
        logger.info("Local repository {} not found, cloning from remote".\
                    format(local_repo_path))
        try:
            local_repo = Repo.clone_from(remote_repo_path, local_repo_path)
        except NoSuchPathError as e:
            raise ConfigException("Invalid remote repository {}: {}".format(
                remote_repo_path, str(e)))
        except GitCommandError as e:
            raise ConfigException(
                "Error cloning remote repository {}: {}".format(
                    remote_repo_path, str(e)))

        ret = 'Cloned new from remote. Last commit {} by {} at {}'.format(
            local_repo.head.commit.name_rev, local_repo.head.commit.committer,
            local_repo.head.commit.committed_datetime)

    if repo_type == RepoType.SETTINGS:
        try:
            logger.debug("Clearing redis-lru cache for settings")
            with redis_session() as redis_db:
                cache = RedisLRU(redis_db)
                cache.clear_all_cache()
            get_settings()
            test_devtypes = [
                DeviceType.ACCESS, DeviceType.DIST, DeviceType.CORE
            ]
            for devtype in test_devtypes:
                get_settings(device_type=devtype)
            for hostname in os.listdir(os.path.join(local_repo_path,
                                                    'devices')):
                hostname_path = os.path.join(local_repo_path, 'devices',
                                             hostname)
                if not os.path.isdir(hostname_path) or hostname.startswith(
                        '.'):
                    continue
                if not Device.valid_hostname(hostname):
                    continue
                get_settings(hostname)
            check_settings_collisions()
        except SettingsSyntaxError as e:
            logger.exception("Error in settings repo configuration: {}".format(
                str(e)))
            raise e
        except VlanConflictError as e:
            logger.exception("VLAN conflict in repo configuration: {}".format(
                str(e)))
            raise e
        logger.debug(
            "Files changed in settings repository: {}".format(changed_files))
        updated_devtypes, updated_hostnames = settings_syncstatus(
            updated_settings=changed_files)
        logger.debug(
            "Devicestypes to be marked unsynced after repo refresh: {}".format(
                ', '.join([dt.name for dt in updated_devtypes])))
        logger.debug(
            "Devices to be marked unsynced after repo refresh: {}".format(
                ', '.join(updated_hostnames)))
        with sqla_session() as session:
            devtype: DeviceType
            for devtype in updated_devtypes:
                Device.set_devtype_syncstatus(session,
                                              devtype,
                                              syncstatus=False)
            for hostname in updated_hostnames:
                dev: Device = session.query(Device).\
                    filter(Device.hostname == hostname).one_or_none()
                if dev:
                    dev.synchronized = False
                else:
                    logger.warn(
                        "Settings updated for unknown device: {}".format(
                            hostname))

    if repo_type == RepoType.TEMPLATES:
        logger.debug(
            "Files changed in template repository: {}".format(changed_files))
        updated_devtypes = template_syncstatus(updated_templates=changed_files)
        updated_list = [
            '{}:{}'.format(platform, dt.name)
            for dt, platform in updated_devtypes
        ]
        logger.debug(
            "Devicestypes to be marked unsynced after repo refresh: {}".format(
                ', '.join(updated_list)))
        with sqla_session() as session:
            devtype: DeviceType
            for devtype, platform in updated_devtypes:
                Device.set_devtype_syncstatus(session,
                                              devtype,
                                              platform,
                                              syncstatus=False)

    return ret
Beispiel #7
0
def device_upgrade_task(task,
                        job_id: str,
                        filename: str,
                        url: str,
                        reboot: Optional[bool] = False,
                        download: Optional[bool] = False,
                        pre_flight: Optional[bool] = False,
                        post_flight: Optional[bool] = False,
                        post_waittime: Optional[int] = 0,
                        activate: Optional[bool] = False) -> NornirJobResult:

    # If pre-flight is selected, execute the pre-flight task which
    # will verify the amount of disk space and so on.
    set_thread_data(job_id)
    logger = get_logger()
    if pre_flight:
        logger.info('Running pre-flight check on {}'.format(task.host.name))
        try:
            res = task.run(task=arista_pre_flight_check, job_id=job_id)
        except Exception as e:
            logger.exception(
                "Exception while doing pre-flight check: {}".format(str(e)))
            raise Exception('Pre-flight check failed')
        else:
            if res.failed:
                logger.exception('Pre-flight check failed for: {}'.format(
                    ' '.join(res.failed_hosts.keys())))
                raise

    # If download is true, go ahead and download the firmware
    if download:
        # Download the firmware from the HTTP container.
        logger.info('Downloading firmware {} on {}'.format(
            filename, task.host.name))
        try:
            res = task.run(task=arista_firmware_download,
                           filename=filename,
                           httpd_url=url,
                           job_id=job_id)
        except Exception as e:
            logger.exception('Exception while downloading firmware: {}'.format(
                str(e)))
            raise e

    # If download_only is false, continue to activate the newly downloaded
    # firmware and verify that it if present in the boot-config.
    already_active = False
    if activate:
        logger.info('Activating firmware {} on {}'.format(
            filename, task.host.name))
        try:
            res = task.run(task=arista_firmware_activate,
                           filename=filename,
                           job_id=job_id)
        except NornirSubTaskError as e:
            subtask_result = e.result[0]
            logger.debug(
                'Exception while activating firmware for {}: {}'.format(
                    task.host.name, subtask_result))
            if subtask_result.exception:
                if isinstance(subtask_result.exception,
                              FirmwareAlreadyActiveException):
                    already_active = True
                    logger.info(
                        "Firmware already active, skipping reboot and post_flight: {}"
                        .format(subtask_result.exception))
                else:
                    logger.exception(
                        'Firmware activate subtask exception for {}: {}'.
                        format(task.host.name, str(subtask_result.exception)))
                    raise e
            else:
                logger.error('Activate subtask result for {}: {}'.format(
                    task.host.name, subtask_result.result))
                raise e
        except Exception as e:
            logger.exception(
                'Exception while activating firmware for {}: {}'.format(
                    task.host.name, str(e)))
            raise e

    # Reboot the device if needed, we will then lose the connection.
    if reboot and not already_active:
        logger.info('Rebooting {}'.format(task.host.name))
        try:
            res = task.run(task=arista_device_reboot, job_id=job_id)
        except Exception as e:
            pass

    # If post-flight is selected, execute the post-flight task which
    # will update device facts for the selected devices
    if post_flight and not already_active:
        logger.info(
            'Running post-flight check on {}, delay start by {}s'.format(
                task.host.name, post_waittime))
        try:
            res = task.run(task=arista_post_flight_check,
                           post_waittime=post_waittime,
                           job_id=job_id)
        except Exception as e:
            logger.exception('Failed to run post-flight check: {}'.format(
                str(e)))
        else:
            if res.failed:
                logger.error('Post-flight check failed for: {}'.format(
                    ' '.join(res.failed_hosts.keys())))

    if job_id:
        with redis_session() as db:
            db.lpush('finished_devices_' + str(job_id), task.host.name)
Beispiel #8
0
def push_sync_device(task,
                     dry_run: bool = True,
                     generate_only: bool = False,
                     job_id: Optional[str] = None,
                     scheduled_by: Optional[str] = None):
    """
    Nornir task to generate config and push to device

    Args:
        task: nornir task, sent by nornir when doing .run()
        dry_run: Don't commit config to device, just do compare/diff
        generate_only: Only generate text config, don't try to commit or
                       even do dry_run compare to running config

    Returns:

    """
    set_thread_data(job_id)
    logger = get_logger()
    hostname = task.host.name
    with sqla_session() as session:
        dev: Device = session.query(Device).filter(
            Device.hostname == hostname).one()
        mgmt_ip = dev.management_ip
        infra_ip = dev.infra_ip
        if not mgmt_ip:
            raise Exception(
                "Could not find management IP for device {}".format(hostname))
        devtype: DeviceType = dev.device_type
        if isinstance(dev.platform, str):
            platform: str = dev.platform
        else:
            raise ValueError("Unknown platform: {}".format(dev.platform))
        settings, settings_origin = get_settings(hostname, devtype)
        device_variables = {
            'mgmt_ip': str(mgmt_ip),
            'device_model': dev.model,
            'device_os_version': dev.os_version
        }

        if devtype == DeviceType.ACCESS:
            mgmtdomain = cnaas_nms.db.helper.find_mgmtdomain_by_ip(
                session, dev.management_ip)
            if not mgmtdomain:
                raise Exception(
                    "Could not find appropriate management domain for management_ip: {}"
                    .format(dev.management_ip))

            mgmt_gw_ipif = IPv4Interface(mgmtdomain.ipv4_gw)
            access_device_variables = {
                'mgmt_vlan_id':
                mgmtdomain.vlan,
                'mgmt_gw':
                str(mgmt_gw_ipif.ip),
                'mgmt_ipif':
                str(
                    IPv4Interface('{}/{}'.format(
                        mgmt_ip, mgmt_gw_ipif.network.prefixlen))),
                'mgmt_prefixlen':
                int(mgmt_gw_ipif.network.prefixlen),
                'interfaces': []
            }
            intfs = session.query(Interface).filter(
                Interface.device == dev).all()
            intf: Interface
            for intf in intfs:
                untagged_vlan = None
                tagged_vlan_list = []
                intfdata = None
                if intf.data:
                    if 'untagged_vlan' in intf.data:
                        untagged_vlan = resolve_vlanid(
                            intf.data['untagged_vlan'], settings['vxlans'])
                    if 'tagged_vlan_list' in intf.data:
                        tagged_vlan_list = resolve_vlanid_list(
                            intf.data['tagged_vlan_list'], settings['vxlans'])
                    intfdata = dict(intf.data)
                access_device_variables['interfaces'].append({
                    'name':
                    intf.name,
                    'ifclass':
                    intf.configtype.name,
                    'untagged_vlan':
                    untagged_vlan,
                    'tagged_vlan_list':
                    tagged_vlan_list,
                    'data':
                    intfdata
                })
            mlag_vars = get_mlag_vars(session, dev)
            device_variables = {
                **access_device_variables,
                **device_variables,
                **mlag_vars
            }
        elif devtype == DeviceType.DIST or devtype == DeviceType.CORE:
            asn = generate_asn(infra_ip)
            fabric_device_variables = {
                'mgmt_ipif': str(IPv4Interface('{}/32'.format(mgmt_ip))),
                'mgmt_prefixlen': 32,
                'infra_ipif': str(IPv4Interface('{}/32'.format(infra_ip))),
                'infra_ip': str(infra_ip),
                'interfaces': [],
                'bgp_ipv4_peers': [],
                'bgp_evpn_peers': [],
                'mgmtdomains': [],
                'asn': asn
            }
            ifname_peer_map = dev.get_linknet_localif_mapping(session)
            if 'interfaces' in settings and settings['interfaces']:
                for intf in settings['interfaces']:
                    try:
                        ifindexnum: int = Interface.interface_index_num(
                            intf['name'])
                    except ValueError as e:
                        ifindexnum: int = 0
                    if 'ifclass' in intf and intf['ifclass'] == 'downlink':
                        data = {}
                        if intf['name'] in ifname_peer_map:
                            data['description'] = ifname_peer_map[intf['name']]
                        fabric_device_variables['interfaces'].append({
                            'name':
                            intf['name'],
                            'ifclass':
                            intf['ifclass'],
                            'indexnum':
                            ifindexnum,
                            'data':
                            data
                        })
                    elif 'ifclass' in intf and intf['ifclass'] == 'custom':
                        fabric_device_variables['interfaces'].append({
                            'name':
                            intf['name'],
                            'ifclass':
                            intf['ifclass'],
                            'config':
                            intf['config'],
                            'indexnum':
                            ifindexnum
                        })
            for mgmtdom in cnaas_nms.db.helper.get_all_mgmtdomains(
                    session, hostname):
                fabric_device_variables['mgmtdomains'].append({
                    'id':
                    mgmtdom.id,
                    'ipv4_gw':
                    mgmtdom.ipv4_gw,
                    'vlan':
                    mgmtdom.vlan,
                    'description':
                    mgmtdom.description,
                    'esi_mac':
                    mgmtdom.esi_mac
                })
            # find fabric neighbors
            fabric_links = []
            for neighbor_d in dev.get_neighbors(session):
                if neighbor_d.device_type == DeviceType.DIST or neighbor_d.device_type == DeviceType.CORE:
                    # TODO: support multiple links to the same neighbor?
                    local_if = dev.get_neighbor_local_ifname(
                        session, neighbor_d)
                    local_ipif = dev.get_neighbor_local_ipif(
                        session, neighbor_d)
                    neighbor_ip = dev.get_neighbor_ip(session, neighbor_d)
                    if local_if:
                        fabric_device_variables['interfaces'].append({
                            'name':
                            local_if,
                            'ifclass':
                            'fabric',
                            'ipv4if':
                            local_ipif,
                            'peer_hostname':
                            neighbor_d.hostname,
                            'peer_infra_lo':
                            str(neighbor_d.infra_ip),
                            'peer_ip':
                            str(neighbor_ip),
                            'peer_asn':
                            generate_asn(neighbor_d.infra_ip)
                        })
                        fabric_device_variables['bgp_ipv4_peers'].append({
                            'peer_hostname':
                            neighbor_d.hostname,
                            'peer_infra_lo':
                            str(neighbor_d.infra_ip),
                            'peer_ip':
                            str(neighbor_ip),
                            'peer_asn':
                            generate_asn(neighbor_d.infra_ip)
                        })
            # populate evpn peers data
            for neighbor_d in get_evpn_spines(session, settings):
                if neighbor_d.hostname == dev.hostname:
                    continue
                fabric_device_variables['bgp_evpn_peers'].append({
                    'peer_hostname':
                    neighbor_d.hostname,
                    'peer_infra_lo':
                    str(neighbor_d.infra_ip),
                    'peer_asn':
                    generate_asn(neighbor_d.infra_ip)
                })
            device_variables = {**fabric_device_variables, **device_variables}

    # Add all environment variables starting with TEMPLATE_SECRET_ to
    # the list of configuration variables. The idea is to store secret
    # configuration outside of the templates repository.
    template_secrets = {}
    for env in os.environ:
        if env.startswith('TEMPLATE_SECRET_'):
            template_secrets[env] = os.environ[env]

    # Merge device variables with settings before sending to template rendering
    # Device variables override any names from settings, for example the
    # interfaces list from settings are replaced with an interface list from
    # device variables that contains more information
    template_vars = {**settings, **device_variables, **template_secrets}

    with open('/etc/cnaas-nms/repository.yml', 'r') as db_file:
        repo_config = yaml.safe_load(db_file)
        local_repo_path = repo_config['templates_local']

    mapfile = os.path.join(local_repo_path, platform, 'mapping.yml')
    if not os.path.isfile(mapfile):
        raise RepoStructureException(
            "File {} not found in template repo".format(mapfile))
    with open(mapfile, 'r') as f:
        mapping = yaml.safe_load(f)
        template = mapping[devtype.name]['entrypoint']

    logger.debug("Generate config for host: {}".format(task.host.name))
    r = task.run(task=text.template_file,
                 name="Generate device config",
                 template=template,
                 path=f"{local_repo_path}/{task.host.platform}",
                 **template_vars)

    # TODO: Handle template not found, variables not defined
    # jinja2.exceptions.UndefinedError

    task.host["config"] = r.result
    task.host["template_vars"] = template_vars

    if generate_only:
        task.host["change_score"] = 0
    else:
        logger.debug("Synchronize device config for host: {} ({}:{})".format(
            task.host.name, task.host.hostname, task.host.port))

        task.host.open_connection("napalm", configuration=task.nornir.config)
        task.run(task=networking.napalm_configure,
                 name="Sync device config",
                 replace=True,
                 configuration=task.host["config"],
                 dry_run=dry_run)
        task.host.close_connection("napalm")

        if task.results[1].diff:
            config = task.results[1].host["config"]
            diff = task.results[1].diff
            task.host["change_score"] = calculate_score(config, diff)
        else:
            task.host["change_score"] = 0
    if job_id:
        with redis_session() as db:
            db.lpush('finished_devices_' + str(job_id), task.host.name)