def __init__(self, orch_config, structural_config_file, behavioral_config_file, orchestration_manager=None, sequester_segment=None): self._static_devices = DevicesState() self._dynamic_devices = DevicesState() self._device_behaviors = {} self._testing_device_vlans = {} self._acl_configs = {} self._vlan_states = {} self._segments_to_vlans = {} self._structural_faucet_config = None self._behavioral_faucet_config = None self._behavioral_include = None self._next_cookie = None self._config = orch_config self._structural_config_file = structural_config_file self._behavioral_config_file = behavioral_config_file self._forch_config_dir = os.path.dirname(self._structural_config_file) self._faucet_config_dir = os.path.dirname(self._behavioral_config_file) self._all_testing_vlans = None self._available_testing_vlans = None self._watched_include_files = [] self._orchestration_manager = orchestration_manager self._sequester_segment = sequester_segment self._lock = threading.RLock() self._logger = get_logger('ftizer') self._validate_and_initialize_config()
def __init__(self, config, cleanup_handler, active_state_handler, metrics): self._state = {'processes': {}, 'vrrp': {}} self._process_state = self._state['processes'] self._process_state['connections'] = {} self._vrrp_state = self._state['vrrp'] self._last_error = {} self._current_time = None self._conn_state = None self._conn_state_count = 0 self._metrics = metrics self._lock = threading.Lock() self._target_procs = config.processes self._check_vrrp = config.check_vrrp self._keepalived_pid_file = os.getenv('KEEPALIVED_PID_FILE', _KEEPALIVED_PID_FILE_DEFAULT) self._connections = config.connections self._process_interval = config.scan_interval_sec or 60 self._cleanup_handler = cleanup_handler self._active_state_handler = active_state_handler self._logger = get_logger('lstate') self._logger.info('Scanning %s processes every %ds', len(self._target_procs), self._process_interval)
def __init__(self, proxy_config, content_type=None): self._proxy_config = proxy_config self._proxy_port = self._proxy_config.proxy_port or DEFAULT_PROXY_PORT self._pages = {} self._proxy_server = None self._content_type = content_type self._logger = get_logger('proxy')
def __init__(self, on_receiving_result): super().__init__() self._on_receiving_result = on_receiving_result self._logger = get_logger('drserver') self._port_device_mapping = {} self._port_events_listeners = {} self._mac_assignments = {} self._lock = threading.Lock()
def __init__(self, source_ip, source_port, server_ip, # pylint: disable=too-many-arguments server_port): self.socket = None self.source_ip = source_ip self.source_port = source_port self.server_ip = server_ip self.server_port = server_port self.lock = RLock() self._logger = get_logger('rsocket')
def __init__(self, port, config=None, content_type=None): self._config = config self._paths = {} self._server = None self._root_path = config.http_root if config and config.http_root else 'public' self._port = port self._host = '0.0.0.0' self._thread = None self.content_type = content_type self._logger = get_logger('httpserv')
def __init__(self, server_address=DEFAULT_BIND_ADDRESS, server_port=DEFAULT_SERVER_PORT): self._logger = get_logger('endserver') self._server = grpc.server(futures.ThreadPoolExecutor()) self._servicer = EndpointServicer() server_grpc.add_EndpointServerServicer_to_server(self._servicer, self._server) self._address = f'{server_address}:{server_port}' self._logger.info('Listening on %s', self._address) self._server.add_insecure_port(self._address) self._server.start()
def __init__(self, mac, initial_state, state_callbacks: StateCallbacks, state_overwrites=None): self._mac = mac self._current_state = initial_state self._state_callbacks = state_callbacks self._logger = get_logger('portsm') if state_overwrites: self._transitions = self._resolve_transitions(state_overwrites) self._handle_current_state()
def __init__(self, config): self.config = config self.sock = None self.buffer = None self._buffer_lock = threading.Lock() self._handlers = {} self.previous_state = None self._port_debounce_sec = config.port_debounce_sec or self._PORT_DEBOUNCE_SEC self._port_timers = {} self.event_socket_connected = False self._last_event_id = None self._logger = get_logger('fevent')
def __init__(self, config): self._cpn_state = {} self._node_states = {} self._hosts_ip = {} self._min_consecutive_healthy = (config.min_consecutive_ping_healthy or DEFAULT_MIN_CONSECUTIVE_PING_HEALTHY) self._min_consecutive_down = (config.min_consecutive_ping_down or DEFAULT_MIN_CONSECUTIVE_PING_DOWN) self.ping_interval = config.ping_interval or DEFAULT_PING_INTERVAL self._lock = threading.Lock() self._ping_manager = None self._logger = get_logger('cstate')
def __init__(self, mac, initial_state, unauthenticated_state_callback, sequester_state_callback, operational_state_callback, infracted_state_callback): self._mac = mac self._current_state = initial_state self._unauthenticated_state_callback = unauthenticated_state_callback self._sequester_state_callback = sequester_state_callback self._operational_state_callback = operational_state_callback self._infracted_state_callback = infracted_state_callback self._logger = get_logger('portsm') self._handle_current_state()
def __init__(self, socket_info, radius_secret, auth_callback): self.next_radius_id = 0 self._packet_id_to_mac = {} self.auth_callback = auth_callback self.packet_id_to_req_authenticator = {} self.running = True # TODO: Find better way to handle secret self.radius_secret = radius_secret self.radius_socket = RadiusSocket(socket_info.source_ip, socket_info.source_port, socket_info.server_ip, socket_info.server_port) self._logger = get_logger('rquery') self.radius_socket.setup()
def __init__(self, target_ip, structural_config_file): self._logger = get_logger('endpproxy') self._lock = threading.RLock() self._mac_tap_port = {} self._freed_tap_ports = set() self._next_tap_port = BASE_T1SW_PORT self._structural_config_file = structural_config_file server_port = DEFAULT_SERVER_PORT address = f'{target_ip}:{server_port}' self._logger.info('Proxy requests to %s', address) channel = grpc.insecure_channel(address) self._stub = server_grpc.EndpointServerStub(channel) grpc.channel_ready_future(channel).result(timeout=CONNECT_TIMEOUT_SEC)
def load_config(): """Load configuration from the configuration file""" logger = get_logger(_LOGGER_NAME) config_root = os.getenv('FORCH_CONFIG_DIR', '.') config_file = os.getenv('FORCH_CONFIG_FILE', _DEFAULT_FORCH_CONFIG) config_path = os.path.join(config_root, config_file) logger.info('Reading config file %s', os.path.abspath(config_path)) try: return yaml_proto(config_path, ForchConfig) except Exception as e: logger.error('Cannot load config: %s', e) return None
def __init__(self, config): self._config = config self._structural_config_file = None self._behavioral_config_file = None self._forch_config_dir = None self._faucet_config_dir = None self._gauge_config_file = None self._segments_vlans_file = None self._faucet_events = None self._start_time = datetime.fromtimestamp(time.time()).isoformat() self._faucet_prom_endpoint = None self._gauge_prom_endpoint = None self._behavioral_config = None self._faucet_collector = None self._local_collector = None self._cpn_collector = None self._varz_collector = None self._faucetizer = None self._authenticator = None self._faucetize_scheduler = None self._config_file_watcher = None self._faucet_state_scheduler = None self._gauge_metrics_scheduler = None self._device_report_handler = None self._port_state_manager = None self._initialized = False self._active_state = State.initializing self._active_state_lock = threading.Lock() self._should_enable_faucetizer = False self._should_ignore_static_behavior = False self._should_ignore_auth_result = False self._forch_config_errors = {} self._system_errors = {} self._faucet_config_summary = SystemState.FaucetConfigSummary() self._metrics = None self._varz_proxy = None self._last_faucet_config_writing_time = None self._last_received_faucet_config_hash = None self._config_hash_verification_timeout_sec = ( self._config.event_client.config_hash_verification_timeout_sec or _DEFAULT_CONFIG_HASH_VERIFICATION_TIMEOUT_SEC) self._states_lock = threading.Lock() self._timer_lock = threading.Lock() self._logger = get_logger('forch')
def __init__(self, device_state_manager=None, varz_updater=None, testing_segment=None): self._state_machines = {} self._static_port_behaviors = {} self._static_device_behaviors = {} self._dynamic_device_behaviors = {} self._device_state_manager = device_state_manager self._varz_updater = varz_updater self._placement_to_mac = {} self._testing_segment = testing_segment self._lock = threading.RLock() self._logger = get_logger('portmgr')
def __init__(self, result_handler, server_address, server_port, unauth_vlan): self._logger = get_logger('devreport') self._logger.info('Initializing with unauthenticated vlan %s', unauth_vlan) address = server_address or DEFAULT_SERVER_ADDRESS port = server_port or DEFAULT_SERVER_PORT target = f'{address}:{port}' self._logger.info('Using target server %s', target) channel = grpc.insecure_channel(target) self._stub = SessionServerStub(channel) self._dp_mac_map = {} self._mac_sessions = {} self._mac_device_vlan_map = {} self._mac_assigned_vlan_map = {} self._unauth_vlan = unauth_vlan self._lock = threading.Lock() self._result_handler = result_handler
def run_forchestrator(): """main function to start forch""" logger = get_logger(_LOGGER_NAME) logger.info('Starting Forchestrator') config = load_config() if not config: logger.error('Invalid config, exiting.') sys.exit(1) forchestrator = Forchestrator(config) http_server = forch.http_server.HttpServer(forchestrator.get_local_port(), config.http) try: forchestrator.initialize() http_server.map_request('system_state', forchestrator.get_system_state) http_server.map_request('dataplane_state', forchestrator.get_dataplane_state) http_server.map_request('switch_state', forchestrator.get_switch_state) http_server.map_request('cpn_state', forchestrator.get_cpn_state) http_server.map_request('process_state', forchestrator.get_process_state) http_server.map_request('host_path', forchestrator.get_host_path) http_server.map_request('list_hosts', forchestrator.get_list_hosts) http_server.map_request('vrrp_state', forchestrator.get_vrrp_state) http_server.map_request('sys_config', forchestrator.get_sys_config) http_server.map_request('', http_server.static_file('')) except Exception as e: logger.error("Cannot initialize forch: %s", e, exc_info=True) http_server.map_request('', functools.partial(show_error, e)) finally: http_server.start_server() forchestrator.update_initialization_varz() if not forchestrator.main_loop(): try: http_server.join_thread() except KeyboardInterrupt: logger.info('Keyboard interrupt. Exiting.') logger.warning('Exiting program') http_server.stop_server() forchestrator.stop()
def __init__(self, auth_config, auth_callback=None, radius_query_object=None, metrics=None): self.radius_query = None self.sessions = {} self._sessions_lock = threading.Lock() self.auth_callback = auth_callback self._metrics = metrics self._logger = get_logger('auth') radius_info = auth_config.radius_info radius_ip = radius_info.server_ip radius_port = radius_info.server_port source_port = radius_info.source_port if radius_info.radius_secret_helper: secret = os.popen(radius_info.radius_secret_helper).read().strip() else: secret = None if not (radius_ip and radius_port and secret): self._logger.warning( 'Invalid radius_info in config. Radius IP: %s; Radius port: %s Secret present: %s', radius_ip, radius_port, bool(secret)) raise ConfigError Socket = collections.namedtuple( 'Socket', 'source_ip, source_port, server_ip, server_port') socket_info = Socket('0.0.0.0', source_port, radius_ip, radius_port) if radius_query_object: self.radius_query = radius_query_object else: self.radius_query = radius_query.RadiusQuery( socket_info, secret, self.process_radius_result) threading.Thread(target=self.radius_query.receive_radius_messages, daemon=True).start() interval = auth_config.heartbeat_sec or HEARTBEAT_INTERVAL_SEC self.auth_config = auth_config self.timer = HeartbeatScheduler(interval) self.timer.add_callback(self.handle_sm_timeout) self.timer.start() self._logger.info( 'Created Authenticator module with radius IP %s and port %s.', radius_ip, radius_port)
def __init__(self, device_state_manager=None, varz_updater=None, device_state_reporter=None, orch_config=None): self._state_machines = {} self._auto_sequester = {} self._static_device_behaviors = {} self._dynamic_device_behaviors = {} self._device_state_manager = device_state_manager self._varz_updater = varz_updater self._device_state_reporter = device_state_reporter self._placement_to_mac = {} self._sequester_timer = {} self._scheduled_sequester_timer = {} self._lock = threading.RLock() self._logger = get_logger('portmgr') self._state_callbacks = self._build_state_callbacks() self._state_overwrites = {} self._orch_config = orch_config if orch_config and orch_config.HasField('sequester_config'): sequester_config = orch_config.sequester_config self._sequester_segment = sequester_config.sequester_segment self._sequester_timeout = sequester_config.sequester_timeout_sec self._logger.info( 'Configuring sequestering with segment %s, timeout %ss', self._sequester_segment, self._sequester_timeout) if sequester_config.test_result_device_states: dict_maps = [ (entry.test_result, entry.device_state) for entry in sequester_config.test_result_device_states ] test_result_device_states_map = dict(dict_maps) # pylint: disable=no-member self._state_overwrites = { DVAState.State.sequestered: { TestResult.ResultCode: test_result_device_states_map } } if sequester_config.auto_sequestering: self._default_auto_sequestering = sequester_config.auto_sequestering
def __init__(self, result_handler, target, unauth_vlan, tunnel_ip, endpoint_handler=None): self._logger = get_logger('devreport') self._logger.info('Initializing with unauthenticated vlan %s', unauth_vlan) self._logger.info('Using target %s, proto %s', target, bool(PORT_BEHAVIOR_SESSION_RESULT)) self._channel = grpc.insecure_channel(target) self._stub = None self._dp_mac_map = {} self._mac_sessions = {} self._mac_device_vlan_map = {} self._mac_assigned_vlan_map = {} self._unauth_vlan = unauth_vlan self._lock = threading.Lock() self._result_handler = result_handler self._tunnel_ip = tunnel_ip self._endpoint_handler = endpoint_handler
def __init__(self, src_mac, port_id, auth_config, radius_query_callback, auth_callback, metrics=None): self.src_mac = src_mac self.port_id = port_id self._auth_callback = auth_callback self._radius_query_callback = radius_query_callback self._current_state = None self._radius_retries = 0 self._current_timeout = 0 self._max_radius_retries = auth_config.max_radius_retries or self.MAX_RADIUS_RETRIES self._query_timeout_sec = auth_config.query_timeout_sec or self.QUERY_TIMEOUT_SEC self._rej_timeout_sec = auth_config.reject_timeout_sec or self.REJECT_TIMEOUT_SEC self._auth_timeout_sec = auth_config.auth_timeout_sec or self.AUTH_TIMEOUT_SEC self._metrics = metrics self._transition_lock = Lock() self._logger = get_logger('mabsm') self._reset_state_machine()
"""Manages ping task""" import asyncio from asyncio.subprocess import PIPE from collections import namedtuple import threading from forch.utils import get_logger PingResult = namedtuple('PingResult', ['host_name', 'proc_code', 'stdout', 'stderr']) LOGGER = get_logger('ping') class PingManager: """Manages a thread that periodically pings the hosts""" def __init__(self, hosts: dict, interval: int = 60, count: int = 1): self._hosts = hosts self._count = count self._timeout = self._count self._interval = interval self._loop = asyncio.new_event_loop() asyncio.get_child_watcher().attach_loop(self._loop) async def _ping_host(self, host_name, host_ip): """Ping a single host""" cmd = f"ping -c {self._count} -w {self._timeout} {host_ip}" proc = await asyncio.create_subprocess_shell(cmd, stdout=PIPE, stderr=PIPE)
def __init__(self): self._logger = get_logger('vstate')
help='structural faucet config input') parser.add_argument('-f', '--forch-config', type=str, default='forch.yaml', help='forch config file') parser.add_argument('-o', '--output', type=str, default='faucet.yaml', help='behavioral faucet config output') return parser.parse_args(raw_args) if __name__ == '__main__': LOGGER = get_logger('faucetizer', stdout=True) FORCH_BASE_DIR = os.getenv('FORCH_CONFIG_DIR') FAUCET_BASE_DIR = os.getenv('FAUCET_CONFIG_DIR') ARGS = parse_args(sys.argv[1:]) FORCH_CONFIG_FILE = os.path.join(FORCH_BASE_DIR, ARGS.forch_config) ORCH_CONFIG = load_orch_config(FORCH_CONFIG_FILE) STRUCTURAL_CONFIG_FILE = os.path.join(FORCH_BASE_DIR, ARGS.config_input) BEHAVIORAL_CONFIG_FILE = os.path.join(FAUCET_BASE_DIR, ARGS.output) SEGMENTS_VLANS_FILE = os.path.join(FORCH_BASE_DIR, ARGS.segments_vlans) FAUCETIZER = Faucetizer(ORCH_CONFIG, STRUCTURAL_CONFIG_FILE, BEHAVIORAL_CONFIG_FILE) FAUCETIZER.reload_structural_config() FAUCETIZER.reload_segments_to_vlans(SEGMENTS_VLANS_FILE)
import argparse import copy import os import sys import threading import yaml from forch.utils import get_logger, yaml_proto from forch.proto.devices_state_pb2 import DevicesState, SegmentsToVlans from forch.proto.devices_state_pb2 import DevicePlacement, DeviceBehavior from forch.proto.forch_configuration_pb2 import ForchConfig from forch.proto.shared_constants_pb2 import DVAState, PortType LOGGER = get_logger('faucetizer') INCLUDE_FILE_SUFFIX = '_augmented' SEQUESTER_PORT_DESCRIPTION_DEFAULT = 'TESTING' DEVICE_BEHAVIOR = 'device_behavior' DEVICE_TYPE = 'device_type' STATIC_DEVICE = 'static' DYNAMIC_DEVICE = 'dynamic' class Faucetizer: """Collect Faucet information and generate ACLs""" # pylint: disable=too-many-arguments def __init__(self, orch_config,
"""Module for managing orchestrator device topologies""" import os import sys from forch.utils import get_logger, yaml_proto from forch.proto.building_schema_pb2 import BuildingSchema def load_devices(): """Load a device specification file""" base_dir_name = os.getenv('FORCH_CONFIG_DIR') building_schema_file_name = os.path.join(base_dir_name, 'building_schema.yaml') LOGGER.info('Loading device spec file %s', building_schema_file_name) building_schema = yaml_proto(building_schema_file_name, BuildingSchema) loaded_macs = list(building_schema.mac_addrs.keys()) loaded_macs.sort() LOGGER.info('Loaded device spec for devices: %s', loaded_macs) sys.stdout.write(str(loaded_macs) + '\n') if __name__ == '__main__': LOGGER = get_logger('topology', stdout=True) load_devices()
"""Module to expose varz interface""" import functools import threading from prometheus_client import Counter, Gauge, Info, generate_latest, REGISTRY from forch.http_server import HttpServer from forch.utils import get_logger LOGGER = get_logger('metrics') DEFAULT_VARZ_PORT = 8302 class ForchMetrics(): """Class that implements the module that exposes varz for metrics""" _reg = REGISTRY def __init__(self, varz_config): self._local_port = varz_config.varz_port or DEFAULT_VARZ_PORT LOGGER.info('forch_metrics port is %s', self._local_port) self._http_server = None self._metrics = {} def start(self): """Start serving varz""" self._add_vars() self._http_server = HttpServer(self._local_port) try: self._http_server.map_request('', self.get_metrics) except Exception as e: self._http_server.map_request(
import os import collections import argparse import threading import yaml from forch.heartbeat_scheduler import HeartbeatScheduler import forch.radius_query as radius_query from forch.simple_auth_state_machine import AuthStateMachine from forch.utils import get_logger, proto_dict, dict_proto, ConfigError from forch.proto.devices_state_pb2 import DevicePlacement from forch.proto.authentication_pb2 import AuthResult from forch.proto.forch_configuration_pb2 import OrchestrationConfig LOGGER = get_logger('auth') HEARTBEAT_INTERVAL_SEC = 3 class Authenticator: """Authenticate devices using MAB/dot1x""" def __init__(self, auth_config, auth_callback=None, radius_query_object=None, metrics=None): self.radius_query = None self.sessions = {} self._sessions_lock = threading.Lock() self.auth_callback = auth_callback
"""Schedule heart beat with functions that have to be called in order""" import threading from forch.utils import get_logger LOGGER = get_logger('heartbeat') class HeartbeatScheduler: """Heart beat scheduler""" def __init__(self, interval_sec): self._interval_sec = interval_sec self._callbacks = [] self._run = False def add_callback(self, callback): """Add callback""" self._callbacks.append(callback) def _periodic_task(self): if not self._run: return for callback in self._callbacks: try: callback() except Exception as error: LOGGER.error("Error in running %s: %s", callback, error) threading.Timer(self._interval_sec, self._periodic_task).start()