def __init__(self, mib_list, src_dir_list, http_sources=None): if "BFT_DEBUG" in os.environ: self.dbg = os.environ.get('BFT_DEBUG') else: self.dbg = "" if "yy" in self.dbg: # VERY verbose, but essential for spotting # possible ASN.1 errors from pysmi import debug debug.setLogger(debug.Debug('reader', 'compiler')) # Initialize compiler infrastructure mibCompiler = MibCompiler(SmiStarParser(), JsonCodeGen(), CallbackWriter(self.callback_func)) # search for source MIBs here mibCompiler.addSources(*[FileReader(x) for x in src_dir_list]) if http_sources: # search for source MIBs at Web sites mibCompiler.addSources(*[HttpReader(*x) for x in http_sources]) # never recompile MIBs with MACROs mibCompiler.addSearchers(StubSearcher(*JsonCodeGen.baseMibs)) # run recursive MIB compilation mibCompiler.compile(*mib_list)
def __init__(self, mib_list, src_dir_list, http_sources=None): if "BFT_DEBUG" in os.environ: self.dbg = os.environ.get('BFT_DEBUG') else: self.dbg = "" if "yy" in self.dbg: # VERY verbose, but essential for spotting # possible ASN.1 errors from pysmi import debug debug.setLogger(debug.Debug('reader', 'compiler')) # Initialize compiler infrastructure mibCompiler = MibCompiler(SmiStarParser(), JsonCodeGen(), CallbackWriter(self.callback_func)) # search for source MIBs here mibCompiler.addSources(*[FileReader(x) for x in src_dir_list]) if http_sources: # search for source MIBs at Web sites mibCompiler.addSources(*[HttpReader(*x) for x in http_sources]) # never recompile MIBs with MACROs mibCompiler.addSearchers(StubSearcher(*JsonCodeGen.baseMibs)) # run recursive MIB compilation mib_dict = mibCompiler.compile(*mib_list) err = False if mib_dict is None or mib_dict == {}: print( "ERROR: failed on mib compilation (mibCompiler.compile returned an empty dictionary)" ) err = True for key, value in mib_dict.iteritems(): if value == 'unprocessed': print("ERROR: failed on mib compilation: " + key + ": " + value) err = True if err: raise Exception("SnmpMibs failed to initialize.") elif 'BFT_DEBUG' in os.environ: print('# %d MIB modules compiled' % len(mib_dict))
sys.stderr.write("""\ SNMP SMI/MIB library version %s, written by Ilya Etingof <*****@*****.**> Python interpreter: %s Software documentation and support at http://snmplabs.com/pysmi %s """ % (__version__, sys.version, helpMessage)) sys.exit(EX_OK) if opt[0] == '--quiet': quietFlag = True if opt[0] == '--verbose': verboseFlag = True if opt[0] == '--debug': debug.setLogger(debug.Debug(*opt[1].split(','))) if opt[0] == '--mib-source': mibSources.append(opt[1]) if opt[0] == '--cache-directory': cacheDirectory = opt[1] if opt[0] == '--ignore-errors': ignoreErrorsFlag = True if not mibSources: mibSources = [ 'file:///usr/share/snmp/mibs', 'http://mibs.snmplabs.com/asn1/@mib@' ]
def generate_profile_from_mibs(ctx, mib_files, filters, aliases, debug, interactive, source, compiled_mibs_path): """ Generate an SNMP profile from MIBs. Accepts a directory path containing mib files to be used as source to generate the profile, along with a filter if a device or family of devices support only a subset of oids from a mib. filters is the path to a yaml file containing a collection of MIBs, with their list of MIB node names to be included. For example: ```yaml RFC1213-MIB: - system - interfaces - ip CISCO-SYSLOG-MIB: [] SNMP-FRAMEWORK-MIB: - snmpEngine ``` Note that each `MIB:node_name` correspond to exactly one and only one OID. However, some MIBs report legacy nodes that are overwritten. To resolve, edit the MIB by removing legacy values manually before loading them with this profile generator. If a MIB is fully supported, it can be omitted from the filter as MIBs not found in a filter will be fully loaded. If a MIB is *not* fully supported, it can be listed with an empty node list, as `CISCO-SYSLOG-MIB` in the example. `-a, --aliases` is an option to provide the path to a YAML file containing a list of aliases to be used as metric tags for tables, in the following format: ```yaml aliases: - from: MIB: ENTITY-MIB name: entPhysicalIndex to: MIB: ENTITY-MIB name: entPhysicalName ``` MIBs tables most of the time define a column OID within the table, or from a different table and even different MIB, which value can be used to index entries. This is the `INDEX` field in row nodes. As an example, entPhysicalContainsTable in ENTITY-MIB ```txt entPhysicalContainsEntry OBJECT-TYPE SYNTAX EntPhysicalContainsEntry MAX-ACCESS not-accessible STATUS current DESCRIPTION "A single container/'containee' relationship." INDEX { entPhysicalIndex, entPhysicalChildIndex } ::= { entPhysicalContainsTable 1 } ``` or its json dump, where `INDEX` is replaced by indices ```json "entPhysicalContainsEntry": { "name": "entPhysicalContainsEntry", "oid": "1.3.6.1.2.1.47.1.3.3.1", "nodetype": "row", "class": "objecttype", "maxaccess": "not-accessible", "indices": [ { "module": "ENTITY-MIB", "object": "entPhysicalIndex", "implied": 0 }, { "module": "ENTITY-MIB", "object": "entPhysicalChildIndex", "implied": 0 } ], "status": "current", "description": "A single container/'containee' relationship." }, ``` Sometimes indexes are columns from another table, and we might want to use another column as it could have more human readable information - we might prefer to see the interface name vs its numerical table index. This can be achieved using metric_tag_aliases Return a list of SNMP metrics and copy its yaml dump to the clipboard Metric tags need to be added manually """ if debug: set_debug() from pysmi import debug debug.setLogger(debug.Debug('all')) # ensure at least one mib file is provided if len(mib_files) == 0: abort( '🙄 no mib file provided, need at least one mib file to generate a profile' ) # create a list of all mib files directories and mib names source_directories = set() mibs = set() for file in mib_files: source_directories.add(os.path.dirname(file)) mibs.add(os.path.splitext(os.path.basename(file))[0]) # create a tmp dir for compiled json mibs json_destination_directory = os.path.join(gettempdir(), 'mibs') if not os.path.exists(json_destination_directory): os.mkdir(json_destination_directory) profile_oid_collection = {} # build profile for oid_node in _extract_oids_from_mibs(list(mibs), list(source_directories), json_destination_directory, source, compiled_mibs_path, filters): if oid_node.node_type == 'table': _add_profile_table_node(profile_oid_collection, oid_node) elif oid_node.node_type == 'row': # requires _add_profile_row_node( profile_oid_collection, oid_node, os.path.dirname(mib_files[0]), metric_tag_aliases_path=aliases, json_mib_directory=json_destination_directory, source=source, compiled_mibs_path=compiled_mibs_path, ) elif oid_node.node_type == 'column': _add_profile_column_node(profile_oid_collection, oid_node) elif oid_node.node_type == 'scalar': _add_profile_scalar_node(profile_oid_collection, oid_node) echo_info('{} metrics found'.format(len(profile_oid_collection.values()))) yaml_data = yaml.dump({'metrics': list(profile_oid_collection.values())}, sort_keys=False) if not interactive or click.confirm('Save to file?'): output_filename = 'metrics.yaml' with open(output_filename, 'w') as f: f.write(yaml_data) echo_info('Metrics saved to {}'.format(output_filename)) echo_debug( yaml.dump({'metrics': list(profile_oid_collection.values())}, sort_keys=False))
from pysnmp.entity import engine, config from pysnmp import debug from pysnmp.entity.rfc3413 import cmdrsp, context, ntforg from pysnmp.carrier.asynsock.dgram import udp from pysnmp.smi import builder from pysmi import debug as pysmi_debug import asyncio import time from pysnmp.hlapi.asyncio import * import threading import collections #can be useful #debug.setLogger(debug.Debug('all')) pysmi_debug.setLogger(pysmi_debug.Debug('compiler')) MibObject = collections.namedtuple('MibObject', ['mibName', 'objectType', 'valueFunc']) class Mib(object): """Stores the data we want to serve. """ def __init__(self): self._lock = threading.RLock() self._test_count = 0 def getTestDescription(self): return "My Description"
def generate_traps_db(mib_sources, output_dir, output_file, output_format, no_descr, debug, mib_files): """Generate yaml or json formatted documents containing various information about traps. These files can be used by the Datadog Agent to enrich trap data. This command is intended for "Network Devices Monitoring" users who need to enrich traps that are not automatically supported by Datadog. The expected workflow is as such:\n 1- Identify a type of device that is sending traps that Datadog does not already recognize.\n 2- Fetch all the MIBs that Datadog does not support.\n 3- Run `ddev meta snmp generate-traps-db -o ./output_dir/ /path/to/my/mib1 /path/to/my/mib2`\n You'll need to install pysmi manually beforehand. """ from pysmi.codegen import JsonCodeGen from pysmi.compiler import MibCompiler from pysmi.parser import SmiV1CompatParser from pysmi.reader import getReadersFromUrls from pysmi.searcher import AnyFileSearcher from pysmi.writer import FileWriter if debug: set_debug() from pysmi import debug debug.setLogger(debug.Debug('all')) # Defaulting to github.com/DataDog/mibs.snmplabs.com/ mib_sources = [mib_sources] if mib_sources else [MIB_SOURCE_URL] if output_file: allowed_extensions = ALLOWED_EXTENSIONS_BY_FORMAT[output_format] if not any(output_file.endswith(x) for x in allowed_extensions): abort("Output file {} does not end with an allowed extension '{}'". format(output_file, ", ".join(allowed_extensions))) if output_dir and output_file: abort( "Do not set both --output-dir and --output-file at the same time.") elif not output_file and not output_dir: abort("Need to set one of --output-dir or --output-file") with TempDir('ddev_mibs') as compiled_mibs_sources: compiled_mibs_sources = os.path.abspath(compiled_mibs_sources) echo_info("Writing intermediate compiled MIBs to {}".format( compiled_mibs_sources)) mibs_sources_dir = os.path.join(compiled_mibs_sources, 'mibs_sources') if not os.path.isdir(mibs_sources_dir): os.mkdir(mibs_sources_dir) mib_sources = (sorted( set([ os.path.abspath(os.path.dirname(x)) for x in mib_files if os.path.sep in x ])) + mib_sources) mib_files = [os.path.basename(x) for x in mib_files] searchers = [ AnyFileSearcher(compiled_mibs_sources).setOptions(exts=['.json']) ] code_generator = JsonCodeGen() file_writer = FileWriter(compiled_mibs_sources).setOptions( suffix='.json') mib_compiler = MibCompiler(SmiV1CompatParser(tempdir=''), code_generator, file_writer) mib_compiler.addSources( *getReadersFromUrls(*mib_sources, **dict(fuzzyMatching=True))) mib_compiler.addSearchers(*searchers) compiled_mibs, compiled_dependencies_mibs = compile_and_report_status( mib_files, mib_compiler) # Move all the parent MIBs that had to be compiled but were not requested in the command to a subfolder. for mib_file_name in compiled_dependencies_mibs: os.replace( os.path.join(compiled_mibs_sources, mib_file_name + '.json'), os.path.join(mibs_sources_dir, mib_file_name + '.json'), ) # Only generate trap_db with `mib_files` unless explicitly asked. Used to ignore other files that may be # present "compiled_mibs_sources" compiled_mibs = [ os.path.join(compiled_mibs_sources, x + '.json') for x in compiled_mibs ] # Generate the trap database based on the compiled MIBs. trap_db_per_mib = generate_trap_db(compiled_mibs, mibs_sources_dir, no_descr) use_json = output_format == "json" if output_file: # Compact representation, only one file write_compact_trap_db(trap_db_per_mib, output_file, use_json=use_json) echo_success("Wrote trap data to {}".format( os.path.abspath(output_file))) else: # Expanded representation, one file per MIB. write_trap_db_per_mib(trap_db_per_mib, output_dir, use_json=use_json) echo_success("Wrote trap data to {}".format( os.path.abspath(output_dir)))