class TestDeviceInterface(unittest.TestCase): def setUp(self): logging.basicConfig( filename='test/unittest.log', format= "%(asctime)s - %(levelname)s - %(name)s:%(funcName)s - %(message)s", level=logging.DEBUG, filemode='w') self.device = Device( 'localhost', Config(filename='test/configuration_examples/simple.json')) self.device.get_interfaces() for if_index, di in self.device.interfaces.iteritems(): self.di = di break def test_add_ip_address(self): self.di.add_ip_address('192.0.2.1') self.di.add_ip_address('192.0.2.1') def test_get_ptr_for_ip(self): self.di.device.config.terse = True self.di.get_ptr_for_ip('192.0.2.1') self.di.device.config.terse = False self.di.get_ptr_for_ip('192.0.2.1') def test_get_ptrs(self): self.di.add_ip_address('192.0.2.x') self.di.get_ptrs() def test_ignored_interface_ptrs(self): self.di.ignored = True self.assertTrue(self.di.add_ip_address('192.0.2.1')) self.assertTrue( self.di.update_ptr_status('192.0.2.1', 'ptr-test.domain.example', Ptr.STATUS_UNKNOWN)) self.di.check_ptr() self.assertEquals(Ptr.STATUS_IGNORED, self.di.get_ptrs()['192.0.2.1'].status) def test_ignored_ip_address_ptr(self): self.device.config = Config( filename='test/configuration_examples/configuration.json') self.di.add_ip_address('192.0.2.22') self.assertTrue(self.device.config.is_ip_ignored('192.0.2.22')) self.di.check_ptr() def test_update_ptr_status(self): self.di.update_ptr_status('192.0.2.1', 'ptr-test.domain.example', Ptr.STATUS_NOT_CREATED) self.assertTrue(self.di.add_ip_address('192.0.2.1')) self.di.update_ptr_status('192.0.2.1', 'ptr-test.domain.example', Ptr.STATUS_NOT_CREATED) def test_long_if_name(self): self.di.if_name = "Ethernet0/0/0" self.di._make_ptr() self.assertEquals('localhost-et0-0-0', self.di.ptr) self.di.if_name = "GigabitEthernet" self.di._make_ptr()
def setUp(self): logging.basicConfig( filename='test/unittest.log', format= "%(asctime)s - %(levelname)s - %(name)s:%(funcName)s - %(message)s", level=logging.DEBUG, filemode='w') self.device = Device( 'localhost', Config(filename='test/configuration_examples/simple.json')) self.device.get_interfaces() for if_index, di in self.device.interfaces.iteritems(): self.di = di break
def load(self): """ Load list of devices from each connector Since devices dict is keyed by hostnames, there are no duplicates :return: """ # Temporary list device_list = [] self.logger.info("Dispatch load command to all (%d) connectors" % len(self.__connectors)) # Concatenate device list from each connector to temporary list for connector in self.__connectors: device_list += filter(lambda x: len(x) > 0, connector.load_devices()) # Populate devices dict from temporary list for device in device_list: hostname = self.dns.get_fqdn(device) if hostname: if hostname not in self.devices: self.devices[hostname] = Device(hostname, self.config, self.dns) else: self.logger.warning("Hostname '%s' couldn't be resolved. SKipping..." % device) pass self.logger.info("Loaded %d device(s) from %d connectors" % (len(device_list), len(self.__connectors)))
def post(self): global pis data = request.json devices = [] for device in data: mac = device[0] dbm = 0 if device[1] == '' else int(device[1]) devices.append(Device(mac, dbm)) ip = request.remote_addr for pi in pis: if pi.id == ip: pi.devices = devices break return 'Grax x tu apoyo prro'
check_only = args.check diff_only = args.diff terse = args.terse config = Config(check_only=check_only, diff_only=diff_only, terse=terse) dispatcher = Dispatcher(config) print "Loaded connectors: %s" % ', '.join(dispatcher.get_connector_list()) dns = DnsCheck(config=config) output = TabularUtf8Output() fqdn = dns.get_fqdn(hostname) if fqdn: d = Device(hostname=fqdn, config=config) if d.get_interfaces(): d.check_ptrs() print output.display_device_detailed(d) # Filter all PTRs that don't have status equal to STATUS_NOT_UPDATED or STATUS_NOT_CREATED ptrs_for_update = { k: v for k, v in d.get_ptrs().iteritems() if v.status in (DnsCheck.STATUS_NOT_UPDATED, DnsCheck.STATUS_NOT_CREATED) } #print ptrs_for_update dispatcher.save_ptrs(ptrs_for_update) email = EmailReport(config=config,
from classes import Arduino, Raspberry, Device from example_tuple import uim raspberry = Raspberry() arduino = Arduino device = Device(625000, uim) device.start_press() device.increase_pressure(5) device.decrease_pressure(5) device.stop_press()
def find_duplicates_descriptions(): important_column = "String" output_file = output_folder + duplicates_output_file with open(output_file, 'w') as file: file.write("<h1>Descriptions</h1>") # XB3 device_xb3 = Device("XB3") xb3 = device_xb3.get_conf_error_df() xb3["normalized"] = normalize_column(xb3[important_column]) xb3_dups = xb3[xb3["normalized"].duplicated(keep=False)] xb3_dups.sort_values(by=["normalized"], inplace=True) xb3_dups.reset_index(drop=True, inplace=True) xb3_dups_str = xb3_dups.to_html(index=False) string_to_write = f"<h2>Duplicated in {self.get_device_type()}</h2>{xb3_dups_str}<br>" with open(output_file, "a") as file: file.write(string_to_write) logging.info( f"Wrote {self.get_device_type()} duplicates - Description") # XB6 device_xb6 = Device("XB6") xb6 = device_xb6.get_conf_error_df() xb6["normalized"] = normalize_column(xb6[important_column]) xb6_dups = xb6[xb6["normalized"].duplicated(keep=False)] xb6_dups.sort_values(by=["normalized"], inplace=True) xb6_dups.reset_index(drop=True, inplace=True) xb6_dups_str = xb6_dups.to_html(index=False) string_to_write = f"<h2>Duplicated in {self.get_device_type()}</h2>{xb6_dups_str}<br>" with open(output_file, "a") as file: file.write(string_to_write) logging.info( f"Wrote {self.get_device_type()} duplicates - Description") xb3["Table"] = device_xb3.get_device_type() xb6["Table"] = device_xb6.get_device_type() df_combine = xb3.append(xb6) dups_combine = df_combine[df_combine["normalized"].duplicated(keep=False)] dups_combine.drop_duplicates(subset=["Splunk search Parameter"], keep=False, inplace=True) dups_combine.sort_values(by=["normalized", "Table"], inplace=True) dups_combine.reset_index(drop=True, inplace=True) dups_combine_str = dups_combine.to_html(index=False) string_to_write = f"<h2>Duplicates between XB3 & XB6</h2>{dups_combine_str}<br>" with open(output_file, "a") as file: file.write(string_to_write) logging.info("Wrote XB3 & XB6 duplicates - Description") important_column = "String" output_file = output_folder + duplicates_output_file with open(output_file, 'w') as file: file.write("<h1>Descriptions</h1>") # XB3 xb3 = pd.read_csv("static/ErrorMarkers/xb3.csv", sep=",").drop(columns=["File"]) xb3["normalized"] = normalize_column(xb3[important_column]) xb3_dups = xb3[xb3["normalized"].duplicated(keep=False)] xb3_dups.sort_values(by=["normalized"], inplace=True) xb3_dups.reset_index(drop=True, inplace=True) xb3_dups_str = xb3_dups.to_html(index=False) string_to_write = f"<h2>Duplicate Descriptions in XB3</h2>{xb3_dups_str}<br>" with open(output_file, "a") as file: file.write(string_to_write) # XB6 xb6 = pd.read_csv("static/ErrorMarkers/xb6.csv", sep=",").drop(columns=["File"]) xb6["normalized"] = normalize_column(xb6[important_column]) xb6_dups = xb6[xb6["normalized"].duplicated(keep=False)] xb6_dups.sort_values(by=["normalized"], inplace=True) xb6_dups.reset_index(drop=True, inplace=True) xb6_dups_str = xb6_dups.to_html(index=False) string_to_write = f"<h2>Duplicate Descriptions in XB6</h2>{xb6_dups_str}<br>" with open(output_file, "a") as file: file.write(string_to_write) xb3["Table"] = "XB3" xb6["Table"] = "XB6" df_combine = xb3.append(xb6) dups_combine = df_combine[df_combine["normalized"].duplicated(keep=False)] dups_combine.drop_duplicates(subset=["Splunk search Parameter"], keep=False, inplace=True) dups_combine.sort_values(by=["normalized", "Table"], inplace=True) dups_combine.reset_index(drop=True, inplace=True) dups_combine_str = dups_combine.to_html(index=False) string_to_write = f"<h2>Duplicate Descriptions between XB3 & XB6</h2>{dups_combine_str}<br>" with open(output_file, "a") as file: file.write(string_to_write)
def find_duplicates_markers(): important_column = "Splunk search Parameter" output_file = f"{output_folder}/{markers_output_file}" with open(output_file, 'w') as file: file.write("<h1>Markers</h1>") device_xb3 = Device("XB3") xb3 = device_xb3.get_conf_error_df() xb3["Table"] = device_xb3.get_device_type() xb3["lowercase"] = xb3[important_column].str.lower() dups_xb3 = xb3[xb3["lowercase"].duplicated(keep=False)].sort_values( by=["lowercase"], axis=0).drop(columns=["lowercase", "File"]) dups_xb3_string = dups_xb3[[important_column, "String"]].to_html(index=False) string_to_write = f"<h2>{device_xb3.get_device_type()}</h2>{dups_xb3_string}<br>" with open(output_file, "a") as file: file.write(string_to_write) logging.info( f"Wrote {device_xb3.get_device_type()} duplicates - Markers") device_xb6 = Device("XB6") xb6 = device_xb6.get_conf_error_df() xb6["Table"] = device_xb6.get_device_type() xb6["lowercase"] = xb6[important_column].str.lower() dups_xb6 = xb6[xb6["lowercase"].duplicated(keep=False)].sort_values( by=["lowercase"], axis=0).drop(columns=["lowercase", "File"]) dups_xb6_string = dups_xb6[[important_column, "String"]].to_html(index=False) string_to_write = f"<h2>{device_xb6.get_device_type()}</h2>{dups_xb6_string}<br>" with open(output_file, "a") as file: file.write(string_to_write) logging.info( f"Wrote {device_xb6.get_device_type()} duplicates- Markers") # Overlap? xb3_unique = xb3.drop_duplicates(subset=[important_column], keep='first') xb6_unique = xb6.drop_duplicates(subset=[important_column], keep='first') df_combine = xb3_unique.append(xb6_unique)[[ important_column, "String", "Table" ]] df_combine["lowercase"] = df_combine[important_column].str.lower() dups_combine = df_combine[df_combine["lowercase"].duplicated( keep=False)].sort_values(by=["lowercase", "Table"]) dups_combine.drop_duplicates(subset=["lowercase", "Table"], inplace=True) dups_combine.reset_index(drop=True, inplace=True) dups_combine.drop(columns=["lowercase"], inplace=True) s_xb3 = dups_combine[dups_combine["Table"] == "XB3"].reset_index(drop=True) s_xb6 = dups_combine[dups_combine["Table"] == "XB6"].reset_index(drop=True) df = pd.DataFrame({ "XB6": s_xb3[important_column], "XB3": s_xb6[important_column] }) dups_combine_string = df.to_html(index=False) string_to_write = f"<h2>Duplicates between XB3 & XB6</h2>{dups_combine_string}<br>" with open(output_file, "a") as file: file.write(string_to_write) logging.info("Wrote XB3 & XB6 duplicates - Markers")