示例#1
0
 def __init__(self):
     self.data_filename_cls = DataFileNames()
     self.dir_cls = DirectoryValues()
     self.process_dir = self.dir_cls.processed_dir()
     self.reports_dir = self.dir_cls.reports_dir()
     self.ipam_filename = self.data_filename_cls.processed_filename()
     self.ipam_to_ipr_xlsx = os.path.join(self.process_dir,
                                          self.ipam_filename)
     self.date = self._get_file_date(self.ipam_to_ipr_xlsx)
示例#2
0
    def __init__(self):
        self.dir_cls = DirectoryValues()
        self.ipam_filenames_cls = DataFileNames()
        self.reader_cls = Reader()

        # Load Networks Pickled Data
        self.networks = self.reader_cls.read_from_pkl(
            self.dir_cls.raw_dir(),
            self.ipam_filenames_cls.networks_filename())
        self.networkcontainers = self.reader_cls.read_from_pkl(
            self.dir_cls.raw_dir(),
            self.ipam_filenames_cls.networkcontainers_filename())
        self.all_nets = self.networks + self.networkcontainers
 def __init__(self):
     self._log_cls = LoggingValues()
     logging.basicConfig(filename=self._log_cls.log_filename(),
                         level=logging.DEBUG,
                         filemode='a',
                         format=self._log_cls.log_format())
     self._logger = logging.getLogger(__name__)
     self.dir_cls = DirectoryValues()
     self.write_cls = Writer()
     self.reader_cls = Reader()
     self.call_types_cls = IpamCallTypes()
     self.filenames_cls = DataFileNames()
     self.ext_call_setup_cls = IpamApiRequest()
     self._network_data = []
     self.dl_lock = threading.Lock()
     max_concurrent_dl = 8
     self.dl_sem = threading.Semaphore(max_concurrent_dl)
 def __init__(self):
     self.env_cls = EnvironmentValues()
     self.dir_cls = DirectoryValues()
     self.filenames_cls = DataFileNames()
     self.write_cls = Writer()
     self.reader_cls = Reader()
     self.client = paramiko.SSHClient()
     self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
示例#5
0
class IpamDataProcessed:
    def __init__(self):
        self.dir_cls = DirectoryValues()
        self.filename_cls = DataFileNames()
        self.reader_cls = Reader()

    def run_ipam_processed(self, interim_data):
        """Run's through all the methods with the ipam_dump_interim_xlsx file.

        """
        output_data = self.ea_datacenter_processing(
            self.ea_ipr_designation_processing(interim_data))
        output_data.to_excel(self.dir_cls.processed_dir() + '\\' +
                             self.filename_cls.ipam_dump_processed_xlsx())
        output_data.to_pickle(self.dir_cls.processed_dir() + '\\' +
                              self.filename_cls.ipam_dump_processed_pickle())

    def load_pickle_file(self, dir, filename):
        return self.reader_cls.read_from_pkl(dir, filename)

    @staticmethod
    def get_listed_values(data):
        ea_updater = {}
        for k, v in data[0].items():
            if isinstance(v, list):
                ea_updater[k] = '; '.join(str(e) for e in v)
        return ea_updater

    @staticmethod
    def put_listed_values(updater_data, source_data, ea):
        for k, v in updater_data.items():
            source_data.at[k, ea] = v
        return source_data

    def ea_datacenter_processing(self, data):
        ea_value = 'extattrs_Datacenter_value'
        dc_data = [data[ea_value].to_dict()]
        updater_dcdata = self.get_listed_values(dc_data)
        return self.put_listed_values(updater_dcdata, data, ea_value)

    def ea_ipr_designation_processing(self, data):
        ea_value = 'extattrs_IPR Designation_value'
        dc_data = [data[ea_value].to_dict()]
        updater_dcdata = self.get_listed_values(dc_data)
        return self.put_listed_values(updater_dcdata, data, ea_value)
示例#6
0
class TestProjDataFileNames(unittest.TestCase):
    def setUp(self):
        """Testing IpamCallFilenames()"""
        self.ipam_data_filenames_cls = DataFileNames()

    def test_create_IpamCallFilenames_intance(self):
        self.ipam_data_filenames_cls = DataFileNames()

    def test_type_extensible_attributes_filename(self):
        self.assertIsInstance(
            self.ipam_data_filenames_cls.extensible_attributes_filename(), str)

    def test_eq_extensible_attributes_filename(self):
        self.assertEqual(
            'extensible_attributes.pkl',
            self.ipam_data_filenames_cls.extensible_attributes_filename())

    def test_type_extensible_attributes_list_values_filename(self):
        self.assertIsInstance(
            self.ipam_data_filenames_cls.
            extensible_attributes_list_values_filename(), str)

    def test_eq_extensible_attributes_list_values_filename(self):
        self.assertEqual(
            'extensible_attributes_list_values.pkl',
            self.ipam_data_filenames_cls.
            extensible_attributes_list_values_filename())

    def test_type_network_views_filename(self):
        self.assertIsInstance(
            self.ipam_data_filenames_cls.network_views_filename(), str)

    def test_eq_network_views_filename(self):
        self.assertEqual('network_views.pkl',
                         self.ipam_data_filenames_cls.network_views_filename())

    def test_type_networks_filename(self):
        self.assertIsInstance(self.ipam_data_filenames_cls.networks_filename(),
                              str)

    def test_eq_networks_filename(self):
        self.assertEqual('networks.pkl',
                         self.ipam_data_filenames_cls.networks_filename())

    def test_type_networkcontainers_filename(self):
        self.assertIsInstance(
            self.ipam_data_filenames_cls.networkcontainers_filename(), str)

    def test_eq_networkcontainers_filename(self):
        self.assertEqual(
            'networkcontainers.pkl',
            self.ipam_data_filenames_cls.networkcontainers_filename())
 def __init__(self):
     self._log_cls = LoggingValues()
     logging.basicConfig(filename=self._log_cls.log_filename(),
                         level=logging.DEBUG,
                         filemode='a',
                         format=self._log_cls.log_format())
     self._logger = logging.getLogger(__name__)
     self.dir_cls = DirectoryValues()
     self.filename_cls = DataFileNames()
     self.env_cls = EnvironmentValues()
     self.reader_cls = Reader()
     self.writer_cls = Writer()
示例#8
0
class IpamReports:
    """Takes the processed files and build the reports for IPR."""
    def __init__(self):
        self.data_filename_cls = DataFileNames()
        self.dir_cls = DirectoryValues()
        self.process_dir = self.dir_cls.processed_dir()
        self.reports_dir = self.dir_cls.reports_dir()
        self.ipam_filename = self.data_filename_cls.processed_filename()
        self.ipam_to_ipr_xlsx = os.path.join(self.process_dir,
                                             self.ipam_filename)
        self.date = self._get_file_date(self.ipam_to_ipr_xlsx)

    @staticmethod
    def _get_file_date(file):
        """Returns creation date of file."""
        date_str = time.ctime(os.path.getmtime(file))
        datetime_object = datetime.strptime(date_str, '%a %b %d %H:%M:%S %Y')
        return datetime_object.strftime('%Y%m%d')

    @staticmethod
    def _copy_data_over(source, template, final, sheet_name):
        """
        Reference to the site that was used for the below for loop:

        URL:
        https://stackoverflow.com/questions/44593705/how-to-copy-over-an-excel-
        sheet-to-another-workbook-in-python
        """
        source_wb = openpyxl.load_workbook(filename=source)
        source_ws = source_wb[sheet_name]
        try:
            template_wb = openpyxl.load_workbook(filename=template)
        except FileNotFoundError:
            print('Report by percent-BLANK.xlsx template not found.')
            return
        template_ws = template_wb.worksheets[1]
        for row in source_ws:
            for cell in row:
                template_ws[cell.coordinate].value = cell.value
        max_row = template_ws.max_row
        for row in template_ws.iter_rows(min_row=2,
                                         max_row=max_row,
                                         min_col=24,
                                         max_col=25):
            for cell in row:
                cell.alignment = Alignment(horizontal='left')
        template_wb.save(final)

    @staticmethod
    def _create_new_ipam_file_name_with_date_added(date, file_name):
        """Will work on xlsx extension files only."""
        date += '.xlsx'
        return file_name[:-5] + '-' + date

    @staticmethod
    def _create_new_percent_file_name_with_date_added(date, file_name):
        """Will work on xlsx extension files only."""
        date += '.xlsx'
        return file_name[:-10] + date

    @staticmethod
    def _create_ipam_report(processed_file, report_file):
        shutil.copy(processed_file, report_file)

    def generate_ipam_to_ipr_report(self):
        """Generates IPAM-to-IPR-(date).xlsx report for IPR."""
        ipam_report_with_date_filename = \
            self._create_new_ipam_file_name_with_date_added(
                self.date, self.ipam_filename)
        reports_ipam_to_ipr_xlsx = os.path.join(
            self.reports_dir, ipam_report_with_date_filename)
        self._create_ipam_report(self.ipam_to_ipr_xlsx,
                                 reports_ipam_to_ipr_xlsx)

    def generate_percent_report(self):
        """Generates IPR_Percent report xlsx file for IPR."""
        #        percent_blank_filename = \
        #            self.data_filename_cls.percent_blank_filename()
        percent_blank_xlsx = os.path.join(
            self.process_dir, self.data_filename_cls.percent_blank_filename())
        percent_report_with_date_filename = \
            self._create_new_percent_file_name_with_date_added(
                self.date,
                self.data_filename_cls.percent_blank_filename()
            )
        percent_report_with_date_filename_and_path = os.path.join(
            self.reports_dir, percent_report_with_date_filename)
        self._copy_data_over(self.ipam_to_ipr_xlsx, percent_blank_xlsx,
                             percent_report_with_date_filename_and_path,
                             'Summary')

    def generate_forecast_percent_report(self):
        """Generates IPR_Forecast_Percent report xlsx file for IPR."""
        percent_blank_forecast_filename = \
            self.data_filename_cls.percent_blank_forecast_filename()
        percent_blank_xlsx = os.path.join(self.process_dir,
                                          percent_blank_forecast_filename)
        percent_forecast_report_with_date_filename = \
            self._create_new_percent_file_name_with_date_added(
                self.date, percent_blank_forecast_filename)
        percent_forecast_report_with_date_filename_and_path = os.path.join(
            self.reports_dir, percent_forecast_report_with_date_filename)
        self._copy_data_over(
            self.ipam_to_ipr_xlsx, percent_blank_xlsx,
            percent_forecast_report_with_date_filename_and_path,
            'Summary_Forecast')
示例#9
0
 def test_create_IpamCallFilenames_intance(self):
     self.ipam_data_filenames_cls = DataFileNames()
示例#10
0
 def setUp(self):
     """Testing IpamCallFilenames()"""
     self.ipam_data_filenames_cls = DataFileNames()
示例#11
0
 def __init__(self):
     self.dir_cls = DirectoryValues()
     self.filename_cls = DataFileNames()
     self.reader_cls = Reader()
class IpamGetsToWrite:
    """Class containing methods for making DDI call's."""
    # pylint: disable = R0902
    # 11/7 (too-many-instance-attributes) Known and like it this way.
    def __init__(self):
        self._log_cls = LoggingValues()
        logging.basicConfig(filename=self._log_cls.log_filename(),
                            level=logging.DEBUG,
                            filemode='a',
                            format=self._log_cls.log_format())
        self._logger = logging.getLogger(__name__)
        self.dir_cls = DirectoryValues()
        self.write_cls = Writer()
        self.reader_cls = Reader()
        self.call_types_cls = IpamCallTypes()
        self.filenames_cls = DataFileNames()
        self.ext_call_setup_cls = IpamApiRequest()
        self._network_data = []
        self.dl_lock = threading.Lock()
        max_concurrent_dl = 8
        self.dl_sem = threading.Semaphore(max_concurrent_dl)

    def get_extensible_attributes(self):
        """Requests the extensible attributes defined within DDI."""
        self._logger.info('Pulling current Extensible Attribute data.')

        _ext_attr_data = json.loads(
            self.ext_call_setup_cls.ipam_api_request(
                self.call_types_cls.extensible_attributes()).text)

        self.write_cls.write_to_pkl(self.dir_cls.raw_dir(),
                                    self.filenames_cls.
                                    extensible_attributes_filename(),
                                    _ext_attr_data)
        self._logger.info('Ext Attr data written to .pkl file in Raw Dir.')

    def get_extensible_attributes_list_values(self):
        """
        Requests the extensible attributes listed values defined within DDI.
        """
        self._logger.info('Pulling current Extensible Attribute data.')
        _ext_attr_list_data = json.loads(
            self.ext_call_setup_cls.ipam_api_request(
                self.call_types_cls.extensible_attributes_list_values()).
            text)
        self.write_cls.write_to_pkl(
            self.dir_cls.raw_dir(),
            self.filenames_cls.extensible_attributes_list_values_filename(),
            _ext_attr_list_data)
        self._logger.info('Ext Att list data written to .pkl file in Raw Dir.')

    def get_network_views(self):
        """Requests a the network_view data from DDI."""
        self._logger.info('Pulling current Network View Data.')
        _network_view_data = json.loads(
            self.ext_call_setup_cls.ipam_api_request(
                self.call_types_cls.network_views()).text)

        self.write_cls.write_to_pkl(self.dir_cls.raw_dir(),
                                    self.filenames_cls.
                                    network_views_filename(),
                                    _network_view_data)
        self._logger.info('Network View data written to .pkl file in raw Dir.')

    def _get_ipam_networks(self, call):
        """Multi Threading portion of the requests."""
        self.dl_sem.acquire()
        try:
            networks = json.loads(
                self.ext_call_setup_cls.ipam_api_request(call).text)
            with self.dl_lock:
                self._network_data += networks
        finally:
            self.dl_sem.release()

    def get_networks(self):
        """Requests the networks defined within DDI by view."""
        self._logger.info('Pulling IPAM Networks.')
        self._network_data = []
        network_views = self.return_network_views()
        start = time.perf_counter()
        threads = []
        for _ref in network_views:
            network_call = self.call_types_cls.networks(_ref['name'])
            _t = threading.Thread(target=self._get_ipam_networks,
                                  args=(network_call,))
            _t.start()
            threads.append(_t)

        for _t in threads:
            _t.join()
        end = time.perf_counter()

        self._logger.info("Downloaded %s Networks in %2f seconds",
                          len(self._network_data), end - start)

        self.write_cls.write_to_pkl(self.dir_cls.raw_dir(),
                                    self.filenames_cls.
                                    networks_filename(),
                                    self._network_data)
        self._logger.info('IPAM data written to .pkl file in raw Dir.')

    def get_networkcontainers(self):
        """Requests the networkcontainers defined within DDI by view."""
        self._logger.info('Pulling IPAM Networkcontainers.')
        self._network_data = []
        network_views = self.return_network_views()
        start = time.perf_counter()
        threads = []
        for _ref in network_views:
            network_call = self.call_types_cls.networkcontainers(_ref['name'])
            _t = threading.Thread(target=self._get_ipam_networks,
                                  args=(network_call,))
            _t.start()
            threads.append(_t)

        for _t in threads:
            _t.join()
        end = time.perf_counter()

        self._logger.info("Downloaded %s Networks in %2f seconds",
                          len(self._network_data), end - start)

        self.write_cls.write_to_pkl(self.dir_cls.raw_dir(),
                                    self.filenames_cls.
                                    networkcontainers_filename(),
                                    self._network_data)
        self._logger.info('IPAM data written to .pkl file in raw Dir.')

    def return_network_views(self):
        """Reads in the network views."""
        return self.reader_cls.read_from_pkl(self.dir_cls.raw_dir(),
                                             self.filenames_cls.
                                             network_views_filename())
示例#13
0
class IpamDataInterim:
    """Class to read in networks and networkcontainers to then be smashed,
    mangled, and spit out into a .pickle and .xlsx file

    """
    def __init__(self):
        self.dir_cls = DirectoryValues()
        self.ipam_filenames_cls = DataFileNames()
        self.reader_cls = Reader()

        # Load Networks Pickled Data
        self.networks = self.reader_cls.read_from_pkl(
            self.dir_cls.raw_dir(),
            self.ipam_filenames_cls.networks_filename())
        self.networkcontainers = self.reader_cls.read_from_pkl(
            self.dir_cls.raw_dir(),
            self.ipam_filenames_cls.networkcontainers_filename())
        self.all_nets = self.networks + self.networkcontainers

    def run_ipam_interim(self, xlsx, pickle):
        flattened_dict_data = self.flatten_data(self.all_nets)
        output_data = self.panda_processing_of_flattened_data(
            flattened_dict_data)
        output_data.to_excel(self.dir_cls.interim_dir() + '\\' + xlsx)
        output_data.to_pickle(self.dir_cls.interim_dir() + '\\' + pickle)

    def _convert_flatten(self, data, parent_key='', sep='_'):
        """Method to convert input of nested dict's to a flattened dict

        default seperater '_'

        """
        items = []
        for k, v in data.items():
            new_key = parent_key + sep + k if parent_key else k

            if isinstance(v, MutableMapping):
                items.extend(
                    self._convert_flatten(v, new_key, sep=sep).items())
            else:
                items.append((new_key, v))
        return dict(items)

    def flatten_data(self, data_to_be_flattened):
        """Method to flatten the requested data."""
        return [
            self._convert_flatten(data_to_be_flattened[i])
            for i in range(len(data_to_be_flattened))
        ]

    def panda_processing_of_flattened_data(self, all_nets):
        """Turns the returned flat dict into a panda dataframe.

        Further processing:

        1. Add 'net_type' column.
        2. Add oct1, oct2, oct3, oct4, and /Cidr columns.
        3. Add oct1, oct2, oct3, oct4, and /Cidr columns.
        4. Sorting Data via line 2 values.
        5. Return Indexed data starting at 10000
        """

        net_flat_df = pd.DataFrame.from_dict(self.flatten_data(all_nets))

        # Further processing Line 1.
        net_flat_df['net_type'] = net_flat_df['_ref'].str.split('/',
                                                                expand=True)[0]

        # Further processing Line 2.
        oct_list = ['oct1', 'oct2', 'oct3', 'oct4', '/Cidr']
        network_col = net_flat_df['network'].str.split(".", expand=True)
        net_flat_df[oct_list[0]] = network_col[0].astype(str).astype(int)
        net_flat_df[oct_list[1]] = network_col[1].astype(str).astype(int)
        net_flat_df[oct_list[2]] = network_col[2].astype(str).astype(int)
        split_third_value = network_col[3].str.split("/", expand=True)
        net_flat_df[oct_list[3]] = split_third_value[0].astype(str).astype(int)
        net_flat_df[oct_list[4]] = split_third_value[1].astype(str).astype(int)

        # Further processesing Line 3.
        network_col = net_flat_df['network'].str.split("/", expand=True)
        net_flat_df['IP Subnet'] = network_col[0].astype(str)
        net_flat_df['IP Cidr'] = network_col[1].astype(str).astype(int)

        # Further processing Line 4.
        net_flat_df = net_flat_df.sort_values(
            oct_list, ascending=[True, True, True, True, True])

        # Further processing Line 5.
        net_flat_df.index = range(len(net_flat_df.index))
        net_flat_df.index += 10000
        return net_flat_df