def __init__(self):
        self.logMng = LoggingManager.LogHandler("CfgGen")
        self.logMng.logLevel("DEBUG")
        self.logMng.attachedStreamHandler()
        self.logMng.attachedFileHandler("CfgGen.log")
        self.logMng.infoMesg("Initialize AFDXConfigGenerator")

        self.configMng = ConfigManager.ConfigHandler()
        self.configMng.loadConfig("afdx_switch.conf")

        self.xmlParserMng = XmlParser.XmlParser()

        self.switchPortInfoList = []
        self.smfdCfgFileList = []
        self.smfdAfdxCfgPathDict = {}

        self.loadAFDXSwitchInfo()
    def synthesize_user_repo_association_test(self, user_orig_data_test,
                                              repo_orig_data_test,
                                              commits_orig_data_test):
        print "Synthesizing User Repo Association."
        # This API will find out the repositories importance and the user's association with them to finally allocate one rating for every repository
        # There are lot of things on which the Rating of this sentiment depends.
        # Rating Synthesizing weights for the different things.
        # Sentiments :: length, structural_integrity_score, topic_relevance_score, positivity_score, spelling_integrity_score
        # "is_forked", "cont_count", "days_from_push", "size", "watcher_count", "stargazer_count", "has_wiki", "fork_count", "open_issues", "sub_count", no_of_commits.
        # Total 16 this on which we are dependent.
        # We use a linear combination of different factors as distributed by the weights set in the configuration manager.
        # We have to divide the weights by 100 for normalisation.
        for index, row in self.repo_data_test.iterrows():
            try:
                '''user_id", "repo_id", "rating'''
                print "Synthesizing info for repo = " + str(
                    row['repo_id']) + " and owner = " + str(row['owner_id'])
                curr_user_id = row['owner_id']
                curr_repo_id = row['repo_id']
                self.user_repo_association_test.set_value(
                    index, 'user_id', curr_user_id)
                self.user_repo_association_test.set_value(
                    index, 'repo_id', curr_repo_id)
                # Synthesize the Rating using the linear combination of the values depending on whether it's directly or inversely proportional.
                # First collect all the commit logs for this repo/repo user combination.
                curr_commits = []

                # TODO : Assuming here the commits are clean for only one user!

                commit_data_for_this_repo = commits_orig_data_test[
                    commits_orig_data_test['repo_id'] == curr_repo_id]
                for index_inner, row_inner in commit_data_for_this_repo.iterrows(
                ):
                    curr_commits.append(row_inner['message_text'])

                # Capture the best description text.

                if row['readme'] != "":
                    best_description = row['readme']
                elif row['description'] != "":
                    best_description = row['description']
                else:
                    best_description = cfg.default_description

                [length, structural_integrity_score, topic_relevance_score, positivity_score, spelling_integrity_score] \
                    = self.commit_log_analyzer.process_batch_logs(curr_commits, best_description)

                no_of_commits = len(curr_commits)

                # Sentiments :: length, structural_integrity_score, topic_relevance_score, positivity_score, spelling_integrity_score
                # "is_forked", "cont_count", "days_from_push", "size", "watcher_count", "stargazer_count", "has_wiki", "fork_count", "open_issues", "sub_count", no_of_commits.

                a1 = length * float(cfg.average_commit_length_weight) / 100
                a2 = structural_integrity_score * float(
                    cfg.structural_integrity_score_weight) / 100
                a3 = topic_relevance_score * float(
                    cfg.topic_relevance_score_weight) / 100
                a4 = positivity_score * float(
                    cfg.topic_relevance_score_weight) / 100
                a5 = spelling_integrity_score * float(
                    cfg.spelling_integrity_score_weight) / 100
                a6 = no_of_commits * float(cfg.no_of_commits_weight) / 100
                a7 = float(cfg.is_forked_weight) / (
                    100 * (1 + self.__map_bool_to_int(row['is_forked'])))
                a8 = row['cont_count'] * float(cfg.cont_count_weight) / 100
                a9 = float(
                    cfg.days_from_push_weight) / (100 *
                                                  (1 + row['days_from_push']))
                a10 = row['size'] * float(cfg.repo_size_weight) / 100
                a11 = row['watcher_count'] * float(
                    cfg.watcher_count_weight) / 100
                a12 = row['stargazer_count'] * float(
                    cfg.stargazer_count_weight) / 100
                a13 = self.__map_bool_to_int(row['has_wiki']) * float(
                    cfg.has_wiki_weight) / 100
                a14 = row['fork_count'] * float(cfg.fork_count_weight) / 100
                a15 = row['open_issues'] * float(cfg.open_issues_weight) / 100
                a16 = row['sub_count'] * float(cfg.sub_count_weight) / 100

                cumulative_score = a1 + a2 + a3 + a4 + a5 + a6 + a7 + a8 + a9 + a10 + a11 + a12 + a13 + a14 + a15 + a16
                # Insert the cumulative score to the 3rd column
                self.user_repo_association_test.set_value(
                    index, 'rating', cumulative_score)
            except Exception as e:
                error = "Error in synthesizing association data. The error is = " + str(
                    e) + "Other info :: Row Data = " + str(row)
                print error
                log_mgr.add_log_to_file(error)
Beispiel #3
0
weekview_output_file_path = config_vars.get('weekview_output_path')
csv_default_file_path = config_vars.get('csv_default_file_path')
csv_monthview_file_path = config_vars.get('csv_monthview_file_path')
csv_weekview_file_path = config_vars.get('csv_weekview_file_path')
csv_report_path_default = config_vars.get('csv_report_path_default')
csv_report_path_monthly = config_vars.get('csv_report_path_monthly')
csv_report_path_weekly = config_vars.get('csv_report_path_weekly')
noschedules_data_file_path = config_vars.get('noschedules_data_file')
nopolicy_data_file_path = config_vars.get('nopolicy_data_file')
inactive_policies_data_file_path = config_vars.get(
    'inactive_policies_data_file')
kpi_threshold = config_vars.get('kpi_threshold')
month_view_days_count = config_vars.get('month_view_days_count')
week_view_days_count = config_vars.get('week_view_days_count')
timetorun_file = config_vars.get('timetorun_file')
app_logger = LoggingManager('10.HistoryView2HTML.py')
dformat = '%Y-%m-%d %H:%M:%S'


class HtmlGeneratorHistoryAll:
    def __init__(self):
        pass

    def unix_time(self, dt):
        return int(time.mktime(dt.timetuple()))

    def get_static_html_part_upper(self, page_title, csv_report_file_name,
                                   html_file_name):

        timenow = datetime.now().strftime('%a %Y-%m-%d %H:%M:%S')
Beispiel #4
0
from GlobalConfig import *

config_vars = GlobalConfig.read_vars('13.BackupSize2HTML')

default_page_title = config_vars.get('default_page_title')
report_size_count = config_vars.get('report_size_count')
report_size_count_csv = config_vars.get('report_size_count_csv')
variance_value = config_vars.get('variance_value')
history_file_path = config_vars.get('history_file_path')
output_html_file_default = config_vars.get('output_html_path_default')
output_csv_file_default = config_vars.get('output_csv_path_default')
output_csv_file_default2 = config_vars.get('output_csv_path_default2')
csv_report_path_default = config_vars.get('csv_report_path_default')
timetorun_file = config_vars.get('timetorun_file')

app_logger = LoggingManager('13.BackupSize2HTML.py')
dformat = '%Y-%m-%d %H:%M:%S'


class BackupSize2HTMLGenerator:
    def __init__(self):
        pass

    def get_static_html_upper(self, title, csv_path_download):

        timenow = datetime.now().strftime('%a %Y-%m-%d %H:%M:%S')
        _timetorun = open(timetorun_file, 'r').readlines()[-1]

        html = """<!DOCTYPE html>"""
        html += """\n<html>\n   <head>\n<meta charset="utf-8" />\n      <title>""" + title + """</title>"""
        html += """\n<link rel="stylesheet" type="text/css" href="css/view.css">\n  </head>"""
__author__ = 'Codengine'

import os.path
from datetime import datetime
import time
from LoggingManager import *
from GlobalConfig import *

config_vars = GlobalConfig.read_vars('08.LiveViewHistory2HTML')

all_seq_file = config_vars.get('all_seq_file')
input_file_name = config_vars.get('input_file_name')
output_directory = config_vars.get('output_directory')
timetorun_file = config_vars.get('timetorun_file')

app_logger = LoggingManager('08.LiveViewHistory2HTML.py')


class HtmlGeneratorHistory:
    def __init__(self):
        pass

    def unix_time(self, dt):
        return int(time.mktime(dt.timetuple()))

    def get_static_html_part_upper(self, page_title):

        timenow = datetime.now().strftime('%a %Y-%m-%d %H:%M:%S')
        _timetorun = open(timetorun_file, 'r').readlines()[-1]

        html = """<!DOCTYPE html>"""
from datetime import datetime
import time
import os

from LoggingManager import *
from GlobalConfig import *

config_vars = GlobalConfig.read_vars('15.CatalogHistory')

dashboard_catalog_live_file_path = config_vars.get(
    'dashboard_catalog_live_file_path')
dashboard_catalog_all_file_path = config_vars.get(
    'dashboard_catalog_all_file_path')
dashboard_catalog_tmp_file_path = config_vars.get('dashboard_tmp_file_path')

app_logger = LoggingManager('15.CatalogHistory.py')
dformat = '%Y-%m-%d %H:%M:%S'


class CatalogHistory:
    def __init__(self):
        pass

    def read_in_chunks(self, file_object, last_line_read=0):
        """Lazy function (generator) to read a file piece by piece.
        Default chunk size: 1k."""
        while True:
            file_object.seek(last_line_read)
            data = file_object.readline()

            last_line_read = last_line_read + len(data)
Beispiel #7
0
__author__ = 'Codengine'

import time
from datetime import datetime
import os
from LoggingManager import *
from GlobalConfig import *

config_vars = GlobalConfig.read_vars('17.BackupWriteTime2HTML')

default_page_title = config_vars.get('default_page_title')
#dashboard_files_dir = config_vars.get('dashboard_files_dir')
timetorun_file = config_vars.get('timetorun_file')
default_html_output_file_path = config_vars.get('default_html_output_file_path')

app_logger = LoggingManager('17.BackupWriteTime2HTML.py')

backupwrite_file = config_vars.get('backupwritetime_file')
dformat = '%Y-%m-%d %H:%M:%S'

class MainIndex:
    def __init__(self):
        pass

    def get_static_html_page_upper(self,page_title):

        timenow = datetime.now().strftime('%a %Y-%m-%d %H:%M:%S')
        _timetorun = open(timetorun_file, 'r').readlines()[-1]

        html = """<!DOCTYPE html>\n<html>"""
        html += """\n   <head>"""
Beispiel #8
0
import time
from LoggingManager import *
from GlobalConfig import *

config_vars = GlobalConfig.read_vars('12.PolicyCalendar')

default_page_title = config_vars.get('default_page_title')
csv_report_file_path = config_vars.get('csv_report_file_path')
policy_calendar_file = config_vars.get('policy_calendar_file')
default_output_directory = config_vars.get('default_html_output_directory')
default_csv_directory = config_vars.get('default_csv_output_directory')
previous_month = config_vars.get('previous_month')
next_month = config_vars.get('next_month')
timetorun_file = config_vars.get('timetorun_file')

app_logger = LoggingManager('12.PolicyCalendar2HTML.py')
dformat = '%Y-%m-%d %H:%M:%S'


class PolicyCalendarHTMLGenerator:
    def __init__(self):
        pass

    def get_static_html_part_upper(self, page_title, csv_report_file_name):

        timenow = datetime.now().strftime('%a %Y-%m-%d %H:%M:%S')
        _timetorun = open(timetorun_file, 'r').readlines()[-1]

        html = """<!DOCTYPE html>"""
        html += """\n<html>\n    <head>\n    <meta charset="utf-8" />\n    <title>""" + page_title + """</title>"""
        html += """\n    <link rel="stylesheet" type="text/css" href="css/view.css">\n    </head>\n    <body>"""
Beispiel #9
0
additional_backup_price_per_gb_default = config_vars.get('additional_backup_price_per_gb_default')
backup_usage_default = config_vars.get('backup_usage_default')
total_backup_capacity_default = config_vars.get('total_backup_capacity_default')
total_backup_usage_default = config_vars.get('total_backup_usage_default')
total_backup_quota_default = config_vars.get('total_backup_quota_default')
bill_eoros_default = config_vars.get('bill_eoros_default')

backup_contract_file_path = config_vars.get('backup_contract_file_path')
backup_customer_file_path = config_vars.get('backup_customer_file_path')
backup_usage_file_path = config_vars.get('backup_usage_file_path')
all_seq_file_path = config_vars.get('all_seq_file_path')
csv_report_path_default = config_vars.get("csv_report_path_default")
billing_file_path = config_vars.get('billing_file_path')
output_csv_path_default = config_vars.get("output_csv_path_default")
timetorun_file = config_vars.get('timetorun_file')
app_logger = LoggingManager('18.Billing.py')
dformat = '%Y-%m-%d %H:%M:%S'

class FileProcessor(object):
    def __init__(self):
        pass

    def process_backup_usage_file(self):
        file_contents = []
        with open(backup_usage_file_path,'r') as bu:
            file_contents = bu.readlines()

        file_contents = [line.replace('\n','').strip() for line in file_contents]

        if file_contents:
            file_contents = file_contents[1:]
Beispiel #10
0
from LoggingManager import *
from GlobalConfig import *

config_vars_one = GlobalConfig.read_vars('06.LiveViewInput')
config_vars = GlobalConfig.read_vars('07.LiveView2HTML')

page_title = config_vars.get('page_title')
sorted_page_title = config_vars.get('sorted_page_title')
input_file_name = config_vars.get('input_file_name')
output_file_name = config_vars.get('output_file_name')
sorted_output_file_name = config_vars.get('sorted_output_file_name')
column_five_value = config_vars_one.get('column_five_percentage')
timenow = datetime.now().strftime('%a %Y-%m-%d %H:%M:%S')
timetorun_file = config_vars.get('timetorun_file')

app_logger = LoggingManager('07.LiveView2HTML.py')


class HtmlGenerator:
    def __init__(self):
        pass

    def read_next_lines(self, lines, start=0, line_count=2):
        temp_lines = []
        while True:
            temp_lines = lines[start:start + line_count]
            if not temp_lines:
                break
            yield temp_lines

    def generate_html_page(self,