Exemplo n.º 1
0
def create_server_summary(file_paths, config):
    """
    Creates the summary bar charts:
    Defacement/Phishing/Malware Summary/(URL/IP)
    
    Arguments:
    file_paths -- folder paths for the three months of csv files
    config     -- configuration dictionary
    """
    month = config['month']
    year = config['year']
    months = [month_string_format(year, month-2), month_string_format(year, month-1), month_string_format(year, month)]
    output_dir = config['output_dir']
    
    data = []
    for file in file_paths:
        _, csv_data = rutil.read_csv(file, [1,2,3])
        data.append(csv_data)
    server_dis_headers = ['Defacement','Phishing','Malware']                
    server_dis = [[],[],[]]
    for i in range(3):
        for j in range(3):
            server_dis[i].append(data[j][i][1])
    plot_url = rutil.plotly_bar_chart(months, 
                           zip(server_dis, server_dis_headers), 
                           'Server Related security events distribution', 
                           bar_mode='stack')
    rutil.plotly_download_png(plot_url, output_dir + 'ServerRelated.png')

    gen = [(1,'Defacement',config['defce_color']),(2,'Phishing',config['phish_color']),(3,'Malware',config['malwr_color'])]
    gen_headers = ['URL','Domain','IP']                
    gen_data = [[],[],[]]
    for index, type, colors in gen:
        for i in range(3):
            gen_data[i] = []
            for j in range(3):
                gen_data[i].append(data[j][index-1][i+1])    
        plot_url = rutil.plotly_bar_chart(months,
                                          zip(gen_data, gen_headers),
                                          type + ' General Statistics', color=colors)
        rutil.plotly_download_png(plot_url, output_dir + type + 'Gen.png')
    
    url_ip_headers = ['URL/IP Ratio']

    for index, type, colors in gen:
        url_data = []
        for j in range(3):
            url_data.append(round(float(data[j][index-1][1]) / float(data[j][index-1][3]),2))  
        plot_url = rutil.plotly_bar_chart(months, [(url_data, 'URL/IP Ratio')], type + ' URL/IP Ratio', color=colors)
        rutil.plotly_download_png(plot_url, output_dir + type + 'URLIP.png')
Exemplo n.º 2
0
def monthly_create_bar_charts(config):
    """
    Create bar charts that use data from current month
    [N.B. Bar charts that use data from multiple months are generated
    in monthly_create_multi_bar]
    
    Arguments:
    config     -- configuration dictionary
    """
    print('Creating bar charts...')
    print('  Downloading bar charts...')
    bar_chart_dir = os.path.join(os.getcwd(), config["file_paths"][2])
    bar_charts = [('ISPServerAll', 'Top 10 ISPs by server related event types'),
                  ('ISPBotnets', 'Top 10 ISPs by non-server event type'),
                  ('ISPAll', 'Top 10 ISPs for all events')]
    for file, title in bar_charts:
        shutil.copyfile(bar_chart_dir + file + '.csv', bar_chart_dir + file + 'Pie.csv')
        header, data = rutil.read_csv(bar_chart_dir + file + '.csv', max_row=10)
        plot_url = rutil.plotly_bar_chart(data[0][:10], zip(data[1:], header[1:]), title, 'stack')
        rutil.plotly_download_png(plot_url, config['output_dir'] + file + '.png')

    # Create bar charts that use data from multiple months
    monthly_create_multi_bar(config)
Exemplo n.º 3
0
def quarterly_latex(config):
    """
    Compiles the LaTeX report for the Quarterly Security
    Watch Report. Generates the LaTeX tables from reading
    CSV files; Uses template LaTeX report_qrtr_temp_chi
    and report_quarterly_temp and inserts the data 
    appropriately.
    """
    
    yyqq, year, qrtr, qrtr_label, data_paths = config['params']
    output = config['output']
    bar_chart_param, pie_chart_param = config['trim_config']
    
    # Top 5 Botnets Table
    top_bn_data, top_bn_name, _ = config['top_bn']
    table_hdr = ['Name'] + qrtr_label
    table_top_bot = ''
    table_top_bot += '\\begin{table}[!htbp]\n\\centering\n'
    table_top_bot += '\n\\begin{tabular}{llllll} \\hline\n'
    table_top_bot += '&'.join(map(lambda x: '\\bf ' + x, table_hdr)) + '\\\\\\hline\n'
    rows = map(lambda x,y:x+'&'+'&'.join(y)+'\\\\\n', top_bn_name, top_bn_data)
    for row in rows:  
        table_top_bot += row     
    table_top_bot += '\\hline\n\\end{tabular}\n\\end{table}\n'                        
                   
    # Generate latex table for Major Botnet Families
    headers, data = rutil.read_csv(data_paths[4] + 'botnetDailyMax.csv', [0,1])
    _, prev_data = rutil.read_csv(data_paths[3] + 'botnetDailyMax.csv', [0,1])
    rank_change = []
    pct_change = ['NA'] * 10
    for i in range(10):
        if data[0][i] == prev_data[0][i]:
            rank_change.append('$\\rightarrow$')        
        elif data[0][i] in prev_data[0][:i]:
            rank_change.append('$\\Downarrow$')
        elif data[0][i] in prev_data[0][i+1:]:
            rank_change.append('$\\Uparrow$')
        else:
            rank_change.append('NEW')
    for i in range(len(prev_data[0])):
        for j in range(10):
            if prev_data[0][i] == data[0][j]:
                new = float(data[1][j])
                old = float(prev_data[1][i])
                pct_change[j] = str(round((new - old) * 100 / old, 1)) + '\%'   

    # Major Botnet Families table headers            
    headers = ['Rank', '$\\Uparrow\\Downarrow$', 
               'Concerned Bots', 'Number of Unique', 
               'Changes with']
    table_ltx = ''
    table_ltx += '\\begin{table}[!htbp]\n\\centering\n'
    table_ltx += '\\caption{__CAPTION__}'
    table_ltx += '\n\\begin{tabular}{lllll} \\hline\n__HEADERS__\\\\\\hline\n'

    # Major Botnet Families table data
    for i in range(len(data[0]) if len(data[0]) < 10 else 10):
        table_ltx += '&'.join([str(i+1), rank_change[i], data[0][i], data[1][i], pct_change[i]]) + '\\\\\n'      
    table_ltx += '\\hline\n\\end{tabular}\n\\end{table}\n'            
    ltx_temp = ''
    
    # Create Chinese and English version of Major Botnet Families
    table_ltx_cap_eng = 'Major Botnet Families in Hong Kong Networks'
    table_ltx_cap_chi = u'香港網絡內的主要殭屍網絡'
    table_ltx_hdr_eng = '&'.join(map(lambda x:'\\bf ' + x,headers)) + '\\\\\n&&& \\bf IP addresses & \\bf previous period\n'
    table_ltx_hdr_chi = u'\\bf 排名 & \\bf $\\Uparrow\\Downarrow$ & \\bf 殭屍網絡名稱 & \\bf 唯一IP地址 & \\bf 變化 \n'
    table_eng = table_ltx.replace('__HEADERS__', table_ltx_hdr_eng)
    table_eng = table_eng.replace('__CAPTION__', table_ltx_cap_eng)
    table_chi = table_ltx.replace('__HEADERS__', table_ltx_hdr_chi)
    table_chi = table_chi.replace('__CAPTION__', table_ltx_cap_chi)
    
    # Compile Latex report
    serv_events = config['serv_events']
    with open(output + 'report_quarterly_temp.tex') as f:
        ltx_temp = f.read()
    fontcfg = ConfigParser.ConfigParser(allow_no_value=True)
    fontcfg.read('config.cfg')
    f = lambda x: fontcfg.get('font','font_' + x)
    ltx_temp = ltx_temp.replace('__FONT_SIZE__',f('size'))
    ltx_temp = ltx_temp.replace('__FONT__',f('family'))
    ltx_temp = ltx_temp.replace('botnet\\_table', table_eng)
    ltx_temp = ltx_temp.replace('QUARTER', qrtr_label[4])
    ltx_temp = ltx_temp.replace('UNIQUEEVENTS', serv_events[4])
    ltx_temp = ltx_temp.replace('table\\_top\\_bot', table_top_bot)
    ltx_temp = ltx_temp.replace('__PIE_CHART__', pie_chart_param)
    ltx_temp = ltx_temp.replace('__BAR_CHART__', bar_chart_param)
    with open(output + 'SecurityWatchReport.tex', 'w+') as f:
        f.write(ltx_temp)
        
    with open(output + 'report_qrtr_temp_chi.tex') as f:
        ltx_temp = f.read()
    ltx_temp = ltx_temp.replace('UNIQUEEVENTS', serv_events[4])
    ltx_temp = ltx_temp.replace('table\\_top\\_bot', table_top_bot)
    ltx_temp = ltx_temp.replace('__PIE_CHART__', pie_chart_param)
    ltx_temp = ltx_temp.replace('__BAR_CHART__', bar_chart_param)
    with open(output + 'SecurityWatchReportChi.tex', 'w+') as f:
        f.write(ltx_temp)
    with codecs.open(output + 'chiqrtr.tex', mode='w+', encoding='utf-8-sig') as f:
        f.write(u'20' + unicode(year) + u'第' + [u'一',u'二',u'三',u'四'][qrtr-1] + u'季度')
    with codecs.open(output + 'botnetchitable.tex', mode='w+', encoding='utf-8-sig') as f:
        f.write(table_chi)
        
    print('Rendering PDF')
    os.chdir(output)
    os.system('pdflatex SecurityWatchReport.tex')    
    os.system('pdflatex SecurityWatchReport.tex')   # Second time to replace references and ToC  
    os.rename('SecurityWatchReport.pdf', 
              'SecurityWatchReport' + qrtr_label[4] + '.pdf')  

    print('Report successfully compiled. Exiting now...')   
    os.system('xelatex SecurityWatchReportChi.tex')    
    os.system('xelatex SecurityWatchReportChi.tex') # Second time to replace references and ToC
    os.rename('SecurityWatchReportChi.pdf', 
              'SecurityWatchReportChi' + qrtr_label[4] + '.pdf')  
    print('Report successfully compiled. Exiting now...')                           
Exemplo n.º 4
0
def quarterly_compile_data(config):
    """
    Reads CSV files and compiles the data.
    """  
    yyqq, year, qrtr, qrtr_label, data_paths = config['params']
    # URL/IP data
    url_data = [[],[],[]]
    url_ip_col = [('Defacement', 1), 
                  ('Phishing', 2), 
                  ('Malware', 3)]
    for type, index in url_ip_col:
        url_ip_unique_data = [[],[]]
        url_ip_ratio_data = [[]]
        for d in data_paths:
            _, data = rutil.read_csv(d + 'serverSummary.csv', columns=[index])
            url_count = data[0][1]
            ip_count = data[0][3]
            url_ip_ratio = round(float(url_count) / float(ip_count),2)
            url_ip_unique_data[0].append(url_count)
            url_ip_unique_data[1].append(ip_count)
            url_ip_ratio_data[0].append(str(url_ip_ratio))
        url_data[index-1] = url_ip_unique_data[0]
        config[type + '_url_data'] = (url_ip_unique_data, url_ip_ratio_data)    
    config['url_data'] = list(url_data)
    
    # Botnet (C&C) Distribution and Trend data
    cc_data = [[],[],[]]
    for d in data_paths:
        _, data = rutil.read_csv(d + 'C&CServers.csv', columns=[0,3]) 
        ip_list = []
        irc_count = 0
        http_count = 0
        for i in range(len(data[0])):
            ip = data[0][i]
            if ip not in ip_list:
                ip_list.append(ip)
                if data[1][i] == '-':
                    http_count += 1
                else:
                    irc_count += 1
        cc_data[0].append(str(irc_count))
        cc_data[1].append(str(http_count))
        cc_data[2].append(str(irc_count+http_count))
    config['cc_data'] = cc_data
    
    # Unique Botnet (Bots) Trend
    bn_data = []
    for d in data_paths:
        _, data = rutil.read_csv(d + 'botnetDailyMax.csv', columns=[1]) 
        total_count = 0
        for i in range(len(data[0])):
            if data[0][i] is not '':
                total_count += int(data[0][i])
        bn_data.append(total_count)
    config['bn_data'] = bn_data
    
    # Top Botnet data
    top_bn_data = [[],[],[],[],[]]
    top_bn_name = []
    top_bn_curr = []
    _, data = rutil.read_csv(data_paths[len(data_paths)-1] + 'botnetDailyMax.csv', [0,1])
    for i in range(5):
        top_bn_name.append(data[0][i])
        top_bn_curr.append(data[1][i])
    for j in range(4):
        _, data = rutil.read_csv(data_paths[j] + 'botnetDailyMax.csv', [0,1]) 
        for i in range(len(data[0])):
            index = -1
            try: 
                index = top_bn_name.index(data[0][i])
                if index >= 0:
                    top_bn_data[index].append(data[1][i])
            except:
                index = -1
        for i in range(5):
            if len(top_bn_data[i]) <= j:
                top_bn_data[i].append('0')
    for i in range(5):
        top_bn_data[i].append(top_bn_curr[i])
    config['top_bn'] = (top_bn_data, top_bn_name, top_bn_curr)
    
    url_data.append(bn_data)
    url_data.append(cc_data[2])
    config['serv_events'] = reduce(rutil.sum_array, url_data)