[plt.setp(item.xaxis.get_label(), 'size', 40) for item in axes.ravel()] corr = df.corr().as_matrix() for i, j in zip(*plt.np.triu_indices_from(axes, k=1)): axes[i, j].annotate("%.3f" %corr[i,j], (0.8, 0.8), xycoords='axes fraction', ha='center', va='center', size=45) plt.show() if __name__ == '__main__': # data preparation file = '3222' data_preparation = DataPreparation(file) logs = data_preparation.logs_preparation() rules = data_preparation.rules_preparation() indicators = data_preparation.indexes_preparation(rules) rules_ids = data_preparation.rules_to_ids(rules) # frequent patterns spam = SPAM(file) spam.set_max_gap(1) spam.set_min_pattern_length(2) spam.set_max_pattern_length(3) spam.spam_algorithm(logs, 0.5) #unusual patterns ind = str(indicators.loc[0][0]) logs = logs.reset_index() unusual_patterns = UnusualPatterns(file, spam.result, indicators, logs, rules_ids)
prefix_s_step, s_temp_bitmap) # recursively try to extend that pattern if self.max_pattern_length > size_current_prefix: self.dfs_pruning(prefix_s_step, s_temp_bitmap, s_temp, s_temp, s_temp[pos], size_current_prefix + 1) # Save the results in a file def write_to_file(self, file): results_to_strings = self.result.apply( lambda sequence: " -1 ".join(str(x) for x in sequence['Pattern']) + " -1 #SUP: " + str(sequence['SUP']), axis=1) results_to_strings.to_csv('../results-SPAM/' + self.file + '-out.txt', index=False, header=None) if __name__ == '__main__': f = 'test1' l = DataPreparation(f) logs = l.logs_preparation() s = SPAM(f) #s.set_max_gap(1) s.set_min_pattern_length(2) s.set_max_pattern_length(3) s.spam_algorithm(logs, 0.4) s.write_to_file(f)