Example #1
0
def predict(path, num_predictions, predicted):
    if path not in predicted:
        if os.path.isfile(os.path.dirname(os.path.dirname(path))+'/data_obj.pickle'):
            with open(os.path.dirname(os.path.dirname(path))+'/data_obj.pickle', 'rb') as f:
                d = pickle.load(f)
        else:
            edges_dir = os.path.dirname(os.path.dirname(os.path.dirname(path))) + '/edges'
            edges = pd.read_csv(edges_dir, header = None)
            event_log_dir = os.path.dirname(os.path.dirname(path))
            event_log = pd.read_csv(event_log_dir+ '/event_log', header=None)
            d = Data()
            d.load_data_data_frame(event_log, edges)
            with open(event_log_dir+'/data_obj.pickle', 'wb') as f:
                pickle.dump(d, f)
            # print(event_log_dir)
        with open(os.path.dirname(os.path.dirname(path)) + '/contagion.pickle', 'rb') as file:
            cc = pickle.load(file)
        with open(os.path.dirname(os.path.dirname(path)) + '/adjacency.pickle', 'rb') as file:
            a = pickle.load(file)
        with open(path + '/threshold.pickle', 'rb') as f:
            t = pickle.load(f)
        m = MCDOI()
        m.assign_contagions_correlation_matrix(cc)
        m.assign_adjacency_matrix(a)
        m.assign_thresholds_matrix(t)
        m.fill_state_matrix(d)
        result = m.predict(num_predictions)
        new_path = path.split('/')
        new_path[4] = 'negative-random-activation'
        save_results(result, '/'+os.path.join(*new_path), num_predictions)
        with open(directory+'predicted_7days', 'a+', encoding='utf-8') as handle:
            handle.write(path + '\n')
Example #2
0
def estimate_and_predict(d, dir, batch_type, batch_size, num_predictions):
    try:
        m = MCDOI()
        m.fit(d, batch_type = batch_type, batch_size = batch_size)
        result = m.predict(num_predictions)
        save_results(result, dir + batch_type + '/size_' + str(batch_size) + '/', num_predictions)
        save_parameters(m, dir + batch_type + '/size_' + str(batch_size) + '/')
    except Exception as err:
        write_to_logger(err.args)
        print(err.args)
        exit(1)
    finally:
        send_email()
Example #3
0
def main():
    directory = '/datasets/mcdoi/louvain/louvain_46_720/'
    try:
        d = Data()
        d.load_data(directory)
        m = MCDOI()
        m.fit(d, batch_type='time', batch_size=86400)
        m.predict(3)  #predict(3) zwraca 63 aktywacje
    except Exception as err:
        writeToLogger(err.args)
        print(err.args)
        exit(1)
    finally:
        send_email()
Example #4
0
def estimate_t_and_predict(path_dataset_history, batch_type, batch_size,
                           num_predictions):
    print('Start')
    edges = pd.read_csv(os.path.dirname(path_dataset_history) + '/edges',
                        header=None)
    event_log = pd.read_csv(path_dataset_history + '/event_log', header=None)
    print('Data frames loaded')
    with open(path_dataset_history + '/contagion.pickle', 'rb') as file:
        cc = pickle.load(file)
    with open(path_dataset_history + '/adjacency.pickle', 'rb') as file:
        a = pickle.load(file)
    print('Pickles loaded')
    d = Data()
    d.load_data_data_frame(event_log, edges)
    print('Data object initialised')
    m = MCDOI()
    m.assign_contagions_correlation_matrix(cc)
    m.assign_adjacency_matrix(a)
    print('Matrices assigned')
    m.fit_only_thresholds_states(d,
                                 batch_type=batch_type,
                                 batch_size=batch_size)
    # file_name = path_dataset_history + '/' + batch_type + '/size_' + str(batch_size) + '/threshold.pickle'
    # os.makedirs(os.path.dirname(file_name), exist_ok=True)
    # with open(file_name, 'wb') as threshold_file:
    #     pickle.dump(m.thresholds.matrix, threshold_file)
    result = m.predict(num_predictions)
Example #5
0
def estimate_t_and_predict(path_dataset_history, batch_type, batch_sizes,
                           num_predictions, estimated):
    flag = False
    for batch_size in batch_sizes:
        if path_dataset_history + '/' + batch_type + '/size_' + str(
                batch_size) not in estimated:
            flag = True
    if flag:
        edges = pd.read_csv(os.path.dirname(path_dataset_history) + '/edges',
                            header=None)
        event_log = pd.read_csv(path_dataset_history + '/event_log',
                                header=None)
        with open(path_dataset_history + '/contagion.pickle', 'rb') as file:
            cc = pickle.load(file)
        with open(path_dataset_history + '/adjacency.pickle', 'rb') as file:
            a = pickle.load(file)
        for batch_size in batch_sizes:
            if path_dataset_history + '/' + batch_type + '/size_' + str(
                    batch_size) not in estimated:
                if os.path.isfile(path_dataset_history + '/data_obj.pickle'):
                    with open(path_dataset_history + '/data_obj.pickle',
                              'rb') as f:
                        d = pickle.load(f)
                else:
                    d = Data()
                    d.load_data_data_frame(event_log, edges)
                    with open(path_dataset_history + '/data_obj.pickle',
                              'wb') as f:
                        pickle.dump(d, f)
                m = MCDOI()
                m.assign_contagions_correlation_matrix(cc)
                m.assign_adjacency_matrix(a)
                m.fit_only_thresholds_states(d,
                                             batch_type=batch_type,
                                             batch_size=batch_size)
                file_name = path_dataset_history + '/' + batch_type + '/size_' + str(
                    batch_size) + '/threshold.pickle'
                os.makedirs(os.path.dirname(file_name), exist_ok=True)
                with open(file_name, 'wb') as threshold_file:
                    pickle.dump(m.thresholds.matrix, threshold_file)
                result = m.predict(num_predictions)
                save_results(
                    result, path_dataset_history + '/' + batch_type +
                    '/size_' + str(batch_size), num_predictions)
                with open(directory + 'estimated_t+predict',
                          'a+',
                          encoding='utf-8') as handle:
                    handle.write(path_dataset_history + '/' + batch_type +
                                 '/size_' + str(batch_size) + '\n')
                with open(directory + 'predicted_7days',
                          'a+',
                          encoding='utf-8') as handle:
                    handle.write(path_dataset_history + '/' + batch_type +
                                 '/size_' + str(batch_size) + '\n')
Example #6
0
def predict(path_dataset_history, batch_type, batch_sizes, num_predictions):
    if os.path.isfile(path_dataset_history + '/data_obj.pickle'):
        with open(path_dataset_history + '/data_obj.pickle', 'rb') as f:
            d = pickle.load(f)
    else:
        edges = pd.read_csv(os.path.dirname(path_dataset_history) + '/edges',
                            header=None)
        event_log = pd.read_csv(path_dataset_history + '/event_log',
                                header=None)
        d = Data()
        d.load_data_data_frame(event_log, edges)
        with open(path_dataset_history + '/data_obj.pickle', 'wb') as f:
            pickle.dump(d, f)
    with open(path_dataset_history + '/contagion.pickle', 'rb') as file:
        cc = pickle.load(file)
    with open(path_dataset_history + '/adjacency.pickle', 'rb') as file:
        a = pickle.load(file)
    new_path_dataset_history = path_dataset_history.split('/')
    new_path_dataset_history[4] = 'paper/' + model
    new_path_dataset_history = '/' + os.path.join(*new_path_dataset_history)
    for batch_size in batch_sizes:
        m = MCDOI()
        m.assign_contagions_correlation_matrix(cc)
        m.assign_adjacency_matrix(a)
        with open(
                new_path_dataset_history + '/' + batch_type + '/size_' +
                str(batch_size) + '/threshold.pickle', 'rb') as f:
            t = pickle.load(f)
        m.assign_thresholds_matrix(t)
        m.fill_state_matrix(d)
        # m.fit_only_thresholds_states(d, batch_type = batch_type, batch_size = batch_size)
        # file_name = new_path_dataset_history + '/' + batch_type + '/size_' + str(batch_size) + '/threshold.pickle'
        # os.makedirs(os.path.dirname(file_name), exist_ok=True)
        # with open(file_name, 'wb') as threshold_file:
        #     pickle.dump(m.thresholds.matrix, threshold_file)
        # with open(directory+'estimated_thresholds', 'a+', encoding='utf-8') as handle:
        #     handle.write(path_dataset_history + '/' + batch_type + '/size_' + str(batch_size) + '\n')
        result = m.predict(num_predictions)
        # print(new_path_dataset_history + '/' + batch_type + '/size_' + str(batch_size))
        save_results(
            result, new_path_dataset_history + '/' + batch_type + '/size_' +
            str(batch_size), num_predictions)