Exemple #1
0
def _get_dict_for_user(user):
    """
    Get the file containing revision entries for the user "user"
    If the file is present, load its json which is effectively a dict

    Otherwise create the file
    :param user:
    :type user:
    :return:
    :rtype:
    """
    if FILE_MODE:
        filename = _get_file_name(user)
        if os.path.isfile(filename):
            with open(filename, 'rb') as inp:
                user_dict = Serializable.loads(json.load(inp))
        else:
            user_dict = {user: {}}

    else:
        filename = os.path.join(os.getcwd(), 'results', WIKINAME,
                                'user_graph.json')
        with open(filename, 'rb') as inp:
            user_dict = Serializable.loads(json.load(inp))

        if not user_dict.has_key(user):
            user_dict[user] = {}

    return user_dict[user]
Exemple #2
0
def _update_dict_for_user(user, dict_for_user):
    """
    Write the dict into user's specific file
    :param user:
    :type user:
    :return:
    :rtype:
    """
    if FILE_MODE:
        dict_to_dump = {user: dict_for_user}

        filename = _get_file_name(user)
        with open(filename, 'wb') as outp:
            json.dump(Serializable.dumps(dict_to_dump), outp)

    else:
        filename = os.path.join(os.getcwd(), 'results', WIKINAME,
                                'user_graph.json')
        with open(filename, 'rb') as inp:
            user_dict = Serializable.loads(json.load(inp))

        user_dict[user] = dict_for_user

        with open(filename, 'wb') as outp:
            json.dump(Serializable.dumps(user_dict), outp)
Exemple #3
0
 def loads(s, device):
     d = Serializable.loads(s)
     m = StdNet(d['args'], empty=True)
     for i, ms in enumerate(d['layers']):
         l = LinearWithSensitivity.loads(ms, device)
         m.layers.append(l)
         m.add_module('layer_%d' % i, l)
     return m
Exemple #4
0
 def loads(s, device):
     d = Serializable.loads(s)
     m = LinearExtended(d['in_features'],
                        d['out_features'],
                        bias=d['bias'] is not None)
     m.weight.data = torch.from_numpy(d['weight']).to(device)
     if d['bias'] is not None:
         m.bias.data = torch.from_numpy(d['bias']).to(device)
     return m
Exemple #5
0
 def loads(s, device):
     d = Serializable.loads(s)
     args = Storage(d['args'])
     m = RBFNet(args, empty=True)
     for i, ms in enumerate(d['layers']):
         l = RBFI.loads(ms, device)
         m.layers.append(l)
         m.add_module('layer_%d' % i, l)
     return m
Exemple #6
0
 def loads(s, device):
     d = Serializable.loads(s)
     args = dict(n_classes=10)
     args.update(d['args'])
     args = Storage(args)
     m = MWDNet(args, empty=True)
     for i, ms in enumerate(d['layers']):
         l = MWD.loads(ms, device)
         m.layers.append(l)
         m.add_module('layer_%d' % i, l)
     return m
Exemple #7
0
 def loads(s, device):
     """Reads itself from string s."""
     d = Serializable.loads(s)
     m = RBFI(d['in_features'],
              d['out_features'],
              andor=d['andor'],
              modinf=d['modinf'],
              regular_deriv=d['regular_deriv'],
              min_input=d['min_input'],
              max_input=d['max_input'],
              min_slope=d['min_slope'],
              max_slope=d['max_slope'])
     m.u.data = torch.from_numpy(d['u']).to(device)
     m.w.data = torch.from_numpy(d['w']).to(device)
     m.andor01.data = torch.from_numpy(d['andor01']).to(device)
     return m
            learning_rate_vector=LEARNING_RATE_VECTOR)
        t2 = time.clock()
        precision_dict, recall_dict, recall_list, all_labels = lstm_stack.test_model_simple(
            test_set, max_depth=DEPTH - 1)

        t3 = time.clock()

        results_file = os.path.join(
            os.getcwd(), 'results', WIKINAME,
            'results_breadth_%d_depth_%d_instances_%d.json' %
            (BREADTH, DEPTH, NUMBER_OF_INSTANCES))

        print "Training completed in %r" % (t2 - t1)
        if os.path.isfile(results_file):
            with open(results_file, 'rb') as inp:
                results = (Serializable.loads(inp.read()))
        else:
            results = {}

        for label in all_labels:
            label = str(label)
            # total_prec_list[label].append(precision_dict[label])
            # total_recall_list[label].append(recall_dict[label])
            # total_avg_recall_list.append(np.mean(recall_list))
            # total_f1_list[label].append(_f1(precision_dict[label], recall_dict[label]))
            for keyname in ['prec', 'rec', 'f1']:
                if not results.has_key(keyname):
                    results[keyname] = {}
                if not results[keyname].has_key(label):
                    results[keyname][label] = []
            if not results.has_key('avg_rec'):
    p_value, z_left_tail = statistical_significance(a, b, level=level)


if __name__ == "__main__":

    WIKINAME = 'astwiki'
    NUMBER_OF_INSTANCES = 50000

    BREADTH = 15
    DEPTH = 1
    # results_file = os.path.join(os.getcwd(), 'results', WIKINAME, 'results_breadth_%d_depth_%d.json' % (BREADTH, DEPTH))
    results_file = os.path.join(os.getcwd(), 'results', WIKINAME,
                                'results_breadth_%d_depth_%d_instances_%d.json' % (BREADTH, DEPTH, NUMBER_OF_INSTANCES))

    with open(results_file, 'rb') as inp:
        r1 = Serializable.loads(inp.read())

    BREADTH = 3
    DEPTH = 1
    results_file = os.path.join(os.getcwd(), 'results', WIKINAME,
                                'results_breadth_%d_depth_%d_instances_%d.json' % (BREADTH, DEPTH, NUMBER_OF_INSTANCES))

    # results_file = os.path.join(os.getcwd(), 'results', WIKINAME, 'results_breadth_%d_depth_%d.json' % (BREADTH, DEPTH))
    with open(results_file, 'rb') as inp:
        r2 = Serializable.loads(inp.read())
    #
    # f1_label1_d1 = r1['f1']['0']
    # f1_label1_d2 = r2['f1']['0']

    f1_label1_d1 = r1['avg_rec']
    f1_label1_d2 = r2['avg_rec']