def run_predictions(self, path, dataset = "u1"):
		loader = MovieLensLoader(path, "\\" + dataset + ".base")
		users = loader.users
		items = loader.items
		predictor = Predictor(users, items)

		count, wrong, right, difference = 0.0, 0.0, 0.0, 0.0

		print "Starting test"

		with open(path + "\\" + dataset + ".test") as f, open(path + "\\" + dataset + ".res", "w") as y:
			for line in f:
				data = line.split("\t")
				userid = int(data[0])
				itemid = int(data[1])
				rating = float(data[2])
				new_rating = round(predictor.predict_rating(users[userid], items[itemid]))

				if rating == new_rating:
					right += 1  
				else: 
					wrong += 1
				difference += abs(rating-new_rating)
				count += 1

				y.write(str(userid) + "\t" + str(itemid) + "\t" + str(int(new_rating)) + "\n")

				if count % 100 == 0:
					print "Running test: " + str(count)

		print "Done. Result written to result.txt"
		
		self.__write_result(path, dataset, count, right, wrong, difference)
예제 #2
0
def test_case_34():
    with open('training/' + os.listdir('training/')[34]) as f:
        raw_task = json.load(f)
    base_entity_finder = EntityFinder(
        lambda grid: find_components(grid, directions=ALL_DIRECTIONS))
    task = tuplefy_task(raw_task)
    inp = task['train'][0]['input']
    out = task['train'][0]['output']
    entities = base_entity_finder(inp)

    color_8 = Property(lambda x: frozenset({8}),
                       np.log(10) - 1,
                       name=f'color {8}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    color_0 = Property(lambda x: frozenset({0}),
                       np.log(10) - 1,
                       name=f'color {0}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    take_color = Property(lambda x: x.entity.colors(),
                          name='the colors',
                          output_types=frozenset({'color'}),
                          entity_finder=base_entity_finder,
                          nll=1,
                          requires_entity=True)
    select_8 = Selector.make_property_selector(take_color, color_8, True)
    select_not_8 = Selector.make_property_selector(take_color, color_8, False)
    select_not_0 = Selector.make_property_selector(take_color, color_0, False)
    select_not_0.nll = np.log(2)
    select_not_0_nor_8 = Selector.intersect(select_not_0, select_not_8)

    selected_entities = select_not_0_nor_8.select(entities)

    collision = Relation(
        lambda entity1, entity2: next(
            iter(collision_directions(entity1, entity2, adjustment=1)))
        if len(collision_directions(entity1, entity2)) == 1 else None,
        nll=1 + np.log(2),
        name='the unique collision vector to',
        output_types=frozenset({'vector'}))
    collision_with_8 = Property.from_relation_selector(collision, select_8,
                                                       base_entity_finder)
    move_into_8 = Transformer(
        lambda entities, grid: move(entities,
                                    vector_property=collision_with_8,
                                    copy=True,
                                    extend_grid=False),
        nll=collision_with_8.nll + np.log(2),
        name=f"{'copy' if True else 'move'} them by ({collision_with_8})")
    new_entities, new_grid = move_into_8.transform(selected_entities, inp)
    assert new_grid == out
    my_entity_finder = base_entity_finder.compose(select_not_0_nor_8)
    my_predictor = Predictor(my_entity_finder, move_into_8)
    for case in task['train'] + task['test']:
        assert my_predictor.predict(case['input']) == case['output']

    my_predictor_2 = Predictor(base_entity_finder, move_into_8)
def reset_all():
    Relation.reset()
    Property.reset()
    Selector.reset()
    Predictor.reset()
    Entity.reset()
    # global move_entity_cache
    # move_entity_cache = {}
    atomic_objects.adjacent_direction_cache = {}
    atomic_objects.find_color_entities_cache = {}
    atomic_objects.collision_directions_cache = {}
예제 #4
0
def test_transformers_predictors():
    with open('training/' + os.listdir('training/')[7]) as f:
        raw_case7 = json.load(f)
    case7 = tuplefy_task(raw_case7)
    inp = case7['train'][0]['input']
    out = case7['train'][0]['output']
    base_entity_finder = EntityFinder(find_components)
    entities = base_entity_finder(inp)
    take_color = Property(lambda x: x.entity.colors(),
                          name='the colors',
                          output_types=frozenset({'color'}),
                          entity_finder=base_entity_finder,
                          nll=1)
    color_2 = Property(lambda x, i=2: frozenset({2}),
                       np.log(10) - 2,
                       name=f'color {2}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    color_8 = Property(lambda x, i=8: frozenset({8}),
                       np.log(10) - 2,
                       name=f'color {8}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    select_8 = Selector.make_property_selector(take_color, color_8)
    select_2 = Selector.make_property_selector(take_color, color_2)
    max_ord = OrdinalProperty(lambda x: nth_ordered(x, 0, use_max=True),
                              nll=0,
                              name=f'take the {1} largest')
    find_collision_vect_to_8 = Property.from_relation_selector(
        collision_relation,
        select_8,
        entity_finder=base_entity_finder,
        ordinal_property=max_ord)
    my_transformer = Transformer(
        lambda entities, grid: move(entities,
                                    vector_property=find_collision_vect_to_8),
        name=f'move them by ({find_collision_vect_to_8})',
        nll=1 + np.log(2))

    assert my_transformer.transform(select_2.select(entities))[1] == out

    select_2_finder = base_entity_finder.compose(select_2)
    my_predictor = Predictor(select_2_finder, my_transformer)
    assert my_predictor.predict(inp) == out
예제 #5
0
def test_case_30():
    with open('training/' + os.listdir('training/')[30]) as f:
        raw_task = json.load(f)
    base_entity_finder = EntityFinder(
        lambda grid: find_components(grid, directions=ALL_DIRECTIONS))
    task = tuplefy_task(raw_task)
    inp = task['train'][0]['input']
    output = task['train'][0]['output']
    entities = base_entity_finder(inp)
    take_color = Property(lambda x: x.entity.colors(),
                          name='the colors',
                          output_types=frozenset({'color'}),
                          entity_finder=base_entity_finder,
                          nll=1,
                          requires_entity=True)
    color_0 = Property(lambda x, i=2: frozenset({0}),
                       np.log(10) - 1,
                       name=f'color {0}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    select_not_0 = Selector.make_property_selector(take_color,
                                                   color_0,
                                                   the_same=False)
    crop_transform = Transformer(crop_entities,
                                 nll=np.log(2),
                                 name='crop them')
    _, trivial_transformed_grid = crop_transform.transform(entities)
    assert trivial_transformed_grid == inp

    selected_entities = select_not_0.select(entities)
    _, transformed_grid = crop_transform.transform(selected_entities)
    assert transformed_grid == ((0, 2, 2, 2), (0, 0, 2, 0), (2, 2, 2, 0),
                                (2, 0, 2, 0))

    my_predictor = Predictor(base_entity_finder.compose(select_not_0),
                             crop_transform)

    for case in task['train']:
        assert my_predictor.predict(case['input']) == case['output']

    test_case = task['test'][0]
    print(my_predictor)
    assert my_predictor.predict(test_case['input']) == test_case['output']
예제 #6
0
def test_sequential():
    with open('training/' + os.listdir('training/')[56]) as f:
        raw_task = json.load(f)
    task = tuplefy_task(raw_task)
    input_grid = task['train'][0]['input']
    output_grid = task['train'][0]['output']
    color_0 = Property(lambda x: frozenset({0}),
                       np.log(10) - 1,
                       name=f'color {0}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    take_color = Property(lambda x: x.entity.colors(),
                          name='the colors',
                          output_types=frozenset({'color'}),
                          entity_finder=base_entity_finder,
                          nll=1)
    select_not_0 = Selector.make_property_selector(take_color, color_0, False)
    x_length = Property(
        lambda x: x.entity.max_coord(axis=1) - x.entity.min_coord(axis=1) + 1,
        np.log(2),
        name='the x length',
        output_types=frozenset({'x_length'}),
        entity_finder=base_entity_finder)
    zero = Property(lambda x: 0,
                    1,
                    name='0',
                    output_types=frozenset({'y_length'}),
                    entity_finder=base_entity_finder)
    x_length_vect = Property.xy_length_to_vector(zero, x_length)
    copy_move_x_length = Transformer(
        lambda entities, grid: move(entities, grid, x_length_vect, copy=True),
        name=f'copy them by ({x_length_vect})')
    my_entity_finder = base_entity_finder.compose(select_not_0)
    cropper = Transformer(crop_entities, nll=np.log(2), name='crop them')
    single_predictor = Predictor(my_entity_finder,
                                 copy_move_x_length,
                                 parallel=False)
    predictor_1 = Predictor(my_entity_finder, copy_move_x_length)
    predictor_2 = Predictor(my_entity_finder, cropper)
    sequential_predictor = Predictor([my_entity_finder, my_entity_finder],
                                     [copy_move_x_length, cropper],
                                     parallel=False)
    composed_predictor = predictor_1.compose(predictor_2, parallel=False)
    train_input = task['train'][0]['input']
    train_output = task['train'][0]['output']
    print(composed_predictor)
    assert sequential_predictor.predict(train_input) == train_output
    assert composed_predictor.predict(train_input) == train_output
예제 #7
0
def test_composite_selections():
    with open('training/' + os.listdir('training/')[205]) as f:
        raw_cases = json.load(f)
    cases = tuplefy_task(raw_cases)
    color_0 = Property(lambda x: frozenset({0}),
                       np.log(2),
                       name=f'color {0}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    color_5 = Property(lambda x: frozenset({5}),
                       np.log(10) - 1,
                       name=f'color {5}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    take_color = Property(lambda x: x.entity.colors(),
                          name='the colors',
                          output_types=frozenset({'color'}),
                          entity_finder=base_entity_finder,
                          nll=1)
    select_not_0 = Selector.make_property_selector(take_color, color_0, False)
    select_not_5 = Selector.make_property_selector(take_color, color_5, False)
    select_not_0_nor_5 = select_not_0.intersect(select_not_5)
    entity_finder = base_entity_finder.compose(select_not_0_nor_5, True)
    select_5 = Selector.make_property_selector(take_color, color_5)
    center_y = Property(lambda x: x.entity.center(axis=0),
                        nll=np.log(2),
                        name='the center y coordinate',
                        output_types=frozenset({'y_coordinate'}),
                        entity_finder=base_entity_finder,
                        requires_entity=True)
    center_x = Property(lambda x: x.entity.center(axis=1),
                        nll=np.log(2),
                        name='the center x coordinate',
                        output_types=frozenset({'x_coordinate'}),
                        entity_finder=base_entity_finder,
                        requires_entity=True)
    center_5y = center_y.add_selector(select_5)
    length_5y = Property.create_distance_property(center_5y, center_y)
    center_5x = center_x.add_selector(select_5)
    length_5x = Property.create_distance_property(center_5x, center_x)
    vect_prop = Property.xy_length_to_vector(length_5y, length_5x)
    move_to_5 = Transformer(
        lambda entities, grid, copy=True: move(
            entities, vector_property=vect_prop, copy=copy, extend_grid=False),
        nll=vect_prop.nll + np.log(2),
        name=f"{'copy' if True else 'move'} them by ({vect_prop})")
    my_predictor = Predictor(entity_finder, move_to_5)

    for case in cases['train']:
        assert my_predictor.predict(case['input']) == case['output']
예제 #8
0
def test_replace_color_entity_frame():
    with open('training/' + os.listdir('training/')[80]) as f:
        raw_case = json.load(f)
    case = tuplefy_task(raw_case)

    color_0 = Property(lambda x: frozenset({0}),
                       np.log(10) - 1,
                       name=f'color {0}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    color_1 = Property(lambda x: frozenset({1}),
                       np.log(10) - 1,
                       name=f'color {1}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    color_8 = Property(lambda x: frozenset({8}),
                       np.log(10) - 1,
                       name=f'color {1}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    take_color = Property(lambda x: x.entity.colors(),
                          name='the colors',
                          output_types=frozenset({'color'}),
                          entity_finder=base_entity_finder,
                          nll=1)
    select_8 = Selector.make_property_selector(take_color, color_8, True)

    select_not_0 = Selector.make_property_selector(take_color, color_0, False)

    color_frame_blue = Transformer(
        lambda entities, grid, offsets=(0, 0, 0, 0):
        replace_colors_in_entities_frame(entities,
                                         grid=None,
                                         offsets=offsets,
                                         source_color_prop=color_0,
                                         target_color_prop=color_1),
        name=
        f'replace ({color_0}) with ({color_1}) in a box around them with offsets {(0, 0, 0, 0)}'
    )
    first_case = case['train'][0]['input']
    entity_finder = base_entity_finder.compose(select_8)
    my_predictor = Predictor(entity_finder, color_frame_blue)
    # print(my_predictor.predict(first_case))
    assert my_predictor.predict(first_case) == case['train'][0]['output']
    assert my_predictor.predict(
        case['test'][0]['input']) == case['test'][0]['output']

    entity_finder_2 = base_entity_finder.compose(select_not_0)
    my_predictor_2 = Predictor(entity_finder_2, color_frame_blue)
    assert my_predictor_2.predict(first_case) == case['train'][0]['output']
    assert my_predictor_2.predict(
        case['test'][0]['input']) == case['test'][0]['output']
    print(my_predictor_2)
예제 #9
0
def main(cfg):
    global cash, predictor
    predictor = Predictor(cfg.DISCORD.tokenizer, cfg.DISCORD.model)
    cash = CashMannager(predictor.tokenizer)

    bot.run(cfg.DISCORD.token)
    print('Categorizer')

    categorizer = Categorizer(df=data.df, super_topics=data.super_topics)

    categorizer.sort_children(max_children=max_children,
                              min_occurrences=min_occurrences)

    start_time = time_keeper(start_time)

    print('Predictor')

    predictor = Predictor(
        df=categorizer.df,
        use_rules=use_rules,
        use_ML=use_ML,
        classifier=classifier,
        vectorizer=vectorizer,
        true_family_given=true_family_given,
        topic_lists=categorizer.topic_lists,
        super_topics=categorizer.super_topics + ['entity'],
    )

    predictor.calc_results()

    start_time = time_keeper(start_time)

    print('Evaluator')

    evaluator = Evaluator(
        expt_num=expt_num,
        data_sets=predictor.data_sets,
        topic_lists=categorizer.topic_lists,
예제 #11
0
def test_reflect_about_line():
    with open('training/' + os.listdir('training/')[86]) as f:
        raw_task = json.load(f)
    task = tuplefy_task(raw_task)
    inp = task['train'][0]['input']
    out = task['train'][0]['output']
    vert_center_line = Property(
        lambda x: (float(np.array(x.grid).shape[1] - 1) / 2., 1.),
        np.log(4),
        name='the vertical center line',
        output_types=frozenset({'line'}),
        entity_finder=base_entity_finder)
    entities = base_entity_finder(inp)
    new_entities, new_grid = reflect_about_line(entities, inp,
                                                vert_center_line)
    # original = ((2, 2, 1),
    #             (2, 1, 2),
    #             (2, 8, 1))
    assert new_grid == ((1, 2, 2), (2, 1, 2), (1, 8, 2))
    horiz_center_line = Property(
        lambda x: (float(np.array(x.grid).shape[0] - 1) / 2., 0.),
        np.log(4),
        name='the horizontal center line',
        output_types=frozenset({'line'}),
        entity_finder=base_entity_finder)
    new_entities, new_grid = reflect_about_line(entities, inp,
                                                horiz_center_line)
    assert new_grid == ((2, 8, 1), (2, 1, 2), (2, 2, 1))
    back_diagonal_center_line = Property(lambda x: (0., -0.5),
                                         np.log(4),
                                         name='the back diagonal center line',
                                         output_types=frozenset({'line'}),
                                         entity_finder=base_entity_finder)
    new_entities, new_grid = reflect_about_line(entities, inp,
                                                back_diagonal_center_line)
    assert new_grid == ((2, 2, 2), (2, 1, 8), (1, 2, 1))
    forward_diagonal_center_line = Property(
        lambda x: (float(np.array(x.grid).shape[1] - 1.) / 2., 0.5),
        np.log(4),
        name='the forward diagonal center line',
        output_types=frozenset({'line'}),
        entity_finder=base_entity_finder)
    new_entities, new_grid = reflect_about_line(entities, inp,
                                                forward_diagonal_center_line)
    assert new_grid == ((1, 2, 1), \
                        (8, 1, 2), \
                        (2, 2, 2))
    new_entities, new_grid = reflect_about_line(entities, inp,
                                                vert_center_line)
    new_entities, new_grid = reflect_about_line(new_entities, new_grid,
                                                horiz_center_line)
    assert new_grid == out

    new_entities, new_grid = rotate_via_reflects(entities, inp,
                                                 vert_center_line,
                                                 horiz_center_line)
    assert len(new_entities) == 3
    assert new_grid == out
    transformer = Transformer(
        lambda entities, grid: rotate_via_reflects(
            entities, grid, vert_center_line, horiz_center_line),
        nll=vert_center_line.nll + horiz_center_line.nll + np.log(2),
        name=f'reflect about ({vert_center_line}) then ({horiz_center_line})')
    entities = base_entity_finder(inp)
    # new_entities, new_grid = transformer.transform(entities, inp)
    my_predictor = Predictor(base_entity_finder, transformer, parallel=False)
    assert my_predictor.predict(inp) == out

    grid_center = Property(lambda x:
                           (float(np.array(x.grid).shape[0] - 1) / 2.,
                            float(np.array(x.grid).shape[1] - 1) / 2.),
                           0,
                           name='the center point of the grid',
                           output_types=frozenset({'point'}),
                           entity_finder=base_entity_finder)
    new_entities, new_grid = rotate_about_point(entities,
                                                inp,
                                                grid_center,
                                                quarter_steps=2)
    assert new_grid == out
예제 #12
0
def test_place_shape():
    with open('training/' + os.listdir('training/')[94]) as f:
        raw_task = json.load(f)
    task = tuplefy_task(raw_task)
    input_grid = task['train'][0]['input']
    output_grid = task['train'][0]['output']
    entities = base_entity_finder(input_grid)
    appearing_shapes = Counter()

    for grid in task['train']:
        output_entities = base_entity_finder(grid['output'])
        appearing_shapes += Entity.shapes(output_entities)
    desired_shape = frozenset({((0.0, 1.0), 1), ((1.0, 0.0), 1),
                               ((-1.0, 1.0), 1), ((1.0, 1.0), 1),
                               ((1.0, -1.0), 1), ((0.0, -1.0), 1),
                               ((-1.0, -1.0), 1), ((-1.0, 0.0), 1)})
    assert desired_shape in appearing_shapes
    color_5 = Property(lambda x: frozenset({5}),
                       np.log(10) - 1,
                       name=f'color {5}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    take_color = Property(lambda x: x.entity.colors(),
                          name='the colors',
                          output_types=frozenset({'color'}),
                          entity_finder=base_entity_finder,
                          nll=1)
    center_0 = Property(lambda x: x.entity.center(axis=0),
                        nll=1 + np.log(2),
                        name='the center y coordinate',
                        output_types=frozenset({'y_coordinate'}),
                        entity_finder=base_entity_finder)
    center_1 = Property(lambda x: x.entity.center(axis=1),
                        nll=1 + np.log(2),
                        name='the center x coordinate',
                        output_types=frozenset({'x_coordinate'}),
                        entity_finder=base_entity_finder)
    center = Property.create_point_property(center_0, center_1)
    desired_shape_prop = Property(lambda x: desired_shape,
                                  np.log(10) - 1,
                                  name=f'shape {desired_shape}',
                                  output_types=frozenset({'shape'}),
                                  is_constant=True,
                                  entity_finder=base_entity_finder)
    # shape_entity_prop = Property(lambda x: x.entity.shape(), 1, name=f'the shape',
    #                              output_types=frozenset({'shape'}),
    #                              entity_finder=base_entity_finder)
    place_desired_shape = Transformer(
        lambda entities, grid: place_shape(
            entities, point_prop=center, shape_prop=desired_shape_prop),
        nll=center.nll + desired_shape_prop.nll + np.log(2),
        name=f'place ({desired_shape_prop}) at position ({center}))')
    select_5 = Selector.make_property_selector(take_color, color_5)
    find_entities_5 = base_entity_finder.compose(select_5)
    my_predictor = Predictor(find_entities_5, place_desired_shape)
    assert my_predictor.predict(input_grid) == output_grid

    with open('training/' + os.listdir('training/')[14]) as f:
        raw_task14 = json.load(f)
    task14 = tuplefy_task(raw_task14)
    input_grid14 = task14['train'][0]['input']
    output_grid14 = task14['train'][0]['output']
    color_1 = Property(lambda x: frozenset({1}),
                       np.log(10) - 1,
                       name=f'color {1}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    select_1 = Selector.make_property_selector(take_color, color_1)
    # print(input_grid14)
    diamond = frozenset({((1.0, 0.0), 7), ((-1.0, 0.0), 7), ((0.0, 1.0), 7),
                         ((0.0, -1.0), 7)})
    diamond_prop = Property(lambda x: diamond,
                            np.log(10) - 1,
                            name=f'shape {diamond}',
                            output_types=frozenset({'shape'}),
                            is_constant=True,
                            entity_finder=base_entity_finder)
    place_diamond = Transformer(
        lambda entities, grid: place_shape(entities, grid, center, diamond_prop
                                           ),
        name=f'place ({diamond_prop}) at position ({center})')
    diamond_predictor = Predictor(base_entity_finder.compose(select_1),
                                  place_diamond)
    print(diamond_predictor)
    for case in task14['train']:
        # print(case['input'])
        output_grid = diamond_predictor.predict(case['input'])
        assert (base_entity_finder.grid_distance(
            case['output'], diamond_predictor.predict(case['input'])) +
                base_entity_finder.grid_distance(
                    diamond_predictor.predict(case['input']), case['input']) <=
                base_entity_finder.grid_distance(case['output'],
                                                 case['input']))
예제 #13
0
def test_case_29():
    with open('training/' + os.listdir('training/')[29]) as f:
        raw_task = json.load(f)
    base_entity_finder = EntityFinder(
        lambda grid: find_components(grid, directions=ALL_DIRECTIONS))
    trivial_selector = Selector(lambda entity, grid: True, name='')
    task = tuplefy_task(raw_task)
    inp = task['train'][0]['input']
    out = task['train'][0]['output']
    # print(task['train'][0]['input'])
    take_color = Property(lambda x: x.entity.colors(),
                          name='the colors',
                          output_types=frozenset({'color'}),
                          entity_finder=base_entity_finder,
                          nll=1,
                          requires_entity=True)
    # color_2 = Property(lambda x, i=2: frozenset({2}), np.log(10) - 2, name=f'color {2}',
    #                    output_types=frozenset({'color'}))
    color_1 = Property(lambda x, i=2: frozenset({1}),
                       np.log(10) - 1,
                       name=f'color {1}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    color_0 = Property(lambda x, i=2: frozenset({0}),
                       np.log(10) - 1,
                       name=f'color {0}',
                       output_types=frozenset({'color'}),
                       entity_finder=base_entity_finder)
    select_1 = Selector.make_property_selector(take_color, color_1)
    property_0 = Property(lambda x, i=0: i,
                          nll=1,
                          name=f'{0}',
                          output_types=frozenset({
                              'x_coordinate', 'y_coordinate', 'x_length',
                              'y_length', 'quantity'
                          }),
                          entity_finder=base_entity_finder)
    select_not_0 = Selector.make_property_selector(take_color,
                                                   color_0,
                                                   the_same=False)
    smallest_y = Property(lambda x: x.entity.max_coord(axis=0),
                          1 + np.log(4),
                          name='the largest y coordinate',
                          output_types=frozenset({'y_coordinate'}),
                          entity_finder=base_entity_finder,
                          requires_entity=True)
    min_y_of_blue = smallest_y.add_selector(select_1)
    distance_to_min_y_of_blue = Property.create_distance_property(
        min_y_of_blue, smallest_y)
    vector_to_min_y_of_blue = Property.xy_length_to_vector(
        distance_to_min_y_of_blue, property_0)
    move_transform = Transformer(
        lambda entities, grid, vector_prop=vector_to_min_y_of_blue: move(
            entities, vector_property=vector_prop),
        nll=vector_to_min_y_of_blue.nll + np.log(2),
        name=f'move them by ({vector_to_min_y_of_blue})')
    my_predictor = Predictor(base_entity_finder.compose(trivial_selector),
                             move_transform)  # .compose(select_not_0)
    # display_case(my_predictor.predict(inp))
    # display_case(out)
    assert my_predictor.predict(inp) == out

    test_input = task['test'][0]['input']
    test_output = task['test'][0]['output']
    test_entities = base_entity_finder(test_input)
    assert len(test_entities) == 4

    selected_finder = base_entity_finder.compose(select_not_0)
    # selected_finder(test_input)
    assert len(selected_finder(test_input)) == 3

    assert my_predictor.predict(test_input) == test_output
예제 #14
0
from loaders import EachMovieLoader, MovieLensLoader
from classes import Predictor
from evaluators import MovieLensEvaluator

#Loading data
path = "\MovieLens\ml-100k\ml-100k" #Path to MovieLens dir.
dataset = "u.data" #Name of dataset.
loader = MovieLensLoader("D:\Dropbox\Data Sets\MovieLens\ml-100k\ml-100k", dataset)
users, items = loader.users, loader.items

#Making predictions
predictor = Predictor(users, items)
print predictor.predict_rating(users[1], items[100]) #Predicted rating for user 1 on item 100.

#Running evaluation of a test-set.
#Outputs predicted ratings to dataset.res and statistics to result.txt
#evaluator = MovieLensEvaluator()
#evaluator.run_predictions("D:\Dropbox\Data Sets\MovieLens\ml-100k\ml-100k", "u1")
    # print(f"sys.getsizeof(transformers) = {sys.getsizeof(transformers)}", f"len(transformers) = {len(transformers)}")

    transformers = itertools.chain(transformers, composite_transformers)
    transformers = list(transformers)
    transformers.sort()

    if not ALLOW_COMPOSITE_SELECTORS:
        transformers = itertools.chain(transformers, composite_transformers)
        transformers = list(transformers)
        transformers.sort()
        entity_finders = [
            base_entity_finder.compose(selector) for selector in selector_list
            if selector.nll + base_entity_finder.nll <= max_nll
        ]
        predictor_queue = [
            Predictor(entity_finder, transformer)
            for entity_finder, transformer in combine_sorted_queues((
                entity_finders, transformers), max_nll)
        ]
    else:
        composite_transformers = list(composite_transformers)
        transformers = list(transformers)
        transformers.sort()
        composite_transformers.sort()
        entity_finders_noncomposite = [
            base_entity_finder.compose(selector, False)
            for selector in selector_list
            if selector.nll + base_entity_finder.nll <= max_nll
        ]

        entity_finders_composite = entity_finders_noncomposite + \