Ejemplo n.º 1
0
def run(input_file):

    connections = [(lhs.split(), rhs.strip())
                   for lhs, rhs in get_tokenized_input(input_file, '->')]
    signal_a = part_one(connections)

    # Grab the input again, but this time remove the connection that supplies signal `b` and then
    # apply the end result on wire `a` from part 1 to the signal `b` for the next run.
    connections = [(lhs.split(), rhs.strip())
                   for lhs, rhs in get_tokenized_input(input_file, '->')]
    connections = [(lhs, rhs) for (lhs, rhs) in connections if not rhs == 'b']
    connections.append(([signal_a], 'b'))
    part_two(connections)
Ejemplo n.º 2
0
def run(input_file):

    # Parse each line into a tuple of ((person1, person2) ,hapiness_person1), and then into a dict
    # mapping each pair of people to the happiness change for person1
    happiness_pairs = [
        __parse_happiness(line)
        for line in get_tokenized_input(input_file, ' ')
    ]
    happiness_map = {
        people: happiness
        for people, happiness in happiness_pairs
    }

    # Determine each distinct person
    people = list(set(people_pair[0] for people_pair in happiness_map.keys()))

    part_one(people, happiness_map)

    # Oops, you forgot yourself. Add yourself to the list with a 0 score for all pairings.
    for person in people:
        happiness_map[('me', person)] = 0
        happiness_map[(person, 'me')] = 0
    people.append('me')

    part_two(people, happiness_map)
Ejemplo n.º 3
0
def run(input_file):

    # Parse reindeer from the input file
    reindeer = [
        Reindeer(tokens) for tokens in get_tokenized_input(input_file, ' ')
    ]

    part_one(reindeer, 2503)
    part_two(reindeer, 2503)
Ejemplo n.º 4
0
def run(input_file):

    # Parse Ingredients from the input file
    ingredients = [
        Ingredient(tokens) for tokens in get_tokenized_input(input_file, ' ')
    ]

    part_one(ingredients)
    part_two(ingredients)
Ejemplo n.º 5
0
def run(input_file):
    def __new_lights_grid():
        """ Returns a 1000x1000 grid of lights that all start off. """
        lights = dict()
        for coord in nested_iterable(range(1000), range(1000)):
            lights[coord] = 0
        return lights

    commands = [
        LightsCommand(tokens, LightsCommand.COMMAND_V1)
        for tokens in get_tokenized_input(input_file, ' ')
    ]
    part_one(__new_lights_grid(), commands)

    commands = [
        LightsCommand(tokens, LightsCommand.COMMAND_V2)
        for tokens in get_tokenized_input(input_file, ' ')
    ]
    part_two(__new_lights_grid(), commands)
Ejemplo n.º 6
0
def run(input_file):

    # Parse Aunt Sue list from the input file
    aunt_sue_list = [
        __parse_aunt_sue(line[0])
        for line in get_tokenized_input(input_file, '\n')
    ]

    properties = __parse_mfcsam_output("""children: 3
                      cats: 7
                      samoyeds: 2
                      pomeranians: 3
                      akitas: 0
                      vizslas: 0
                      goldfish: 5
                      trees: 3
                      cars: 2
                      perfumes: 1""")

    part_one(aunt_sue_list, properties)
    part_two(aunt_sue_list, properties)
Ejemplo n.º 7
0
def run(input_file):

    # Parse each line into a tuple of (city1, city2) and distance
    city_distances = [
        __parse_city_distance(line)
        for line in get_tokenized_input(input_file, ' ')
    ]

    # Hold the distinct cities we know about, as well as the distance between each pair
    cities = set()
    city_distance_map = dict()

    # Iterate the city distances and put the pairs of cities into a map with their distance.
    # Put the pairs in both orders so we can look them up either way.
    for (city1, city2), distance in city_distances:
        city_distance_map[(city1, city2)] = distance
        city_distance_map[(city2, city1)] = distance
        cities.add(city1)
        cities.add(city2)

    part_one(cities, city_distance_map)
    part_two(cities, city_distance_map)
Ejemplo n.º 8
0
def run(input_file):

    instructions = get_tokenized_input(input_file, ', ')[0]

    part_one(instructions)
    part_two(instructions)
Ejemplo n.º 9
0
def run(input_file):

    part_one(get_tokenized_input(input_file, ',', int)[0])
    part_two(get_tokenized_input(input_file, ',', int)[0])
Ejemplo n.º 10
0
def run(input_file):

    boxes = get_tokenized_input(input_file, 'x', int)

    part_one(boxes)
    part_two(boxes)