Exemplo n.º 1
0
    def __init__(self, cache=None, cache_file=None, read_cache=True, write_cache=True, use_network=True, 
            user_agent=None, timeout=30, delay=5, proxies=None, proxy_file=None, max_proxy_errors=5,
            opener=None, headers=None, data=None, num_retries=0, num_redirects=1,
            force_html=False, force_ascii=False, max_size=None, default='', pattern=None):
        """
        `cache' is a pdict object to use for the cache
        `cache_file' sets filename to store cached data
        `read_cache' sets whether to read from the cache
        `write_cache' sets whether to write to the cache
        `use_network' sets whether to download content not in the cache
        `user_agent' sets the User Agent to download content with
        `timeout' is the maximum amount of time to wait for http response
        `delay' is the minimum amount of time (in seconds) to wait after downloading content from a domain per proxy
        `proxy_file' is a filename to read proxies from
        `max_proxy_errors' is the maximum number of consecutive errors allowed per proxy before discarding
            an error is only counted if another proxy is able to successfully download the URL
            set to None to disable
        `proxies' is a list of proxies to cycle through when downloading content
        `opener' sets an optional opener to use instead of using urllib2 directly
        `headers' are the headers to include in the request
        `data' is what to post at the URL
        `num_retries' sets how many times to try downloading a URL when get an error
        `num_redirects' sets how many times the URL is allowed to be redirected, to avoid infinite loop
        `force_html' sets whether to download non-text data
        `force_ascii' sets whether to only return ascii characters
        `max_size' determines maximum number of bytes that will be downloaded, or None to disable
        `default' is what to return when no content can be downloaded
        `pattern' is a regular expression that the downloaded HTML has to match to be considered a valid download
        """
        socket.setdefaulttimeout(timeout)
        need_cache = read_cache or write_cache
        if pdict and need_cache:
            cache_file = cache_file or settings.cache_file
            self.cache = cache or pdict.PersistentDict(cache_file)
        else:
            self.cache = None
            if need_cache:
                common.logger.info('Cache disabled because could not import pdict')

        self.settings = adt.Bag(
            read_cache = read_cache,
            write_cache = write_cache,
            use_network = use_network,
            delay = delay,
            proxies = collections.deque((random.shuffle(common.read_list(proxy_file)) if proxy_file else []) or proxies or []),
            proxy_file = proxy_file,
            max_proxy_errors = max_proxy_errors,
            user_agent = user_agent,
            opener = opener,
            headers = headers,
            data = data,
            num_retries = num_retries,
            num_redirects = num_redirects,
            force_html = force_html,
            force_ascii = force_ascii,
            max_size = max_size,
            default = default,
            pattern = pattern
        )
        self.last_load_time = self.last_mtime = time.time()
Exemplo n.º 2
0
    def test_second_ex_part2(self):
        f = 'day10-test2.txt'
        input_list: List[int] = common.read_list(f)
        input_list = day10.prep_list(input_list)

        answer: int = day10.part2(input_list)
        self.assertEqual(19208, answer)
Exemplo n.º 3
0
    def __init__(self, cache=None, cache_file=None, read_cache=True, write_cache=True, use_network=True, 
            user_agent=None, timeout=30, delay=5, proxies=None, proxy_file=None, max_proxy_errors=5,
            opener=None, headers=None, data=None, num_retries=0, num_redirects=1,
            force_html=False, force_ascii=False, max_size=None, default='', pattern=None):
        """
        `cache' is a pdict object to use for the cache
        `cache_file' sets filename to store cached data
        `read_cache' sets whether to read from the cache
        `write_cache' sets whether to write to the cache
        `use_network' sets whether to download content not in the cache
        `user_agent' sets the User Agent to download content with
        `timeout' is the maximum amount of time to wait for http response
        `delay' is the minimum amount of time (in seconds) to wait after downloading content from a domain per proxy
        `proxy_file' is a filename to read proxies from
        `max_proxy_errors' is the maximum number of consecutive errors allowed per proxy before discarding
            an error is only counted if another proxy is able to successfully download the URL
            set to None to disable
        `proxies' is a list of proxies to cycle through when downloading content
        `opener' sets an optional opener to use instead of using urllib2 directly
        `headers' are the headers to include in the request
        `data' is what to post at the URL
        `num_retries' sets how many times to try downloading a URL when get an error
        `num_redirects' sets how many times the URL is allowed to be redirected, to avoid infinite loop
        `force_html' sets whether to download non-text data
        `force_ascii' sets whether to only return ascii characters
        `max_size' determines maximum number of bytes that will be downloaded, or None to disable
        `default' is what to return when no content can be downloaded
        `pattern' is a regular expression that the downloaded HTML has to match to be considered a valid download
        """
        socket.setdefaulttimeout(timeout)
        need_cache = read_cache or write_cache
        if pdict and need_cache:
            cache_file = cache_file or settings.cache_file
            self.cache = cache or pdict.PersistentDict(cache_file)
        else:
            self.cache = None
            if need_cache:
                common.logger.info('Cache disabled because could not import pdict')

        self.settings = adt.Bag(
            read_cache = read_cache,
            write_cache = write_cache,
            use_network = use_network,
            delay = delay,
            proxies = collections.deque((common.read_list(proxy_file) if proxy_file else []) or proxies or []),
            proxy_file = proxy_file,
            max_proxy_errors = max_proxy_errors,
            user_agent = user_agent,
            opener = opener,
            headers = headers,
            data = data,
            num_retries = num_retries,
            num_redirects = num_redirects,
            force_html = force_html,
            force_ascii = force_ascii,
            max_size = max_size,
            default = default,
            pattern = pattern
        )
        self.last_load_time = self.last_mtime = time.time()
Exemplo n.º 4
0
    def test_example(self):
        f = 'day07-test.txt'
        raw_list = common.read_list(f, str)

        sg = day07.SuitcaseCatalog(raw_list)
        answer1 = sg.count_contains('shiny gold')

        self.assertEqual(4, answer1)
Exemplo n.º 5
0
    def test_count_descendents(self):
        f = 'day07-test3.txt'
        raw_list = common.read_list(f, str)

        sg = day07.SuitcaseCatalog(raw_list)
        answer2 = sg.count_descendants('shiny gold')

        self.assertEqual(126, answer2)
Exemplo n.º 6
0
    def __init__(self,
                 cache=None,
                 cache_file=None,
                 read_cache=True,
                 write_cache=True,
                 use_network=True,
                 user_agent=None,
                 timeout=30,
                 delay=5,
                 proxies=None,
                 proxy_file=None,
                 max_proxy_errors=5,
                 opener=None,
                 headers=None,
                 data=None,
                 num_retries=0,
                 num_redirects=1,
                 force_html=False,
                 force_ascii=False,
                 max_size=None,
                 default='',
                 pattern=None,
                 acceptable_errors=None):
        socket.setdefaulttimeout(timeout)
        need_cache = read_cache or write_cache
        if pdict and need_cache:
            cache_file = cache_file or settings.cache_file
            self.cache = cache or pdict.PersistentDict(cache_file)
        else:
            self.cache = None
            if need_cache:
                common.logger.info(
                    'Cache disabled because could not import pdict')

        self.settings = adt.Bag(
            read_cache=read_cache,
            write_cache=write_cache,
            use_network=use_network,
            delay=delay,
            proxies=(common.read_list(proxy_file) if proxy_file else [])
            or proxies or [],
            proxy_file=proxy_file,
            max_proxy_errors=max_proxy_errors,
            user_agent=user_agent,
            opener=opener,
            headers=headers,
            data=data,
            num_retries=num_retries,
            num_redirects=num_redirects,
            force_html=force_html,
            force_ascii=force_ascii,
            max_size=max_size,
            default=default,
            pattern=pattern,
            acceptable_errors=acceptable_errors)
        self.last_load_time = self.last_mtime = time.time()
        self.num_downloads = self.num_errors = 0
Exemplo n.º 7
0
 def reload_proxies(self):
     """Check every 10 minutes for updated proxy file
     """
     if self.settings.proxy_file and time.time() - self.last_load_time > 10 * 60:
         self.last_load_time = time.time()
         if os.path.exists(self.settings.proxy_file):
             if os.stat(self.settings.proxy_file).st_mtime != self.last_mtime:
                 self.last_mtime = os.stat(self.settings.proxy_file).st_mtime
                 self.settings.proxies = collections.deque(common.read_list(self.settings.proxy_file))
                 common.logger.debug('Reloaded proxies from updated file.')
Exemplo n.º 8
0
 def reload_proxies(self):
     """Check every 10 minutes for updated proxy file
     """
     if self.settings.proxy_file and time.time() - self.last_load_time > 10 * 60:
         self.last_load_time = time.time()
         if os.path.exists(self.settings.proxy_file):
             if os.stat(self.settings.proxy_file).st_mtime != self.last_mtime:
                 self.last_mtime = os.stat(self.settings.proxy_file).st_mtime
                 self.settings.proxies = collections.deque(common.read_list(self.settings.proxy_file))
                 common.logger.debug('Reloaded proxies from updated file.')
Exemplo n.º 9
0
    def reload_proxies(self, timeout=600):
        """Check periodically for updated proxy file

        timeout:
            the number of seconds before check for updated proxies
        """
        if self.settings.proxy_file and time.time() - self.last_load_time > timeout:
            self.last_load_time = time.time()
            if os.path.exists(self.settings.proxy_file):
                if os.stat(self.settings.proxy_file).st_mtime != self.last_mtime:
                    self.last_mtime = os.stat(self.settings.proxy_file).st_mtime
                    self.settings.proxies = common.read_list(self.settings.proxy_file)
                    common.logger.debug('Reloaded proxies from updated file.')
Exemplo n.º 10
0
def read_mask(idx, size, imageset):
    """Read mask file."""
    mask_file = os.path.join(imageset, 'my_output', 'parsed_mask',
                             str(idx[0]) + '_' + str(idx[1]) + '.jpg.mask')
    if not os.path.exists(mask_file):
        print(Notify.WARNING, 'Not exist', mask_file, Notify.ENDC)
        return None, None
    mask_list = read_list(mask_file)
    mask_p1 = np.array(map(int, mask_list[0].split(',')), np.int32)
    mask_p2 = np.array(map(int, mask_list[1].split(',')), np.int32)
    mask_p1 = np.reshape(mask_p1, size)
    mask_p2 = np.reshape(mask_p2, size)
    return mask_p1, mask_p2
Exemplo n.º 11
0
    def reload_proxies(self, timeout=600):
        """Check periodically for updated proxy file

        timeout:
            the number of seconds before check for updated proxies
        """
        if self.settings.proxy_file and time.time() - self.last_load_time > timeout:
            self.last_load_time = time.time()
            if os.path.exists(self.settings.proxy_file):
                if os.stat(self.settings.proxy_file).st_mtime != self.last_mtime:
                    self.last_mtime = os.stat(self.settings.proxy_file).st_mtime
                    self.settings.proxies = common.read_list(self.settings.proxy_file)
                    common.logger.debug('Reloaded proxies from updated file.')
Exemplo n.º 12
0
    def __init__(self, cache=None, cache_file=None, read_cache=True, write_cache=True, use_network=True, 
            user_agent=None, timeout=30, delay=5, proxies=None, proxy_file=None, max_proxy_errors=5,
            opener=None, headers=None, data=None, num_retries=0, num_redirects=0, num_caches=1,
            force_html=False, force_ascii=False, max_size=None, default='', pattern=None, acceptable_errors=None, 
            throttle_additional_key=None, keep_ip_ua=True, **kwargs):
        socket.setdefaulttimeout(timeout)
        need_cache = read_cache or write_cache
        if pdict and need_cache:
            cache_file = cache_file or settings.cache_file
            self.cache = cache or pdict.PersistentDict(cache_file, num_caches=num_caches)
        else:
            self.cache = None
            if need_cache:
                common.logger.warning('Cache disabled because could not import pdict')

        self.settings = adt.Bag(
            read_cache = read_cache,
            write_cache = write_cache,
            use_network = use_network,
            delay = delay,
            proxies = (common.read_list(proxy_file) if proxy_file else []) or proxies or [],
            proxy_file = proxy_file,
            max_proxy_errors = max_proxy_errors,
            user_agent = user_agent,
            opener = opener,
            headers = headers,
            data = data,
            num_retries = num_retries,
            num_redirects = num_redirects,
            num_caches=num_caches,
            force_html = force_html,
            force_ascii = force_ascii,
            max_size = max_size,
            default = default,
            pattern = pattern,
            keep_ip_ua = keep_ip_ua,
            acceptable_errors = acceptable_errors
        )
        self.last_load_time = self.last_mtime = time.time()
        self.num_downloads = self.num_errors = 0
        self.throttle_additional_key = throttle_additional_key
Exemplo n.º 13
0
# - you'll end up with input count for your device adapter
def count(il: List[int]) -> int:
    counter = collections.Counter({0: 1})
    for entry in il:
        counter[entry + 1] += counter[entry]
        counter[entry + 2] += counter[entry]
        counter[entry + 3] += counter[entry]

    return counter[max(il)]


def part1(il) -> int:
    difference_count = get_differences(il)
    return difference_count[1] * difference_count[3]


def part2(il) -> int:
    return count_branches(tuple(il))


if __name__ == '__main__':
    f = 'day10.txt'
    input_list: List[int] = common.read_list(f)
    input_list = prep_list(input_list)

    answer1: int = part1(input_list)
    print(f'Answer 1: {answer1}')

    answer2: int = part2(input_list)
    print(f'Answer 2: {answer2}')
Exemplo n.º 14
0
        self.seen_positions = []

    def process_first_loop(self) -> bool:
        while True:
            self.process()

            if self.cur_position in self.seen_positions:
                return False

            if self.check_for_exit():
                return True


if __name__ == '__main__':
    f = 'day08.txt'
    input_list = common.read_list(f, str)
    bc = BootCode(input_list)

    bc.process_first_loop()
    answer1 = bc.accumulator
    print(f'Answer 1: {answer1}')

    changeable_spaces = []
    for i, line in enumerate(input_list):
        if line[:3] in ['nop', 'jmp']:
            changeable_spaces.append(i)

    for space in changeable_spaces:
        instructions = input_list.copy()
        if instructions[space][:3] == 'nop':
            cmd = 'jmp'
Exemplo n.º 15
0
 def setUp(self) -> None:
     f = 'day09-test.txt'
     self.test_input = common.read_list(f)
Exemplo n.º 16
0
        # run the contains function for each color in the catalog
        for key in self.catalog:
            if self.contains(key, color):
                count += 1

        return count

    def count_descendants(self, color: str) -> int:
        count: int = 0
        for bag in self.catalog[color]:
            if self.catalog[bag.color] is None:
                count += bag.count
            else:
                count += bag.count + (bag.count *
                                      self.count_descendants(bag.color))

        return count


if __name__ == '__main__':
    f = 'day07.txt'
    raw_list = common.read_list(f, str)

    sg = SuitcaseCatalog(raw_list)
    answer1 = sg.count_contains('shiny gold')
    answer2 = sg.count_descendants('shiny gold')

    print(f'Answer 1: {answer1}')
    print(f'Answer 2: {answer2}')
Exemplo n.º 17
0
            if list(filter(lambda x: sum(x) == self.code[i], possible_combos)):
                continue

            return self.code[i]

    def find_encryption_weakness(self, invalid_entry) -> int:
        """ Uses a nested for loop to try and find the sublist that totals the submitted invalid entry """

        stop: int  = self.code.index(invalid_entry)

        for i in range(0, stop):
            total: int = self.code[i]

            for j in range(i+1, stop):
                total += self.code[j]

                if total == invalid_entry:
                    values = self.code[i:j]
                    return min(values) + max(values)


if __name__ == '__main__':
    f    = 'day09.txt'
    xmas = XMAS(common.read_list(f, int), 25)

    answer1 = xmas.find_first_invalid_entry
    print(f'Answer 1: {answer1}')

    answer2 = xmas.find_encryption_weakness(answer1)
    print(f'Answer 2: {answer2}')
Exemplo n.º 18
0
            if letter == 'L':
                max_col = math.floor((max_col + min_col) / 2)
            elif letter == 'R':
                min_col = math.ceil((max_col + min_col) / 2)

    def get_seat(self) -> tuple:
        return self.assigned_row, self.assigned_col

    def calc_seat_id(self):
        self.seat_id = self.assigned_row * 8 + self.assigned_col


if __name__ == "__main__":
    input_file = 'day05.txt'
    ticket_codes = common.read_list(input_file, str)

    seat_ids = []
    for ticket_code in ticket_codes:
        t = TicketParser(ticket_code)
        seat_ids.append(t.seat_id)

    max_seat_id = max(seat_ids)
    print(f'Answer 1: {max_seat_id}')

    seat_ids.sort()
    my_seat_id: int = 0
    for i, seat_id in enumerate(seat_ids):
        difference = seat_ids[i + 1] - seat_id
        if difference > 1:
            my_seat_id = (seat_id + seat_ids[i + 1]) // 2
Exemplo n.º 19
0
                cs.set_memory(int(match.group(1)), int(match.group(2)))

    return cs.get_memory_sum()


def part2(instructions: List[str]) -> int:
    cs: ComputerSystem = ComputerSystem()

    for instruction in instructions:
        if instruction[0:3] == 'mas':
            match = re.match(MASK_PATTERN, instruction)
            if match:
                cs.set_mask(match.group(1).strip())

        elif instruction[0:3] == 'mem':
            match = re.match(MEM_PATTERN, instruction)
            if match:
                cs.memory_decoder(int(match.group(1)), int(match.group(2)))

    return sum(cs.mem.values())


if __name__ == '__main__':
    instructions: List[str] = common.read_list('day14.txt', str)

    answer1 = part1(instructions)
    print(f'Answer 1: {answer1}')

    answer2 = part2(instructions)
    print(f'Answer 2: {answer2}')
Exemplo n.º 20
0
import common

if __name__ == '__main__':
    f = 'day12.txt'
    moves = common.read_list(f, str)
    ship = common.Boat()

    for move in moves:
        ship.move(move)

    answer1: int = ship.manhattan_distance()
    print(f'Answer 1: {answer1}')

    ship2 = common.WaywardBoat()

    for move in moves:
        ship2.move(move)

    answer2: int = ship2.manhattan_distance()
    print(f'Answer2: {answer2}')

Exemplo n.º 21
0
from itertools import combinations
from common import IntList
from common import read_list
from common import product


def find_values(arr: IntList, expected_value: int, group_size: int = 2):
    for group in combinations(arr, group_size):
        if sum(group) == expected_value:
            return product(group)


if __name__ == "__main__":
    input_file = 'day01.txt'
    input_array = read_list(input_file)

    # Puzzle 1
    res1 = find_values(input_array, 2020)
    print(f'Puzzle1 Answer: {res1}')

    # Puzzle 2
    res2 = find_values(input_array, 2020, 3)
    print(f'Puzzle2 Answer: {res2}')
Exemplo n.º 22
0
def read_mask_to_bool(mask_file):
    mask_list = read_list(mask_file)
    mask_p1 = np.array(map(int, mask_list[0].split(',')), np.bool)
    mask_p2 = np.array(map(int, mask_list[1].split(',')), np.bool)
    return np.concatenate((mask_p1, mask_p2))