def main(): f = open("day20_1_input.txt", "r") data = [line.strip().strip("\n") for line in f] f.close() for i in range(len(data)): inputs = data[i].split('-') data[i] = (int(inputs[0]), int(inputs[1])) data.sort(key=lambda x: x[0]) ip_address = 0 index = 0 for i in range(len(data)): value = data[i] if ip_address < value[0]: index = i break elif ip_address <= value[1]: ip_address = value[1] + 1 if ip_address <= 4294967295: print "Done! Found ip address {0}".format(ip_address) printed = False for i in range(max(LOWER_BOUND, index - 10), min(UPPER_BOUND, index + 10)): if ip_address < data[i][0] and not printed: color_print(Color.BLUE, "-- {0} --".format(ip_address)) printed = True print "{0} - {1}".format(data[i][0], data[i][1])
def database_builder(): list_database = [] list_origin_database = [] list_torrents_dir = [ f for f in os.listdir('torrents') if os.path.isfile(os.path.join('torrents', f)) ] origin_database_loaded = False database_loaded = False full_loaded = False if os.path.isfile('anime_database.json'): anime_database_file = open('anime_database.json', encoding='utf-8') anime_database_obj = json.load(anime_database_file) else: color.color_print(Fore.CYAN, '[INFO]', 'MISSING OFFLINE DATABASE. DOWNLOADING...') color.color_print( Fore.CYAN, '[INFO]', 'THANK YOU FOR MANAMI PROJECT TO PROVIDE ME A OFFLINE DATABASE') urllib.request.urlretrieve( 'https://raw.githubusercontent.com/manami-project/anime-offline-database/master/anime-offline-database.json', 'anime_database.json') color.color_print(Fore.CYAN, '[INFO]', 'DOWNLOAD COMPLETED\n') if os.path.isfile('output\\database_original.json'): origin_database_json_file = open('output\\database_original.json', encoding='utf-8') try: origin_database_obj = json.load(origin_database_json_file) origin_database_loaded = True except: origin_database_loaded = False database_json_file = open('output\\database.json', encoding='utf-8') try: database_obj = json.load(database_json_file) database_loaded = True except: database_loaded = False if origin_database_loaded and database_loaded: full_loaded = True list_database = database_obj list_origin_database = origin_database_obj added_new_state = False color.color_print(Fore.LIGHTMAGENTA_EX, '[COPYRIGHT]', 'MANAMI PROJECT: ANIME OFFLINE DATABASE') color.color_print(Fore.LIGHTMAGENTA_EX, '[COPYRIGHT]', 'IGORCMOURA: ANITOPY') color.color_print(Fore.LIGHTMAGENTA_EX, '[COPYRIGHT]', 'JCUL: BENCODE\n') color.color_print(Fore.YELLOW, '[PROCESSING]', 'PARSE TORRENTS\n') for i in list_torrents_dir: torrent_filename = i torrent_full_path = 'torrents\\' + i with open(torrent_full_path, 'rb') as fh: torrent_data = fh.read() if not search_database(list_database, i) or not full_loaded: torrent = bencode.decode(torrent_data) torrent_announces = [] torrent_files = [] torrent_creation_date = '' torrent_hash = '' torrent_magnet = '' torrent_total_length = 0 for im in torrent: torrent_creation_date = ( datetime.utcfromtimestamp(int(im[b'creation date'])) - timedelta(hours=9)).strftime('%Y-%m-%d %H:%M:%S') torrent_temp_announce = [] for imfw in im[b'announce-list']: torrent_temp_announce.append(imfw[0].decode("utf-8")) torrent_hash = str( hashlib.sha1(bencoding.bencode( im[b'info'])).hexdigest()) magnet_temp = 'magnet:?xt=urn:btih:{}'.format(torrent_hash) torrent_magnet = magnet_temp torrent_announces = torrent_temp_announce if b'files' in im[b'info']: for imf in im[b'info'][b'files']: torrent_files.append({ 'name': imf[b'path'][0].decode("utf-8"), 'size': format_size_units(imf[b'length']) }) torrent_total_length += imf[b'length'] else: torrent_total_length = im[b'info'][b'length'] torrent_files.append({ 'name': im[b'info'][b'name'].decode("utf-8"), 'size': format_size_units(im[b'info'][b'length']) }) torrent_size = format_size_units(torrent_total_length) info_id = random_string_digits(10) result_anitopy = anitopy.parse(torrent_filename) anime_db_result = search_anime(anime_database_obj, result_anitopy['anime_title']) json_data_for_add = {} json_data_for_add['id'] = info_id json_data_for_add['file_name'] = torrent_filename json_data_for_add['title'] = result_anitopy['anime_title'] if 'episode_number' in result_anitopy: json_data_for_add['episode'] = result_anitopy['episode_number'] else: json_data_for_add['episode'] = None json_data_for_add['hash'] = torrent_hash json_data_for_add['size'] = torrent_size if 'video_resolution' in result_anitopy: json_data_for_add['resolution'] = result_anitopy[ 'video_resolution'] else: json_data_for_add['resolution'] = None if 'video_term' in result_anitopy: json_data_for_add['video_codec'] = result_anitopy['video_term'] else: json_data_for_add['video_codec'] = None if 'audio_term' in result_anitopy: json_data_for_add['audio_codec'] = result_anitopy['audio_term'] else: json_data_for_add['audio_codec'] = None if 'release_group' in result_anitopy: json_data_for_add['release_group'] = result_anitopy[ 'release_group'] else: json_data_for_add['release_group'] = None json_data_for_add['created_date'] = torrent_creation_date json_data_for_add['magnet_url'] = torrent_magnet json_data_for_add[ 'torrent_url'] = 'https://anime.cryental.dev/download/' + info_id + '.torrent' json_data_for_add['extra'] = { 'announces': torrent_announces, 'files': torrent_files } if not anime_db_result: json_data_for_add['metadata'] = None else: info_type = None info_episodes = None info_picture = None info_thumbnail = None info_status = None if 'type' in anime_db_result: info_type = anime_db_result['type'] if 'episodes' in anime_db_result: info_episodes = anime_db_result['episodes'] if 'picture' in anime_db_result: info_picture = anime_db_result['picture'] if 'thumbnail' in anime_db_result: info_thumbnail = anime_db_result['thumbnail'] if 'status' in anime_db_result: info_status = anime_db_result['status'] json_data_for_add['metadata'] = { 'type': info_type, 'episodes': info_episodes, 'picture': info_picture, 'thumbnail': info_thumbnail, 'status': info_status } list_database.append(json_data_for_add) if not search_database(list_origin_database, i) or not full_loaded: json_original_data_for_add = {} json_original_data_for_add['id'] = info_id json_original_data_for_add['file_name'] = torrent_filename json_original_data_for_add['hash'] = torrent_hash with open(torrent_full_path, "rb") as f: encodedZip = base64.b64encode(f.read()) json_original_data_for_add['raw_data'] = encodedZip.decode( ) json_original_data_for_add[ 'created_date'] = torrent_creation_date list_origin_database.append(json_original_data_for_add) added_new_state = True color.color_print(Fore.YELLOW, '[PROCESSED] ', i) else: print(Fore.LIGHTRED_EX + '[SKIPPED] ' + Style.RESET_ALL + i) if added_new_state or not full_loaded: color.color_print(Fore.YELLOW, '[PROCESSING]', 'SORTING LIST') list_database.sort(key=sortSecond, reverse=True) color.color_print(Fore.YELLOW, '[PROCESSING]', 'SORTING ORIGINAL LIST') list_origin_database.sort(key=sortSecond, reverse=True) color.color_print(Fore.YELLOW, '[PROCESSING]', 'DISK ACCESSING') with open('output\\database.json', 'w') as outfile: color.color_print(Fore.YELLOW, '[PROCESSING]', 'WRITING LIST') json.dump(list_database, outfile) color.color_print(Fore.YELLOW, '[PROCESSING]', 'DISK ACCESSING') with open('output\\database_original.json', 'w') as outfile: color.color_print(Fore.YELLOW, '[PROCESSING]', 'WRITING LIST ORIGINAL') json.dump(list_origin_database, outfile) color.color_print(Fore.YELLOW, '[PROCESSING]', 'WRITING UPDATED DATE') today = datetime.now() new_days = open('output\\updated_on.txt', 'w') new_days.write(today.strftime("%Y-%m-%d %H:%M:%S")) new_days.close() color.color_print(Fore.YELLOW, '[PROCESSING]', 'WRITING HASH FILES') database_md5 = str(md5('output\\database.json')) origin_database_md5 = str(md5('output\\database_original.json')) updated_md5 = str(md5('output\\updated_on.txt')) with open('output\\database.json.md5', 'w') as outfile: json.dump(database_md5, outfile) with open('output\\database_original.json.md5', 'w') as outfile: json.dump(origin_database_md5, outfile) with open('output\\updated_on.txt.md5', 'w') as outfile: json.dump(updated_md5, outfile) color.color_print(Fore.YELLOW, '[DONE]', 'COMPLETED\n')
async def main(): init() crawl_new_torrent_only = True cron_job = True cron_job_interval = 2 cls() while True: print( 'Ohys-Raws Crawler Engine 4.17\nPython Version\n\nDeveloped By Cryental\n' ) if not os.path.exists('torrents'): os.makedirs('torrents') if not os.path.exists('output'): os.makedirs('output') if crawl_new_torrent_only and cron_job: color.color_print( Fore.YELLOW, '[!]', 'CRONJOB SET: INTERVAL TO {} SEC'.format(cron_job_interval) + '\n') time.sleep(cron_job_interval) color.color_print(Fore.YELLOW, '[DONE]', 'READING HEADERS') color.color_print(Fore.YELLOW, '[DONE]', 'WEBCLIENT INIT') color.color_print(Fore.YELLOW, '[DONE]', 'SET NORMAL HEADERS\n') color.color_print(Fore.YELLOW, '[RUNNING]', 'TORRENT LIST LOADING\n') if crawl_new_torrent_only: color.color_print(Fore.YELLOW, '[DONE]', 'MODE SELECTED - NEW TORRENT ONLY MODE\n') else: color.color_print(Fore.YELLOW, '[DONE]', 'MODE SELECTED - FULL DUMP MODE\n') if not crawl_new_torrent_only: color.color_print(Fore.YELLOW, '[RUNNING]', 'ALL TORRENT LIST LOADING\n') else: color.color_print(Fore.YELLOW, '[RUNNING]', 'NEW TORRENT LIST LOADING\n') headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/80.0.3987.132 Safari/537.36" } cancel_crawling_state = False new_torrent_state = False try: for i in range(0, 150): async with aiohttp.ClientSession() as session: async with session.get( 'http://eu.ohys.net/t/json.php?dir=disk&p={}'. format(str(i)), headers=headers) as req: contents_array = json.loads(await req.text()) if len(contents_array) == 0: break for item in contents_array: decoded_file_name = html.unescape(item['t']) if not os.path.isfile( 'torrents/' + decoded_file_name ) or os.stat('torrents/' + decoded_file_name).st_size == 0: async with session.get( 'http://eu.ohys.net/t/' + item['a']) as file_req: file = await aiofiles.open( 'torrents/' + decoded_file_name, mode='wb') await file.write(await file_req.read()) await file.close() color.color_print(Fore.LIGHTGREEN_EX, '[DOWNLOADED]', decoded_file_name) new_torrent_state = True elif crawl_new_torrent_only: cancel_crawling_state = True else: color.color_print(Fore.LIGHTRED_EX, '[SKIPPED]', decoded_file_name) if cancel_crawling_state: break except: pass print('') color.color_print(Fore.YELLOW, '[DONE]', 'TORRENT LOADED\n') if new_torrent_state: color.color_print(Fore.YELLOW, '[PROCESSING]', 'OUTPUT TO JSON TYPE\n') time.sleep(2) modules.database_builder() color.color_print(Fore.YELLOW, '[DONE]', 'OUTPUT TO JSON TYPE') if not cron_job: sys.exit(0) cls()
def show_all(my_mat, comp_mat, ball, balls_left): if os.name == "nt": os.system("cls") # Clear screen else: os.system('clear') color_print("Новый бочонок: {} (осталось {})".format(ball, balls_left), color=Color.White) color_print("--- Карточка компьютера ---", color=Color.BGreen) color_print(matrix_view(comp_mat), color=Color.Blue) color_print("---------------------------", color=Color.BGreen) color_print("------ Ваша карточка ------", color=Color.BGreen) color_print(matrix_view(my_mat), color=Color.Cyan) color_print("---------------------------", color=Color.BGreen) color_print("Зачеркнуть цифру? (y/n) ", color=Color.Yellow, end="")
def dead_heat(): color_print("Ничья.", color=Color.IYellow) sys.exit()
def you_win(): color_print("Вы выиграли!", color=Color.BGreen) color_print("Компьютер проиграл!", color=Color.BGreen) sys.exit()
def you_fail(): color_print("Вы проиграли!", color=Color.Red) sys.exit()
def main(): init() crawl_new_torrent_only = False cron_job = True cron_job_interval = 2 cls() while True: print( 'Ohys-Raws Crawler Engine 4.17\nPython Version\n\nDeveloped By Cryental\n' ) if not os.path.exists('torrents'): os.makedirs('torrents') if not os.path.exists('output'): os.makedirs('output') if crawl_new_torrent_only and cron_job: color.color_print( Fore.YELLOW, '[!]', 'CRONJOB SET: INTERVAL TO {} SEC'.format(cron_job_interval) + '\n') time.sleep(cron_job_interval) color.color_print(Fore.YELLOW, '[DONE]', 'READING HEADERS') color.color_print(Fore.YELLOW, '[DONE]', 'WEBCLIENT INIT') color.color_print(Fore.YELLOW, '[DONE]', 'SET NORMAL HEADERS') color.color_print(Fore.YELLOW, '[RUNNING]', 'TORRENT LIST LOADING\n') if crawl_new_torrent_only: color.color_print(Fore.YELLOW, '[DONE]', 'MODE SELECTED - NEW TORRENT ONLY MODE') else: color.color_print(Fore.YELLOW, '[DONE]', 'MODE SELECTED - FULL DUMP MODE') if not crawl_new_torrent_only: color.color_print(Fore.YELLOW, '[RUNNING]', 'NEW TORRENT LIST LOADING\n') color.color_print(Fore.YELLOW, '[RUNNING]', 'NEW TORRENT LIST LOADING') headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/80.0.3987.132 Safari/537.36" } cancel_crawling_state = False new_torrent_state = False try: for i in range(0, 150): req = urllib.request.Request( 'http://torrents.ohys.net/t/json.php?dir=disk&p={}'.format( str(i)), headers=headers) response = urllib.request.urlopen(req) contents = response.read() contents_array = json.loads(contents) if len(contents_array) == 0: break for item in contents_array: decoded_file_name = html.unescape(item['t']) if not os.path.isfile('torrents\\' + decoded_file_name) or os.stat( 'torrents\\' + decoded_file_name).st_size == 0: time.sleep(0.008) urllib.request.urlretrieve( 'http://torrents.ohys.net/t/' + item['a'], 'torrents\\' + decoded_file_name) color.color_print(Fore.YELLOW, '[DOWNLOADED]', decoded_file_name) new_torrent_state = True elif crawl_new_torrent_only: cancel_crawling_state = True else: color.color_print(Fore.YELLOW, '[EXISTED]', decoded_file_name) if cancel_crawling_state: break except: pass color.color_print(Fore.YELLOW, '[DONE]', 'TORRENT LOADED') if new_torrent_state: color.color_print(Fore.YELLOW, '[PROCESSING]', 'OUTPUT TO JSON TYPE') time.sleep(2) modules.database_builder() color.color_print(Fore.YELLOW, '[DONE]', 'OUTPUT TO JSON TYPE') if not cron_job: sys.exit(0) break cls()