async def detector(): global db, au_post while True: try: await asyncio.sleep(0.3) array = former( requests.get('https://t.me/chatwars3/' + str(au_post) + '?embed=1').text) if array[0] != 'False': try: post = await bot.send_message(idChannel, array[1], parse_mode='HTML') db[au_post] = { 'update_id': update_id, 'action': 'Add', '@cw3auction': form_mash(post['message_id'], post['text']), '@chatwars3': array } au_post += 1 google('update_cell', au_post) printer('запостил новый лот') except IndexError and Exception as error: printer(error) await asyncio.sleep(1) except IndexError and Exception: await executive()
def detector(self, message, au_post, auth, db_path=path['lots']): lot, log_text = None, 'None' lot_split = message['text'].split('/') print_text = f"{self.server['link: channel']}{lot_split[0]}" if int(lot_split[0]) >= au_post: db = SQLighter(db_path) lot = self.form(message['text']) lot_in_db = secure_sql(db.get_lot, lot['au_id']) html_link = objects.html_link(print_text, f"№{lot['lot_id']}") log_text = f"#Рассылка лота #{lot['lot_id']} {html_link} с айди #{lot['base']} разошелся по " log_text += f"{bold('{}')} адресатам " + code(f"id:{lot['au_id']}") if lot_in_db is False: secure_sql(db.merge, lot) if lot['base'] != 'None': if lot['status'] != '#active': print_text += ' Не активен, в базу добавлен' else: auth.send_dev_message( print_text + code('\nЭтого куска говна нет в константах'), tag=None) else: print_text += ' Уже в базе' else: print_text += ' Старый, lot_updater() разберется' objects.printer(print_text) return lot, log_text
async def supporter(): while True: try: await asyncio.sleep(10) stamp2 = datetime.now().timestamp() temp_db = copy.copy(db) for i in temp_db: lot = db.get(i) try: await bot.edit_message_text(lot['@chatwars3'][1], idChannel, lot['@cw3auction'][0], parse_mode='HTML') print_text = 'Пост обновлен' except IndexError and Exception as e: print_text = 'Пост не изменился' search = re.search('exactly the same as a current content', str(e)) if search: print_text += ', потому что точно такой же' else: print_text += ' ' + str(e) await asyncio.sleep(10) printer( str(i) + '-' + str(lot['@cw3auction'][0]) + ' ' + print_text) await asyncio.sleep(10) printer('конец ' + str(datetime.now().timestamp() - stamp2)) except IndexError and Exception: await executive()
def hourly(): while True: try: sleep(300) objects.printer('работаю ' + objects.log_time(time_now() - 2 * 60 * 60)) goodies(time_now() - 2 * 60 * 60, 5) sleep(3300) except IndexError and Exception: executive()
def main_posting(): while True: try: sleep(20) scheduled = google('moscow-schedule', 'col_values', 1) if len(scheduled) > 0 and (13 <= hour() < 21): if len(scheduled[0]) > 0 and (last_date + 60 * 60) < stamper(log_time(gmt=0, form='channel')): poster(idMain, [scheduled[0], None, None, None]) objects.printer('запостил') google('moscow-schedule', 'delete', 1) except IndexError and Exception: executive()
def daily(): while True: try: now_dict = stamp_dict(time_now()) if now_dict['hour'] == '21': objects.printer('работаю ' + objects.log_time(time_now() - 24 * 60 * 60)) goodies(time_now() - 24 * 60 * 60, 10) sleep(3600) sleep(300) except IndexError and Exception: executive()
def starting_print(timestamp): text = 'Оболочка запущена за ' rounded = round(datetime.now().timestamp() - timestamp, 2) if 0 < rounded < 1: if len(str(rounded)) == 3: rounded = f'{rounded}0' text += f'{rounded} секунды' else: rounded = int(rounded) text += f'{rounded} секунд' if rounded < 10 or rounded > 20: if str(rounded)[-1] in ['1']: text += 'у' elif str(rounded)[-1] in ['2', '3', '4']: text += 'ы' objects.printer(text)
def checker(address, main_sep, link_sep, quest): global used_array sleep(3) text = requests.get(address, headers=headers) soup = BeautifulSoup(text.text, 'html.parser') posts_raw = soup.find_all('div', class_=main_sep) posts = [] for i in posts_raw: link = i.find('a', class_=link_sep) if link is not None: posts.append(link.get('href')) for i in posts: if i not in used_array: google('moscow-growing', 'insert', i) used_array.insert(0, i) post = quest(i) poster(idDocs, former(post[0], post[1], str(random.randint(1, 2)))) objects.printer(i + ' сделано') sleep(3)
def multiple_db_updates(self, lots, lot_updater=True, max_workers=10): start_sql_request = self.create_start_sql_request() print_text, stamp = f"{len(lots)}: ", datetime.now().timestamp() db = { 'active': SQLighter(path['active']), 'lots': None if lot_updater else SQLighter(path['lots']) } for array in objects.sql_divide(lots): sql = {'active': '', 'lots': ''} with concurrent.futures.ThreadPoolExecutor( max_workers=max_workers) as future_executor: futures = [ future_executor.submit(self.form, future) for future in array ] for future in concurrent.futures.as_completed(futures): sql_request = '' lot = future.result() for key in lot: sql_request += f"'{lot.get(key)}', " sql_request = f'{sql_request[:-2]}), (' if lot['status'] == '#active': sql['active'] += sql_request else: sql['lots'] += sql_request if lot_updater: sql_request = '' for key in sql: sql_request += sql[key] if sql_request: secure_sql(db['active'].custom_sql, f'{start_sql_request}{sql_request[:-3]};') else: for key in db: if db[key] and sql[key]: secure_sql(db[key].custom_sql, f'{start_sql_request}{sql[key][:-3]};') delay = datetime.now().timestamp() - stamp objects.printer(f"{print_text}{delay}") return int(delay)
def tut_checker(): while True: try: global unused_box checker( 'https://jobs.tut.by/search/vacancy?order_by=publication_time&clusters=true&area=16&' 'currency_code=BYR&enable_snippets=true&only_with_salary=true', 'vacancy-serp-item', 'bloko-link', tut_quest) if len(unused_box) > 0 and (11 <= hour() < 21): if (last_date + 122 * 60) < objects.time_now() and block != 'True': site_search = re.search(r'tut\.by|hh\.ru', unused_box[0]) if site_search: post = tut_quest(unused_box[0]) else: post = praca_quest(unused_box[0]) google(unused_box[0]) poster(idMain, former(post[1], 'MainChannel', post[0])) objects.printer(unused_box[0] + ' сделано') unused_box.pop(0) sleep(3) except IndexError and Exception: executive()
def vk_parser(): global used_links while not used_links: sleep(1) while True: try: sleep(20) response = requests.get('https://api.vk.com/method/wall.get', params=params).json() if response.get('response'): for item in reversed(response['response']['items']): post = 'https://vk.com/' + params['domain'] + \ '?w=wall' + str(item['owner_id']) + '_' + str(item['id']) if post not in used_links: used_links.append(post) objects.printer('получена ' + post) if item.get('copy_history') is None: telegram_publish(item) sleep(10) else: if response.get('error'): objects.printer(response) sleep(180) except IndexError and Exception: executive()
def checker(address, main_sep, link_sep, quest): global used_array global unused_box sleep(3) time_now = objects.time_now() text = requests.get(address, headers=headers) soup = BeautifulSoup(text.text, 'html.parser') posts_raw = soup.find_all('div', class_=main_sep) posts = [] for i in posts_raw: link = i.find('a', class_=link_sep) if link is not None: posts.append(link.get('href')) for i in posts: if i not in used_array and i not in unused_box and (11 <= hour() < 21): if (last_date + 120 * 60) < time_now and block != 'True': google(i) used_array.insert(0, i) post = quest(i) poster(idMain, former(post[1], 'MainChannel', post[0])) objects.printer(i + ' сделано') sleep(3) else: unused_box.append(i)
def multiple_requests(self, active_array, full_limit, max_workers=10): stuck = 0 loop = True ranges = [] response = {} used_array = [] temp_ranges = [] update_array = [] request_array = [] prev_update_array = [] limit = copy(full_limit) for post_id in active_array: ranges.extend(range(post_id - 10, post_id + 11)) for post_id in dict( sorted(Counter(ranges).items(), key=lambda item: item[1], reverse=True)): if post_id not in temp_ranges and post_id in active_array: temp_ranges.extend(range(post_id - 10, post_id + 11)) request_array.append(post_id) delay = len(request_array) / 5 delay = int(delay + 1 if delay.is_integer() is False else delay) while loop is True: links = [] if len(update_array) == 0: for lot_id in request_array: if len(update_array ) < full_limit and lot_id not in used_array: update_array.append(lot_id) used_array.append(lot_id) else: if update_array == prev_update_array: stuck += 1 else: prev_update_array = deepcopy(update_array) stuck = 0 if stuck in [50, 500, 5000]: auth = objects.AuthCentre(os.environ['ERROR-TOKEN']) message = f"active_array({len(active_array)}) = {active_array}\n" \ f"request_array({len(request_array)}) = {request_array}\n" \ f"update_array({len(update_array)}) = {update_array}\n" \ f"delay = {delay}" auth.send_json(message, 'variables', 'Бесконечный цикл обновляющихся лотов') for lot_id in update_array: links.append(f"{self.server['link: channel']}{lot_id}") limit -= 1 temp_array = deepcopy(update_array) print_text, stamp = f"{len(links)}: ", datetime.now().timestamp() with concurrent.futures.ThreadPoolExecutor( max_workers=max_workers) as future_executor: futures = [ future_executor.submit(requests.get, future) for future in links ] for future in concurrent.futures.as_completed(futures): result = self.former(future.result(), active_array) response.update(result) for lot_id in result: if lot_id in temp_array: temp_array[temp_array.index(lot_id)] = None update_array = [] objects.printer( f"{print_text}{datetime.now().timestamp() - stamp}") for lot_id in temp_array: if lot_id is not None: update_array.append(lot_id) if len(update_array) == 0 and len(used_array) == len( request_array): loop = False if limit <= 0: limit = copy(full_limit) delay -= 60 sleep(60) return 0 if delay < 0 else delay, list(response.values())
def checker(): from timer import timer global main_post_id, mini_post_id, last_date while True: try: sleep(0.1) printer_text = main_address + str(main_post_id) soup = BeautifulSoup( requests.get(printer_text + '?embed=1').text, 'html.parser') is_post_not_exist = soup.find('div', class_='tgme_widget_message_error') if is_post_not_exist is None: is_post_id_exist = soup.find('div', class_='tgme_widget_message_link') raw = str( soup.find('div', class_='tgme_widget_message_text js-message_text' )).replace('<br/>', '\n') if is_post_id_exist: soup = BeautifulSoup(raw, 'html.parser').get_text() time_search = re.search( r'(\d\d) (.*) 10(..)\nРезультаты сражений:', soup) if time_search: reports = {} soup = re.sub('.*\nРезультаты сражений:\n|️', '', soup) trophy_search = re.search( 'По итогам сражений замкам начислено:(.*)', soup, flags=re.DOTALL) for row in soup.split('\n\n'): row = re.sub( 'По итогам сражений замкам начислено:.+', '', row, flags=re.DOTALL) search_castle = re.search(castles, row) if search_castle: money_search = re.search( '(на|отобрали) (.*) золотых монет', row) box_search = re.search( 'потеряно (.*) складских ячеек', row) castle = search_castle.group(1) reports[castle] = ': ' if e_trident in row: reports[castle] += e_trident for battle in battle_emoji: if battle in row: reports[castle] += battle_emoji.get( battle) break if money_search: if money_search.group(1) == 'на': reports[castle] += ' -' elif money_search.group(1) == 'отобрали': reports[castle] += ' +' reports[castle] += money_search.group( 2) + '💰' if box_search: if box_search.group(1) != '0': reports[ castle] += ' -' + box_search.group( 1) + '📦' if trophy_search: letter = title trophy_list = trophy_search.group(1).split('\n') date = objects.log_time(timer(time_search), form='b_channel') for row in trophy_list: search = re.search( castles + r'.+ \+(\d+) 🏆 очков', row) if search: castle, trophy = search.groups() letter += italic(castle + str(reports.get(castle)) + ' +' + trophy + '🏆') + '\n' if stamper(date, '%d/%m/%Y %H:%M') > stamper( last_date, '%d/%m/%Y %H:%M'): letter += html_link( printer_text, 'Битва') + ' ' + italic(date) sender(letter, date, 'main') else: printer('пост ' + printer_text + ' уже был, пропускаю') main_post_id += 1 else: printer('пост ' + printer_text + ' не относится к дайджесту, пропускаю') main_post_id += 1 else: printer_text = mini_address + str(mini_post_id) soup = BeautifulSoup( requests.get(printer_text + '?embed=1').text, 'html.parser') is_post_not_exist = soup.find( 'div', class_='tgme_widget_message_error') if is_post_not_exist is None: is_post_id_exist = soup.find( 'div', class_='tgme_widget_message_link') raw = soup.find( 'div', class_='tgme_widget_message_text js-message_text') raw = re.sub('️', '', re.sub('<br/>', '\n', str(raw))) if is_post_id_exist: is_post_battle = None soup = BeautifulSoup(raw, 'html.parser').get_text() for row in soup.split('\n'): is_post_battle = re.search( r'Battle (\d{2}) (.*?) 10(\d{2})', row) if is_post_battle: break if is_post_battle: points = None digest = None date = objects.log_time(timer(is_post_battle), form='b_channel') for part in soup.split('\n\n'): points_search = re.search('🏆Очки:\n(.*)', part, flags=re.DOTALL) digest_search = re.search( '⛳Сводки с полей:\n(.*)', part, flags=re.DOTALL) if digest_search: digest = digest_search.group(1).split('\n') elif points_search: points = points_search.group(1).split('\n') if stamper(date, '%d/%m/%Y %H:%M') > stamper( last_date, '%d/%m/%Y %H:%M'): list_tags_a = BeautifulSoup( raw, 'html.parser').find_all('a') battle_links = [] for tag_a in list_tags_a: if tag_a.get_text() == 'Battle': battle_links.append(tag_a.get('href')) if len(battle_links) == 1: search_post = re.search( main_address, battle_links[0]) if search_post: raw_post = re.sub( r'\D', '', re.sub(main_address, '', battle_links[0])) main_post_id = int(raw_post) + 1 if points and digest: print(points) letter = title for row in digest: castle_row = re.search( '(.*?): .*', row) points_text = '' if castle_row: for point in points: conform = re.search( castle_row.group(1) + r'.*: \+(\d+)', point) if conform: points_text += ' +' + conform.group( 1) + '🏆' letter += italic( row + points_text) + '\n' letter += html_link( battle_links[0], 'Битва') + ' ' + italic(date) sender(letter, date, 'mini') else: printer('пост ' + printer_text + ' уже был, пропускаю') mini_post_id += 1 else: printer('пост ' + printer_text + ' не относится к дайджесту, пропускаю') mini_post_id += 1 else: sleep(0.02) except IndexError and Exception: executive()
async def lot_updater(): global db, limiter, update_id while True: try: await asyncio.sleep(1) printer('начало') g_actives = google('col_values', 1) stamp2 = datetime.now().timestamp() session = FuturesSession() temp_db = copy.copy(db) update_array = [] update_id += 1 futures = [] for i in temp_db: lot = db.get(i) if lot['action'] == 'Add': google('insert', [i, lot['@cw3auction'][0]]) g_actives.insert(2, str(i)) db[i]['action'] = 'None' for i in temp_db: if db[i]['action'] != 'deleted': lot = db.get(i) if lot['update_id'] + 1 < update_id: update_array = [] update_id -= 1 limiter = 1 if lot['update_id'] + 1 == update_id and limiter <= 300: db[i]['update_id'] = update_id update_array.append(i) limiter += 1 for i in update_array: url = 'https://t.me/chatwars3/' + str(i) + '?embed=1' futures.append(session.get(url)) for future in concurrent.futures.as_completed(futures): result = former(future.result().content) last_time_request() if result[0] != 'False': db[result[0]]['@chatwars3'] = result lot_cw3 = db[result[0]]['@cw3auction'] if result[2] != lot_cw3[1]: db[result[0]]['action'] = 'Update' if result[3] != '#активен': db[result[0]]['action'] = 'Delete' for i in temp_db: lot = db.get(i) if lot['action'] in ['Update', 'Delete']: try: post = await bot.edit_message_text( lot['@chatwars3'][1], idChannel, lot['@cw3auction'][0], parse_mode='HTML') print_text = 'Пост обновлен' if lot['action'] == 'Update': db[i]['action'] = 'None' db[i]['@cw3auction'] = form_mash( post['message_id'], post['text']) else: google('delete', g_actives.index(str(i)) + 1) g_actives.pop(g_actives.index(str(i))) db[i]['action'] = 'deleted' print_text += ' (закончился) и удален из обновлений' except IndexError and Exception as e: print_text = 'Пост не изменился' search = re.search( 'exactly the same as a current content', str(e)) if search: print_text += ', потому что точно такой же' if lot['action'] == 'Update': db[i]['@cw3auction'][1] = db[i]['@chatwars3'][ 2] db[i]['@cw3auction'][2] = db[i]['@chatwars3'][ 3] db[i]['action'] = 'None' else: print_text += ', а еще он закончился и удален из обновлений' google('delete', g_actives.index(str(i)) + 1) g_actives.pop(g_actives.index(str(i))) db[i]['action'] = 'deleted' else: print_text += ' ' + str(e) printer( str(i) + '-' + str(lot['@cw3auction'][0]) + ' ' + print_text) limiter = 1 printer('конец ' + str(datetime.now().timestamp() - stamp2)) delay = 60 - (time_now() - last_requested) if delay >= 0: await asyncio.sleep(delay) except IndexError and Exception: await executive()
async def oldest(server): old = 0 old_values = [] temp_worksheet = None spreadsheet_files = [] client = gspread.service_account(server['json1']) for s in client.list_spreadsheet_files(): if s['name'] in [i + server['storage'] for i in ['', temp_prefix]]: spreadsheet = client.open(s['name']) last_worksheet = None last_worksheet_id = 1 for worksheet in spreadsheet.worksheets(): title = worksheet.title if number_secure(title): title = number_secure(title) if int(title) > last_worksheet_id: last_worksheet_id = int(title) last_worksheet = worksheet elif title == 'old': last_worksheet = worksheet if s['name'] not in spreadsheet_files: spreadsheet_files.append(s['name']) if last_worksheet: values = last_worksheet.col_values(1) if s['name'] == temp_prefix + server['storage']: temp_worksheet = last_worksheet old_values = values for value in values: value = number_secure(value.split('/')[0]) if value: if int(value) > old: old = int(value) if old and spreadsheet_files: old += 1 if temp_prefix + server['storage'] not in spreadsheet_files: create_temp_spreadsheet(client, server['storage']) while True: try: print_text = server['channel'] + str(old) text = requests.get(print_text + '?embed=1') response = former(text.text) if response.startswith('False'): await asyncio.sleep(8) else: try: temp_worksheet.update_cell( len(old_values) + 1, 1, response) old_values.append(response) except IndexError and Exception as error: storage_name = server['storage'] search_exceed = re.search('exceeds grid limits', str(error)) if search_exceed: worksheet_number = 0 await asyncio.sleep(100) client = gspread.service_account(server['json1']) temp_spreadsheet = client.open(temp_prefix + storage_name) temp_values = temp_spreadsheet.worksheet( 'old').col_values(1) dev = Auth.send_dev_message('Устраняем таблицу', tag=italic) main_spreadsheet = client.open(storage_name) for w in main_spreadsheet.worksheets(): if number_secure(w.title): title = number_secure(w.title) if int(title) > worksheet_number: worksheet_number = int(title) main_worksheet = main_spreadsheet.add_worksheet( str(worksheet_number + 1), limit, 1) main_spreadsheet.batch_update( objects.properties_json( main_worksheet.id, limit, temp_values)) dev_edited = Auth.edit_dev_message( dev, italic('\n— Новая: ' + storage_name + '/' + str(worksheet_number + 1))) create_temp_spreadsheet(client, storage_name, [response]) client.del_spreadsheet(temp_spreadsheet.id) Auth.edit_dev_message(dev_edited, italic('\n— Успешно')) old_values = [response] await asyncio.sleep(30) else: client = gspread.service_account(server['json1']) temp_worksheet = client.open( temp_prefix + storage_name).worksheet('old') temp_worksheet.update_cell( len(old_values) + 1, 1, response) old_values.append(response) old += 1 await asyncio.sleep(1.2) objects.printer(print_text + ' Добавил в google старый лот') except IndexError and Exception: await Auth.async_exec() else: s_name = 'Undefined' for name in server_dict: if server_dict[name] == server: s_name = name Auth.send_dev_message( 'Нет подключения к google.\nНе запущен CW-Notify-Storage-Oldest(' + s_name + ')')