class LikeMonitor: def __init__(self, username: str, telegram_chat_ids: str): self.sleeper = Sleeper(30) self.username = username self.existing_like_id_set = get_like_id_set(self.get_likes()) logging.info('Init monitor succeed.\nUsername: {}\nLike ids: {}'.format( self.username, self.existing_like_id_set)) self.telegram_notifier = TelegramNotifier(chat_ids=telegram_chat_ids, username=username, module='Like') self.last_log_time = datetime.now() def get_likes(self, max_number: int = 200) -> list: url = 'https://api.twitter.com/1.1/favorites/list.json' params = {'screen_name': self.username, 'count': max_number} json_response = send_get_request(url, params) while not json_response: self.sleeper.sleep(normal=False) json_response = send_get_request(url, params) return json_response def run(self): while True: self.sleeper.sleep(normal=True) likes = self.get_likes() for like in reversed(likes[:-10]): if like['id'] not in self.existing_like_id_set: self.telegram_notifier.send_message('@{}: {}'.format( like['user']['screen_name'], like['text'])) self.existing_like_id_set |= get_like_id_set(likes) if datetime.now() - self.last_log_time > timedelta(hours=1): logging.info('Existing like id number: {}'.format(len(self.existing_like_id_set))) self.last_log_time = datetime.now()
def __init__(self, username, client_id, token, channel): self.client_id = client_id self.token = token self.channel = '#' + channel self.accounts_controller = AccountsController() self.commands_controller = CommandsController() self.telegram_notifier = TelegramNotifier() self.twitch_client = TwitchClient() self.is_duel_on = False # Get the channel id, we will need this for v5 API calls url = 'https://api.twitch.tv/kraken/users?login='******'Client-ID': client_id, 'Accept': 'application/vnd.twitchtv.v5+json' } r = requests.get(url, headers=headers).json() self.channel_id = r['users'][0]['_id'] logger.info(self.channel_id) # Create IRC bot connection server = 'irc.twitch.tv' port = 6667 logger.info('Connecting to ' + server + ' on port ' + str(port) + '...') irc.bot.SingleServerIRCBot.__init__(self, [(server, port, 'oauth:' + token)], username, username)
def __init__(self, username: str, telegram_chat_ids: str): self.sleeper = Sleeper(30) self.username = username self.existing_like_id_set = get_like_id_set(self.get_likes()) logging.info('Init monitor succeed.\nUsername: {}\nLike ids: {}'.format( self.username, self.existing_like_id_set)) self.telegram_notifier = TelegramNotifier(chat_ids=telegram_chat_ids, username=username, module='Like') self.last_log_time = datetime.now()
def __init__(self, username: str, telegram_chat_ids: str): self.sleeper = Sleeper(120) self.username = username self.user_id = get_user_id(username) self.following_users = self.get_all_following_users(self.user_id) logging.info( 'Init monitor succeed.\nUsername: {}\nUser id: {}\nFollowing users: {}' .format(username, self.user_id, self.following_users)) self.telegram_notifier = TelegramNotifier(chat_ids=telegram_chat_ids, username=username, module='Following') self.last_log_time = datetime.now()
def __init__(self, username: str, telegram_chat_ids: str): self.sleeper = Sleeper(10) self.user_id = get_user_id(username) tweets = self.get_tweets() self.last_tweet_id = tweets[0]['id'] logging.info( 'Init monitor succeed.\nUsername: {}\nUser id: {}\nLast tweet: {}'. format(username, self.user_id, tweets[0])) self.telegram_notifier = TelegramNotifier(chat_ids=telegram_chat_ids, username=username, module='Tweet') self.last_log_time = datetime.now()
def find(): notifier = TelegramNotifier(settings.TELEGRAM_BOT_TOKEN, settings.TELEGRAM_CHAT_ID) since = get_since() crawlers = [ OnlinerCrawler(since=since, timezone=settings.TIMEZONE), neagent.Neagent(), ] for crawler in crawlers: LOGGER.info("searching using crawler: {}".format( crawler.__class__.__name__)) for apartment in crawler.find(): LOGGER.debug(apartment) LOGGER.debug("=======") notifier.notify(str(apartment))
class TweetMonitor: def __init__(self, username: str, telegram_chat_ids: str): self.sleeper = Sleeper(10) self.user_id = get_user_id(username) tweets = self.get_tweets() self.last_tweet_id = tweets[0]['id'] logging.info( 'Init monitor succeed.\nUsername: {}\nUser id: {}\nLast tweet: {}'. format(username, self.user_id, tweets[0])) self.telegram_notifier = TelegramNotifier(chat_ids=telegram_chat_ids, username=username, module='Tweet') self.last_log_time = datetime.now() def get_tweets(self, since_id: str = None) -> list: url = 'https://api.twitter.com/2/users/{}/tweets'.format(self.user_id) params = {'max_results': 100} if since_id: params['since_id'] = since_id json_response = send_get_request(url, params) while not json_response: self.sleeper.sleep(normal=False) json_response = send_get_request(url, params) return json_response.get('data', []) def run(self): while True: self.sleeper.sleep(normal=True) tweets = self.get_tweets(since_id=self.last_tweet_id) if tweets: for tweet in tweets: self.telegram_notifier.send_message(tweet['text']) self.last_tweet_id = tweets[0]['id'] if datetime.now() - self.last_log_time > timedelta(hours=1): logging.info('Last tweet id: {}'.format(self.last_tweet_id)) self.last_log_time = datetime.now()
class FollowingMonitor: def __init__(self, username: str, telegram_chat_ids: str): self.sleeper = Sleeper(120) self.username = username self.user_id = get_user_id(username) self.following_users = self.get_all_following_users(self.user_id) logging.info( 'Init monitor succeed.\nUsername: {}\nUser id: {}\nFollowing users: {}' .format(username, self.user_id, self.following_users)) self.telegram_notifier = TelegramNotifier(chat_ids=telegram_chat_ids, username=username, module='Following') self.last_log_time = datetime.now() def get_all_following_users(self, user_id: str) -> set: url = 'https://api.twitter.com/2/users/{}/following'.format(user_id) params = {'max_results': 1000} json_response = send_get_request(url, params) while not json_response: self.sleeper.sleep(normal=False) json_response = send_get_request(url, params) results = json_response.get('data', []) next_token = json_response.get('meta', {}).get('next_token', '') while next_token: params['pagination_token'] = next_token json_response = send_get_request(url, params) while not json_response: self.sleeper.sleep(normal=False) json_response = send_get_request(url, params) results.extend(json_response.get('data', [])) next_token = json_response.get('meta', {}).get('next_token', '') return set([result.get('username', '') for result in results]) def get_user_details(self, username: str) -> str: url = 'https://api.twitter.com/2/users/by/username/{}'.format(username) params = { 'user.fields': 'name,description,url,created_at,public_metrics' } json_response = send_get_request(url, params) errors = json_response.get('errors', None) if errors: return '\n'.join([error.get('detail', '') for error in errors]) data = json_response.get('data', {}) details_str = 'Name: {}'.format(data.get('name', '')) details_str += '\nBio: {}'.format(data.get('description', '')) website = data.get('url', '') if website: details_str += '\nWebsite: {}'.format(website) details_str += '\nJoined at: {}'.format(data.get('created_at', '')) public_metrics = data.get('public_metrics', {}) details_str += '\nFollowing: {}'.format( public_metrics.get('following_count', -1)) details_str += '\nFollowers: {}'.format( public_metrics.get('followers_count', -1)) details_str += '\nTweets: {}'.format( public_metrics.get('tweet_count', -1)) if public_metrics.get('following_count', 2000) < 2000: following_users = self.get_all_following_users( get_user_id(username)) details_str += '\nFollow each other: {}'.format( 'Yes' if self.username in following_users else 'No') return details_str def detect_changes(self, old_following_users: set, new_following_users: set): if old_following_users == new_following_users: return max_changes = max(len(old_following_users) / 2, 10) if abs(len(old_following_users) - len(new_following_users)) > max_changes: return dec_users = old_following_users - new_following_users if dec_users: logging.info('Unfollow: {}'.format(dec_users)) for dec_user in dec_users: self.telegram_notifier.send_message('Unfollow: @{}\n{}'.format( dec_user, self.get_user_details(dec_user)), disable_preview=True) inc_users = new_following_users - old_following_users if inc_users: logging.info('Follow: {}'.format(inc_users)) for inc_user in inc_users: self.telegram_notifier.send_message('Follow: @{}\n{}'.format( inc_user, self.get_user_details(inc_user)), disable_preview=True) def run(self): while True: self.sleeper.sleep(normal=True) following_users = self.get_all_following_users(self.user_id) self.detect_changes(self.following_users, following_users) self.following_users = following_users if datetime.now() - self.last_log_time > timedelta(hours=1): logging.info('Number of following users: {}'.format( len(self.following_users))) self.last_log_time = datetime.now()
class TwitchBot(irc.bot.SingleServerIRCBot): def __init__(self, username, client_id, token, channel): self.client_id = client_id self.token = token self.channel = '#' + channel self.accounts_controller = AccountsController() self.commands_controller = CommandsController() self.telegram_notifier = TelegramNotifier() self.twitch_client = TwitchClient() self.is_duel_on = False # Get the channel id, we will need this for v5 API calls url = 'https://api.twitch.tv/kraken/users?login='******'Client-ID': client_id, 'Accept': 'application/vnd.twitchtv.v5+json' } r = requests.get(url, headers=headers).json() self.channel_id = r['users'][0]['_id'] logger.info(self.channel_id) # Create IRC bot connection server = 'irc.twitch.tv' port = 6667 logger.info('Connecting to ' + server + ' on port ' + str(port) + '...') irc.bot.SingleServerIRCBot.__init__(self, [(server, port, 'oauth:' + token)], username, username) def on_welcome(self, c, e): logger.info('Joining ' + self.channel) # You must request specific capabilities before you can use them c.cap('REQ', ':twitch.tv/membership') c.cap('REQ', ':twitch.tv/tags') c.cap('REQ', ':twitch.tv/commands') c.join(self.channel) def on_pubmsg(self, c, e): # If a chat message starts with an exclamation point, try to run it as a command if e.arguments[0][:1] == '!': receiver_name = e.tags[2]['value'].lower() cmd = e.arguments[0].split(' ')[0][1:] logger.info('Received command: {} from: {}'.format( cmd, receiver_name)) self.do_command(e, cmd) message = str(e.arguments[0]).lower() if notifier_triggers[0] in message or notifier_triggers[1] in message: self.telegram_notifier.send_message(message=message) return def do_command(self, e, cmd): c = self.connection receiver_name = e.tags[2]['value'].lower() # to add new command add if cmd.startswith('command'): # TODO: Need to move all commands to another place # register to duels if cmd.startswith('reg'): if not self.is_duel_on: return appended = self.accounts_controller.append_account( account_name=receiver_name) if not appended: return c.privmsg(self.channel, text='{} зареган EZ !duel для инфы по дуэлям]'.format( receiver_name)) # get duel stats if cmd.startswith('stats_duel'): accounts = self.accounts_controller.get_accounts() if receiver_name in accounts: win_count = self.accounts_controller.get_win_count( receiver_name) c.privmsg(self.channel, text='{} Количество побед: {}'.format( receiver_name, win_count)) # get top 5 duelists if cmd.startswith('best_duelists'): if receiver_name not in moders_white_list: return report = self.accounts_controller.get_top5() c.privmsg(self.channel, text=str(report)) # start duel if cmd.startswith('duel'): if not self.is_duel_on: return accounts = self.accounts_controller.get_accounts() enemy_name = e.arguments[0].replace('!duel ', '').lower() if receiver_name in moders_list: c.privmsg(self.channel, text='{} умный самый? Дисреспект за абуз'.format( receiver_name)) return if enemy_name == '!duel': c.privmsg(self.channel, text='{} !duel [имя зереганного челика]'.format( receiver_name)) logger.info('started duel: {} vs {}'.format( receiver_name, enemy_name)) return elif enemy_name not in accounts: c.privmsg(self.channel, text='{} твой чел не зареган'.format(receiver_name)) return elif receiver_name not in accounts: c.privmsg(self.channel, text='{} зарегайся !reg'.format(receiver_name)) return loser = self.accounts_controller.get_account_duel_result( receiver_name, enemy_name) if not loser: logger.error('duel was failed') return c.privmsg(self.channel, text='/timeout {} {}'.format(loser, random.randint(1, 600))) logger.info('loser is {}'.format(loser)) # turn on duels if cmd.startswith('turn_off_duel'): if receiver_name not in moders_white_list: return self.is_duel_on = False c.privmsg(self.channel, text='Дуэли вырублены, мечи в ножны!') # turn off duels if cmd.startswith('turn_on_duel'): if receiver_name not in moders_white_list: return self.is_duel_on = True c.privmsg( self.channel, text= 'SMOrc Дуэли активированы !reg для реги !duel для сражений SMOrc' ) # change game if cmd.startswith('change_game'): if receiver_name not in moders_white_list: return game = e.arguments[0].replace('!{} '.format(cmd), '') is_game_changed = self.twitch_client.change_game(game=game) if is_game_changed: c.privmsg(self.channel, text='Игра сменена на {}'.format(game)) else: c.privmsg( self.channel, text='чот не получилось игру установить сори FeelsBadMan') if cmd.startswith('ебануть_команду'): if receiver_name not in moders_white_list: return new_command_list = str(e.arguments[0]).split(' ') print(new_command_list) print(len(new_command_list)) if len(new_command_list) != 3: c.privmsg( self.channel, text= 'Дурачок, команда подана неправильно. Образец !ебануть_команду имя_команды ответ_ответ вместо пробела _' ) return command_name = new_command_list[1] command_answer = str(new_command_list[2]).replace("_", " ") c.privmsg(self.channel, text=self.commands_controller.create_command( command_name=command_name, command_answer=command_answer)) if cmd.startswith('удали_команду'): if receiver_name not in moders_white_list: return command = str(e.arguments[0]).split(' ')[1] c.privmsg(self.channel, text=self.commands_controller.delete_command( command_name=command)) if cmd.startswith('обнови_команду'): if receiver_name not in moders_white_list: return new_command_list = str(e.arguments[0]).split(' ') if len(new_command_list) != 3: c.privmsg( self.channel, text= 'Дурачок, команда подана неправильно. Образец !обнови_команду имя_команды новое_значение вместо пробела _' ) return command_name = new_command_list[1] command_answer = str(new_command_list[2]).replace("_", " ") c.privmsg(self.channel, text=self.commands_controller.update_command( command_name=command_name, new_value=command_answer)) if cmd.startswith('получить_команды'): if receiver_name not in moders_white_list: return c.privmsg(self.channel, text=self.commands_controller.get_all_cmds()) if cmd: msg = self.commands_controller.execute_command(cmd) if msg: c.privmsg(self.channel, text=msg)
# configs telegram_bot_key = "your-api-token" match_days = [ [20, 21, 22, 23, 0, 3], # Monday (8pm-12am) [20, 21, 22, 23, 0], # Tuesday (8pm-12am) [20, 21, 22, 23, 0], # Wednesday (8pm-12am) [20, 21, 22, 23, 0], # Thursday (8pm-12am) [20, 21, 22, 23, 0], # Friday (8pm-12am) [16, 17, 18, 19, 20, 21, 22, 23, 0], # Saturday (4pm-8pm, 8pm-12am) [16, 17, 18, 19, 20, 21, 22, 23, 0], # Sunday (4pm-8pm, 8pm-12am) ] sc = Scraper() tn = TelegramNotifier(telegram_bot_key) @catch_exceptions(cancel_on_failure=True) @with_logging def loop(): logger_instance.info("Looking for a new ball...") data = sc.scrape() if data['status'] is True: if data['data']['status'] is True: logger_instance.info("Found a new six!") logger_instance.info(data['data']) tn.notify() schedule.every(1).minutes.do(loop)
def __init__(self): self.scrapper = WebScrapper() self.io = FileIO() self.notifier = TelegramNotifier()
class Hse28Scrapper(Hse28Setting): def __init__(self): self.scrapper = WebScrapper() self.io = FileIO() self.notifier = TelegramNotifier() def _search_result_checker(self, html): return check_html_element_exist(html, self.RESULT_TAG) def _property_page_checker(self, html): return check_html_element_exist(html, self.PROPERTY_TAB) def get_last_max_id(self): temp_id = self.io.load_file(DATA_PATH, self.TEMP_ID_FILE, 'json') max_id = int(temp_id['max_id']) return max_id async def sort_filter_page(self, session): session = await async_fill_box(session, self.LOW_PRICE_TAG, self.LOWEST_PRICE) session = await async_fill_box(session, self.HIGH_PRICE_TAG, self.HIGHEST_PRICE) session = await async_click_button(session, self.PRICE_BUTTON_TAG, self.LOAD_TAG) session = await async_click_option(session, self.SORT_TAG, self.LOAD_TAG) return session def get_search_url_list(self): url_list = [ self.RESULT_URL.format(district) for district in self.DISTRICT ] return url_list def get_html_result_pages(self, url_list): response_list = self.scrapper.browse_multiple_html( url_list, extra_action=self.sort_filter_page, html_checker=self._search_result_checker, asyn=True) return response_list @staticmethod def _get_property_id_from_url(url): if 'https://www.28hse.com/buy-property-' not in url: property_id = -1 else: property_id = int( url.replace('https://www.28hse.com/buy-property-', '').replace('.html', '')) return property_id def get_property_url_from_html(self, response, max_id): if response['ok']: soup = BeautifulSoup(response['message'], 'html.parser') result_list = soup.select(self.RESULT_TAG)[0] property_url_list = [ property_tag.get('href') for property_tag in result_list.select('a') ] property_url_list = [ property_url for property_url in property_url_list if self._get_property_id_from_url(property_url) > max_id ] return property_url_list else: self.notifier.send_message( 'Unable to load result for page {}. {}'.format( response['url'], response['error'])) return [] def aggregate_property_url(self, response_list, max_id): property_url_list = list( map( lambda response: self.get_property_url_from_html( response, max_id), response_list)) property_url_list = [ url for property_url in property_url_list for url in property_url ] return property_url_list def get_property_pages(self, property_url_list): response_list = self.scrapper.load_multiple_html( property_url_list, html_checker=self._property_page_checker) return response_list def extract_property_pages(self, response): if response['ok']: soup = BeautifulSoup(response['message'], 'html.parser') property_table = pd.read_html( str(soup.select(self.PROPERTY_TAB)[0]))[0] property_table = property_table.set_index(0).T property_table = property_table.assign(**{'URL': response['url']}) return property_table else: self.notifier.send_message( 'Unable to load property for page {}. {}'.format( response['url'], response['error'])) return pd.DataFrame([]) def aggregate_property_table(self, response_list): property_data_list = list( map(self.extract_property_pages, response_list)) if len(property_data_list) > 0: property_data = pd.concat(property_data_list, sort=False) property_data = property_data.set_index('28HSE 樓盤編號') else: property_data = pd.DataFrame([]) return property_data def clean_filter_property_table(self, property_data): property_data = property_data.assign( **{ '售價': property_data.loc[:, '售價'].str.split('每月供款').str[0], '物業地址': property_data.loc[:, '物業地址'].str.replace('屋苑位置', '') }) property_data = property_data.loc[ (property_data.loc[:, '實用面積(呎)'].str.replace('[^0-9]', ''). astype(float) >= self.SMALLEST_SIZE) | (property_data.loc[:, '實用面積(呎)'].isnull())] property_data = property_data.loc[ (property_data.loc[:, '樓齡(年)'].str.replace('[^0-9]', ''). astype(float) <= self.OLDEST_PROPERTY) | (property_data.loc[:, '樓齡(年)'].isnull())] property_data = property_data.loc[~property_data.loc[:, '售價'].str. contains('居屋')] property_data = property_data.drop( ['物業編號', '樓盤狀態', '瀏覽人次', '收藏人次', '刊登或續期日', '記錄更新', '放盤到期日'], axis=1) property_data = property_data.fillna('') return property_data def save_max_id(self, property_data): max_id = str(property_data.index.astype(int).max()) if max_id != 'nan': max_id = {'max_id': max_id} self.io.save_file(max_id, DATA_PATH, self.TEMP_ID_FILE, 'json') @staticmethod def convert_property_dict_to_str(property_dict): message_str = '' for key, value in property_dict.items(): message_str += '{}: {}\n'.format(key, value) return message_str def send_property_details(self, property_data): property_records = property_data.to_dict('records') message_list = list( map(self.convert_property_dict_to_str, property_records)) for chat_id in CHAT_ID: self.notifier.CHAT_ID = chat_id list(map(self.notifier.send_message, message_list)) def hse28_scrapping(self): max_id = self.get_last_max_id() url_list = self.get_search_url_list() response_list = self.get_html_result_pages(url_list) property_url_list = self.aggregate_property_url(response_list, max_id) response_list = self.get_property_pages(property_url_list) property_data = self.aggregate_property_table(response_list) if len(property_data) > 0: property_data = self.clean_filter_property_table(property_data) if len(property_data) > 0: self.save_max_id(property_data) self.send_property_details(property_data)
class Hse730(Hse730Setting): def __init__(self): self.scrapper = WebScrapper() self.io = FileIO() self.notifier = TelegramNotifier() def _search_result_checker(self, html): return check_html_element_exist(html, self.RESULT_TAG) def _property_page_checker(self, html): return check_html_element_exist(html, self.PROPERTY_TAB) def get_check_id(self): sent_id = self.io.load_file(DATA_PATH, self.CHECK_ID_FILE, 'json') return sent_id def get_search_url_list(self): url_list = [ self.RESULT_URL.format(''.join(self.DISTRICT), page_no + 1, self.LOWEST_PRICE, self.HIGHEST_PRICE) for page_no in range(self.CHECK_NO_PAGE) ] return url_list def get_html_result_pages(self, url_list): response_list = self.scrapper.load_multiple_html( url_list, html_checker=self._search_result_checker, asyn=True) return response_list @staticmethod def _get_property_id_from_url(url): if '/buy-property-' not in url: property_id = -1 else: property_id = int( url.replace('https://www.house730.com', '').replace('/buy-property-', '').replace('.html', '')) return property_id def save_check_id(self, new_check_id_list): self.io.save_file(new_check_id_list, DATA_PATH, self.CHECK_ID_FILE, 'json') def get_property_url_from_html(self, response, check_id): if response['ok']: soup = BeautifulSoup(response['message'], 'html.parser') result_list = soup.select(self.RESULT_TAG)[0] property_url_list = [ property_tag.get('href') for property_tag in result_list.select('a.name') ] property_url_list = [ 'https://www.house730.com' + property_url for property_url in property_url_list if self._get_property_id_from_url(property_url) not in check_id ] return property_url_list else: self.notifier.send_message( 'Unable to load result for page {}. {}'.format( response['url'], response['error'])) return [] def aggregate_property_url(self, response_list, check_id): property_url_list = list( map( lambda response: self.get_property_url_from_html( response, check_id), response_list)) property_url_list = [ url for property_url in property_url_list for url in property_url ] return property_url_list def append_new_check_id_list(self, property_url_list, check_id): new_check_id_list = [ self._get_property_id_from_url(property_url) for property_url in property_url_list ] new_check_id_list += check_id new_check_id_list = new_check_id_list[:50] self.save_check_id(new_check_id_list) def get_property_pages(self, property_url_list): response_list = self.scrapper.load_multiple_html( property_url_list, html_checker=self._property_page_checker) return response_list def extract_property_pages(self, response): if response['ok']: soup = BeautifulSoup(response['message'], 'html.parser') property_table = pd.read_html( str(soup.select(self.PROPERTY_TAB)[0]))[0] property_table = property_table.set_index(0).T property_table = property_table.assign(**{'URL': response['url']}) return property_table else: self.notifier.send_message( 'Unable to load property for page {}. {}'.format( response['url'], response['error'])) return pd.DataFrame([]) def aggregate_property_table(self, response_list): property_data_list = list( map(self.extract_property_pages, response_list)) if len(property_data_list) > 0: property_data = pd.concat(property_data_list, sort=False) property_data = property_data.set_index('House730樓盤編號') else: property_data = pd.DataFrame([]) return property_data def clean_filter_property_table(self, property_data): property_data = property_data.assign( **{ '售價': property_data.loc[:, '售價'].str.split('按揭計算機').str[0], '樓盤地址': property_data.loc[:, '樓盤地址'].str.replace('屋苑位置', '') }) property_data = property_data.loc[ (property_data.loc[:, '實用面積(呎)'].str.replace('[^0-9]', ''). astype(float) >= self.SMALLEST_SIZE) | (property_data.loc[:, '實用面積(呎)'].isnull())] property_data = property_data.loc[ (property_data.loc[:, '樓齡(年)'].str.replace('[^0-9]', ''). astype(float) <= self.OLDEST_PROPERTY) | (property_data.loc[:, '樓齡(年)'].isnull())] property_data = property_data.loc[~property_data.loc[:, '售價'].str. contains('居屋')] property_data = property_data.drop( ['物業編號', '樓盤狀態', '瀏覽人次', '刊登或續期日', '記錄更新', '放盤到期日'], axis=1) property_data = property_data.fillna('') return property_data @staticmethod def convert_property_dict_to_str(property_dict): message_str = '' for key, value in property_dict.items(): message_str += '{}: {}\n'.format(key, value) return message_str def send_property_details(self, property_data): property_records = property_data.to_dict('records') message_list = list( map(self.convert_property_dict_to_str, property_records)) for chat_id in CHAT_ID: self.notifier.CHAT_ID = chat_id list(map(self.notifier.send_message, message_list)) def hse730_scrapping(self): check_id = self.get_check_id() url_list = self.get_search_url_list() response_list = self.get_html_result_pages(url_list) property_url_list = self.aggregate_property_url( response_list, check_id) self.append_new_check_id_list(property_url_list, check_id) response_list = self.get_property_pages(property_url_list) property_data = self.aggregate_property_table(response_list) if len(property_data) > 0: property_data = self.clean_filter_property_table(property_data) if len(property_data) > 0: self.send_property_details(property_data)