def execute_from_command_line(argv=None): """ A method that runs a ManagementUtility. """ # load .env file APP_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) dotenv_path = os.path.join(APP_ROOT, '.env') load_dotenv(dotenv_path=dotenv_path) start_time = time.time() utility = ManagementUtility(argv) try: utility.execute() except core_exceptions.BaseException as err: logger.exception(str(err)) err.display_error() except Exception as err: logger.exception(str(err)) print("\nError Message:") print("\n {0}".format(str(err))) print("\nTraceback:") print("\n {0}".format(str(traceback.format_exc()))) # Show below message only when sub command is given in the arguments. # Do not show below messages sub command is not present or sub command is 'help'. if len(argv) > 1 and argv[1] not in help_subcommands: print("\nYou can check logs at location : {0}".format( os.getenv('LOG_FILE_PATH'))) total_time = round((time.time() - start_time), 2) print("Total time taken by command : {0} seconds\n".format( total_time))
def request(cls, match, replace=False): """ Request the season file to the site """ for odds_type in OddsTypeNotation.list(): if not MatchOddsManager.is_file_exists(match, odds_type) or replace: params = dict() params['p'] = '0' params['e'] = match[MatchNotation.ID_SITE] headers = dict() headers['Referer'] = 'notnull' params['b'] = odds_type done = False while not done: try: r = requests.get(cls.get_url(), params=params, headers=headers, timeout=50) MatchOddsManager.create_file(match, odds_type, r.content) done = True except Exception as e: logger.exception(e) done = False pass
def get_html(self) -> BeautifulSoup: try: response = requests.get(self.url) except requests.exceptions.RequestException: logger.exception("Could not handle request") raise if response.status_code == 200: return BeautifulSoup(response.text, features="html.parser") if response.status_code // 3 == 3: self.url = response.headers['Location'] return self.get_html() if response.status_code == 404: raise exceptions.BrokenLink()
def request(cls, season, replace=False): """ Request the season file to the site """ logger.debug('request(): ' + str(season)) if not SeasonManager.is_file_exists(season) or replace: download = False r = None while not download: try: logger.debug('Requests: ' + cls.get_url(season)) r = requests.get(cls.get_url(season)) download = True except Exception as e: logger.exception(e) SeasonManager.create_file(season, r.content)
async def send_safe_message(self, chat_id: int, text: str, disable_notification: bool = False) -> bool: """ Safe messages sender :param chat_id: :param text: :param disable_notification: :return: """ try: await super(Mailer, self).send_message( chat_id, text, disable_notification=disable_notification) except exceptions.BotBlocked: logger.error(f"Target [ID:{chat_id}]: blocked by user") self._delete_from_chats(chat_id) except exceptions.ChatNotFound: logger.error(f"Target [ID:{chat_id}]: invalid user/chat ID") self._delete_from_chats(chat_id) except exceptions.RetryAfter as e: logger.error( f"Target [ID:{chat_id}]: Flood limit is exceeded. Sleep {e.timeout} seconds." ) await asyncio.sleep(e.timeout) return await self.send_safe_message(chat_id, text) except exceptions.UserDeactivated: logger.error(f"Target [ID:{chat_id}]: user/chat is deactivated") self._delete_from_chats(chat_id) except exceptions.BotKicked: logger.error(f"Bot was kicked from the group chat: {chat_id}") self._delete_from_chats(chat_id) except exceptions.TelegramAPIError: logger.exception(f"Target [ID:{chat_id}]: failed") else: logger.debug(f"Target [ID:{chat_id}]: success") return True return False
def get_links(self, max_workers=8): visited = set() links = set() depth = Counter() link = Link(self.url.lower(), depth) logger.info(f"Root url is {link.url}") links.add(link) visited.add(link) with concurrent.futures.ThreadPoolExecutor( max_workers=max_workers) as executor: while links: depth += 1 logger.info(f"Collecting depth {depth} links") future_links_html = { executor.submit(link.get_html): link for link in links } links.clear() for future_link_html in concurrent.futures.as_completed( future_links_html): try: link_html = future_link_html.result() logger.info( f"Trying to collect links from {future_links_html[future_link_html].url}" ) a_elems = link_html.find_all('a') for a_elem in a_elems: link_url = self._make_link_url(a_elem, 'href') if link_url: new_link = Link(link_url, depth) if new_link not in visited: links.add(new_link) visited.add(new_link) else: link.occurrences += 1 logger.info(f"Successfully collected links") except exceptions.BrokenLink: logger.warn( f"{future_links_html[future_link_html].url} is broken" ) future_links_html[future_link_html].is_broken = True except requests.RequestException: logger.exception( f"Something went wrong while requesting {future_links_html[future_link_html].url}" ) except Exception: logger.exception("Unknow error occurred") logger.info( f"Visited {len(visited)} and has {len(links)} more links to go" ) return visited
def __init__(self, msg, *args, **kwargs): super().__init__(msg, *args, **kwargs) # log to file logger.exception(f"{type(self)}: {msg}")