def get_exchangerate(url='http://rate.bot.com.tw/xrt?Lang=zh-TW'): """ get Taiwan bank exchange rate. """ log_path = os.path.join(os.getcwd(), 'logs', log_time() + '-' + str(uuid.uuid1()) + '.log') lazy_logger.log_to_console(logger) lazy_logger.log_to_rotated_file(logger=logger, file_name=log_path) logger.info('logger file: {0}'.format(log_path)) try: logger.info('start connect {}'.format(url)) resp = requests.get('http://rate.bot.com.tw/xrt?Lang=zh-TW') logger.info('connect success, get exchange') soup = BeautifulSoup(resp.text, 'html.parser') rows = soup.find('table', 'table').tbody.find_all('tr') rateset = [] for row in rows: logger.info("取得: {}".format(list(row.stripped_strings)[0])) rateset.append(" ,".join([s for s in row.stripped_strings])) currency = '\n'.join(rateset) logger.info('get exchange success') except Exception as e: ret = OrderedDict((('ret', -1), ('status', e), ('version', ''))) logs = get_log(file=log_path, title='get_exchangerate_job') ret.update(logs) return ret ret = OrderedDict((('ret', 0), ('status', 'success.'), ('values', currency), ('version', '1.00'))) logs = get_log(file=log_path, title='get_exchangerate_job') ret.update(logs) return ret
def lazylog(*args, **kwargs): log_path = os.path.join(os.getcwd(), 'logs', log_time() + '-' + str(uuid.uuid1()) + '.log') lazy_logger.log_to_console(logger) lazy_logger.log_to_rotated_file(logger=logger, file_name=log_path) logger.info('logger file: {0}'.format(log_path)) kwargs['log_path'] = log_path return f(*args, **kwargs)
def test_log_to_console(capsys): logger = lazy_logger.get_logger('noname') lazy_logger.log_to_console(logger) logger.debug('yo') out, err = capsys.readouterr() assert err[0:10] == time.strftime("%Y-%m-%d") assert err[26:] == 'noname - DEBUG - yo\n'
def job(url): log_path = os.path.join(os.getcwd(), 'logs', log_time() + '-' + str(uuid.uuid1()) + '.log') lazy_logger.log_to_console(logger) lazy_logger.log_to_rotated_file(logger=logger, file_name=log_path) logger.info('logger file: {0}'.format(log_path)) crawler = Crawler() driver = crawler.driver() driver.get(url) pageSource = driver.page_source soup = bs(pageSource, "html.parser") print('{}'.format(soup.title)) driver.close() ret = OrderedDict((('ret', 0), ('status', 'Success'), ('version', '0.1'))) logs = get_log(file=log_path, title='crawler_job') ret.update(logs) return ret
import lazy_logger import sys logger = lazy_logger.get_logger(__name__) lazy_logger.log_to_console(logger) lazy_logger.log_to_rotated_file(logger) # create log file log.out @logger.patch def main(): print('Hello World!') # expect acting as logger print('Hello stdout!', file=sys.stdout) # expect acting as normal print if __name__ == '__main__': main()