def fetch_apod(): date = datetime.datetime.now().strftime('%y%m%d') cn_url = consts.apod_cn_base_url % date en_url = consts.apod_en_base_url % date try: resp = httputils.get_dict_sync(url=cn_url) data = BeautifulSoup(resp.body, 'lxml') title = replace_blanks(data.find_all('center')[1].find('b').text) author = replace_blanks(data.find_all('center')[1].text).replace( title, '').strip() article = replace_blanks('\n'.join([ p.text for p in data.find('body').findChildren(name='p', recursive=False) ]).replace('\u8aaa\u660e:', '')) t = data.find_all('center')[-1].text translate = replace_blanks(t[t.find('\u7ffb\u8b6f'):]) picurl = consts.apod_pic_base_url + data.find('center').find( 'img').get('src') res = { 'title': title, 'article': article, 'author': author, 'translate': translate, 'picurl': picurl, 'cn_url': cn_url, 'en_url': en_url } logging.info('finish to fetch apod: %s', res) return res except AttributeError: logging.error('fail to fetch apod: parse error') return None except tornado.web.HTTPError: logging.error('fail to fetch apod: http error') return None
def fetch_apod(): date = datetime.datetime.now().strftime('%y%m%d') cn_url = consts.apod_cn_base_url % date en_url = consts.apod_en_base_url % date try: resp = httputils.get_dict_sync(url=cn_url) data = BeautifulSoup(resp.body, 'lxml') title = replace_blanks(data.find_all('center')[1].find('b').text) author = replace_blanks(data.find_all('center')[1].text).replace(title, '').strip() article = replace_blanks( '\n'.join([p.text for p in data.find('body').findChildren( name='p', recursive=False)]).replace('\u8aaa\u660e:', '')) t = data.find_all('center')[-1].text translate = replace_blanks(t[t.find('\u7ffb\u8b6f'):]) picurl = consts.apod_pic_base_url + data.find('center').find('img').get('src') res = {'title': title, 'article': article, 'author': author, 'translate': translate, 'picurl': picurl, 'cn_url': cn_url, 'en_url': en_url} logging.info('finish to fetch apod: %s', res) return res except AttributeError: logging.error('fail to fetch apod: parse error') return None except tornado.web.HTTPError: logging.error('fail to fetch apod: http error') return None
def get_lastest_news(): data = {'appid': consts.appid} security.add_sign(data, consts.sitekey) resp = httputils.get_dict_sync(url=consts.wechat_news_url, data=data) if resp.code != 200: logging.error('fail to get lastest news') return None else: resp_data = json.loads(resp.body.decode('utf8')) if resp_data['err_code'] != 0: logging.error('fail to get lastest news: %s: %s', resp_data['err_code'], resp_data['err_msg']) return None else: logging.info('finish to get lastest news: %s', resp_data['data']) return resp_data['data']
def get_lastest_news(): data = {'appid': consts.appid} security.add_sign(data, consts.sitekey) resp = httputils.get_dict_sync(url=consts.wechat_news_url, data=data) if resp.code != 200: logging.error('fail to get lastest news') return None else: resp_data = json.loads(resp.body.decode('utf8')) if resp_data['err_code'] != 0: logging.error('fail to get lastest news: %s: %s', resp_data['err_code'], resp_data['err_msg']) return None else: logging.info('finish to get lastest news: %s', resp_data['data']) return resp_data['data']