def get_desc(cityname, cityshort): if cache is not None: r = cache.get('airpollution.%s' % (cityshort)) if r: return r title_link = 'http://aqicn.org/city/{}/cn/'.format(cityshort.lower()) r = requests.get(title_link, headers=headers) r.encoding = 'utf-8' p = r.text soup = BeautifulSoup(p) aqiwgtinfo = soup.find(id="aqiwgtinfo'").text aqivalue = soup.find("div", {'class': 'aqivalue'}).text min_pm25 = soup.find(id='min_pm25').text max_pm25 = soup.find(id='max_pm25').text text = '{0}实时空气质量指数(AQI): {1} {2} [最大:{3}, 最小:{4}]'.format( cityname.encode('utf-8'), aqiwgtinfo.encode('utf-8'), aqivalue, max_pm25, min_pm25) if cache is not None: cache.set('airpollution.%s' % (cityshort), text, 1800) image_url = soup.find(id='tr_pm25').find( id='td_pm25').find('img').attrs.get('src') title = soup.find('title').text attaches = [ gen_attachment(text, image_url, title=title, title_link=title_link) ] return text, attaches
def handle(data): app = current_app if app is None: ak = '18691b8e4206238f331ad2e1ca88357e' else: ak = app.config.get('BAIDU_AK') city = get_city(data) if not city: return '不会自己去看天气预报啊' res = weather(ak, city)[:3] ret = [] attaches = [] for idx, day in enumerate(res): if idx == 0: current = TEMPERATURE_REGEX.search(day['date']).groups()[0] text = u'{0}: {1} {2} {3} 温度: {4}'.format( DAY[idx], current, day['weather'], day['wind'], day['temperature']) else: text = u'{0}: {1} {2} 温度: {3}'.format( DAY[idx], day['weather'], day['wind'], day['temperature']) ret.append(text) type = 'dayPictureUrl' if check_time() == 'day' else 'dayPictureUrl' attaches.append(gen_attachment(text, day[type], image_type='thumb', title=u'{}天气预报'.format(city), title_link='')) return '\n'.join(ret), attaches
def get_desc(cityname, cityshort, cache=None, app=None): if cache is not None: r = cache.get('airpollution.%s' % (cityshort)) if r: return r title_link = 'http://aqicn.org/city/{}/cn/'.format(cityshort.lower()) r = requests.get(title_link, headers=headers) r.encoding = 'utf-8' p = r.text soup = BeautifulSoup(p) aqiwgtinfo = soup.find(id="aqiwgtinfo'").text aqivalue = soup.find("div", {'class': 'aqivalue'}).text min_pm25 = soup.find(id='min_pm25').text max_pm25 = soup.find(id='max_pm25').text text = '{0}实时空气质量指数(AQI): {1} {2} [最大:{3}, 最小:{4}]'.format( cityname.encode('utf-8'), aqiwgtinfo.encode('utf-8'), aqivalue, max_pm25, min_pm25) if cache is not None: cache.set('airpollution.%s' % (cityshort), text, 1800) image_url = soup.find(id='tr_pm25').find(id='td_pm25').find( 'img').attrs.get('src') title = soup.find('title').text attaches = [gen_attachment(text, image_url, app=app, title=title, title_link=title_link)] return text, attaches
def handle(data, cache=None, app=None): if app is None: ak = "18691b8e4206238f331ad2e1ca88357e" else: ak = app.config.get("BAIDU_AK") city = get_city(data) if not city: return "不会自己去看天气预报啊" res = weather(ak, city)[0] current = TEMPERATURE_REGEX.search(res["date"]).groups()[0] text = u"当前: {0} {1} {2} 温度: {3}".format(current, res["weather"], res["wind"], res["temperature"]) type = "dayPictureUrl" if check_time() == "day" else "dayPictureUrl" attaches = [gen_attachment(text, res[type], image_type="thumb", title=u"{}天气预报".format(city), title_link="")] return text, attaches
def get_content(channel): category = CHANNEL_MAPS.get(channel) r = requests.get(API.format(category, datetime2timestamp())) data = r.json()['data'] for i in data: text = (u'<{seo_url}|{title}> 赞{bury_count} 踩{digg_count} - ' '{source} {datetime}').format(**i) image_url = i.get('middle_image', '') if isinstance(image_url, dict): image_url = image_url['url'] attach = gen_attachment(trunc_utf8(i['abstract']), image_url, image_type='thumb', title=i['title'], title_link=i['seo_url'], fallback=False) yield text, attach
def get_later_movie_info(city): r = requests.get(LATER_URL.format(city)) soup = BeautifulSoup(r.text) items = soup.find(id="showing-soon").findAll("div", {"item"}) for i in items: h = i.find("h3").find("a") url = h.attrs["href"] title = h.text content = "|".join([li.text for li in i.findAll("li")[:4]]) image_url = i.find("a").find("img").attrs.get("src", "") # SA好变态, 感觉是防盗链了,下同 image_url = upload_image(image_url, "thumb") yield u"<{url}|{title}> {content}".format(**locals()), gen_attachment( content, image_url, image_type="thumb", title=title, title_link=url )
def get_later_movie_info(city, app): r = requests.get(LATER_URL.format(city)) soup = BeautifulSoup(r.text) items = soup.find(id='showing-soon').findAll('div', {'item'}) for i in items: h = i.find('h3').find('a') url = h.attrs['href'] title = h.text content = '|'.join([li.text for li in i.findAll('li')[:4]]) image_url = i.find('a').find('img').attrs.get('src', '') # SA好变态, 感觉是防盗链了,下同 image_url = upload_image(image_url, 'thumb', app) yield u'<{url}|{title}> {content}'.format(**locals()), gen_attachment( content, image_url, image_type='thumb', title=title, title_link=url)
def get_later_movie_info(city): r = requests.get(LATER_URL.format(city)) soup = BeautifulSoup(r.text) items = soup.find(id='showing-soon').findAll('div', {'item'}) for i in items: h = i.find('h3').find('a') url = h.attrs['href'] title = h.text content = '|'.join([li.text for li in i.findAll('li')[:4]]) image_url = i.find('a').find('img').attrs.get('src', '') # SA好变态, 感觉是防盗链了,下同 image_url = upload_image(image_url, 'thumb') yield u'<{url}|{title}> {content}'.format(**locals()), gen_attachment( content, image_url, image_type='thumb', title=title, title_link=url)
def handle(data, app=None, **kwargs): if app is None: ak = '18691b8e4206238f331ad2e1ca88357e' else: ak = app.config.get('BAIDU_AK') city = get_city(data) if not city: return '不会自己去看天气预报啊' res = weather(ak, city)[0] current = TEMPERATURE_REGEX.search(res['date']).groups()[0] text = u'当前: {0} {1} {2} 温度: {3}'.format( current, res['weather'], res['wind'], res['temperature']) type = 'dayPictureUrl' if check_time() == 'day' else 'dayPictureUrl' attaches = [gen_attachment(text, res[type], image_type='thumb', title=u'{}天气预报'.format(city), title_link='')] return text, attaches
def get_current_movie_info(city): r = requests.get(CURRENT_URL.format(city)) soup = BeautifulSoup(r.text) items = soup.find(id="nowplaying").find("ul", {"class": "lists"}).findAll("li", {"class": "poster"}) count = 0 for i in items: if count >= 10: continue img = i.find("img") title = img.attrs.get("alt", "") content = "|".join([li.text for li in i.findAll("li")[:4]]) url = i.find("a").attrs.get("href", "") image_url = img.attrs.get("src", "") image_url = upload_image(image_url, "thumb") count += 1 yield u"<{url}|{title}>".format(**locals()), gen_attachment( content, image_url, image_type="thumb", title=title, title_link=url )
def get_current_movie_info(city, app): r = requests.get(CURRENT_URL.format(city)) soup = BeautifulSoup(r.text) items = soup.find(id='nowplaying').find('ul', {'class': 'lists'}).findAll( 'li', {'class': 'poster'}) count = 0 for i in items: if count >= 10: continue img = i.find('img') title = img.attrs.get('alt', '') content = '|'.join([li.text for li in i.findAll('li')[:4]]) url = i.find('a').attrs.get('href', '') image_url = img.attrs.get('src', '') image_url = upload_image(image_url, 'thumb', app) count += 1 yield u'<{url}|{title}>'.format(**locals()), gen_attachment( content, image_url, image_type='thumb', title=title, title_link=url)
def get_current_movie_info(city): r = requests.get(CURRENT_URL.format(city)) soup = BeautifulSoup(r.text) items = soup.find(id='nowplaying').find('ul', {'class': 'lists'}).findAll( 'li', {'class': 'poster'}) count = 0 for i in items: if count >= 10: continue img = i.find('img') title = img.attrs.get('alt', '') content = '|'.join([li.text for li in i.findAll('li')[:4]]) url = i.find('a').attrs.get('href', '') image_url = img.attrs.get('src', '') image_url = upload_image(image_url, 'thumb') count += 1 yield u'<{url}|{title}>'.format(**locals()), gen_attachment( content, image_url, image_type='thumb', title=title, title_link=url)
def get_business_info(self, business, details=False): url = business['business_url'] # 商户页面URL链接 id = business['business_id'] distance = business['distance'] # 商户与参数坐标的距离,单位为米 coupon_description = business['coupon_description'] # 优惠券描述 deals_description = ','.join([ c['description'] for c in business['deals']]) # 团购描述 name = real_name(business['name']) # 商户名 branch_name = business['branch_name'] # 分店名 address = business['address'] # 地址 telephone = business['telephone'] # 电话 avg_rating = business['avg_rating'] # 星级评分,5.0代表五星,4.5代表四星半,依此类推 noqa photo_url = business['photo_url'] if details: product_grade = business['product_grade'] # noqa 产品/食品口味评价,1:一般,2:尚可,3:好,4:很好,5:非常好 decoration_grade = business['decoration_grade'] # 环境评价 同上 service_grade = business['service_grade'] # 服务评价 同上 avg_price = business['avg_price'] # 均价格,单位:元,若没有人均,返回-1 text = u'<{0}|{1}> {2} {3} 距离: {4} '.format( url, name, address, telephone, distance) attach = gen_attachment( u'{0} {1} 距离: {2}'.format(address, telephone, distance), photo_url, image_type='thumb', title=name, title_link=url) return text, attach
def get_business_info(self, business, details=False): url = business['business_url'] # 商户页面URL链接 # id = business['business_id'] distance = business['distance'] # 商户与参数坐标的距离,单位为米 # coupon_description = business['coupon_description'] # 优惠券描述 # deals_description = ','.join([ # c['description'] for c in business['deals']]) # 团购描述 name = real_name(business['name']) # 商户名 # branch_name = business['branch_name'] # 分店名 address = business['address'] # 地址 telephone = business['telephone'] # 电话 # avg_rating = business['avg_rating'] # 星级评分,5.0代表五星,4.5代表四星半,依此类推 photo_url = business['photo_url'] if details: product_grade = business['product_grade'] # noqa 产品/食品口味评价,1:一般,2:尚可,3:好,4:很好,5:非常好 # decoration_grade = business['decoration_grade'] # 环境评价 同上 # service_grade = business['service_grade'] # 服务评价 同上 # avg_price = business['avg_price'] # 均价格,单位:元,若没有人均,返回-1 text = u'<{0}|{1}> {2} {3} 距离: {4} '.format( url, name, address, telephone, distance) attach = gen_attachment( u'{0} {1} 距离: {2}'.format(address, telephone, distance), photo_url, image_type='thumb', title=name, title_link=url) return text, attach