def parse_page(self, response): try: logging.log(logging.INFO, 'page:%s' % response.request.url) cat = response.request.meta['cat'] items = [] qq_v = response.xpath('//div[@class="mod_cont"]/ul/li') for v in qq_v: urls = v.xpath('./h6/a/@href').extract() titles = v.xpath('./h6/a/@text').extract() thumb_urls = v.xpath('./a/img/@src').extract() durations = v.xpath( './a/div/span[@class="mod_version"]/text()').extract() playeds = v.xpath('./p/span/text()').extract() title = titles[0] if titles else None thumb_url = thumb_urls[0] if thumb_urls else None duration = Util.get_qq_duration( durations[0]) if durations else None played = Util.normalize_played(Util.normalize_vp( playeds[0])) if playeds else None if urls: r = Request(url=urls[0], callback=self.parse_episode) d = { 'title': title, 'thumb_url': thumb_url, 'duration': duration, 'played': played } d.update(order) r.meta.update({'order': d}) items.append(r) return items except Exception as e: logging.log(logging.ERROR, traceback.format_exc())
def video_about_parse(self, response): items = [] try: show_id = response.xpath( '//meta[@itemprop="channelId"]/@content').extract() user_name = response.xpath( '//span[@class="qualified-channel-title-text"]/a/text()' ).extract() fans = response.xpath('//ul[@class="about-stats"]/li').re( re.compile(r'<li.*>.*<b>([\d|,]*)</b>.*subscribers.*</li>', re.S)) played = response.xpath('//ul[@class="about-stats"]/li').re( re.compile(r'<li.*>.*<b>([\d|,]*)</b>.*views.*</li>', re.S)) intro = response.xpath( '//div[@class="about-description branded-page-box-padding"]/descendant-or-self::*/text()' ).extract() if show_id: user_item = UserItem() user_item['show_id'] = show_id[0] if user_name: user_item['user_name'] = user_name[0] if fans: user_item['fans'] = Util.normalize_played(fans[0]) if played: user_item['played'] = Util.normalize_played(played[0]) if intro: user_item['intro'] = "".join(intro).strip() user_item['spider_id'] = self.spider_id user_item['site_id'] = self.site_id user_item['url'] = response.request.url[:-len('/about')] items.append(user_item) except Exception, e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_owner(self, response): try: logging.log(logging.INFO, "owner:%s" % response.request.url) items = [] user_item = UserItem() #owner id script = response.xpath('/html/head/script') owner_id = script.re('ownerId = \"(\d+)\"') show_id = script.re('ownerEncodeid = \'(.+)\'') if owner_id: user_item['owner_id'] = owner_id[0] if show_id: user_item['show_id'] = show_id[0] else: return #user profile up = response.xpath('//div[@class="profile"]') if up: user_name = up.xpath('./div[@class="info"]/div[@class="username"]/a[1]/@title').extract() played = up.xpath('./div[@class="state"]/ul/li[@class="vnum"]/em/text()').extract() fans = up.xpath('./div[@class="state"]/ul/li[@class="snum"]/em/text()').extract() if user_name: user_item['user_name'] = user_name[0] if played: #user_item['played'] = Util.normalize_vp(played[0]) user_item['played'] = Util.normalize_played(Util.normalize_vp(played[0])) if fans: user_item['fans'] = Util.normalize_vp(fans[0]) #youku profile yp = response.xpath('//div[@class="YK-profile"]') if yp: intro = yp.xpath('./div[@class="userintro"]/div[@class="desc"]/p[2]/text()').extract() if intro: user_item['intro'] = ''.join(intro) #count yh = response.xpath('//div[@class="YK-home"]') vcount = None if yh: video_count = yh.xpath('div[1]/div/div/div/div[@class="title"]/span/a/text()').re(u'\((\d+)\)') if video_count: vcount = video_count[0] user_item['vcount'] = vcount user_item['spider_id'] = self.spider_id user_item['site_id'] = self.site_id user_item['url'] = response.request.url items.append(user_item) return items except Exception as e: logging.log(logging.ERROR, traceback.format_exc())
def parse_vpaction(self, response): try: logging.log(logging.INFO, 'vpaction:%s' % response.request.url) item = response.request.meta['item'] vp = response.xpath('//ul[@class="player_info"]/li[@class="sum"]/text()').extract() if vp: item['played'] = Util.normalize_played(Util.normalize_vp(vp[0].replace('总播放:', ''))) show_id = item['show_id'] item = Request(url=self.playlength_url+show_id, callback=self.parse_playlength, meta={'item':item}) return item except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse(self, response): try: logging.log(logging.INFO, "parse:%s" % response.request.url) audit = response.request.meta['audit'] cat_name = response.request.meta['cat_name'] kw_id = response.request.meta['kw_id'] priority = response.request.meta['priority'] items = [] #video items yk_v = response.xpath('//div[@class="sk-vlist clearfix"]/div[@class="v"]') for v in yk_v: url = v.xpath('./div[@class="v-meta va"]/div[@class="v-meta-title"]/a/@href').extract() thumb_urls = v.xpath('./div[@class="v-link"]/a/@href').extract() if thumb_urls: thumb_url = thumb_urls[0] if thumb_url == 'http://g1.ykimg.com/': thumb_url = None else: thumb_url = None pl = v.xpath('./div[@class="v-meta va"]/div[@class="v-meta-entry"]/div/label[text()="%s"]/../span/text()' % u'播放: ').extract() if pl: pld = Util.normalize_played(pl[0]) played = int(pld) else: played = None if url: items.append(Request(url=url[0], callback=self.parse_episode, meta={'audit': audit, 'thumb_url': thumb_url, 'played': played, 'cat_name': cat_name, 'kw_id': kw_id, 'priority': priority})) return items except Exception as e: logging.log(logging.ERROR, traceback.format_exc())
def parse_page(self,response): try: log.msg('parse_page: %s' % response.request.url) page = response.request.meta['page'] kw_id = response.request.meta['kw_id'] if int(page) > int(self.max_search_page): return items = [] #video items titems = response.xpath('//div[@id="search_list"]/div[2]/div[2]/ul[1]/li') for item in titems: turl = item.xpath('./h3[1]/a/@href').extract() if turl: show_id = Util.get_ku6_showid(turl[0]) items.append(Request(url=turl[0].strip(), callback=self.parse, meta={'kw_id': kw_id, 'show_id': show_id})) #pages next_page = response.xpath("//div[@id='search_list']/div[2]/div[2]/div/a[text()='%s']/@href" % u'下一页').extract() if next_page: items.append(Request(url=next_page[0], callback=self.parse_page, meta={'page': page+1, 'kw_id': kw_id})) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_episode(self, response): try: logging.log(logging.INFO, 'episode:%s' % response.request.url) order = response.request.meta['order'] items = [] #video info #tags = response.xpath('//p[@class="info_tags"]//a/@title').extract() #descriptions = response.xpath('//div[@class="info_summary cf"]/span/text()').extract() ep_item = EpisodeItem() ep_item['show_id'] = Util.get_qq_showid(response.request.url) #if tags: # ep_item['tag'] = Util.unquote(tags[0]).rstrip('|') #if descriptions: # ep_item['description'] = descriptions[0] for k, v in order.items(): if k == 'user': ep_item['category'] = v elif k == 'show_id': ep_item['owner_show_id'] = v else: ep_item[k] = v ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = response.request.url ep_item['format_id'] = self.format_id items.append(ep_item) return items except Exception as e: logging.log(logging.ERROR, traceback.format_exc())
def process_item(self, item, spider): if not item or 'NewestItemPipeline' not in getattr( spider, 'pipelines', []): return item if isinstance(item, EpisodeItem): if 'upload_time' in item and item['upload_time']: delta = Util.get_delta_minutes(datetime.now(), item['upload_time']) if delta >= self._up_thres: log.msg("Drop late upload item: %s" % item['show_id']) return else: log.msg("Drop null upload item: %s" % item['show_id']) return if 'played' in item and item['played']: if int(item['played']) < int(self._pl_thres): log.msg("Drop less played item: %s" % item['show_id']) return else: log.msg("Drop null played item: %s" % item['show_id']) return return item else: return item
def parse_episode(self, response): try: log.msg('parse_episode %s' % response.request.url) cat_name = response.request.meta['cat_name'] thumb_url = response.request.meta['thumb'] audit = response.request.meta['audit'] lens = response.request.meta['lens'] priority = response.request.meta['priority'] items = [] #show_id show_id = Util.get_letv_showid(response.request.url) albumid = response.selector.re(re.compile(r'pid: ?(\d+)')) #video info title = response.xpath( '//meta[@name="irTitle"]/@content').extract() upload_time = response.xpath( '//ul[@class="info_list"]//em[@id="video_time"]/text()' ).extract() tag_sel = response.xpath( '//meta[@name="keywords"]/@content').extract() ep_item = EpisodeItem() if title: ep_item['title'] = title[0] if show_id: ep_item['show_id'] = show_id if tag_sel: tag_str = tag_sel[0][len(title[0]) + 1:] if tag_str: tag_list = [] split_space = tag_str.split(' ') for item_space in split_space: split_comma = item_space.split(',') for item_comma in split_comma: tag_list.append(item_comma) ep_item['tag'] = "|".join([t.strip() for t in tag_list]) if upload_time: ep_item['upload_time'] = upload_time[0].strip() if thumb_url: ep_item['thumb_url'] = thumb_url[0].strip() ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = response.request.url ep_item['category'] = cat_name ep_item['format_id'] = '2' ep_item['audit'] = audit ep_item['priority'] = priority ep_item['duration'] = lens items.append(ep_item) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_page(self, response): try: logging.log(logging.INFO, 'page:%s' % response.request.url) order = response.request.meta['order'] items = [] qq_v = response.xpath( '//ul[@id="videolst_cont"]/li[@class="list_item"]') for v in qq_v: urls = v.xpath('./strong/a/@href').extract() titles = v.xpath('./strong/a/text()').extract() thumb_urls = v.xpath('./a/img/@src').extract() durations = v.xpath('./a/span/em/text()').extract() playeds = v.xpath( './div/span[@class="figure_info_play"]/span/text()' ).extract() upload_times = v.xpath( './div/span[@class="figure_info_time"]/text()').extract() title = titles[0] if titles else None thumb_url = thumb_urls[0] if thumb_urls else None duration = Util.get_qq_duration( durations[0]) if durations else None played = Util.normalize_played(Util.normalize_vp( playeds[0])) if playeds else None upload_time = Util.get_qq_upload_time( upload_times[0]) if upload_times else None if urls: r = Request(url=urls[0], callback=self.parse_episode) d = { 'title': title, 'thumb_url': thumb_url, 'duration': duration, 'played': played, 'upload_time': upload_time } d.update(order) r.meta.update({'order': d}) items.append(r) return items except Exception as e: logging.log(logging.ERROR, traceback.format_exc())
def start_requests(self): try: items = [] for cat in self._cat_urls: url = self.request_url % Util.get_acfun_showid(cat['url']) items.extend([Request(url=url, callback=self.parse_page,meta={'cat_name': cat['cat_name'],'audit':cat['audit'],'priority':cat['priority']})]) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_episode(self, response): try: log.msg('parse_episode %s' % response.request.url) #cat_id = response.request.meta['cat_id'] cat_name = response.request.meta['cat_name'] thumb_url = response.request.meta['thumb'] audit = response.request.meta['audit'] lens = response.request.meta['lens'] priority = response.request.meta['priority'] items = [] #space maybe exist: "albumId:326754200" or "albumId: 326754200" #albumid = response.selector.re(re.compile(r'pid: ?(\d+)')) #show_id show_id = Util.get_sohu_showid(response.request.url) #tag tag = response.xpath('//meta[@name="keywords"]/@content').extract() #video info title = response.xpath( '//div[@id="crumbsBar"]/div/div[@class="left"]/h2/text()' ).extract() #played = response.xpath('//em[@id="video_playcount"]').extract() ep_item = EpisodeItem() if title: ep_item['title'] = title[0].strip() if show_id: ep_item['show_id'] = show_id if tag: ep_item['tag'] = tag[0].strip() if thumb_url: ep_item['thumb_url'] = thumb_url[0].strip() ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = response.request.url #ep_item['cat_id'] = cat_id ep_item['category'] = cat_name ep_item['format_id'] = '2' ep_item['audit'] = audit ep_item['priority'] = priority ep_item['duration'] = lens #if played: # ep_item['played']=played #if albumid: # items.append(Request(url=self.playlength_url+albumid[0], callback=self.parse_playlength, meta={'item':ep_item,'albumid':albumid[0]})) #else: items.append(ep_item) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_second(self,response): try: #log.msg('lev2: %s' % response.request.url) kw_id = response.request.meta['kw_id'] items = [] sel = Selector(response) #info jinfo = json.loads(response.body) title = jinfo['data']['t'] show_id = response.request.meta['show_id'] tags = jinfo['data']['tag'] tag = tags.replace(' ','|').replace(',','|').strip('|') tuploadtime = jinfo['data']['uploadtime'] upload_time = Util.timestamp2datetime(tuploadtime) description = jinfo['data']['desc'] thumb_url = jinfo['data']['picpath'] tduration = str(jinfo['data']['vtime']) tduration1 = tduration.split(',') duration = tduration1[0] ep_item = EpisodeItem() if len(title) != 0: ep_item["title"] = title ep_item['show_id'] = response.request.meta['show_id'] turl = "http://v.ku6.com/show/" + show_id + ".html" if len(tag) != 0: ep_item["tag"] = tag if len(upload_time) != 0: ep_item["upload_time"] = upload_time if len(turl) != 0: ep_item["url"] = turl if len(thumb_url) != 0: ep_item['thumb_url'] = thumb_url if len(duration) != 0: ep_item["duration"] = duration ep_item['kw_id'] = kw_id ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id items.append(Request(url=turl, callback=self.parse_episode, meta={'item':ep_item})) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_vpaction(self, response): try: #log.msg('%s' % response.request.url) item = response.request.meta['item'] sel = Selector(response) #vp = sel.xpath('//div[@id="videodetailInfo"]/ul/li').re(u'<label>总播放数:</label><span.*>(.+)</span>') #vp = sel.xpath('//div[@class="info_num"]/span/text()').extract() vp = sel.xpath('//ul[@class="player_info"]/li[@class="sum"]/text()' ).extract() if vp: item['played'] = Util.normalize_played( Util.normalize_vp(vp[0].replace('总播放:', ''))) show_id = item['show_id'] item = Request(url=self.playlength_url + show_id, callback=self.parse_playlength, meta={'item': item}) return item except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def vpaction_parse(self, response): items = [] try: episode_item = response.request.meta['episode_item'] vp = response.xpath('//div[@id="videodetailInfo"]/ul/li').re( u'<label>总播放数:</label><span.*>(.+)</span>') if vp: episode_item['played'] = Util.normalize_vp(vp[0]) show_id = episode_item['show_id'] if show_id: items.append( Request(url=self.playlength_url + show_id, callback=self.playlength_parse, meta={'episode_item': episode_item})) except Exception, e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_media(self, response, **kwargs): items = [] try: title = response.xpath( '//div[@id="crumbsBar"]/div/div[@class="left"]/h2/text()' ).extract() tag = response.xpath('//meta[@name="keywords"]/@content').extract() #show_id = Util.get_sohu_showid(response.request.url) thumb = response.xpath('//script').re(',sCover: \'(.*)\'') upload = response.xpath('//script').re(',uploadTime: \'(.*)\'') description = response.xpath( '//p[@class="rel cfix"]/@title').extract() played = response.xpath( '//span[@class="vbtn vbtn-play"]/em/i/text()').extract() print played, upload video_id = response.xpath('//script').re('vid = \'(\d+)\'') ep_item = EpisodeItem() if video_id: ep_item['video_id'] = video_id[0] ep_item['show_id'] = video_id[0] if title: ep_item['title'] = title[0] if tag: ep_item['tag'] = tag[0].strip().replace(',', '|') if upload: ep_item['upload_time'] = upload[0] + ":00" if description: ep_item['description'] = description[0].strip() if thumb: ep_item['thumb_url'] = thumb[0] if played: ep_item['played'] = Util.normalize_played(played[0]) ep_item['category'] = u"搞笑" ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = response.request.url items.append(ep_item) log.msg("spider success, title:%s" % (ep_item['title']), level=log.INFO) except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR) finally: return items
def parse_episode(self, response): try: log.msg('parse_episode %s' % response.request.url) #cat_id = response.request.meta['cat_id'] cat_name = response.request.meta['cat_name'] audit = response.request.meta['audit'] priority = response.request.meta['priority'] thumb = response.request.meta['thumb'] items = [] show_id = Util.get_v1_showid(response.request.url) title = response.xpath('//meta[@name="title"]/@content').extract() tags = response.xpath( '//meta[@name="keywords"]/@content').extract() ep_item = EpisodeItem() if title: ep_item['title'] = title[0].strip() if show_id: ep_item['show_id'] = show_id if tags: ep_item['tag'] = tags[0].strip() ep_item['thumb_url'] = thumb[0].strip() ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = response.request.url #ep_item['cat_id'] = cat_id ep_item['category'] = cat_name #ep_item['description'] = item.get("description") ep_item['format_id'] = '2' ep_item['audit'] = audit ep_item['priority'] = priority #ep_item['played'] = item.get('play') #ep_item['upload_time'] = item.get('create') #duration = item.get('duration') #if duration: # a,b=duration.split(':') # duration = int(a)*60+int(b) #else: # duration = 0 #ep_item['duration'] = duration items.append(ep_item) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_episode(self, response): try: log.msg('parse_episode %s' % response.request.url) #cat_id = response.request.meta['cat_id'] cat_name = response.request.meta['cat_name'] thumb_url = response.request.meta['thumb'] audit = response.request.meta['audit'] priority = response.request.meta['priority'] lens = response.request.meta['lens'] items = [] show_id = Util.get_ifeng_showid(response.request.url) title = response.xpath( '//head/meta[@property="og:title"]/@content').extract() tags = response.xpath('//div[@class="protag"]/a/text()').extract() upload_time = response.xpath( '//div[@class="vTit_wrap"]/div/p/span[@class="data"]/text()' ).extract() #video info ep_item = EpisodeItem() if title: ep_item['title'] = title[0].strip() if show_id: ep_item['show_id'] = show_id if tags: ep_item['tag'] = '|'.join(tags) if thumb_url: ep_item['thumb_url'] = thumb_url[0].strip() if upload_time: ep_item['upload_time'] = upload_time[0] ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = response.request.url #ep_item['cat_id'] = cat_id ep_item['category'] = cat_name ep_item['format_id'] = '2' ep_item['audit'] = audit ep_item['priority'] = priority ep_item['duration'] = lens items.append(ep_item) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_page(self, response): try: log.msg('%s: %s' % (response.request.url, response.request.meta['page'])) cat_id = response.request.meta['cat_id'] page = response.request.meta['page'] if int(page) > int(self.max_search_page): return items = [] sel = Selector(response) #video items yk_v = sel.xpath('//div[@class="yk-col4"]') for v in yk_v: url = v.xpath('./div/div[@class="v-link"]/a/@href').extract() pl = v.xpath( './div/div[@class="v-meta va"]/div[@class="v-meta-entry"]/span/text()' ).extract() if url and pl: pld = Util.normalize_played(pl[0]) if int(pld) >= int(self.hottest_played_threshold): items.append( Request(url=url[0], callback=self.parse_episode, meta={'cat_id': cat_id})) #else: # log.msg('discard: %s' % url[0]) #pages next_page = sel.xpath( '//div[@class="yk-pager"]/ul/li[@class="next"]/a/@href' ).extract() if next_page: items.append( Request(url=self.url_prefix + next_page[0], callback=self.parse_page, meta={ 'page': page + 1, 'cat_id': cat_id })) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_vpaction(self, response): try: #log.msg('%s' % response.request.url) item = response.request.meta['item'] vp = response.xpath('//div[@id="videodetailInfo"]/ul/li').re( u'<label>总播放数:</label><span.*>(.+)</span>') if vp: item['played'] = Util.normalize_vp(vp[0]) show_id = item['show_id'] item = Request(url=self.playlength_url + show_id, callback=self.parse_playlength, meta={'item': item}) return item except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_episode(self, response): try: log.msg('parse_episode %s' % response.request.url) #cat_id = response.request.meta['cat_id'] cat_name = response.request.meta['cat_name'] thumb_url = response.request.meta['thumb'] audit = response.request.meta['audit'] priority = response.request.meta['priority'] items = [] show_id = Util.get_tucao_showid(response.request.url) title = response.xpath( '//h1[@class="show_title"]/text()').extract() tags = response.xpath( '//meta[@name="keywords"]/@content').extract() #video info ep_item = EpisodeItem() if title: ep_item['title'] = title[0].strip() if show_id: ep_item['show_id'] = show_id if tags: ep_item['tag'] = tags[0].strip() if thumb_url: ep_item['thumb_url'] = thumb_url[0].strip() ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = response.request.url #ep_item['cat_id'] = cat_id ep_item['category'] = cat_name ep_item['format_id'] = '2' ep_item['audit'] = audit ep_item['priority'] = priority #ep_item['duration'] = lens items.append(ep_item) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def video_parse(self, response): items = [] try: kw_id = response.request.meta[ 'kw_id'] if 'kw_id' in response.request.meta else None pg_id = response.request.meta[ 'pg_id'] if 'pg_id' in response.request.meta else None cat_id = response.request.meta[ 'cat_id'] if 'cat_id' in response.request.meta else None subject_id = response.request.meta[ 'subject_id'] if 'subject_id' in response.request.meta else None show_id = Util.get_youtube_showid(response.request.url) if not show_id: return items #owner owner = response.xpath( '//div[@class="yt-user-info"]/a/@data-ytid').extract() owner_url = response.xpath( '//div[@class="yt-user-info"]/a/@href').extract() owner_show_id = None if owner: owner_show_id = owner[0] items.append( Request(url=self.youtube_url_prefix + owner_url[0] + "/about", callback=self.video_about_parse)) #video info title = response.xpath('//span[@id="eow-title"]/text()').extract() tag = response.xpath( './head/meta[@name="keywords"]/@content').extract() description = response.xpath( '//p[@id="eow-description"]/descendant-or-self::*/text()' ).extract() played = response.xpath( '//div[@class="watch-view-count"]/text()').extract() category = response.xpath( '//div[@id="watch-description"]//ul[@class="content watch-info-tag-list"]/li/a/text()' ).extract() upload = response.xpath( '//meta[@itemprop="datePublished"]/@content').extract() #该方法获取的缩略图 thumb_url = response.xpath( '//link[@itemprop="thumbnailUrl"]/@href').extract() #other info sts = re.search(r'\"sts\": ?(\d+)', response.body) ep_item = EpisodeItem() ep_item['show_id'] = show_id #这里缩略图采用合成的方式得到['default', 'mqdefault', 'hqdefault', 'sddefault', 'maxresdefault'] #ep_item['thumb_url'] = self.thumb_url_prefix + '/' + show_id + '/default.jpg' if owner_show_id: ep_item['owner_show_id'] = owner_show_id if title: ep_item['title'] = title[0].strip() if tag: ep_item['tag'] = tag[0].replace(', ', '|') if description: ep_item['description'] = "\n".join(description) if played: pld = Util.normalize_played(played[0]) if pld: ep_item['played'] = Util.normalize_played(played[0]) else: ep_item['played'] = '0' if kw_id: ep_item['kw_id'] = kw_id if pg_id: ep_item['pg_id'] = pg_id if cat_id: ep_item['cat_id'] = cat_id if subject_id: ep_item['subject_id'] = subject_id if thumb_url: ep_item['thumb_url'] = thumb_url[0] if category: category = category[0].strip() #https://www.youtube.com/watch?v=lwy4qwaByVQ ep_item['category'] = category.replace('&', '|') if upload: upload = upload[0].strip() struct_time = None struct_time = time.strptime(upload, '%b %d, %Y') if not struct_time: struct_time = time.strptime(upload, '%Y年%m月%d日') if struct_time: time_str = time.strftime('%Y-%m-%d %H:%M:%S', struct_time) #time_str = "%s-%s-%s %s" % (struct_time.tm_year, struct_time.tm_mon, struct_time.tm_mday, time_str) ep_item['upload_time'] = time_str ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = Util.normalize_youtube_url(response.request.url) query = Util.encode({'video_id': ep_item['show_id'], \ 'eurl': 'https://youtube.googleapis.com/v/' + ep_item['show_id'], \ 'sts': sts.groups()[0] if sts else ''}) items.append( Request(url='http://www.youtube.com/get_video_info?' + query, callback=self.video_other_info_parse, meta={'item': ep_item})) except Exception, e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_episode_youku(self, response): try: logging.log(logging.INFO, "episode_youku:%s" % response.request.url) pg_id = response.request.meta['pg_id'] cat_name = response.request.meta['cat_name'] site_id = response.request.meta['site_id'] audit = response.request.meta['audit'] priority = response.request.meta['priority'] items = [] #owner owner = response.xpath( '//div[@class="yk-userinfo"]/div[@class="user-name"]/a/@href' ).extract() owner_show_id = None if owner: owner_show_id = Util.get_owner(owner[0]) #video info title = response.xpath( '//div[@class="base_info"]/h1/descendant-or-self::text()' ).extract() #category = response.xpath('//div[@class="base_info"]/div[@class="guide"]/div/a/text()').extract() scripts = response.xpath('//script[@type="text/javascript"]') video_id = scripts.re('videoId = \'(\d+)\'') tag = scripts.re('tags="(.+)"') upload = response.xpath( '//div[@class="yk-videoinfo"]/div[@class="time"]/text()' ).extract() description = response.xpath( '//div[@class="yk-videoinfo"]/div[@id="text_long"]/text()' ).extract() vp_url = response.xpath( '//span[@id="videoTotalPV"]/../../@href').extract() ep_item = EpisodeItem() ep_item['show_id'] = Util.get_showid(response.request.url) if video_id: ep_item['video_id'] = video_id[0] if owner_show_id: ep_item['owner_show_id'] = owner_show_id if title: t = "".join(title) t = t.strip("\n").strip() #ep_item['title'] = Util.strip_title("".join(title)) ep_item['title'] = Util.strip_title(t) if tag: ep_item['tag'] = Util.unquote(tag[0]).rstrip('|') #if category: # ep_item['category'] = category[0].replace(u'频道', '') ep_item['category'] = cat_name if upload: t = Util.get_upload_time(upload[0]) if t: ep_item['upload_time'] = Util.get_datetime_delta( datetime.now(), t) if description: ep_item['description'] = description[0] ep_item['spider_id'] = self.spider_id ep_item['site_id'] = site_id ep_item['url'] = response.request.url ep_item['pg_id'] = pg_id ep_item['audit'] = audit ep_item['format_id'] = self.format_id ep_item['priority'] = priority if vp_url: items.append( Request(url=vp_url[0], callback=self.parse_vpaction, meta={'item': ep_item})) else: items.append(ep_item) return items except Exception as e: logging.log(logging.ERROR, traceback.format_exc())
def parse_episode_iqiyi(self, response): try: logging.log(logging.INFO, "parse_youku_playlength:%s" % response.request.url) pg_id = response.request.meta['pg_id'] cat_name = response.request.meta['cat_name'] site_id = response.request.meta['site_id'] audit = response.request.meta['audit'] priority = response.request.meta['priority'] items = [] #show_id show_id = Util.get_iqiyi_showid(response.request.url) albumid = response.selector.re(re.compile(r'albumId: ?(\d+)')) #video info title = response.xpath( '//div[@class="play-tit-l"]/h2/descendant-or-self::*/text()' ).extract() if not title: title = response.xpath( '//div[@class="play-tit-l"]/h1/descendant-or-self::*/text()' ).extract() if not title: title = response.xpath( '//div[@class="mod-play-t**s"]/h1/descendant-or-self::*/text()' ).extract() if not title: title = response.xpath( '//div[@class="play-tit play-tit-oneRow play-tit-long"]/h1/descendant-or-self::*/text()' ).extract() #category = response.xpath('//div[@class="crumb_bar"]/span[1]/span/a[2]/text()').extract() #if not category: # category = response.xpath('//div[@class="play-album-crumbs textOverflow"]/span[1]/a[2]/text()').extract() #if not category: # category = response.xpath('//div[@class="crumb_bar"]/span[1]/a[2]/text()').extract() #if not category: # category = response.xpath('//div[@class="mod-crumb_bar"]/span[1]/a[2]/text()').extract() upload_time = response.xpath( '//div[@class="crumb_bar"]/span[3]/span/text()').extract() if not upload_time: upload_time = response.xpath( '//div[@class="crumb_bar"]/span[2]/span/text()').extract() tag = response.xpath( '//span[@id="widget-videotag"]/descendant::*/text()').extract( ) if not tag: tag = response.xpath( '//span[@class="mod-tags_item vl-block"]/descendant::*/text()' ).extract() if not tag: tag = response.xpath( '//div[@class="crumb_bar"]/span[2]/a/text()').extract() ep_item = EpisodeItem() if title: ep_item['title'] = "".join([t.strip() for t in title]) if show_id: ep_item['show_id'] = show_id if tag: ep_item['tag'] = "|".join([t.strip() for t in tag]) if upload_time: ep_item['upload_time'] = upload_time[0].strip() #if category: # ep_item['category'] = category[0].strip() ep_item['category'] = cat_name ep_item['spider_id'] = self.spider_id ep_item['site_id'] = site_id ep_item['pg_id'] = pg_id ep_item['audit'] = audit ep_item['url'] = response.request.url ep_item['format_id'] = self.format_id ep_item['priority'] = priority if albumid: items.append( Request(url=self.playlength_url + albumid[0], callback=self.parse_playlength, meta={ 'item': ep_item, 'albumid': albumid[0] })) else: items.append(ep_item) return items except Exception as e: logging.log(logging.ERROR, traceback.format_exc())
def parse(self, response): try: log.msg(response.request.url, level=log.INFO) cust_para = response.request.meta['cust_para'] items = [] user_item = UserItem() #owner id script = response.xpath('/html/head/script') owner_id = script.re('ownerId = \"(\d+)\"') show_id = script.re('ownerEncodeid = \'(.+)\'') if owner_id: user_item['owner_id'] = owner_id[0] if show_id: user_item['show_id'] = show_id[0] else: return #user profile up = response.xpath('//div[@class="profile"]') if up: user_name = up.xpath( './div[@class="info"]/div[@class="username"]/a[1]/@title' ).extract() played = up.xpath( './div[@class="state"]/ul/li[@class="vnum"]/em/text()' ).extract() fans = up.xpath( './div[@class="state"]/ul/li[@class="snum"]/em/text()' ).extract() if user_name: user_item['user_name'] = user_name[0] if played: user_item['played'] = Util.normalize_vp(played[0]) if fans: user_item['fans'] = Util.normalize_vp(fans[0]) #youku profile yp = response.xpath('//div[@class="YK-profile"]') if yp: intro = yp.xpath( './div[@class="userintro"]/div[@class="desc"]/p[2]/text()' ).extract() if intro: user_item['intro'] = ''.join(intro) #count yh = response.xpath('//div[@class="YK-home"]') vcount = '0' if yh: video_count = yh.xpath( 'div[1]/div/div/div/div[@class="title"]/span/a/text()').re( u'\((\d+)\)') if video_count: vcount = video_count[0] user_item['vcount'] = vcount user_item['spider_id'] = self.spider_id user_item['site_id'] = self.site_id items.append(user_item) #videos items.append( Request(url=response.request.url + "videos", callback=self.parse_video_page, meta={ 'page': 1, 'cust_para': cust_para })) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_episode(self, response): try: cust_para = response.request.meta['cust_para'] log.msg('%s: %s' % (response.request.url, cust_para)) items = [] #owner owner = response.xpath( '//div[@class="yk-userinfo"]/div[@class="user-name"]/a/@href' ).extract() owner_show_id = None if owner: owner_show_id = Util.get_owner(owner[0]) #video info title = response.xpath( '//div[@class="base_info"]/h1/descendant-or-self::*/text()' ).extract() category = response.xpath( '//div[@class="base_info"]/div[@class="guide"]/div/a/text()' ).extract() scripts = response.xpath('//script[@type="text/javascript"]') video_id = scripts.re('videoId = \'(\d+)\'') tag = scripts.re('tags="(.+)"') upload = response.xpath( '//div[@class="yk-videoinfo"]/div[@class="time"]/text()' ).extract() description = response.xpath( '//div[@class="yk-videoinfo"]/div[@id="text_long"]/text()' ).extract() ep_item = EpisodeItem() ep_item['show_id'] = Util.get_showid(response.request.url) if video_id: ep_item['video_id'] = video_id[0] if owner_show_id: ep_item['owner_show_id'] = owner_show_id if title: ep_item['title'] = Util.strip_title("".join(title)) if 'need_check' in cust_para: if self.content_is_forbidden(ep_item['title']): log.msg('video [ %s ] is in blacklist!' % ep_item['show_id']) return items else: pass else: pass if tag: ep_item['tag'] = Util.unquote(tag[0]).rstrip('|') if 'category' in cust_para: ep_item['category'] = cust_para['category'] elif category: ep_item['category'] = category[0].replace(u'频道', '') if upload: t = Util.get_upload_time(upload[0]) if t: ep_item['upload_time'] = Util.get_datetime_delta( datetime.now(), t) if description: ep_item['description'] = description[0] if 'priority' in cust_para: ep_item['priority'] = cust_para['priority'] ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = response.request.url if video_id: items.append( Request(url=self.vpaction_url + video_id[0], callback=self.parse_vpaction, meta={'item': ep_item})) else: items.append(ep_item) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_episode(self, response): try: log.msg('%s' % response.request.url) thumb_url = response.request.meta['thumb_url'] upload_time = response.request.meta['upload_time'] category = response.request.meta['category'] kw_id = response.request.meta[ 'kw_id'] if 'kw_id' in response.request.meta else 1 items = [] #owner owner = response.xpath( '//div[@class="yt-user-info"]/a/@data-ytid').extract() owner_url = response.xpath( '//div[@class="yt-user-info"]/a/@href').extract() owner_show_id = None if owner: owner_show_id = owner[0] items.append( Request(url=self.url_prefix + owner_url[0] + "/about", callback=self.parse_about)) #video info title = response.xpath('//span[@id="eow-title"]/text()').extract() #category = response.xpath('//p[@id="eow-category"]/a/text()').extract() tag = response.xpath( './head/meta[@name="keywords"]/@content').extract() #upload = response.xpath('//p[@id="watch-uploader-info"]/strong/text()').extract() description = response.xpath( '//p[@id="eow-description"]/descendant-or-self::*/text()' ).extract() played = response.xpath( '//div[@class="watch-view-count"]/text()').extract() #other info sts = re.search(r'\"sts\": ?(\d+)', response.body) ep_item = EpisodeItem() ep_item['show_id'] = Util.get_youtube_showid(response.request.url) if owner_show_id: ep_item['owner_show_id'] = owner_show_id if title: ep_item['title'] = title[0].strip() if tag: ep_item['tag'] = tag[0].replace(', ', '|') if category: #ep_item['category'] = category[0].replace('&', '|') ep_item['category'] = category ''' if upload: ptime = Util.get_youtube_publish(upload[0]) if ptime: ep_item['upload_time'] = ptime ''' if upload_time: t = Util.get_youtube_upload_time(upload_time[0].strip()) if t: ep_item['upload_time'] = Util.get_datetime_delta( datetime.now(), t) if description: ep_item['description'] = "\n".join(description) if thumb_url: ep_item['thumb_url'] = thumb_url[0] if played: pld = Util.normalize_played(played[0]) if pld: ep_item['played'] = Util.normalize_played(played[0]) else: ep_item['played'] = '0' ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = Util.normalize_youtube_url(response.request.url) ep_item['kw_id'] = kw_id query = Util.encode({'video_id': ep_item['show_id'], \ 'eurl': 'https://youtube.googleapis.com/v/' + ep_item['show_id'], \ 'sts': sts.groups()[0] if sts else ''}) items.append( Request(url='http://www.youtube.com/get_video_info?' + query, callback=self.parse_other_info, meta={'item': ep_item})) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_episode(self, response): try: log.msg('parse_episode %s' % response.request.url) cat_id = response.request.meta['cat_id'] thumb_url = response.request.meta['thumb'] items = [] #show_id show_id = Util.get_iqiyi_showid(response.request.url) #space maybe exist: "albumId:326754200" or "albumId: 326754200" albumid = response.selector.re(re.compile(r'albumId: ?(\d+)')) #video info title = response.xpath('//div[@class="play-tit-l"]/h2/descendant-or-self::*/text()').extract() if not title: title = response.xpath('//div[@class="play-tit-l"]/h1/descendant-or-self::*/text()').extract() if not title: title = response.xpath('//div[@class="mod-play-t**s"]/h1/descendant-or-self::*/text()').extract() if not title: title = response.xpath('//div[@class="play-tit play-tit-oneRow play-tit-long"]/h1/descendant-or-self::*/text()').extract() category = response.xpath('//div[@class="crumb_bar"]/span[1]/span/a[2]/text()').extract() if not category: category = response.xpath('//div[@class="play-album-crumbs textOverflow"]/span[1]/a[2]/text()').extract() if not category: category = response.xpath('//div[@class="crumb_bar"]/span[1]/a[2]/text()').extract() if not category: category = response.xpath('//div[@class="mod-crumb_bar"]/span[1]/a[2]/text()').extract() upload_time = response.xpath('//div[@class="crumb_bar"]/span[3]/span/text()').extract() if not upload_time: upload_time = response.xpath('//div[@class="crumb_bar"]/span[2]/span/text()').extract() tag = response.xpath('//span[@id="widget-videotag"]/descendant::*/text()').extract() if not tag: tag = response.xpath('//span[@class="mod-tags_item vl-block"]/descendant::*/text()').extract() if not tag: tag = response.xpath('//div[@class="crumb_bar"]/span[2]/a/text()').extract() ep_item = EpisodeItem() if title: ep_item['title'] = "".join([t.strip() for t in title]) if show_id: ep_item['show_id'] = show_id if tag: ep_item['tag'] = "|".join([t.strip() for t in tag]) if upload_time: ep_item['upload_time'] = upload_time[0].strip() if category: ep_item['category'] = category[0].strip() if thumb_url: ep_item['thumb_url'] = thumb_url[0].strip() ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = response.request.url ep_item['cat_id'] = cat_id if albumid: items.append(Request(url=self.playlength_url+albumid[0], callback=self.parse_playlength, meta={'item':ep_item,'albumid':albumid[0]})) else: items.append(ep_item) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def parse_episode(self, response): try: log.msg('%s' % response.request.url) cat_id = response.request.meta['cat_id'] items = [] sel = Selector(response) #owner owner = sel.xpath( '//div[@class="yk-userinfo"]/div[@class="user-name"]/a/@href' ).extract() owner_show_id = None if owner: owner_show_id = Util.get_owner(owner[0]) if owner_show_id in self.channel_exclude: log.msg("video owner excluded: %s" % owner_show_id) return items.append(Request(url=owner[0], callback=self.parse_owner)) #video info #title = sel.xpath('//div[@class="base_info"]/h1/descendant-or-self::*/text()').extract() title = sel.xpath( '//div[@class="base_info"]/h1/descendant-or-self::text()' ).extract() category = sel.xpath( '//div[@class="base_info"]/div[@class="guide"]/div/a/text()' ).extract() scripts = sel.xpath('//script[@type="text/javascript"]') video_id = scripts.re('videoId = \'(\d+)\'') tag = scripts.re('tags="(.+)"') upload = sel.xpath( '//div[@class="yk-videoinfo"]/div[@class="time"]/text()' ).extract() description = sel.xpath( '//div[@class="yk-videoinfo"]/div[@id="text_long"]/text()' ).extract() vp_url = sel.xpath( '//span[@id="videoTotalPV"]/../../@href').extract() ep_item = EpisodeItem() ep_item['show_id'] = Util.get_showid(response.request.url) if video_id: ep_item['video_id'] = video_id[0] if owner_show_id: ep_item['owner_show_id'] = owner_show_id if title: t = "".join(title) t = t.strip("\n").strip() #ep_item['title'] = Util.strip_title("".join(title)) ep_item['title'] = Util.strip_title(t) if tag: ep_item['tag'] = Util.unquote(tag[0]).rstrip('|') if category: ep_item['category'] = category[0].replace(u'频道', '') if upload: t = Util.get_upload_time(upload[0]) if t: ep_item['upload_time'] = Util.get_datetime_delta( datetime.now(), t) if description: ep_item['description'] = description[0] ep_item['spider_id'] = self.spider_id ep_item['site_id'] = self.site_id ep_item['url'] = response.request.url ep_item['cat_id'] = cat_id #if video_id: # items.append(Request(url=self.vpaction_url+video_id[0], callback=self.parse_vpaction, meta={'item':ep_item})) if vp_url: items.append( Request(url=vp_url[0], callback=self.parse_vpaction, meta={'item': ep_item})) else: items.append(ep_item) return items except Exception as e: log.msg(traceback.format_exc(), level=log.ERROR)
def video_parse(self, response): items = [] try: kw_id = response.request.meta[ 'kw_id'] if 'kw_id' in response.request.meta else None pg_id = response.request.meta[ 'pg_id'] if 'pg_id' in response.request.meta else None cat_id = response.request.meta[ 'cat_id'] if 'cat_id' in response.request.meta else None subject_id = response.request.meta[ 'subject_id'] if 'subject_id' in response.request.meta else None #check video's category category_str = response.xpath( '//div[@class="base_info"]/div[@class="guide"]/div/a/text()' ).extract() category = None if category_str: category = category_str[0].replace(u'频道', '') if category: if category in self.category_exclude: log.msg("video category excluded: %s" % category) return owner = response.xpath( '//div[@class="yk-userinfo"]/div[@class="user-name"]/a/@href' ).extract() owner_show_id = None if owner: owner_show_id = Util.get_owner(owner[0]) if owner_show_id in self.channel_exclude: log.msg("video owner excluded: %s" % owner_show_id) return #episode info show_id = Util.get_showid(response.request.url) title = response.xpath( '//div[@class="base_info"]/h1/descendant-or-self::*/text()' ).extract() upload = response.xpath( '//div[@class="yk-videoinfo"]/div[@class="time"]/text()' ).extract() description = response.xpath( '//div[@class="yk-videoinfo"]/div[@id="text_long"]/text()' ).extract() scripts = response.xpath('//script[@type="text/javascript"]') video_id = scripts.re('videoId = \'(\d+)\'') tag = scripts.re('tags="(.+)"') episode_item = EpisodeItem() if show_id: episode_item['show_id'] = show_id else: return if video_id: episode_item['video_id'] = video_id[0] if owner_show_id: episode_item['owner_show_id'] = owner_show_id if title: episode_item['title'] = Util.strip_title("".join(title)) if tag: episode_item['tag'] = Util.unquote(tag[0]).rstrip('|') if category: episode_item['category'] = category if upload: t = Util.get_upload_time(upload[0]) if t: episode_item['upload_time'] = Util.get_datetime_delta( datetime.now(), t) if description: episode_item['description'] = description[0] episode_item['spider_id'] = self.spider_id episode_item['site_id'] = self.site_id episode_item['url'] = response.request.url episode_item['kw_id'] = kw_id episode_item['pg_id'] = pg_id episode_item['cat_id'] = cat_id episode_item['subject_id'] = subject_id if video_id: items.append( Request(url=self.vpaction_url + video_id[0], callback=self.vpaction_parse, meta={'episode_item': episode_item})) else: items.append(episode_item) except Exception, e: log.msg(traceback.format_exc(), level=log.ERROR)