import base64 import ConfigParser import json from service.logger import Logger from service.database import get_database import service.utils import time from scrapy.exceptions import CloseSpider import re import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('toutiao') class VideoToutiaoSpider(scrapy.Spider): name = "toutiao" allowed_domains = ["toutiao.com"] callbacked = False video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re import requests from scrapy.exceptions import CloseSpider import sys from scrapy.selector import Selector from scrapy.http import Request, FormRequest import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('echo') class EchoSpider(scrapy.Spider): name = 'echo' allowed_domains = ["app-echo.com"] callbacked = None video_id = None clip_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None,
import string import os import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import service.video_add_subtitle import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('ted') class TedSpider(scrapy.Spider): name = "ted" allowed_domains = ["ted.com"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import subprocess import os import time from service.database import get_database from service.logger import Logger import service.utils from scrapy.exceptions import CloseSpider import execjs import requests import re import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('iqiyi') class IqiyiSpider(scrapy.Spider): name = "iqiyi" allowed_domains = ["iqiyi.com", "pps.tv"] callbacked = False video_id = None # start_urls = ( # 'http://www.www.iqiyi.com/', # ) def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args, **kwargs): super(IqiyiSpider, self).__init__(*args, **kwargs) self.config = ConfigParser.ConfigParser()
import json from service.logger import Logger from scrapy.exceptions import CloseSpider from service.database import get_database import service.utils import youtube_dl import string import os import subprocess import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('share.cctv') class ShareCctvSpider(scrapy.Spider): name = "share.cctv" allowed_domains = ["cctv.com"] callbacked = False video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import json import string import os import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('vimeo') class VimeoSpider(scrapy.Spider): name = "vimeo" allowed_domains = ["vimeo.com"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import json import string import os import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('miaopai') class MiaopaiSpider(scrapy.Spider): name = "miaopai" allowed_domains = ["miaopai.com"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import string import os import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import requests import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('dl.youtube') class Ku6Spider(scrapy.Spider): name = "ku6" allowed_domains = ["ku6.com"] callbacked = False video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import re import requests import xmltodict import json from service.logger import Logger from scrapy.exceptions import CloseSpider from service.database import get_database import service.utils import os import subprocess import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('kankanews') # http://v.kankanews.com/index.php?app=api&mod=public&act=getvideo&id=2442110 # http://www.kankanews.com/a/2016-11-30/0017786457.shtml class KankanewsSpider(scrapy.Spider): name = "kankanews" allowed_domains = ["kankanews.com"] callbacked = False video_id = None def __init__(self, url, uuid, upload_url, callback,
from service.database import get_database import service.utils import subprocess import hashlib import BeautifulSoup import youtube_dl import string import os import service.mail import sys import execjs reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('bilibili') class BilibiliSpider(scrapy.Spider): name = "bilibili" allowed_domains = ["bilibili.com"] callbacked = False video_id = None # http://www.bilibili.com/m/html5?aid=6032244&page=1 def __init__(self, url, uuid, upload_url, callback, check_video_url=None,
import json import re from service.database import get_database import service.utils from service.logger import Logger from scrapy.exceptions import CloseSpider import service.mail import sys import execjs from bs4 import BeautifulSoup import base64 reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('meipai') class TudouSpider(scrapy.Spider): name = "meipai" allowed_domains = ["meipai.com"] logger = None callbacked = False def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args, **kwargs): super(TudouSpider, self).__init__(*args, **kwargs) self.start_urls.append(url) self.config = ConfigParser.ConfigParser() self.config.read("config/config.ini") self.uuid = uuid self.upload_url = upload_url
import os from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider from scrapy import Selector import subprocess import string import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('subtitle.youtube') class SubtitleYoutubeSpider(scrapy.Spider): name = "subtitle.youtube" allowed_domains = ["youtube.com"] callbacked = None auto = 0 insub = None # start_urls = ( # 'https://www.youtube.com/watch?v=wK222wGwrZg&list=PL2HEDIx6Li8j_jT8JI90WmXeQ657UUKSH&index=35', # ) def __init__(self, url,
import time import subprocess from scrapy import Selector from urllib import unquote import service.mail import sys import urllib import base64 import rsa import binascii from scrapy.http import Request reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('weibo') # http://video.weibo.com/show?fid=1034:d4a82d4c8557db0697da6f6bd29d7ab2 class WeiboSpider(scrapy.Spider): name = "weibo" allowed_domains = ["weibo.com"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, live_callback=None, *args, **kwargs): super(WeiboSpider, self).__init__(*args, **kwargs) self.config = ConfigParser.ConfigParser() self.config.read("config/config.ini") self.uuid = uuid
import re from scrapy.exceptions import CloseSpider import service.acfun import youtube_dl from scrapy.selector import Selector import requests import urllib import execjs import service.mail from scrapy.http import Request, FormRequest import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('acfun') class AcfunSpider(scrapy.Spider): name = "acfun" allowed_domains = ["acfun.tv", "acfun.cn"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import json from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import requests import os import time import subprocess import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('yizhibo') class YizhiboSpider(scrapy.Spider): name = "yizhibo" allowed_domains = ["yizhibo.com"] callbacked = None video_id = None # start_urls = ( # 'http://www.yizhibo.com/', # ) # def __init__(self, url, uuid, upload_url, callback, check_video_url=None, live_callback=None, *args, **kwargs): super(YizhiboSpider, self).__init__(*args, **kwargs)
import string import os import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('chinanews') class ChinanewsSpider(scrapy.Spider): name = "chinanews" allowed_domains = ["chinanews.com"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import os import time from service.database import get_database from service.logger import Logger import service.utils from scrapy.exceptions import CloseSpider import requests import re import urllib import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('sina') class SinaSpider(scrapy.Spider): name = "sina" allowed_domains = ["sina.com"] callbacked = False video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import ConfigParser import time import re import requests import json from service.logger import Logger from scrapy.exceptions import CloseSpider from service.database import get_database import service.utils import subprocess import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('ifeng') class IfengSpider(scrapy.Spider): name = "ifeng" allowed_domains = ["ifeng.com"] callbacked = False video_id = None # http://v.ifeng.com/news/society/201611/01a0e556-2a35-4088-a322-6a8a60983fe7.shtml # http://dyn.v.ifeng.com/cmpp/video_msg_ipad.js?callback=jQuery1910959290532730704_1478078228964&msg=01a0e556-2a35-4088-a322-6a8a60983fe7¶m=playermsg&_=1478078228965 # http://ips.ifeng.com/video19.ifeng.com/video09/2016/11/01/4333056-102-009-1802.mp4 # http://news.ifeng.com/a/20160616/49083698_0.shtml def __init__(self, url, uuid,
import json import string import os import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('facebook') class FacebookSpider(scrapy.Spider): name = "facebook" allowed_domains = ["facebook.com"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re import requests from scrapy.exceptions import CloseSpider import sys import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('mgtv') class MgtvSpider(scrapy.Spider): name = "mgtv" allowed_domains = ["mgtv.com"] callbacked = None video_id = None clip_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None,
import json import string import os import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('56') class A56Spider(scrapy.Spider): name = "56" allowed_domains = ["56.com"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import requests import xmltodict import json from service.logger import Logger from scrapy.exceptions import CloseSpider from service.database import get_database import service.utils import os import subprocess import service.mail import execjs import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('v.qq') class VQqSpider(scrapy.Spider): name = "v.qq" allowed_domains = ["v.qq.com"] callbacked = False video_id = None times = 0 # start_urls = ( # 'http://www.v.qq.com/', # ) # http://s.video.qq.com/loadplaylist?type=5&id=u3z9dt8bfxcgrr6&plname=qq # ad http://livew.l.qq.com/livemsg?pf=H5&ad_type=WL&pf_ex=mac&url=http%3A%2F%2Fv.qq.com%2Fcover%2Fu%2Fu3z9dt8bfxcgrr6.html%3Fvid%3Dd0019xtp6tj&ty=web&plugin=1.0.0&v=%24V2.0Build8588%24&coverid=u3z9dt8bfxcgrr6&vid=d0019xtp6tj&vptag=&pu=0&adaptor=1&dtype=1&live=0&_time_random=1471573031523&chid=0&low_login=1&_=1471573028020 # playinfo http://h5vv.video.qq.com/getinfo?callback=jQuery191026505530742794825_1471573028021&vid=d0019xtp6tj&platform=10901&otype=json&ehost=v.qq.com&defn=auto&low_login=1&_=1471573028022
import ConfigParser import json from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import requests import base64 import time import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('1905') class A1905Spider(scrapy.Spider): name = "1905" allowed_domains = ["1905.com"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args, **kwargs): super(A1905Spider, self).__init__(*args, **kwargs) self.config = ConfigParser.ConfigParser() self.config.read("config/config.ini") self.uuid = uuid self.upload_url = upload_url
import os import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import requests import urllib import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('sohu') class SohuSpider(scrapy.Spider): name = "sohu" allowed_domains = ["sohu.com"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import scrapy import ConfigParser import time import re import requests import json from service.logger import Logger from scrapy.exceptions import CloseSpider from service.database import get_database import service.utils import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('zoomin.tv') class ZoominTvSpider(scrapy.Spider): name = "zoomin.tv" allowed_domains = ["zoomin.tv"] callbacked = False pids = [ 'corporateusahddp', 'corporateuk', 'corporateke', 'corporatees', 'corporatelatamdp', 'corporatecataldp', 'corporatenl', 'corporatevla', 'corporatede', 'corporateit', 'corporatefr', 'corporatewal', 'corporatebradp', 'corporatetr', 'corporateswedp', 'corporateru', 'corporatejp', 'corporatechinacndp', 'corporatearabdp' ] # start_urls = (
import json import string import os import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('miomio') class MiomioSpider(scrapy.Spider): name = "miomio" allowed_domains = ["miomio.tv"] callbacked = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args, **kwargs):
import string import json import subprocess import os import time import re from service.database import get_database import service.utils from service.logger import Logger from scrapy.exceptions import CloseSpider import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('youku') class VShowYoukuSpider(scrapy.Spider): name = "youku" allowed_domains = ["youku.com"] # start_urls = ( # 'http://www.i.youku.com/', # ) logger = None callbacked = False def __init__(self, url, uuid, upload_url,
import re import requests import json from service.logger import Logger from scrapy.exceptions import CloseSpider from service.database import get_database import service.utils import service.video_add_subtitle import subprocess import os import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('open.163') class Open163Spider(scrapy.Spider): name = "open.163" allowed_domains = ["open.163.com", "v.163.com"] callbacked = None video_id = None # start_urls = ( # 'http://www.open.163.com/', # ) # def __init__(self, url,
import os import subprocess import time from service.logger import Logger from service.database import get_database import service.utils import re from scrapy.exceptions import CloseSpider import service.pptv import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('pptv') class PptvSpider(scrapy.Spider): name = "pptv" allowed_domains = ["pptv.com"] callbacked = None video_id = None def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args,
import time import re import requests import json from service.logger import Logger from scrapy.exceptions import CloseSpider from service.database import get_database import service.utils import os import service.mail import sys reload(sys) sys.setdefaultencoding('utf-8') logger = Logger.get_logger('cztv') class CztvSpider(scrapy.Spider): name = "cztv" callbacked = False video_id = None times = 0 def __init__(self, url, uuid, upload_url, callback, platform=11001, check_video_url=None,