示例#1
0
	def draw(self, stave_image):
		for trip in self._triplets:
			#trip.print_explode()
			
			if trip.start is None or trip.final is None:
				Logger.log('triplet not complete')
				continue
			
			with Drawing() as draw, Color('black') as color:
				draw.stroke_width = 1
				draw.stroke_color = color
				draw.line((trip.start.get_line_offset() + 0, 
					Config.draw_center - 18), 
					(trip.final.get_line_offset() + 5, 
					Config.draw_center - 18))
				draw.line((trip.start.get_line_offset() + 0, 
					Config.draw_center - 18), 
					(trip.start.get_line_offset() + 0, 
					Config.draw_center - 14))
				draw.line((trip.final.get_line_offset() + 5, 
					Config.draw_center - 18), 
					(trip.final.get_line_offset() + 5, 
					Config.draw_center - 14))
				draw.draw(stave_image)
				
				trip_file = Config.static_unit_dir + 'figure_3.png'
				trip_mark = Image(filename=trip_file)
				stave_image.composite(trip_mark, 
					trip.start.get_line_offset() + 6, Config.draw_center - 26)
示例#2
0
文件: server.py 项目: silverozzo/vogo
def index():
	"""
	здесь выдается основная страница с формой заполнения кода для табулатуры
	"""
	Logger.clear()
	session_id = request.get_cookie('beaker.session.id')
	name  = request.session['name']  if 'name'  in request.session else ''
	typer = request.session['typer'] if 'typer' in request.session else ''
	code  = request.session['code']  if 'code'  in request.session else ''
	share = request.session['share'] if 'share' in request.session else ''
	return template('index', output=session_id, log_records=Logger.get(), 
		name=name, typer=typer, code=code, share=share)
示例#3
0
	def draw(self, stave_image):
		for item in self._ligas:
			
			#item.print_explode()
			
			if item.finish is None:
				Logger.log("liga is not finished: " + 
					str(self._ligas.index(item)))
				continue
			
			if item.direction is None:
				if item.start.get_stave_offset() > Config.draw_center:
					item.direction = 'down'
				if item.start.get_stave_offset() < Config.draw_center:
					item.direction = 'up'
				if item.start.get_stave_offset() == Config.draw_center:
					if item.finish.get_stave_offset() > Config.draw_center:
						item.direction = 'down'
					if item.finish.get_stave_offset() < Config.draw_center:
						item.direction = 'up'
					if item.finish.get_stave_offset() == Config.draw_center:
						item.direction = 'up'
			
			(p0_x, p0_y, p1_x, p1_y, p2_x, p2_y, p3_x, p3_y) = [0] * 8
			p0_x = item.start.get_line_offset() + 4
			p3_x = item.finish.get_line_offset() - 1
			p1_x = p0_x + (p3_x-p0_x) / 3
			p2_x = p3_x - (p3_x-p0_x) / 3
			
			if item.direction == 'down':
				p0_y = item.start.get_stave_offset() + 5
				p3_y = item.finish.get_stave_offset() + 5
				p1_y = p0_y + (p3_y-p0_y) / 3 + (p3_x-p0_x) / 3
				p2_y = p3_y - (p3_y-p0_y) / 3 + (p3_x-p0_x) / 3
			if item.direction == 'up':
				p0_y = item.start.get_stave_offset() - 4
				p3_y = item.finish.get_stave_offset() - 4
				p1_y = p0_y + (p3_y-p0_y) / 3 - (p3_x-p0_x) / 3
				p2_y = p3_y - (p3_y-p0_y) / 3 - (p3_x-p0_x) / 3
			
			with Drawing() as draw:
				draw.fill_color = Color('transparent')
				draw.stroke_color = Color('black')
				draw.bezier(((p0_x, p0_y), (p1_x, p1_y), (p2_x, p2_y), 
					(p3_x, p3_y)))
				draw.draw(stave_image)
示例#4
0
 def test_put_good_log_num(self):
     logs_nums = [
         10,
         100,
         50
     ]
     logger = Logger()
     res = LoggerResource(logger)
     for log_num in logs_nums:
         res.put(log_num)
         self.assertEqual(log_num, logger.save_iteration)
示例#5
0
文件: server.py 项目: silverozzo/vogo
def tablprocess():
	"""
	здесь собсно обрабатывается запрос на генерацию табулатуры из кода
	"""
	Logger.clear()
	session_id = request.get_cookie('beaker.session.id')
	if session_id is None:
		Logger.log('no session from your side')
		return template('index', output=session_id, log_records=Logger.get(), 
			name='', typer='', code='')
	
	name  = request.forms.get('name')  or ''
	typer = request.forms.get('typer') or ''
	code  = request.forms.get('code')  or ''
	share = request.forms.get('share') or ''
	
	request.session['name']  = name
	request.session['typer'] = typer
	request.session['code']  = code
	request.session['share'] = share
	lines = map(lambda x: x.strip(), code.split("\n"))
	TablMaker.process(lines, name, 'output/' + session_id + '.png')
	if share == 'on':
		library.add_tabl(typer, name, code, session_id)
	
	return template('index', output=session_id, log_records=Logger.get(), 
		name=name, typer=typer, code=code, share=share)
示例#6
0
 def test_get_wrong_model_name(self):
     model_names = [
         "abc",
         -12,
         "test"
     ]
     model_res = ModelResource(complex_m, simple_m, Logger(100))
     for model_name in model_names:
         with self.assertRaises(Error) as ec:
             model_res.get(model_name)
         e = ec.exception
         self.assertEqual(errors["wrong_model_name"]["message"], e.message)
         self.assertEqual(errors["wrong_model_name"]["code"], e.status_code)
示例#7
0
 def test_get_wrong_user_id(self):
     user_ids = [
         "abc",
         -12,
         {}
     ]
     model_res = TestModelResource(complex_m, simple_m, Logger(100))
     for user_id in user_ids:
         with self.assertRaises(Error) as ec:
             model_res.get(user_id)
         e = ec.exception
         self.assertEqual(errors["wrong_user_id"]["message"], e.message)
         self.assertEqual(errors["wrong_user_id"]["code"], e.status_code)
示例#8
0
 def test_put_wrong_log_num(self):
     logs_nums = [
         "abc",
         -12,
         {}
     ]
     res = LoggerResource(Logger())
     for log_num in logs_nums:
         with self.assertRaises(Error) as ec:
             res.put(log_num)
         e = ec.exception
         self.assertEqual(errors["wrong_log_num"]["message"], e.message)
         self.assertEqual(errors["wrong_log_num"]["code"], e.status_code)
示例#9
0
import os
import time
from service.database import get_database
from service.logger import Logger
import service.utils
from scrapy.exceptions import CloseSpider
import requests
import re
import urllib
import service.mail
import sys

reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('sina')


class SinaSpider(scrapy.Spider):
    name = "sina"
    allowed_domains = ["sina.com"]
    callbacked = False
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#10
0
import json
import string
import os
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('vimeo')


class VimeoSpider(scrapy.Spider):
    name = "vimeo"
    allowed_domains = ["vimeo.com"]
    callbacked = None
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#11
0
import string
import os
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import service.video_add_subtitle
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('ted')


class TedSpider(scrapy.Spider):
    name = "ted"
    allowed_domains = ["ted.com"]
    callbacked = None
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#12
0
import string
import os
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import requests
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('dl.youtube')


class Ku6Spider(scrapy.Spider):
    name = "ku6"
    allowed_domains = ["ku6.com"]
    callbacked = False
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#13
0
import json
from service.logger import Logger
from scrapy.exceptions import CloseSpider
from service.database import get_database
import service.utils
import youtube_dl
import string
import os
import subprocess
import service.mail
import sys

reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('share.cctv')


class ShareCctvSpider(scrapy.Spider):
    name = "share.cctv"
    allowed_domains = ["cctv.com"]
    callbacked = False
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#14
0
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
import requests
from scrapy.exceptions import CloseSpider
import sys
from scrapy.selector import Selector
from scrapy.http import Request, FormRequest
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('echo')


class EchoSpider(scrapy.Spider):
    name = 'echo'
    allowed_domains = ["app-echo.com"]
    callbacked = None
    video_id = None
    clip_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
示例#15
0
import json
import string
import os
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('miaopai')


class MiaopaiSpider(scrapy.Spider):
    name = "miaopai"
    allowed_domains = ["miaopai.com"]
    callbacked = None
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#16
0
import subprocess
import os
import time
from service.database import get_database
from service.logger import Logger
import service.utils
from scrapy.exceptions import CloseSpider
import execjs
import requests
import re
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('iqiyi')


class IqiyiSpider(scrapy.Spider):
    name = "iqiyi"
    allowed_domains = ["iqiyi.com", "pps.tv"]
    callbacked = False
    video_id = None

    # start_urls = (
    #     'http://www.www.iqiyi.com/',
    # )

    def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args, **kwargs):
        super(IqiyiSpider, self).__init__(*args, **kwargs)
        self.config = ConfigParser.ConfigParser()
示例#17
0
import os
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import requests
import urllib
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('sohu')


class SohuSpider(scrapy.Spider):
    name = "sohu"
    allowed_domains = ["sohu.com"]
    callbacked = None
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#18
0
import json
import string
import os
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('facebook')


class FacebookSpider(scrapy.Spider):
    name = "facebook"
    allowed_domains = ["facebook.com"]
    callbacked = None
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#19
0
import requests
import xmltodict
import json
from service.logger import Logger
from scrapy.exceptions import CloseSpider
from service.database import get_database
import service.utils
import os
import subprocess
import service.mail
import execjs
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('v.qq')


class VQqSpider(scrapy.Spider):
    name = "v.qq"
    allowed_domains = ["v.qq.com"]
    callbacked = False
    video_id = None
    times = 0

    # start_urls = (
    #     'http://www.v.qq.com/',
    # )
    # http://s.video.qq.com/loadplaylist?type=5&id=u3z9dt8bfxcgrr6&plname=qq
    # ad http://livew.l.qq.com/livemsg?pf=H5&ad_type=WL&pf_ex=mac&url=http%3A%2F%2Fv.qq.com%2Fcover%2Fu%2Fu3z9dt8bfxcgrr6.html%3Fvid%3Dd0019xtp6tj&ty=web&plugin=1.0.0&v=%24V2.0Build8588%24&coverid=u3z9dt8bfxcgrr6&vid=d0019xtp6tj&vptag=&pu=0&adaptor=1&dtype=1&live=0&_time_random=1471573031523&chid=0&low_login=1&_=1471573028020
    # playinfo http://h5vv.video.qq.com/getinfo?callback=jQuery191026505530742794825_1471573028021&vid=d0019xtp6tj&platform=10901&otype=json&ehost=v.qq.com&defn=auto&low_login=1&_=1471573028022
示例#20
0
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
import requests
from scrapy.exceptions import CloseSpider
import sys
import service.mail
import sys

reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('mgtv')


class MgtvSpider(scrapy.Spider):
    name = "mgtv"
    allowed_domains = ["mgtv.com"]
    callbacked = None
    video_id = None
    clip_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
示例#21
0
import ConfigParser
import time
import re
import requests
import json
from service.logger import Logger
from scrapy.exceptions import CloseSpider
from service.database import get_database
import service.utils
import subprocess
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('ifeng')


class IfengSpider(scrapy.Spider):
    name = "ifeng"
    allowed_domains = ["ifeng.com"]
    callbacked = False
    video_id = None

    # http://v.ifeng.com/news/society/201611/01a0e556-2a35-4088-a322-6a8a60983fe7.shtml
    # http://dyn.v.ifeng.com/cmpp/video_msg_ipad.js?callback=jQuery1910959290532730704_1478078228964&msg=01a0e556-2a35-4088-a322-6a8a60983fe7&param=playermsg&_=1478078228965
    # http://ips.ifeng.com/video19.ifeng.com/video09/2016/11/01/4333056-102-009-1802.mp4
    # http://news.ifeng.com/a/20160616/49083698_0.shtml
    def __init__(self,
                 url,
                 uuid,
示例#22
0
import string
import os
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import service.mail
import sys

reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('chinanews')


class ChinanewsSpider(scrapy.Spider):
    name = "chinanews"
    allowed_domains = ["chinanews.com"]
    callbacked = None
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#23
0
import re
import requests
import xmltodict
import json
from service.logger import Logger
from scrapy.exceptions import CloseSpider
from service.database import get_database
import service.utils
import os
import subprocess
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('kankanews')


# http://v.kankanews.com/index.php?app=api&mod=public&act=getvideo&id=2442110
# http://www.kankanews.com/a/2016-11-30/0017786457.shtml
class KankanewsSpider(scrapy.Spider):
    name = "kankanews"
    allowed_domains = ["kankanews.com"]
    callbacked = False
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
示例#24
0
from flask_restful import Resource

from service import typings
from service.logger import Logger
from service.middleware import CHAIN_MAP
from service.utils import PostResource

log = Logger("Api", True, True, False)
log.info("Loading API....")


class BasePostResource(PostResource):
    def chain(self):
        args = self.reqparse.parse_args()
        currency = args.get("currency", "KSM")
        chain = CHAIN_MAP[currency]
        return chain


class Balance(BasePostResource):
    """
    Get the free balance from an address
    """
    def __init__(self):
        super(Balance, self).__init__(typings.balance)

    def post(self):
        args = self.reqparse.parse_args()
        response = self.chain().get_balance(args["address"])
        if response["success"] is False:
            log.error("Error: Substrate API balance call failed", response)
示例#25
0
文件: test.py 项目: silverozzo/vogo
from wand.image import Image


check = 'tr t34 c4 d4 e4'
result = LineMaker.process(check)
result.save(filename='test.png')

check = 'tr [!down (!up b8 a8 ) ]'
result = LineMaker.process(check)
result.save(filename='test.png')


check = 'tr varc c4 d4 e4 novar'
result = LineMaker.process(check)
result.save(filename='test_mark.png')

check = ['tr t34 c4 d4 e4', 'tr f4 g4 a4']
result = TablMaker.process(check, 'test', 'test2.png')

check = 'tr c0#'
result = LineMaker.process(check)
result.save(filename='test_empty.png')

check = 'tr 333 c1 d1 e1'
result = LineMaker.process(check)
result.save(filename='test_trip.png')

from service.logger import Logger
Logger.log('foobar')
print(Logger.get())
示例#26
0
import json
import string
import os
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('56')


class A56Spider(scrapy.Spider):
    name = "56"
    allowed_domains = ["56.com"]
    callbacked = None
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#27
0
from service.database import get_database
import service.utils
import subprocess
import hashlib
import BeautifulSoup
import youtube_dl
import string
import os
import service.mail
import sys
import execjs

reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('bilibili')


class BilibiliSpider(scrapy.Spider):
    name = "bilibili"
    allowed_domains = ["bilibili.com"]
    callbacked = False
    video_id = None

    # http://www.bilibili.com/m/html5?aid=6032244&page=1
    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
示例#28
0
import ConfigParser
import json
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import requests
import base64
import time
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('1905')


class A1905Spider(scrapy.Spider):
    name = "1905"
    allowed_domains = ["1905.com"]
    callbacked = None
    video_id = None

    def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args, **kwargs):
        super(A1905Spider, self).__init__(*args, **kwargs)

        self.config = ConfigParser.ConfigParser()
        self.config.read("config/config.ini")
        self.uuid = uuid
        self.upload_url = upload_url
示例#29
0
import json
import string
import os
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('miomio')


class MiomioSpider(scrapy.Spider):
    name = "miomio"
    allowed_domains = ["miomio.tv"]
    callbacked = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
                 **kwargs):
示例#30
0
import scrapy
import ConfigParser
import time
import re
import requests
import json
from service.logger import Logger
from scrapy.exceptions import CloseSpider
from service.database import get_database
import service.utils
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('zoomin.tv')


class ZoominTvSpider(scrapy.Spider):
    name = "zoomin.tv"
    allowed_domains = ["zoomin.tv"]
    callbacked = False
    pids = [
        'corporateusahddp', 'corporateuk', 'corporateke', 'corporatees',
        'corporatelatamdp', 'corporatecataldp', 'corporatenl', 'corporatevla',
        'corporatede', 'corporateit', 'corporatefr', 'corporatewal',
        'corporatebradp', 'corporatetr', 'corporateswedp', 'corporateru',
        'corporatejp', 'corporatechinacndp', 'corporatearabdp'
    ]

    # start_urls = (
示例#31
0
import re
import requests
import json
from service.logger import Logger
from scrapy.exceptions import CloseSpider
from service.database import get_database
import service.utils
import service.video_add_subtitle
import subprocess
import os
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('open.163')


class Open163Spider(scrapy.Spider):
    name = "open.163"
    allowed_domains = ["open.163.com", "v.163.com"]
    callbacked = None
    video_id = None

    # start_urls = (
    #     'http://www.open.163.com/',
    # )
    #

    def __init__(self,
                 url,
示例#32
0
import string
import json
import subprocess
import os
import time
import re
from service.database import get_database
import service.utils
from service.logger import Logger
from scrapy.exceptions import CloseSpider
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('youku')


class VShowYoukuSpider(scrapy.Spider):
    name = "youku"
    allowed_domains = ["youku.com"]
    # start_urls = (
    #     'http://www.i.youku.com/',
    # )
    logger = None
    callbacked = False

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
示例#33
0
import json
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import requests
import os
import time
import subprocess
import service.mail
import sys
reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('yizhibo')


class YizhiboSpider(scrapy.Spider):
    name = "yizhibo"
    allowed_domains = ["yizhibo.com"]
    callbacked = None
    video_id = None
    # start_urls = (
    #     'http://www.yizhibo.com/',
    # )
    #

    def __init__(self, url, uuid, upload_url, callback, check_video_url=None, live_callback=None, *args, **kwargs):
        super(YizhiboSpider, self).__init__(*args, **kwargs)
示例#34
0
import os
import subprocess
import time
from service.logger import Logger
from service.database import get_database
import service.utils
import re
from scrapy.exceptions import CloseSpider
import service.pptv
import service.mail
import sys

reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('pptv')


class PptvSpider(scrapy.Spider):
    name = "pptv"
    allowed_domains = ["pptv.com"]
    callbacked = None
    video_id = None

    def __init__(self,
                 url,
                 uuid,
                 upload_url,
                 callback,
                 check_video_url=None,
                 *args,
示例#35
0
import json
import re
from service.database import get_database
import service.utils
from service.logger import Logger
from scrapy.exceptions import CloseSpider
import service.mail
import sys
import execjs
from bs4 import BeautifulSoup
import base64

reload(sys)
sys.setdefaultencoding('utf-8')

logger = Logger.get_logger('meipai')


class TudouSpider(scrapy.Spider):
    name = "meipai"
    allowed_domains = ["meipai.com"]
    logger = None
    callbacked = False

    def __init__(self, url, uuid, upload_url, callback, check_video_url=None, *args, **kwargs):
        super(TudouSpider, self).__init__(*args, **kwargs)
        self.start_urls.append(url)
        self.config = ConfigParser.ConfigParser()
        self.config.read("config/config.ini")
        self.uuid = uuid
        self.upload_url = upload_url