Esempio n. 1
0
def vimeo_resolver(url,prettyname,cachePath):
	if url.find('?') > -1: match = re.compile('vimeo.com/video/(.+?)\?').findall(url)
	else: match = re.compile('vimeo.com/video/(.*)').findall(url)
	if match:
		videocache = os.path.join(cachePath,str(match[0]))
		if getSetting("cachesites") == 'true' and os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:	
			try:
				data=basic.open_url('http://player.vimeo.com/video/'+str(match[0])+'/config?type=moogaloop&referrer=&player_url=player.vimeo.com&v=1.0.0&cdn_url=http://a.vimeocdn.com')
				parameters = json.loads(data)
				title = ''
				duration = ''
				thumbnail = ''
				title = parameters['video']['title']
				title2 = ''	
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: pass
				if title2 <> '': title = title2				
				duration = parameters['video']['duration']
				thumbnail = parameters['video']['thumbs']['640']
				try: url = parameters['request']['files']['h264']['hd']['url']
				except: url = parameters['request']['files']['h264']['sd']['url']
				jsontext = '{"prettyname":"'+prettyname+'","url":"' + url +'","title":"'+title.encode('ascii','xmlcharrefreplace')+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail+'"}'
				jsonloaded = json.loads(jsontext, encoding="utf-8")
				if getSetting("cachesites") == 'true': basic.writefile(videocache,'w',jsontext.encode('utf8'))
				return jsontext,jsonloaded
			except BaseException as e:
				print '##ERROR-funvideos:vimeo_resolver: '+str(match[0])+' '+str(e)
				pass
Esempio n. 2
0
def grab(url,prettyname,cachePath,cacheE):
	list = []
	try:
		content = basic.open_url(url)
		spl = content.split('<div class="videoListItem">')
		for i in range(1, len(spl), 1):
			entry = spl[i]
			match = re.compile('data-youtubeid="(.+?)"', re.DOTALL).findall(entry)
			id = match[0]
			match = re.compile('<div class="duration">(.+?)</div>', re.DOTALL).findall(entry)
			duration = match[0].strip()
			splDuration = duration.split(":")
			duration = str(int(splDuration[0])*60+int(splDuration[1]))
			thumb = "http://img.youtube.com/vi/"+id+"/0.jpg"
			match = re.compile('alt="(.+?)"', re.DOTALL).findall(entry)
			title = match[0]
			title = basic.cleanTitle(title)
			videocache = os.path.join(cachePath,str(id))
			title2 = ''		
			try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
			except: pass
			if title2 <> '': title = title2
			jsontext = '{"prettyname":"'+prettyname+'","url":"plugin://plugin.video.youtube/?action=play_video&videoid=' + str(id)+'","title":"'+title+'","duration":"'+str(duration)+'","thumbnail":"'+thumb+'"}'
			jsonloaded = json.loads(jsontext, encoding="utf-8")
			if cacheE == 'true' and not os.path.isfile(videocache): basic.writefile(videocache,'w',jsontext.encode('utf8'))
			list.append(jsonloaded)
		if list: return list
	except BaseException as e:
		print '##ERROR-funvideos:VitaminL_resolver: '+url+' '+str(e)
		pass
Esempio n. 3
0
def youtube_resolver(url,prettyname,cachePath):
	match = re.compile('.*?youtube.com/embed/(.+?)\?').findall(url)
	if not match: match = re.compile('.*?youtube.com/embed/(.*)').findall(url)
	if match:
		videocache = os.path.join(cachePath,str(match[0]))
		if getSetting("cachesite") == 'true' and os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:
			#try:
			data=basic.open_url('https://www.googleapis.com/youtube/v3/videos?id=' + str(match[0]) +'&key=AIzaSyCeh7CwOCb-wJQoPDgDX1faEiXntqYfIIA&part=snippet,contentDetails')
			title = ''
			duration = ''
			thumbnail = ''
			title = re.compile('"title": "(.+?)",').findall(data)
			title = title[0]
			title2 = ''	
			try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
			except: title2 = title.encode('ascii','xmlcharrefreplace')
			if title2 <> '': title = title2
			dur = re.compile('"duration": "PT(.+?)M(.+?)S"').findall(data)
			if dur: duration = float(dur[0][0])*60+float(dur[0][1])
			else:
				dur = re.compile('"duration": "PT(.+?)S"').findall(data)
				if dur: duration = dur[0][0]
				else:
					dur = re.compile('"duration": "PT(.+?)M"').findall(data)
					if dur: duration = float(dur[0][0])*60
			thumbnail = re.compile('"high": {\s+"url": "(.+?)",').findall(data)
			thumbnail = thumbnail[0]
			jsontext= '{"prettyname":"'+prettyname+'","url":"plugin://plugin.video.youtube/play/?video_id=' + str(match[0])+'","title":"'+title+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail+'"}'
			jsonloaded = json.loads('{"prettyname":"'+prettyname+'","url":"plugin://plugin.video.youtube/play/?video_id=' + str(match[0])+'","title":"'+title+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail+'"}', encoding="latin-1")
			if getSetting("cachesite") == 'true': basic.writefile(videocache,'w',jsontext)
			return jsontext,jsonloaded
Esempio n. 4
0
def videolog_resolver(url,prettyname,cachePath):
	try:
		ID = re.compile('id_video=(.+?)&amp').findall(url[0])
		videoID = ID[0]		
		videocache = os.path.join(cachePath,str(videoID))
		if getSetting("cachesites") == 'true' and os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:
			content = abrir_url("http://videolog.tv/"+videoID)
			match = re.compile('<meta property="og:image" content="http://videos.videolog.tv/(.+?)/(.+?)/g_'+id+'_\d+').findall(content)
			image = re.compile('<meta property="og:image" content="(.+?)">').findall(content)
			title = re.compile('<meta property="og:title" content="(.+?)">').findall(content)
			title = basic.cleanTitle(title[0])
			title2 = ''	
			try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
			except: pass
			if title2 <> '': title = title2			
			url='http://videos.videolog.tv/'+match[0]+'/'+match[1]+'/'+id+'.mp4'
			jsontext = '{"prettyname":"'+prettyname+'","url":"' + url +'","title":"'+title.encode('ascii','xmlcharrefreplace')+'","duration":"60","thumbnail":"'+image[0]+'"}'
			jsonloaded = json.loads(jsontext, encoding="utf-8")
			if getSetting("cachesites") == 'true': basic.writefile(videocache,'w',jsontext.encode('utf8'))			
			return jsontext,jsonloaded
	except BaseException as e:
		print '##ERROR-funvideos:videolog_resolver: '+str(id)+' '+str(e)
		pass
Esempio n. 5
0
def grab(url, prettyname, cachePath, cacheE):
    list = []
    try:
        page = basic.open_url(url)
        j = json.loads(page)
        for vid in j['videos']['video']:
            ids = vid['id']
            videocache = os.path.join(cachePath, str(ids))
            if cacheE == 'true' and os.path.isfile(videocache):
                jsonline = basic.readfiletoJSON(videocache)
                jsonloaded = json.loads(jsonline, encoding="utf-8")
            else:
                title = basic.cleanTitle(vid['title'])
                title2 = ''
                try:
                    title2 = title.decode('utf8').encode(
                        'ascii', 'xmlcharrefreplace')
                except:
                    pass
                if title2 <> '': title = title2
                decomp = re.compile('(\d+):(\d+)',
                                    re.DOTALL).findall(vid['length'])
                duration = int(decomp[0][0]) * 60 + int(decomp[0][1])
                thumb = 'http://videos.snotr.com/' + str(ids) + '-large.jpg'
                finalUrl = 'http://videos.snotr.com/' + str(ids) + '.mp4'
                jsontext = '{"prettyname":"' + prettyname + '","url":"' + finalUrl + '","title":"' + title + '","duration":"' + str(
                    duration) + '","thumbnail":"' + thumb + '"}'
                jsonloaded = json.loads(jsontext, encoding="utf-8")
                if cacheE == 'true' and not os.path.isfile(videocache):
                    basic.writefile(videocache, 'w', jsontext.encode('utf8'))
            list.append(jsonloaded)
        return list
    except BaseException as e:
        print '##ERROR-funvideos:Snotr_resolver: ' + url + ' ' + str(e)
def vimeo_resolver(url,prettyname,cachePath):
	if url.find('?') > -1: match = re.compile('vimeo.com/video/(.+?)\?').findall(url)
	else: match = re.compile('vimeo.com/video/(.*)').findall(url)
	if match:
		videocache = os.path.join(cachePath,str(match[0]))
		if getSetting("cachesites") == 'true' and os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:	
			try:
				data=basic.open_url('http://player.vimeo.com/video/'+str(match[0])+'/config?type=moogaloop&referrer=&player_url=player.vimeo.com&v=1.0.0&cdn_url=http://a.vimeocdn.com')
				parameters = json.loads(data)
				title = ''
				duration = ''
				thumbnail = ''
				title = parameters['video']['title']
				title2 = ''	
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: pass
				if title2 <> '': title = title2				
				duration = parameters['video']['duration']
				thumbnail = parameters['video']['thumbs']['640']
				try: url = parameters['request']['files']['h264']['hd']['url']
				except: url = parameters['request']['files']['h264']['sd']['url']
				jsontext = '{"prettyname":"'+prettyname+'","url":"' + url +'","title":"'+title.encode('ascii','xmlcharrefreplace')+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail+'"}'
				jsonloaded = json.loads(jsontext, encoding="utf-8")
				if getSetting("cachesites") == 'true': basic.writefile(videocache,'w',jsontext.encode('utf8'))
				return jsontext,jsonloaded
			except BaseException as e:
				print '##ERROR-funvideos:vimeo_resolver: '+str(match[0])+' '+str(e)
				pass
def videolog_resolver(url,prettyname,cachePath):
	try:
		ID = re.compile('id_video=(.+?)&amp').findall(url[0])
		videoID = ID[0]		
		videocache = os.path.join(cachePath,str(videoID))
		if getSetting("cachesites") == 'true' and os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:
			content = abrir_url("http://videolog.tv/"+videoID)
			match = re.compile('<meta property="og:image" content="http://videos.videolog.tv/(.+?)/(.+?)/g_'+id+'_\d+').findall(content)
			image = re.compile('<meta property="og:image" content="(.+?)">').findall(content)
			title = re.compile('<meta property="og:title" content="(.+?)">').findall(content)
			title = basic.cleanTitle(title[0])
			title2 = ''	
			try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
			except: pass
			if title2 <> '': title = title2			
			url='http://videos.videolog.tv/'+match[0]+'/'+match[1]+'/'+id+'.mp4'
			jsontext = '{"prettyname":"'+prettyname+'","url":"' + url +'","title":"'+title.encode('ascii','xmlcharrefreplace')+'","duration":"60","thumbnail":"'+image[0]+'"}'
			jsonloaded = json.loads(jsontext, encoding="utf-8")
			if getSetting("cachesites") == 'true': basic.writefile(videocache,'w',jsontext.encode('utf8'))			
			return jsontext,jsonloaded
	except BaseException as e:
		print '##ERROR-funvideos:videolog_resolver: '+str(id)+' '+str(e)
		pass
def youtube_resolver(url,prettyname,cachePath):
	match = re.compile('.*?youtube.com/embed/(.+?)\?').findall(url)
	if not match: match = re.compile('.*?youtube.com/embed/(.*)').findall(url)
	if match:
		videocache = os.path.join(cachePath,str(match[0]))
		if getSetting("cachesites") == 'true' and os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:
			try:
				data=basic.open_url('https://gdata.youtube.com/feeds/api/videos/' + str(match[0]) +'?v2&alt=json')
				parameters = json.loads(data)
				title = ''
				duration = ''
				thumbnail = ''
				title = basic.cleanTitle(parameters['entry']['title']['$t'])
				title2 = ''	
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: title2 = title.encode('ascii','xmlcharrefreplace')
				if title2 <> '': title = title2
				print title
				duration = parameters['entry']['media$group']['yt$duration']['seconds']
				thumbnail = parameters['entry']['media$group']['media$thumbnail'][0]['url']
				jsontext= '{"prettyname":"'+prettyname+'","url":"plugin://plugin.video.youtube/?action=play_video&videoid=' + str(match[0])+'","title":"'+title+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail+'"}'
				jsonloaded = json.loads('{"prettyname":"'+prettyname+'","url":"plugin://plugin.video.youtube/?action=play_video&videoid=' + str(match[0])+'","title":"'+title+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail+'"}', encoding="latin-1")
				if getSetting("cachesites") == 'true': basic.writefile(videocache,'w',jsontext)
				return jsontext,jsonloaded
			except BaseException as e:
				print '##ERROR-funvideos:youtube_resolver: '+str(match[0])+' '+str(e)
				pass
def daily_resolver(url,prettyname,cachePath):
	if url.find('?') > -1: match = re.compile('/embed/video/(.+?)\?').findall(url)
	else: match = re.compile('/embed/video/(.*)').findall(url)
	if match:
		videocache = os.path.join(cachePath,str(match[0]))
		if getSetting("cachesites") == 'true' and os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:
			try:
				data=basic.open_url('https://api.dailymotion.com/video/' + str(match[0]) +'?fields=title,duration,thumbnail_url,description')
				parameters = json.loads(data)
				title = ''
				duration = ''
				thumbnail = ''
				title = basic.cleanTitle(parameters['title'])
				title2 = ''	
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: pass
				if title2 <> '': title = title2				
				duration = parameters['duration']
				thumbnail = parameters['thumbnail_url']
				jsontext = '{"prettyname":"'+prettyname+'","url":"plugin://plugin.video.dailymotion_com/?mode=playVideo&url=' + str(match[0])+'","title":"'+title.encode('ascii','xmlcharrefreplace')+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail+'"}'
				jsonloaded = json.loads(jsontext, encoding="utf-8")
				if getSetting("cachesites") == 'true': basic.writefile(videocache,'w',jsontext.encode('utf8'))				
				return jsontext,jsonloaded
			except BaseException as e:
				print '##ERROR-funvideos:daily_resolver: '+str(match[0])+' '+str(e)
				pass
Esempio n. 10
0
def daily_resolver(url,prettyname,cachePath):
	if url.find('?') > -1: match = re.compile('/embed/video/(.+?)\?').findall(url)
	else: match = re.compile('/embed/video/(.*)').findall(url)
	if match:
		videocache = os.path.join(cachePath,str(match[0]))
		if getSetting("cachesites") == 'true' and os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:
			try:
				data=basic.open_url('https://api.dailymotion.com/video/' + str(match[0]) +'?fields=title,duration,thumbnail_url,description')
				parameters = json.loads(data)
				title = ''
				duration = ''
				thumbnail = ''
				title = basic.cleanTitle(parameters['title'])
				title2 = ''	
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: pass
				if title2 <> '': title = title2				
				duration = parameters['duration']
				thumbnail = parameters['thumbnail_url']
				jsontext = '{"prettyname":"'+prettyname+'","url":"plugin://plugin.video.dailymotion_com/?mode=playVideo&url=' + str(match[0])+'","title":"'+title.encode('ascii','xmlcharrefreplace')+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail+'"}'
				jsonloaded = json.loads(jsontext, encoding="utf-8")
				if getSetting("cachesites") == 'true': basic.writefile(videocache,'w',jsontext.encode('utf8'))				
				return jsontext,jsonloaded
			except BaseException as e:
				print '##ERROR-funvideos:daily_resolver: '+str(match[0])+' '+str(e)
				pass
def sapo_resolver(url,prettyname,cachePath):
	match = re.compile('file=http://.+?/(.+?)/mov/').findall(url)
	if match: 
		videocache = os.path.join(cachePath,str(match[0]))
		if os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:
			try:
				sapoAPI = basic.open_url('http://rd3.videos.sapo.pt/'+match[0]+'/rss2')	
				title = ''
				duration = ''
				thumbnail = ''	
				urlfinal = 	''
				duration = re.compile('<sapo:time>(\d+):(\d+):(\d+)</sapo:time').findall(sapoAPI)
				for horas,minutos,segundos in duration: duration = (int(segundos))+(int(minutos)*60)+(int(horas)*3600)
				thumbnail = re.compile('img src="(.+?)"').findall(sapoAPI)
				title = re.compile('<title>(.+?)</title>').findall(sapoAPI)
				title2 = ''
				title = title[1]
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: pass
				if title2 <> '': title = title2			
				urlfinal = re.compile('<sapo:videoFile>(.+?)</sapo:videoFile>').findall(sapoAPI)
				jsontext = '{"prettyname":"'+prettyname+'","url":"'+urlfinal[0]+'","title":"'+title+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail[0]+'"}'
				jsonloaded = json.loads(jsontext, encoding="utf-8")
				if getSetting("cachesites") == 'true': basic.writefile(videocache,'w',jsontext.encode('utf8'))
				return jsontext,jsonloaded
			except BaseException as e:
				print '##ERROR-funvideos:sapo_resolver: '+url+' '+str(e)
				pass
Esempio n. 12
0
def grab(url,prettyname,cachePath,cacheE):
	list = []
	try:
		page = basic.open_url(url)
		j = json.loads(page)
		for vid in j['videos']['video']:
			ids = vid['id']
			videocache = os.path.join(cachePath,str(ids))
			if cacheE == 'true' and os.path.isfile(videocache):
				jsonline = basic.readfiletoJSON(videocache)
				jsonloaded = json.loads(jsonline, encoding="utf-8")			
			else:
				title=basic.cleanTitle(vid['title'])
				title2 = ''				
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: pass
				if title2 <> '': title = title2	
				decomp = re.compile('(\d+):(\d+)', re.DOTALL).findall(vid['length'])
				duration = int(decomp[0][0])*60+int(decomp[0][1])
				thumb = 'http://videos.snotr.com/'+str(ids)+'-large.jpg'
				finalUrl='http://videos.snotr.com/'+str(ids)+'.mp4'
				jsontext = '{"prettyname":"'+prettyname+'","url":"'+finalUrl+'","title":"'+title+'","duration":"'+str(duration)+'","thumbnail":"'+thumb+'"}'
				jsonloaded = json.loads(jsontext, encoding="utf-8")
				if cacheE == 'true' and not os.path.isfile(videocache): basic.writefile(videocache,'w',jsontext.encode('utf8'))
			list.append(jsonloaded)
		return list
	except BaseException as e: print '##ERROR-funvideos:Snotr_resolver: '+url+' '+str(e)
Esempio n. 13
0
def youtube_resolver(url,prettyname,cachePath):
	match = re.compile('.*?youtube.com/embed/(.+?)\?').findall(url)
	if not match: match = re.compile('.*?youtube.com/embed/(.*)').findall(url)
	if match:
		videocache = os.path.join(cachePath,str(match[0]))
		if getSetting("cachesites") == 'true' and os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:
			try:
				data=basic.open_url('https://gdata.youtube.com/feeds/api/videos/' + str(match[0]) +'?v2&alt=json')
				parameters = json.loads(data)
				title = ''
				duration = ''
				thumbnail = ''
				title = basic.cleanTitle(parameters['entry']['title']['$t'])
				title2 = ''	
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: title2 = title.encode('ascii','xmlcharrefreplace')
				if title2 <> '': title = title2
				print title
				duration = parameters['entry']['media$group']['yt$duration']['seconds']
				thumbnail = parameters['entry']['media$group']['media$thumbnail'][0]['url']
				jsontext= '{"prettyname":"'+prettyname+'","url":"plugin://plugin.video.youtube/?action=play_video&videoid=' + str(match[0])+'","title":"'+title+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail+'"}'
				jsonloaded = json.loads('{"prettyname":"'+prettyname+'","url":"plugin://plugin.video.youtube/?action=play_video&videoid=' + str(match[0])+'","title":"'+title+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail+'"}', encoding="latin-1")
				if getSetting("cachesites") == 'true': basic.writefile(videocache,'w',jsontext)
				return jsontext,jsonloaded
			except BaseException as e:
				print '##ERROR-funvideos:youtube_resolver: '+str(match[0])+' '+str(e)
				pass
Esempio n. 14
0
def sapo_resolver(url,prettyname,cachePath):
	match = re.compile('file=http://.+?/(.+?)/mov/').findall(url)
	if match: 
		videocache = os.path.join(cachePath,str(match[0]))
		if os.path.isfile(videocache):
			jsonline = basic.readfiletoJSON(videocache)
			jsonloaded = json.loads(jsonline, encoding="utf-8")
			return jsonline,jsonloaded
		else:
			try:
				sapoAPI = basic.open_url('http://rd3.videos.sapo.pt/'+match[0]+'/rss2')	
				title = ''
				duration = ''
				thumbnail = ''	
				urlfinal = 	''
				duration = re.compile('<sapo:time>(\d+):(\d+):(\d+)</sapo:time').findall(sapoAPI)
				for horas,minutos,segundos in duration: duration = (int(segundos))+(int(minutos)*60)+(int(horas)*3600)
				thumbnail = re.compile('img src="(.+?)"').findall(sapoAPI)
				title = re.compile('<title>(.+?)</title>').findall(sapoAPI)
				title2 = ''
				title = title[1]
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: pass
				if title2 <> '': title = title2			
				urlfinal = re.compile('<sapo:videoFile>(.+?)</sapo:videoFile>').findall(sapoAPI)
				jsontext = '{"prettyname":"'+prettyname+'","url":"'+urlfinal[0]+'","title":"'+title+'","duration":"'+str(duration)+'","thumbnail":"'+thumbnail[0]+'"}'
				jsonloaded = json.loads(jsontext, encoding="utf-8")
				if getSetting("cachesites") == 'true': basic.writefile(videocache,'w',jsontext.encode('utf8'))
				return jsontext,jsonloaded
			except BaseException as e:
				print '##ERROR-funvideos:sapo_resolver: '+url+' '+str(e)
				pass
Esempio n. 15
0
def grab(url, prettyname, id, cachePath, site9gagfile, cacheE):
    jsondata = []
    list = []
    line = basic.readoneline(site9gagfile)
    idpage = re.findall('::' + id + '::::(.+?)::', line, re.DOTALL)
    if not idpage: page = basic.open_url('http://9gag.tv')
    else: page = basic.open_url(url + idpage[0], '9gag')
    jsondata = re.findall('   postGridPrefetchPosts = (.+?)];', page,
                          re.DOTALL)
    j = json.loads(jsondata[0] + ']')
    size = len(j)
    e = 0
    for data in j:
        e = e + 1
        if e == size:
            line = basic.readoneline(site9gagfile)
            if not '<' + id + '>' in line:
                basic.writefile(
                    site9gagfile, "a", '::' + str(int(id) + 1) + '::::' +
                    data['prevPostId'] + '::')
        try:
            duration = 0
            time = re.findall('PT(\d+)M(\d+)S', data['videoDuration'],
                              re.DOTALL)
            if time:
                for min, sec in time:
                    duration = int(min) * 60 + int(sec)
            else:
                time = re.findall('PT(\d+)M', data['videoDuration'], re.DOTALL)
                if time: duration = int(time[0]) * 60
                else:
                    time = re.findall('PT(\d+)S', data['videoDuration'],
                                      re.DOTALL)
                    if time: duration = time[0]
        except:
            duration = 60
            pass
        title = basic.cleanTitle(data['ogTitle'])
        videocache = os.path.join(cachePath, data['videoExternalId'])
        jsontext = '{"prettyname":"' + prettyname + '","url":"plugin://plugin.video.youtube/?action=play_video&videoid=' + data[
            'videoExternalId'] + '","title":"' + title.encode(
                'ascii', 'xmlcharrefreplace'
            ) + '","duration":"' + str(
                duration) + '","thumbnail":"' + data['thumbnail_360w'] + '"}'
        jsonloaded = json.loads(jsontext, encoding="utf-8")
        if cacheE == 'true' and not os.path.isfile(videocache):
            basic.writefile(videocache, 'w', jsontext.encode('utf8'))
        list.append(jsonloaded)
    return list
Esempio n. 16
0
def grab(url,prettyname,cachePath,cacheE):
	list = []
	try:
		page = basic.open_url(url)
		page = page.replace("\\","")
		ids = re.findall('data-content-id="(\d+)"', page, re.DOTALL)
		for videoid in ids:
			videocache = os.path.join(cachePath,str(videoid))
			if cacheE == 'true' and os.path.isfile(videocache):
				jsonline = basic.readfiletoJSON(videocache)
				jsonloaded = json.loads(jsonline, encoding="utf-8")			
			else:
				content = basic.open_url("http://www.break.com/embed/"+videoid)
				matchAuth=re.compile('"AuthToken": "(.+?)"', re.DOTALL).findall(content)
				matchURL=re.compile('"uri": "(.+?)".+?"height": (.+?),', re.DOTALL).findall(content)
				matchYT=re.compile('"youtubeId": "(.*?)"', re.DOTALL).findall(content)
				title=re.compile('"contentName": "(.+?)",', re.DOTALL).findall(content)
				title = basic.cleanTitle(title[0])
				title2 = ''				
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: pass
				if title2 <> '': title = title2		
				duration=re.compile('"videoLengthInSeconds": "(\d+)",', re.DOTALL).findall(content)	
				thumb = re.compile('"thumbUri": "(.+?)",', re.DOTALL).findall(content)				
				finalUrl=""
				if matchYT and matchYT[0]!="":
					finalUrl = "plugin://plugin.video.youtube/?action=play_video&videoid=" + matchYT[0]
					videocache2 = os.path.join(cachePath,str(matchYT[0]))	
					if cacheE == 'true' and not os.path.isfile(videocache): 
						jsontext = '{"prettyname":"'+prettyname+'","url":"'+finalUrl+'","title":"'+title+'","duration":"'+str(duration[0])+'","thumbnail":"'+thumb[0]+'"}'
						jsonloaded = json.loads(jsontext, encoding="utf-8")			
						basic.writefile(videocache2,'w',jsontext.encode('utf8'))
				else:
					max=0
					for url, height in matchURL:
						height=int(height)
						if height>max: 
							finalUrl=url.replace(".wmv",".flv")+"?"+matchAuth[0]
							max=height
				jsontext = '{"prettyname":"'+prettyname+'","url":"'+finalUrl+'","title":"'+title+'","duration":"'+str(duration[0])+'","thumbnail":"'+thumb[0]+'"}'
				jsonloaded = json.loads(jsontext, encoding="utf-8")
				if cacheE == 'true' and not os.path.isfile(videocache): basic.writefile(videocache,'w',jsontext.encode('utf8'))
			list.append(jsonloaded)
		return list
	except BaseException as e:
		print '##ERROR-funvideos:Break_resolver: '+url+' '+str(e)
Esempio n. 17
0
def grab(url,prettyname,cachePath,cacheE):
	list = []
	try:
		page = basic.open_url(url)
		page = page.replace("\\","")
		ids = re.findall('data-content-id="(\d+)"', page, re.DOTALL)
		for videoid in ids:
			videocache = os.path.join(cachePath,str(videoid))
			if cacheE == 'true' and os.path.isfile(videocache):
				jsonline = basic.readfiletoJSON(videocache)
				jsonloaded = json.loads(jsonline, encoding="utf-8")			
			else:
				content = basic.open_url("http://www.break.com/embed/"+videoid)
				matchAuth=re.compile('"AuthToken": "(.+?)"', re.DOTALL).findall(content)
				matchURL=re.compile('"uri": "(.+?)".+?"height": (.+?),', re.DOTALL).findall(content)
				matchYT=re.compile('"youtubeId": "(.*?)"', re.DOTALL).findall(content)
				title=re.compile('"contentName": "(.+?)",', re.DOTALL).findall(content)
				title = basic.cleanTitle(title[0])
				title2 = ''				
				try: title2 = title.decode('utf8').encode('ascii','xmlcharrefreplace')
				except: pass
				if title2 <> '': title = title2		
				duration=re.compile('"videoLengthInSeconds": "(\d+)",', re.DOTALL).findall(content)	
				thumb = re.compile('"thumbUri": "(.+?)",', re.DOTALL).findall(content)				
				finalUrl=""
				if matchYT and matchYT[0]!="":
					finalUrl = "plugin://plugin.video.youtube/play/?video_id=" + matchYT[0]
					videocache2 = os.path.join(cachePath,str(matchYT[0]))	
					if cacheE == 'true' and not os.path.isfile(videocache): 
						jsontext = '{"prettyname":"'+prettyname+'","url":"'+finalUrl+'","title":"'+title+'","duration":"'+str(duration[0])+'","thumbnail":"'+thumb[0]+'"}'
						jsonloaded = json.loads(jsontext, encoding="utf-8")			
						basic.writefile(videocache2,'w',jsontext.encode('utf8'))
				else:
					max=0
					for url, height in matchURL:
						height=int(height)
						if height>max: 
							finalUrl=url.replace(".wmv",".flv")+"?"+matchAuth[0]
							max=height
				jsontext = '{"prettyname":"'+prettyname+'","url":"'+finalUrl+'","title":"'+title+'","duration":"'+str(duration[0])+'","thumbnail":"'+thumb[0]+'"}'
				jsonloaded = json.loads(jsontext, encoding="utf-8")
				if cacheE == 'true' and not os.path.isfile(videocache): basic.writefile(videocache,'w',jsontext.encode('utf8'))
			list.append(jsonloaded)
		return list
	except BaseException as e:
		print '##ERROR-funvideos:Break_resolver: '+url+' '+str(e)
def grab(url,prettyname,id,cachePath,site9gagfile,cacheE):
	jsondata = []
	list = []
	line = basic.readoneline(site9gagfile)
	idpage = re.findall('::'+id+'::::(.+?)::', line, re.DOTALL)
	if not idpage: page = basic.open_url('http://9gag.tv')
	else: page = basic.open_url(url+idpage[0],'9gag')
	jsondata = re.findall('   postGridPrefetchPosts = (.+?)];', page, re.DOTALL)
	j = json.loads(jsondata[0]+']')
	size = len(j)
	e=0
	for data in j:
		e = e + 1
		if e == size:
			line = basic.readoneline(site9gagfile)
			if not '<'+id+'>' in line: basic.writefile(site9gagfile,"a",'::'+str(int(id)+1)+'::::'+data['prevPostId']+'::') 
		try:
			duration = 0
			time = re.findall('PT(\d+)M(\d+)S', data['videoDuration'], re.DOTALL)
			if time:
				for min,sec in time: duration = int(min)*60+int(sec)
			else:
				time = re.findall('PT(\d+)M', data['videoDuration'], re.DOTALL)
				if time: duration = int(time[0])*60
				else:
					time = re.findall('PT(\d+)S', data['videoDuration'], re.DOTALL)
					if time: duration = time[0]
		except: 
			duration = 60
			pass
		title = basic.cleanTitle(data['ogTitle'])	
		videocache = os.path.join(cachePath,data['videoExternalId'])
		jsontext = '{"prettyname":"'+prettyname+'","url":"plugin://plugin.video.youtube/?action=play_video&videoid=' +data['videoExternalId']+'","title":"'+title.encode('ascii','xmlcharrefreplace')+'","duration":"'+str(duration)+'","thumbnail":"'+data['thumbnail_360w']+'"}'
		jsonloaded = json.loads(jsontext, encoding="utf-8")
		if cacheE == 'true' and not os.path.isfile(videocache): basic.writefile(videocache,'w',jsontext.encode('utf8'))
		list.append(jsonloaded)
	return list 
def grabiframes(mainURL,prettyname,cachePath,results=None,index=None,pageURL=None):
	list = []
	if pageURL: pagecache = os.path.join(cachePath,pageURL)
	if pageURL and getSetting("cachesites") == 'true' and os.path.isfile(pagecache):
		jsonline = basic.readfiletoJSON(pagecache)
		jsonloaded = json.loads(jsonline, encoding="utf-8")
		if index: results.append(jsonloaded)
		else: list.append(jsonloaded)
	else:
		try: page = basic.open_url(mainURL)
		except: 
				page = ' '
				pass
		blocker = re.findall('data-videoid="(.+?)"', page, re.DOTALL)
		if blocker:
			fakeframe = []		
			for videoid in blocker:
				fakeframe.append('<iframe src="http//www.youtube.com/embed/'+videoid+'"</iframe>')
			html = fakeframe
		else: html = re.findall('<iframe(.*?)</iframe>', page, re.DOTALL)
		for trunk in html:
			try: iframe = re.compile('src="(.+?)"').findall(trunk)[0]
			except: 
				try: iframe = re.compile("src='(.+?)'").findall(trunk)[0]
				except: 
					try:iframe = re.compile('data-src="(.+?)"').findall(trunk)[0]
					except: iframe = ''
			if iframe:
				if iframe.find('ad120m.com') > -1 or iframe.find('facebook') > -1 or iframe.find('metaffiliation') > -1 or iframe.find('banner600') > -1 or iframe.find('engine.adbooth.com') > -1 or iframe.find('www.lolx2.com') > -1 or iframe.find('jetpack.wordpress.com') > -1: pass
				else:
					print "##filmes-ondemand: "+iframe
					try:
						if iframe.find('youtube') > -1:
							textR,resolver_iframe = youtube_resolver(iframe.replace('-nocookie',''),prettyname,cachePath)
							if resolver_iframe: 	
								if index: results.append(resolver_iframe)
								else: list.append(resolver_iframe)
								if pageURL and getSetting("cachesites") == 'true': basic.writefile(pagecache,'w',textR)
						elif iframe.find('dailymotion') > -1:
							textR,resolver_iframe = daily_resolver(iframe,prettyname,cachePath)
							if resolver_iframe: 							
								if index: results.append(resolver_iframe)
								else: list.append(resolver_iframe)
								if pageURL and getSetting("cachesites") == 'true': basic.writefile(pagecache,'w',textR)
						elif iframe.find('vimeo') > -1:
							textR,resolver_iframe = vimeo_resolver(iframe,prettyname,cachePath)
							if resolver_iframe: 							
								if index: results.append(resolver_iframe)
								else: list.append(resolver_iframe)
								if pageURL and getSetting("cachesites") == 'true': basic.writefile(pagecache,'w',textR)
						elif iframe.find('sapo') > -1:
							textR,resolver_iframe = sapo_resolver(iframe,prettyname,cachePath)
							if resolver_iframe: 							
								if index: results.append(resolver_iframe)
								else: list.append(resolver_iframe)
								if pageURL and getSetting("cachesites") == 'true': basic.writefile(pagecache,'w',textR)
						elif iframe.find('videolog') > -1:
							textR,resolver_iframe = videolog_resolver(iframe,prettyname,cachePath)
							if resolver_iframe: 							
								if index: results.append(resolver_iframe)
								else: list.append(resolver_iframe)
								if pageURL and getSetting("cachesites") == 'true': basic.writefile(pagecache,'w',textR)
					except BaseException as e:
						print '##ERROR-##filmes-ondemand: '+iframe+' '+str(e)
			else: print '##ERROR-filmes:frame on server not supported: '+iframe
	if not index: return list
Esempio n. 20
0
def grabiframes(mainURL,prettyname,cachePath,results=None,index=None,pageURL=None):
	list = []
	if pageURL: pagecache = os.path.join(cachePath,pageURL)
	if pageURL and getSetting("cachesites") == 'true' and os.path.isfile(pagecache):
		jsonline = basic.readfiletoJSON(pagecache)
		jsonloaded = json.loads(jsonline, encoding="utf-8")
		if index: results.append(jsonloaded)
		else: list.append(jsonloaded)
	else:
		try: page = basic.open_url(mainURL)
		except: 
				page = ' '
				pass
		blocker = re.findall('data-videoid="(.+?)"', page, re.DOTALL)
		if blocker:
			fakeframe = []		
			for videoid in blocker:
				fakeframe.append('<iframe src="http//www.youtube.com/embed/'+videoid+'"</iframe>')
			html = fakeframe
		else: html = re.findall('<iframe(.*?)</iframe>', page, re.DOTALL)
		for trunk in html:
			try: iframe = re.compile('src="(.+?)"').findall(trunk)[0]
			except: 
				try: iframe = re.compile("src='(.+?)'").findall(trunk)[0]
				except: 
					try:iframe = re.compile('data-src="(.+?)"').findall(trunk)[0]
					except: iframe = ''
			if iframe:
				if iframe.find('ad120m.com') > -1 or iframe.find('facebook') > -1 or iframe.find('metaffiliation') > -1 or iframe.find('banner600') > -1 or iframe.find('engine.adbooth.com') > -1 or iframe.find('www.lolx2.com') > -1 or iframe.find('jetpack.wordpress.com') > -1: pass
				else:
					print "##funvideos-grabiframes: "+iframe
					try:
						if iframe.find('youtube') > -1:
							textR,resolver_iframe = youtube_resolver(iframe.replace('-nocookie',''),prettyname,cachePath)
							if resolver_iframe: 	
								if index: results.append(resolver_iframe)
								else: list.append(resolver_iframe)
								if pageURL and getSetting("cachesites") == 'true': basic.writefile(pagecache,'w',textR)
						elif iframe.find('dailymotion') > -1:
							textR,resolver_iframe = daily_resolver(iframe,prettyname,cachePath)
							if resolver_iframe: 							
								if index: results.append(resolver_iframe)
								else: list.append(resolver_iframe)
								if pageURL and getSetting("cachesites") == 'true': basic.writefile(pagecache,'w',textR)
						elif iframe.find('vimeo') > -1:
							textR,resolver_iframe = vimeo_resolver(iframe,prettyname,cachePath)
							if resolver_iframe: 							
								if index: results.append(resolver_iframe)
								else: list.append(resolver_iframe)
								if pageURL and getSetting("cachesites") == 'true': basic.writefile(pagecache,'w',textR)
						elif iframe.find('sapo') > -1:
							textR,resolver_iframe = sapo_resolver(iframe,prettyname,cachePath)
							if resolver_iframe: 							
								if index: results.append(resolver_iframe)
								else: list.append(resolver_iframe)
								if pageURL and getSetting("cachesites") == 'true': basic.writefile(pagecache,'w',textR)
						elif iframe.find('videolog') > -1:
							textR,resolver_iframe = videolog_resolver(iframe,prettyname,cachePath)
							if resolver_iframe: 							
								if index: results.append(resolver_iframe)
								else: list.append(resolver_iframe)
								if pageURL and getSetting("cachesites") == 'true': basic.writefile(pagecache,'w',textR)
					except BaseException as e:
						print '##ERROR-##funvideos-grabiframes: '+iframe+' '+str(e)
			else: print '##ERROR-funvideos:frame on server not supported: '+iframe
	if not index: return list