def check_tv_player(name, media): monitor = Player() errors = 0 while not xbmc.abortRequested: try: isPlaying = monitor.isPlaying() if monitor.isPlayingVideo(): monitor._lastPos = monitor.getTime() #print monitor._lastPos else: print "Velocity: Scrobble TV Show End" trakt_auth.stop_tv_watch(name, media) break except Exception as e: errors += 1 if errors >= MAX_ERRORS: log_utils.log( 'Service: Error (%s) received..(%s/%s)...Ending Service...' % (e, errors, MAX_ERRORS), log_utils.LOGERROR) break else: log_utils.log( 'Service: Error (%s) received..(%s/%s)...Continuing Service...' % (e, errors, MAX_ERRORS), log_utils.LOGERROR) else: errors = 0 xbmc.sleep(1000)
def putlocker_tv(name, movie_title): try: title = movie_title[:-7] movie_year = movie_title[-6:] year = movie_year.replace('(', '').replace(')', '') video_type = 'shows' show_url = search(video_type, title, year) for e in show_url: url = e['url'] newseas = re.compile( 'S(.+?)E(.+?) (?P<name>[A-Za-z\t .]+)').findall(name) print newseas for sea, epi, epi_title in newseas: video = make_vid_params('Episode', title, year, sea, epi, epi_title, '') ep_url = _get_episode_url(url, video, sea, epi) hosters = get_sources(ep_url) hosters = main_scrape.apply_urlresolver(hosters) return hosters except Exception as e: hosters = [] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Putlocker TV', msg='(error) %s %s' % (str(e), ''), duration=5000, sound=None) return hosters
def ice_films_tv(name,movie_title): try: title = movie_title[:-7] movie_year = movie_title[-6:] year = movie_year.replace('(','').replace(')','') video_type = 'shows' # print title # print year show_url = search(video_type,title,year) for e in show_url: url = e['url'] #TV MAIN URL RETURNED HERE newseas=re.compile('S(.+?)E(.+?) (?P<name>[A-Za-z\t .]+)').findall(name) for sea,epi,epi_title in newseas: # print sea,epi # print url video = make_vid_params('Episode',title,year,sea,epi,epi_title,'') #print video ep_url = _get_episode_url(url, video,sea,epi) #print "HERE IS END" +ep_url hosters=get_sources(ep_url) hosters = main_scrape.apply_urlresolver(hosters) return hosters except Exception as e: hosters =[] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Ice Films',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None) return hosters
def OPEN_URLTM(url): try: req = urllib2.Request(url) req.add_header( 'User-Agent', 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36' ) req.add_header('Content-Type', 'application/x-www-form-urlencoded') req.add_header('Host', host_url) req.add_header('Referer', '') req.add_header('Connection', 'keep-alive') req.add_header( 'Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8') response = urllib2.urlopen(req) link = response.read() cj.save(cookie_file, ignore_discard=True) response.close() return link except Exception as e: log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='TwoMovies', msg='(error) %s %s' % (str(e), ''), duration=5000, sound=None)
def tmlinkpage(url,movie_title,thumb,media): try: if "full" in url: link = OPEN_URL(url) if 'Before you start watching' in link: #print 'Confirmation Button ' url = url header_dict = {} header_dict['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' header_dict['Connection'] = 'keep-alive' header_dict['Content-Type'] = 'application/x-www-form-urlencoded' header_dict['Origin'] = host_url header_dict['Referer'] = url header_dict['User-Agent'] = 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36' form_data = {'confirm':'I understand, Let me watch the movie now!'} net.set_cookies(cookiejar) conbutton = net.http_POST(url, form_data=form_data,headers=header_dict) link=OPEN_URL(url) link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','') matchurl=re.compile('Sourcelink:<arel="nofollow"onlicktarget="_blank">(.+?)</').findall(link) for urls in matchurl: urls = str(urls) urls = urls.replace('&rel=nofollow','') if media =='movies': main_scrape.get_link(urls,movie_title,thumb,media) else: main_scrape.get_tv_link(urls,movie_title,thumb,media) except Exception as e: log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='TwoMovies',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None)
def _default_get_episode_url(show_url, video, episode_pattern, title_pattern='', airdate_pattern='', data=None, headers=None): #print "Before Default Episode" ##log_utils.log('Default Episode Url: |%s|%s|%s|%s|' % (base_url, show_url, str(video).decode('utf-8', 'replace'), data), log_utils.LOGDEBUG) if 'http://' not in show_url: url = urlparse.urljoin(base_url, show_url) else: url = base_url + show_url #print "After Default Episode" #print url html = get_url(url) #print html if html: # force_title = _force_title(video) # # if not force_title: match = re.search(episode_pattern, html, re.DOTALL) if match: # print "BEFORE PATHIFY" return _pathify_url(match.group(1)) else: log_utils.log('Skipping as Episode not found: %s' % (url), log_utils.LOGDEBUG)
def tmovies_tv(name,movie_title): try: sources = [] searchUrl = base_url+'watch_episode/' # if 'House' in movie_title: # movie_title = movie_title.replace('House','DR House') movie_name = movie_title[:-6] movie_name_short = movie_title[:-7] movie_year = movie_title[-6:] movie_year = movie_year.replace('(','').replace(')','') movie_match =movie_name.replace(" ","_").replace(":","").replace("-","") year_movie_match = movie_match+movie_year direct_movie_match = movie_match[:-1] seasons=re.compile('S(.+?)E(.+?) ').findall(name) for sea,epi in seasons: tmurl = searchUrl+direct_movie_match+'/'+sea+'/'+epi+'/' link = OPEN_URLTM(tmurl) names = dom_parser.parse_dom(link, 'a',{'class':"norm vlink"}) urls = dom_parser.parse_dom(link, 'a',{'class':"norm vlink"}, ret='href') for host, url in zip(names, urls): host = host.replace('www.','') #host = tools.get_hostname(host) source = {'url': url, 'host':host,'direct':False} sources.append(source) sources = main_scrape.apply_urlresolver(sources) return sources except Exception as e: hosters =[] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='TwoMovies',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None) return hosters
def add_to_queue(name,url,thumb,ext,media): try: try: conn.execute('''CREATE TABLE downloads(name text unique, url text, thumb text, ext text, media text)''') except: print "Velocity says: Downloads DB Table Already exists" # Insert a row of data or return already there try: #Quote Replacement name = name.replace("'", "''") media = media.replace("'", "''") #END Quote Replacement conn.execute("INSERT INTO downloads VALUES ('"+name+"','"+url+"','"+thumb+"','"+ext+"','"+media+"')") addon.show_small_popup(title='[COLOR gold]Item Added To Your Queue [/COLOR]', msg=name + ' Was Added To Your Download Queue', delay=int(5000), image=thumb) except Exception as e: addon.show_small_popup(title='[COLOR red]Item Already In Your Queue[/COLOR]', msg=name + ' Is Already In Your Download Queue', delay=int(5000), image=thumb) print 'Error [%s] %s' % (str(e), '') # Save (commit) the changes conn.commit() # We can also close the connection if we are done with it. # Just be sure any changes have been committed or they will be lost. conn.close() except Exception as e: log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Downloads',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None)
def search(self, video_type, title, year, season=''): search_url = urlparse.urljoin(self.base_url, '/index.php?search_keywords=') search_url += urllib.quote_plus(title) search_url += '&year=' + urllib.quote_plus(str(year)) if video_type == 'shows': search_url += '&search_section=2' else: search_url += '&search_section=1' results = [] html = self._http_get(self.base_url, cache_limit=0) #kodi.log("HTML is : " + html) match = re.search('input type="hidden" name="key" value="([0-9a-f]*)"', html) if match: key = match.group(1) search_url += '&key=' + key html = self._http_get(search_url, cache_limit=.25) pattern = r'class="index_item.+?href="(.+?)" title="Watch (.+?)"?\(?([0-9]{4})?\)?"?>' for match in re.finditer(pattern, html): url, title, year = match.groups('') result = { 'url': scraper_utils.pathify_url(url), 'title': scraper_utils.cleanse_title(title), 'year': year } results.append(result) else: log_utils.log('Unable to locate PW search key', log_utils.LOGWARNING) return results
def _default_get_episode_url(self, show_url, video, episode_pattern, title_pattern='', airdate_pattern='', data=None, headers=None, method=None): if not show_url.startswith('http'): url = urlparse.urljoin(self.base_url, show_url) else: url = show_url html = self._http_get(url, data=data, headers=headers, method=method, cache_limit=2) if html: match = re.search(episode_pattern, html, re.DOTALL) if match: return scraper_utils.pathify_url(match.group(1)) else: log_utils.log('Skipping as Episode not found: %s' % (url), log_utils.LOGDEBUG)
def get_tv_link(url,movie_title,thumb,media): hmf = urlresolver.HostedMediaFile(url) ########################################## if hmf: url = urlresolver.resolve(url) if not hmf: url = url try: params = {'url':url, 'name':media, 'thumb':thumb} addon.add_video_item(params, {'title':media}, img=thumb) liz=xbmcgui.ListItem(media, iconImage="DefaultFolder.png", thumbnailImage=thumb) xbmc.sleep(1000) liz.setPath(str(url)) xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, liz) # #xbmc.Player ().play(url, liz, False) # movie_name = movie_title[:-6] # movie_name = '"'+movie_name+'"' # movie_year_full = movie_title[-6:] # movie_year = movie_year_full.replace('(','').replace(')','') # if kodi.get_setting('trakt_oauth_token'): # xbmc.sleep(30000) # print "Velocity: TV Show Scrobble Start" # try: # trakt_auth.start_tv_watch(movie_name,media) # except Exception as e: # log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) # if kodi.get_setting('error_notify') == "true": # kodi.notify(header='Scrobble not loggged', msg='%s %s' % (str(e), ''), duration=5000, sound=None) # xbmc.sleep(30000) # if kodi.get_setting('trakt_oauth_token'): # check_tv_player(movie_name,media) except Exception as e: log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) kodi.notify(header='Try Another Source', msg='Link Removed or Failed', duration=4000, sound=None)
def check_tv_player(name,media): monitor = Player() errors = 0 while not xbmc.abortRequested: try: isPlaying = monitor.isPlaying() if monitor.isPlayingVideo(): monitor._lastPos = monitor.getTime() #print monitor._lastPos else: print "Velocity: Scrobble TV Show End" trakt_auth.stop_tv_watch(name,media) break except Exception as e: errors += 1 if errors >= MAX_ERRORS: log_utils.log('Service: Error (%s) received..(%s/%s)...Ending Service...' % (e, errors, MAX_ERRORS), log_utils.LOGERROR) break else: log_utils.log('Service: Error (%s) received..(%s/%s)...Continuing Service...' % (e, errors, MAX_ERRORS), log_utils.LOGERROR) else: errors = 0 xbmc.sleep(1000)
def _set_cookies(base_url, cookies): cj = cookielib.LWPCookieJar(cookie_file) try: cj.load(ignore_discard=True) except: pass if kodi.get_setting('debug') == 'true': log_utils.log('Before Cookies: %s' % (cookies_as_str(cj)), log_utils.LOGDEBUG) domain = urlparse.urlsplit(base_url).hostname for key in cookies: c = cookielib.Cookie(0, key, str(cookies[key]), port=None, port_specified=False, domain=domain, domain_specified=True, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None, comment_url=None, rest={}) cj.set_cookie(c) cj.save(ignore_discard=True) if kodi.get_setting('debug') == 'true': log_utils.log('After Cookies: %s' % (cookies_as_str(cj)), log_utils.LOGDEBUG) return cj
def ninemovies(name): try: title = name[:-7] movie_year = name[-6:] year = movie_year.replace('(', '').replace(')', '') video_type = 'movies' source = search(video_type, title, year) #print source for e in source: # print e url = e['url'] year = e['year'] name = e['title'] # print "SUF URL IS = "+url srcurl = base_url + url hosters = get_sources(srcurl) hosters = main_scrape.apply_urlresolver(hosters) return hosters except Exception as e: hosters = [] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Nine Movies', msg='(error) %s %s' % (str(e), ''), duration=5000, sound=None) return hosters
def custom_list_view_tv(trakt_id,media): try: lists = trakt_api.get_special_list('kids',media)#is actually SLUG ID for e in lists: infoLabels = trakt_api.process_movie(e) infoLabels.update(make_infoLabels(e)) menu_items=[] trakt_id = str(infoLabels['trakt_id']) trailer = infoLabels['trailer_url'] year = str(infoLabels['year']) name = infoLabels['title'].encode('utf-8') thumb=infoLabels['cover_url'] if thumb is None: thumb = '' menu_items.append(('[COLOR gold]Show Information[/COLOR]', 'XBMC.Action(Info)')) if trailer: utube = tools.make_trailer(trailer) menu_items.append(('[COLOR gold]Play Trailer[/COLOR]', 'PlayMedia('+utube+',xbmcgui.ListItem(title, iconImage=image, thumbnailImage=image))')) kodi.addDir(name+' ('+year+')','','find_season',thumb,name,5,trakt_id,'shows',meta_data=infoLabels,menu_items=menu_items,replace_menu=False) kodi.auto_view('tvshows') except Exception as e: log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Custom List Error',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None)
def parse_link(link, item, patterns): delim = '[._ -]' link = urllib.unquote(link) file_name = link.split('/')[-1] for pattern in patterns: pattern = pattern.format(delim=delim) match = re.search(pattern, file_name, re.I) if match: match = dict((k, v) for k, v in match.groupdict().iteritems() if v is not None) item.update(match) break else: log_utils.log('No Regex Match: |%s|%s|' % (item, link), log_utils.LOGDEBUG) extra = item['extra'].upper() if 'X265' in extra or 'HEVC' in extra: item['format'] = 'x265' item['dubbed'] = True if 'DUBBED' in extra else False if 'airdate' in item and item['airdate']: pattern = '{delim}+'.format(delim=delim) item['airdate'] = re.sub(pattern, '-', item['airdate']) item['airdate'] = utils2.to_datetime(item['airdate'], "%Y-%m-%d").date() return item
def afdah(name): try: title = name[:-7] movie_year = name[-6:] year = movie_year.replace('(','').replace(')','') video_type = 'movies' search_url = urlparse.urljoin(base_url, '/wp-content/themes/afdah/ajax-search.php') data = {'search': title, 'type': 'title'} html = OPEN_URL(search_url, data=data, cache_limit=1) pattern = '<li>.*?href="([^"]+)">([^<]+)\s+\((\d{4})\)' results = [] for match in re.finditer(pattern, html, re.DOTALL | re.I): url, title, match_year = match.groups('') if not year or not match_year or year == match_year: result = {'url': _pathify_url(url), 'title': title, 'year': year} results.append(result) for e in results: url = e['url'] year = e['year'] name = e['title'] # print year # print name # print url # srcurl = base_url+url # link = OPEN_URL_REG(srcurl) hosters=get_sources(url) # print hosters return hosters except Exception as e: hosters =[] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Afdah',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None) return hosters
def _get_ua(): index = random.randrange(len(RAND_UAS)) user_agent = RAND_UAS[index].format(win_ver=random.choice(WIN_VERS), feature=random.choice(FEATURES), br_ver=random.choice(BR_VERS[index])) if kodi.get_setting('debug') == "true": log_utils.log('Creating New User Agent: %s' % (user_agent)) return user_agent
def to_datetime(dt_str, date_format): # strptime mysteriously fails sometimes with TypeError; this is a hacky workaround # note, they aren't 100% equal as time.strptime loses fractional seconds but they are close enough try: dt = datetime.datetime.strptime(dt_str, date_format) except (TypeError, ImportError): dt = datetime.datetime(*(time.strptime(dt_str, date_format)[0:6])) except Exception as e: log_utils.log('Failed dt conversion: (%s) - |%s|%s|' % (e, dt_str, date_format)) dt = datetime.datetime.fromtimestamp(0) return dt
def fix_bad_cookies(cookies): for domain in cookies: for path in cookies[domain]: for key in cookies[domain][path]: cookie = cookies[domain][path][key] if cookie.expires > sys.maxint: log_utils.log('Fixing cookie expiration for %s: was: %s now: %s' % (key, cookie.expires, sys.maxint), log_utils.LOGDEBUG) cookie.expires = sys.maxint return cookies
def __fix_bad_cookies(): c = cj._cookies for domain in c: for path in c[domain]: for key in c[domain][path]: cookie = c[domain][path][key] if cookie.expires > sys.maxint: log_utils.log('Fixing cookie expiration for %s: was: %s now: %s' % (key, cookie.expires, sys.maxint)) cookie.expires = sys.maxint
def _parse_json(html, url=''): if html: try: return json.loads(html) except ValueError: log_utils.log('Invalid JSON returned: %s: %s' % (html, url), xbmc.LOGERROR) return {} else: log_utils.log('Empty JSON object: %s: %s' % (html, url), xbmc.LOGERROR) return {}
def __fix_bad_cookies(): c = cj._cookies for domain in c: for path in c[domain]: for key in c[domain][path]: cookie = c[domain][path][key] if cookie.expires > sys.maxint: if kodi.get_setting('debug') == "true": kodi.notify(header='Cookie Fix', msg='Fixing cookie expiration for %s: was: %s now: %s' % (key, cookie.expires, sys.maxint), duration=5000, sound=None) log_utils.log('Fixing cookie expiration for %s: was: %s now: %s' % (key, cookie.expires, sys.maxint), xbmc.LOGERROR) cookie.expires = sys.maxint
def __fix_bad_cookies(): c = cj._cookies for domain in c: for path in c[domain]: for key in c[domain][path]: cookie = c[domain][path][key] if cookie.expires > sys.maxint: log_utils.log( 'Fixing cookie expiration for %s: was: %s now: %s' % (key, cookie.expires, sys.maxint)) cookie.expires = sys.maxint
def __get_token(self): pin = self.pin_edit_control.getText().strip() if pin: try: trakt_api = trakt.TraktAPI() trakt_api.authorize(pin=pin) return True except Exception as e: log_utils.log('Trakt Authorization Failed: %s' % (e), log_utils.LOGDEBUG) return False return False
def _default_get_episode_url(show_url, video, episode_pattern, title_pattern='', airdate_pattern='', data=None, headers=None): if 'http://' not in show_url: url = urlparse.urljoin(base_url, show_url) else: url = base_url+show_url html = get_url(url) if html: match = re.search(episode_pattern, html, re.DOTALL) if match: return _pathify_url(match.group(1)) else: log_utils.log('Skipping as Episode not found: %s' % (url), log_utils.LOGDEBUG)
def get_sources(suf_url): source_url = suf_url #source_url = get_url(video) sources = [] if source_url and source_url != FORCE_NO_MATCH: try: url = urlparse.urljoin(base_url, source_url) html = _http_get(url, cache_limit=.5) pattern = '<iframe id="videoframe" src="([^"]+)' match = re.search(pattern, html) frame_url = match.group(1) url = urlparse.urljoin(base_url, frame_url) html = _http_get(url, cache_limit=.1) match = re.search('lastChild\.value="([^"]+)"(?:\s*\+\s*"([^"]+))?', html) secret = ''.join(match.groups('')) match = re.search('"&t=([^"]+)', html) t = match.group(1) match = re.search('(?:\s+|,)s\s*=(\d+)', html) s_start = int(match.group(1)) match = re.search('(?:\s+|,)m\s*=(\d+)', html) m_start = int(match.group(1)) pattern = '<div class=ripdiv>(.*?)</div>' for container in re.finditer(pattern, html): fragment = container.group(0) match = re.match('<div class=ripdiv><b>(.*?)</b>', fragment) if match: quality = QUALITY_MAP.get(match.group(1).upper(), QUALITIES.HIGH) else: quality = None pattern = 'onclick=\'go\((\d+)\)\'>([^<]+)(<span.*?)</a>' for match in re.finditer(pattern, fragment): link_id, label, host_fragment = match.groups() source = {'hostname':'IceFilms','multi-part': False, 'quality': quality, 'label': label, 'rating': None, 'views': None, 'direct': False} source['host'] = re.sub('(<[^>]+>|</span>)', '', host_fragment) s = s_start + random.randint(3, 1000) m = m_start + random.randint(21, 1000) url = AJAX_URL % (link_id, s, m, secret, t) # bobs_dogs = url # source['url'] = resolve_link(bobs_dogs) urls = resolve_link(url) source['url'] = urls sources.append(source) except Exception as e: log_utils.log('Failure (%s) during icefilms get sources: |%s|' % (str(e), suf_url), log_utils.LOGWARNING) return sources
def gk_decrypt(name, key, cipher_link): try: key += (24 - len(key)) * '\0' decrypter = pyaes.Decrypter(pyaes.AESModeOfOperationECB(key)) plain_text = decrypter.feed(cipher_link.decode('hex')) plain_text += decrypter.feed() plain_text = plain_text.split('\0', 1)[0] except Exception as e: log_utils.log('Exception (%s) during %s gk decrypt: cipher_link: %s' % (e, name, cipher_link), log_utils.LOGWARNING) plain_text = '' return plain_text
def start_tv_watch(name,media): seasons=re.compile('S(.+?)E(.+?) ').findall(media) for sea,epi in seasons: start_values = """{"show": {"title": """+name+"""},"episode": {"season": """+sea+""","number": """+epi+"""},"progress": 10,"app_version": "1.0","app_date": "2014-09-22"}""" #print start_values request = Request('https://api-v2launch.trakt.tv/scrobble/start', data=start_values, headers=auth_headers) response_body = urlopen(request).read() if kodi.get_setting('debug') == "true": print response_body log_utils.log(response_body)
def merdb(name): try: sources = [] searchUrl = base_url + '?search=' movie_name = name[:-6] movie_name_short = name[:-7] movie_year = name[-6:] movie_year = movie_year.replace('(', '').replace(')', '') sname = movie_name.replace(" ", "+") mername = sname[:-1] movie_match = movie_name.replace(" ", "_") + movie_year surl = searchUrl + mername link = OPEN_URL(surl) #dp.update(80) match = re.compile( '<div class="main_list_box"><a href="(.+?)" title="(.+?)"><img' ).findall(link) for url, name in match: if movie_match in url or movie_name_short == name: link = OPEN_URL(base_url + url) vidlinks = dom_parser.parse_dom( link, 'span', {'class': "movie_version_link"}) linknames = dom_parser.parse_dom(link, 'span', {'class': "version_host"}) for name, vidlink in zip(linknames, vidlinks): #dp.update(80) match = re.compile('<a href="(.+?)"').findall(vidlink) for linkurl in match: if "ads.php" not in linkurl and "Sponsor" not in name and "Host" not in name: url = base_url + linkurl #print "URLS IS = " +url host = name.replace("'", "") #linkname = tools.get_hostname(name) source = { 'hostname': 'MerDB', 'views': None, 'url': url, 'host': host, 'direct': False } sources.append(source) #dp.close() sources = main_scrape.apply_urlresolver(sources) return sources except Exception as e: hosters = [] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='MerDb', msg='(error) %s %s' % (str(e), ''), duration=5000, sound=None) return hosters
def get_ua(): try: last_gen = int(kodi.get_setting('last_ua_create')) except: last_gen = 0 if not kodi.get_setting('current_ua') or last_gen < (time.time() - (7 * 24 * 60 * 60)): index = random.randrange(len(RAND_UAS)) versions = {'win_ver': random.choice(WIN_VERS), 'feature': random.choice(FEATURES), 'br_ver': random.choice(BR_VERS[index])} user_agent = RAND_UAS[index].format(**versions) log_utils.log('Creating New User Agent: %s' % (user_agent), log_utils.LOGDEBUG) kodi.set_setting('current_ua', user_agent) kodi.set_setting('last_ua_create', str(int(time.time()))) else: user_agent = kodi.get_setting('current_ua') return user_agent
def get_sources(suf_url): source_url = suf_url hosters = [] sources = {} if source_url and source_url != FORCE_NO_MATCH: url = urlparse.urljoin(base_url, source_url) #print "URL IS = "+url html = get_url(url) for server_list in dom_parser.parse_dom(html, 'ul', {'class': 'episodes'}): for hash_id in dom_parser.parse_dom(server_list, 'a', ret='data-id'): now = time.localtime() url = urlparse.urljoin(base_url, hash_url) #/ajax/film/episode?hash_id=%s&f=&p=%s url = url % (hash_id, now.tm_hour + now.tm_min) #print "CRAZY URL IS = "+url html =_http_get(url, headers=XHR, cache_limit=.5) #print "HTML IS = "+html if html: try: #print "I DID JSON" js_result = json.loads(html) #print js_result except ValueError: print 'Invalid JSON returned: %s: %s' % (html) log_utils.log('Invalid JSON returned: %s' % (html), log_utils.LOGWARNING) else: if 'videoUrlHash' in js_result and 'grabber' in js_result: # print "ITS IN THERE" query = {'flash': 1, 'json': 1, 's': now.tm_min, 'link': js_result['videoUrlHash'], '_': int(time.time())} query['link'] = query['link'].replace('\/', '/') grab_url = js_result['grabber'].replace('\/', '/') grab_url += '?' + urllib.urlencode(query) html =get_url(grab_url) #print "NEW HTML IS = "+html if html: try: js_result = json.loads(html) except ValueError: print 'Invalid JSON returned: %s: %s' % (html) else: for result in js_result: if 'label' in result: quality = _height_get_quality(result['label']) else: quality = _gv_get_quality(result['file']) sources[result['file']] = quality for source in sources: hoster = {'hostname':'9Movies','multi-part': False, 'host': _get_direct_hostname(source), 'quality': sources[source], 'view': None, 'rating': None, 'url': source, 'direct': True} hosters.append(hoster) hosters = main_scrape.apply_urlresolver(hosters) return hosters
def zmovies(name): try: sources = [] movie_name = name[:-6] movie_name_short = name[:-7] movie_year = name[-6:] movie_year = movie_year.replace('(', '').replace(')', '') sname = movie_name.replace(" ", "+") movie_match = movie_name.replace(" ", "-").replace(":", "") year_movie_match = movie_match + movie_year direct_movie_match = movie_match[:-1] tmurl = base_url + 'movies/view/' + direct_movie_match ytmurl = base_url + 'movies/view/' + year_movie_match #dp.update(25) #For links that are direct link = OPEN_URL(tmurl) match = re.compile( 'target="_blank" href="(.+?)"> <b> Watch Full </b></a> </td>' ).findall(link) for url in match: hmf = urlresolver.HostedMediaFile(url) if hmf: #linkname= hmf.get_host() linkname = tools.get_hostname(url) host = linkname source = {'url': url, 'host': host, 'direct': False} sources.append(source) #Fro Links that need year added link = OPEN_URL(ytmurl) #dp.update(80) match = re.compile( 'target="_blank" href="(.+?)"> <b> Watch Full </b></a> </td>' ).findall(link) for url in match: linkname = tools.get_hostname(url) host = linkname source = {'url': url, 'host': host, 'direct': False} sources.append(source) #dp.close() sources = main_scrape.apply_urlresolver(sources) return sources except Exception as e: hosters = [] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Zee Moviess', msg='(error) %s %s' % (str(e), ''), duration=5000, sound=None) return hosters
def start_tv_watch(name, media): seasons = re.compile('S(.+?)E(.+?) ').findall(media) for sea, epi in seasons: start_values = """{"show": {"title": """ + name + """},"episode": {"season": """ + sea + ""","number": """ + epi + """},"progress": 10,"app_version": "1.0","app_date": "2014-09-22"}""" #print start_values request = Request('https://api-v2launch.trakt.tv/scrobble/start', data=start_values, headers=auth_headers) response_body = urlopen(request).read() if kodi.get_setting('debug') == "true": print response_body log_utils.log(response_body)
def get_sources(self, video, video_type): source_url = self.get_url(video) sources = [] if source_url and source_url != FORCE_NO_MATCH: try: url = urlparse.urljoin(self.base_url, source_url) html = self._http_get(url, cache_limit=2) pattern = '<iframe id="videoframe" src="([^"]+)' match = re.search(pattern, html) url = urlparse.urljoin(self.base_url, match.group(1)) html = self._http_get(url, cache_limit=.5) match = re.search('lastChild\.value="([^"]+)"(?:\s*\+\s*"([^"]+))?', html) secret = ''.join(match.groups('')) match = re.search('"&t=([^"]+)', html) t = match.group(1) match = re.search('(?:\s+|,)s\s*=(\d+)', html) s_start = int(match.group(1)) match = re.search('(?:\s+|,)m\s*=(\d+)', html) m_start = int(match.group(1)) for fragment in dom_parser.parse_dom(html, 'div', {'class': 'ripdiv'}): match = re.match('<b>(.*?)</b>', fragment) if match: q_str = match.group(1).replace(' ', '').upper() quality = QUALITY_MAP.get(q_str, QUALITIES.HIGH) else: quality = QUALITIES.HIGH pattern = '''onclick='go\((\d+)\)'>([^<]+)(<span.*?)</a>''' for match in re.finditer(pattern, fragment): link_id, label, host_fragment = match.groups() source = {'hostname':'IceFilms','multi-part': False, 'quality': quality, 'class': '', 'version': label, 'rating': None, 'views': None, 'direct': False} source['host'] = re.sub('(</?[^>]*>)', '', host_fragment) s = s_start + random.randint(3, 1000) m = m_start + random.randint(21, 1000) url = AJAX_URL % (link_id, s, m, secret, t) urls = self.resolve_link(url) source['url'] = urls sources.append(source) except Exception as e: log_utils.log('Failure (%s) during icefilms get sources: |%s|' % (str(e), video), log_utils.LOGWARNING) main_scrape.apply_urlresolver(sources) return sources
def _set_cookies(base_url, cookies): cj = cookielib.LWPCookieJar(cookie_file) try: cj.load(ignore_discard=True) except: pass if kodi.get_setting('debug') == 'true': log_utils.log('Before Cookies: %s' % (cookies_as_str(cj)), log_utils.LOGDEBUG) domain = urlparse.urlsplit(base_url).hostname for key in cookies: c = cookielib.Cookie(0, key, str(cookies[key]), port=None, port_specified=False, domain=domain, domain_specified=True,domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=False, comment=None,comment_url=None, rest={}) cj.set_cookie(c) cj.save(ignore_discard=True) if kodi.get_setting('debug') == 'true': log_utils.log('After Cookies: %s' % (cookies_as_str(cj)), log_utils.LOGDEBUG) return cj
def OPEN_URL(url): try: req=urllib2.Request(url) req.add_header('User-Agent', 'Mozilla/5.0 (Linux; U; Android 4.2.2; en-us; AFTB Build/JDQ39) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30') response=urllib2.urlopen(req) link=response.read() cj.save(cookie_file, ignore_discard=True) response.close() return link except Exception as e: log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='TwoMovies',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None)
def tmlinkpage(url, movie_title, thumb, media): try: if "full" in url: link = OPEN_URL(url) if 'Before you start watching' in link: #print 'Confirmation Button ' url = url header_dict = {} header_dict[ 'Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' header_dict['Connection'] = 'keep-alive' header_dict[ 'Content-Type'] = 'application/x-www-form-urlencoded' header_dict['Origin'] = host_url header_dict['Referer'] = url header_dict[ 'User-Agent'] = 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36' form_data = { 'confirm': 'I understand, Let me watch the movie now!' } net.set_cookies(cookiejar) conbutton = net.http_POST(url, form_data=form_data, headers=header_dict) link = OPEN_URL(url) link = link.replace('\r', '').replace('\n', '').replace('\t', '').replace(' ', '') matchurl = re.compile( 'Sourcelink:<arel="nofollow"onlicktarget="_blank">(.+?)</' ).findall(link) for urls in matchurl: urls = str(urls) urls = urls.replace('&rel=nofollow', '') if media == 'movies': main_scrape.get_link(urls, movie_title, thumb, media) else: main_scrape.get_tv_link(urls, movie_title, thumb, media) except Exception as e: log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='TwoMovies', msg='(error) %s %s' % (str(e), ''), duration=5000, sound=None)
def tmovies(name): try: sources = [] searchUrl = base_url + 'watch_movie/' movie_name = name[:-6] movie_name_short = name[:-7] movie_year = name[-6:] movie_year = movie_year.replace('(', '').replace(')', '') sname = movie_name.replace(" ", "+") movie_match = movie_name.replace(" ", "_").replace(":", "").replace("-", "") year_movie_match = movie_match + movie_year direct_movie_match = movie_match[:-1] tmurl = base_url + 'watch_movie/' + direct_movie_match ytmurl = base_url + 'watch_movie/' + year_movie_match link = OPEN_URLTM(tmurl) names = dom_parser.parse_dom(link, 'a', {'class': "norm vlink"}) urls = dom_parser.parse_dom(link, 'a', {'class': "norm vlink"}, ret='href') for host, url in zip(names, urls): host = host.replace('www.', '') #host = tools.get_hostname(host) source = {'url': url, 'host': host, 'direct': False} sources.append(source) link = OPEN_URLTM(ytmurl) names = dom_parser.parse_dom(link, 'a', {'class': "norm vlink"}) urls = dom_parser.parse_dom(link, 'a', {'class': "norm vlink"}, ret='href') for host, url in zip(names, urls): host = host.replace('www.', '') #host = tools.get_hostname(host) source = {'url': url, 'host': host, 'direct': False} sources.append(source) sources = main_scrape.apply_urlresolver(sources) return sources except Exception as e: hosters = [] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='TwoMovies', msg='(error) %s %s' % (str(e), ''), duration=5000, sound=None) return hosters
def update_scraper(file_name, scraper_url, scraper_key): py_path = os.path.join(kodi.get_path(), 'scrapers', file_name) exists = os.path.exists(py_path) if not exists or (time.time() - os.path.getmtime(py_path)) > (8 * 60 * 60): new_py = utils2.get_and_decrypt(scraper_url, scraper_key) if new_py: if exists: with open(py_path, 'r') as f: old_py = f.read() else: old_py = '' log_utils.log('%s path: %s, new_py: %s, match: %s' % (__file__, py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG) if old_py != new_py: with open(py_path, 'w') as f: f.write(new_py)
def primewire(name): try: sources = [] searchUrl = base_url+'index.php?search_keywords=' movie_name = name[:-6] movie_name_short = name[:-7] movie_year_full = name[-6:] movie_year = movie_year_full.replace('(','').replace(')','') sname = movie_name.replace(" ","+") primename = sname[:-1] movie_match =movie_name.replace(" ","_")+movie_year surl = searchUrl + primename link = OPEN_URL(surl) full_match = movie_name+movie_year_full match=re.compile('<a href="/(.+?)" title="Watch (.+?)">').findall(link) for url, name in match: if full_match == name: link = OPEN_URL(base_url+url) container_pattern = r'<table[^>]+class="movie_version[ "][^>]*>(.*?)</table>' item_pattern = ( r'quality_(?!sponsored|unknown)([^>]*)></span>.*?' r'url=([^&]+)&(?:amp;)?domain=([^&]+)&(?:amp;)?(.*?)' r'"version_veiws"> ([\d]+) views</') max_index = 0 max_views = -1 for container in re.finditer(container_pattern, link, re.DOTALL | re.IGNORECASE): for i, source in enumerate(re.finditer(item_pattern, container.group(1), re.DOTALL)): qual, url, host, parts, views = source.groups() if kodi.get_setting('debug') == "true": print"PrimeWire Debug:" print "Quality is " + qual print "URL IS " + url.decode('base-64') print "HOST IS "+host.decode('base-64') print "VIEWS ARE " +views if host == 'ZnJhbWVndGZv': continue # filter out promo hosts #host = tools.get_hostname(host.decode('base-64')) source = {'hostname':'PrimeWire','url': url.decode('base-64'), 'host': host.decode('base-64'),'views':views,'quality':qual,'direct':False} sources.append(source) #print "MOVIE SOURCES ARE = "+str(sources) sources = main_scrape.apply_urlresolver(sources) return sources except Exception as e: sources =[] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='PrimeWire',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None) return sources
def _http_get(self, url, params=None, data=None, multipart_data=None, headers=None, cookies=None, allow_redirect=True, method=None, require_debrid=False, read_error=False, cache_limit=8): html = self._cached_http_get(url, self.base_url, self.timeout, params=params, data=data, multipart_data=multipart_data, headers=headers, cookies=cookies, allow_redirect=allow_redirect, method=method, require_debrid=require_debrid, read_error=read_error, cache_limit=cache_limit) sucuri_cookie = scraper_utils.get_sucuri_cookie(html) if sucuri_cookie: log_utils.log('Setting sucuri cookie: %s' % (sucuri_cookie), log_utils.LOGDEBUG) if cookies is not None: cookies = cookies.update(sucuri_cookie) else: cookies = sucuri_cookie html = self._cached_http_get(url, self.base_url, self.timeout, params=params, data=data, multipart_data=multipart_data, headers=headers, cookies=cookies, allow_redirect=allow_redirect, method=method, require_debrid=require_debrid, read_error=read_error, cache_limit=0) return html
def stop_tv_watch(name,media): # log_utils.log(name) print name seasons=re.compile('S(.+?)E(.+?) ').findall(media) for sea,epi in seasons: # log_utils.log(sea,epi) print sea + epi stop_values = """{"show": {"title": """+name+"""},"episode": {"season": """+sea+""","number": """+epi+"""},"progress": 99.9,"app_version": "1.0","app_date": "2014-09-22"}""" #print stop_values request = Request('https://api-v2launch.trakt.tv/scrobble/stop', data=stop_values, headers=auth_headers) response_body = urlopen(request).read() trakt_id=re.compile('"trakt":(.+?),').findall(response_body) print trakt_id[0] watched_cache.set_watch_cache(trakt_id[0],"shows") if kodi.get_setting('debug') == "true": print response_body log_utils.log(response_body)
def zmovies(name): try: sources = [] movie_name = name[:-6] movie_name_short = name[:-7] movie_year = name[-6:] movie_year = movie_year.replace('(','').replace(')','') sname = movie_name.replace(" ","+") movie_match =movie_name.replace(" ","-").replace(":","") year_movie_match = movie_match+movie_year direct_movie_match = movie_match[:-1] tmurl = base_url+'movies/view/'+direct_movie_match ytmurl = base_url+'movies/view/'+year_movie_match #dp.update(25) #For links that are direct link = OPEN_URL(tmurl) match=re.compile('target="_blank" href="(.+?)"> <b> Watch Full </b></a> </td>').findall(link) for url in match: hmf = urlresolver.HostedMediaFile(url) if hmf: #linkname= hmf.get_host() linkname = tools.get_hostname(url) host = linkname #source = {'hostname':'IceFilms','multi-part': False, 'quality': quality, 'label': label, 'rating': None, 'views': None, 'direct': False} source = {'hostname':'ZMovies','views':None, 'quality': None, 'rating': None,'url': url, 'host': host, 'direct':False} sources.append(source) #Fro Links that need year added link = OPEN_URL(ytmurl) #dp.update(80) match=re.compile('target="_blank" href="(.+?)"> <b> Watch Full </b></a> </td>').findall(link) for url in match: linkname = tools.get_hostname(url) host = linkname source = {'hostname':'ZMovies','views':None, 'quality': None, 'rating': None,'url': url, 'host': host, 'direct':False} sources.append(source) #dp.close() sources = main_scrape.apply_urlresolver(sources) print sources return sources except Exception as e: hosters =[] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Zee Moviess',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None) return hosters
def __get_token(self): pin = self.pin_edit_control.getText().strip() if pin: try: trakt_api=trakt.TraktAPI() trakt_api.authorize(pin=pin) # trakt_api = Trakt_API(use_https=use_https, timeout=trakt_timeout) # result = trakt_api.get_token(pin=pin) # kodi.set_setting('trakt_oauth_token', result['access_token']) # kodi.set_setting('trakt_refresh_token', result['refresh_token']) # profile = trakt_api.get_user_profile(cached=False) # kodi.set_setting('trakt_user', '%s (%s)' % (profile['username'], profile['name'])) return True except Exception as e: log_utils.log('Trakt Authorization Failed: %s' % (e), log_utils.LOGDEBUG) return False return False
def find_episode(name,trakt_id,movie_title): try: media ='episode' season = name.replace('Season ','') #print "SHOW TRAKT IS : "+trakt_id link = trakt_api.get_show_episodes(trakt_id,season) for e in link: ep_trakt_id= e['ids']['trakt'] #print "Episode TRAKT ID IS : "+str(ep_trakt_id) infoLabels={} infoLabels.update(make_infoLabels(e)) episode = infoLabels['episode'] infoLabels = trakt_api.get_episode_details(trakt_id,season,episode) menu_items=[] trailer = infoLabels['trailer_url'] year = str(infoLabels['year']) name = infoLabels['title'].encode('utf-8') thumb=infoLabels['cover_url'] # ################ was_watched=watched_cache.get_watched_cache(ep_trakt_id) if was_watched is not None: infoLabels['playcount'] = 1 # ################ if thumb is None: thumb = '' #print infoLabels['premiered'][:10] #if (episode['first_aired'] != None and utils2.iso_2_utc(episode['first_aired']) <= time.time()) or (include_unknown and episode['first_aired'] == None): d1 = str(infoLabels['premiered']) d2 = str(datetime.date.today()) #print today - was_aired #if infoLabels['premiered'] =='': if d1 >= d2 or infoLabels['premiered'] == '': if name is not '': menu_items.append(('[COLOR gold]Show Information[/COLOR]', 'XBMC.Action(Info)')) kodi.addDir('[COLOR maroon]S'+str(season)+'E'+str(episode)+' '+name+'[/COLOR]','','findsource',thumb,movie_title,5,'','shows',meta_data=infoLabels,menu_items=menu_items,replace_menu=True) #name = name+" [COLOR red]Coming Soon[/COLOR]" else: menu_items.append(('[COLOR gold]Show Information[/COLOR]', 'XBMC.Action(Info)')) kodi.addDir('S'+str(season)+'E'+str(episode)+' '+name,'','findsource',thumb,movie_title,5,'','shows',meta_data=infoLabels,menu_items=menu_items,replace_menu=True) kodi.auto_view('episode') except Exception as e: log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Trakt Episodes',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None)
def __get_token(self): pin = self.pin_edit_control.getText().strip() if pin: try: trakt_api = trakt.TraktAPI() trakt_api.authorize(pin=pin) # trakt_api = Trakt_API(use_https=use_https, timeout=trakt_timeout) # result = trakt_api.get_token(pin=pin) # kodi.set_setting('trakt_oauth_token', result['access_token']) # kodi.set_setting('trakt_refresh_token', result['refresh_token']) # profile = trakt_api.get_user_profile(cached=False) # kodi.set_setting('trakt_user', '%s (%s)' % (profile['username'], profile['name'])) return True except Exception as e: log_utils.log('Trakt Authorization Failed: %s' % (e), log_utils.LOGDEBUG) return False return False
def OPEN_URLTM(url): try: req=urllib2.Request(url) req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36') req.add_header('Content-Type','application/x-www-form-urlencoded') req.add_header('Host',host_url) req.add_header('Referer','') req.add_header('Connection','keep-alive') req.add_header('Accept','text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8') response=urllib2.urlopen(req) link=response.read() cj.save(cookie_file, ignore_discard=True) response.close() return link except Exception as e: log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='TwoMovies',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None)
def putlocker_movies(movie_title): try: title = movie_title[:-7] movie_year = movie_title[-6:] year = movie_year.replace('(','').replace(')','') video_type = 'movies' show_url = search(video_type,title,year) for e in show_url: url = e['url'] hosters=get_sources(url) print "HOSTERS ARE " + str(hosters) hosters = main_scrape.apply_urlresolver(hosters) return hosters except Exception as e: hosters =[] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Putlocker Movies',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None) return hosters
def stop_tv_watch(name, media): # log_utils.log(name) print name seasons = re.compile('S(.+?)E(.+?) ').findall(media) for sea, epi in seasons: # log_utils.log(sea,epi) print sea + epi stop_values = """{"show": {"title": """ + name + """},"episode": {"season": """ + sea + ""","number": """ + epi + """},"progress": 99.9,"app_version": "1.0","app_date": "2014-09-22"}""" #print stop_values request = Request('https://api-v2launch.trakt.tv/scrobble/stop', data=stop_values, headers=auth_headers) response_body = urlopen(request).read() trakt_id = re.compile('"trakt":(.+?),').findall(response_body) print trakt_id[0] watched_cache.set_watch_cache(trakt_id[0], "shows") if kodi.get_setting('debug') == "true": print response_body log_utils.log(response_body)
def _default_get_episode_url(show_url, video, episode_pattern, title_pattern='', airdate_pattern='', data=None, headers=None): if 'http://' not in show_url: url = urlparse.urljoin(base_url, show_url) else: url = base_url + show_url html = get_url(url) if html: match = re.search(episode_pattern, html, re.DOTALL) if match: return _pathify_url(match.group(1)) else: log_utils.log('Skipping as Episode not found: %s' % (url), log_utils.LOGDEBUG)
def __fix_bad_cookies(): c = cj._cookies for domain in c: for path in c[domain]: for key in c[domain][path]: cookie = c[domain][path][key] if cookie.expires > sys.maxint: if kodi.get_setting('debug') == "true": kodi.notify( header='Cookie Fix', msg= 'Fixing cookie expiration for %s: was: %s now: %s' % (key, cookie.expires, sys.maxint), duration=5000, sound=None) log_utils.log( 'Fixing cookie expiration for %s: was: %s now: %s' % (key, cookie.expires, sys.maxint), xbmc.LOGERROR) cookie.expires = sys.maxint
def _default_get_episode_url(show_url, video, episode_pattern, title_pattern='', airdate_pattern='', data=None, headers=None): #print "Before Default Episode" log_utils.log('Default Episode Url: |%s|%s|%s|%s|' % (base_url, show_url, str(video).decode('utf-8', 'replace'), data), log_utils.LOGDEBUG) # if 'http://' not in show_url: # url = urlparse.urljoin(base_url, show_url) # else: url = base_url+show_url #print "After Default Episode" #print url html = _http_get(url, data=data, headers=headers, cache_limit=2) if html: # force_title = _force_title(video) # # if not force_title: match = re.search(episode_pattern, html, re.DOTALL) if match: #print "BEFORE PATHIFY" return _pathify_url(match.group(1)) else: log_utils.log('Skipping as Episode not found: %s' % (url), log_utils.LOGDEBUG)
def ot3_movies(name): try: title = name[:-7] movie_year = name[-6:] year = movie_year.replace('(','').replace(')','') video_type = 'movies' source = search(video_type,title,year) for e in source: url = e['url'] year = e['year'] name = e['title'] srcurl =base_url+url hosters=get_sources(srcurl,url) hosters = main_scrape.apply_urlresolver(hosters) return hosters except Exception as e: hosters =[] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='123Movies',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None) return hosters
def merdb(name): try: sources = [] searchUrl = base_url+'?search=' movie_name = name[:-6] movie_name_short = name[:-7] movie_year = name[-6:] movie_year = movie_year.replace('(','').replace(')','') sname = movie_name.replace(" ","+") mername = sname[:-1] movie_match =movie_name.replace(" ","_")+movie_year surl = searchUrl + mername link = OPEN_URL(surl) #dp.update(80) match=re.compile('<div class="main_list_box"><a href="(.+?)" title="(.+?)"><img').findall(link) for url, name in match: if movie_match in url or movie_name_short == name: link = OPEN_URL(base_url+url) vidlinks=dom_parser.parse_dom(link, 'span',{'class':"movie_version_link"}) linknames=dom_parser.parse_dom(link, 'span',{'class':"version_host"}) for name, vidlink in zip(linknames, vidlinks): #dp.update(80) match=re.compile('<a href="(.+?)"').findall(vidlink) for linkurl in match: if "ads.php" not in linkurl and "Sponsor" not in name and "Host" not in name: url = base_url+linkurl #print "URLS IS = " +url host = name.replace("'","") #linkname = tools.get_hostname(name) source = {'hostname':'MerDB','views':None,'url': url, 'host': host, 'direct':False} sources.append(source) #dp.close() sources = main_scrape.apply_urlresolver(sources) return sources except Exception as e: hosters =[] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='MerDb',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None) return hosters
def tmovies(name): try: sources = [] searchUrl = base_url+'watch_movie/' movie_name = name[:-6] movie_name_short = name[:-7] movie_year = name[-6:] movie_year = movie_year.replace('(','').replace(')','') sname = movie_name.replace(" ","+") movie_match =movie_name.replace(" ","_").replace(":","").replace("-","") year_movie_match = movie_match+movie_year direct_movie_match = movie_match[:-1] tmurl = base_url+'watch_movie/'+direct_movie_match ytmurl = base_url+'watch_movie/'+year_movie_match link = OPEN_URLTM(tmurl) names = dom_parser.parse_dom(link, 'a',{'class':"norm vlink"}) urls = dom_parser.parse_dom(link, 'a',{'class':"norm vlink"}, ret='href') for host, url in zip(names, urls): host = host.replace('www.','') #host = tools.get_hostname(host) source = {'url': url, 'host':host,'direct':False} sources.append(source) link = OPEN_URLTM(ytmurl) names = dom_parser.parse_dom(link, 'a',{'class':"norm vlink"}) urls = dom_parser.parse_dom(link, 'a',{'class':"norm vlink"}, ret='href') for host, url in zip(names, urls): host = host.replace('www.','') #host = tools.get_hostname(host) source = {'url': url, 'host':host,'direct':False} sources.append(source) sources = main_scrape.apply_urlresolver(sources) return sources except Exception as e: hosters =[] log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='TwoMovies',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None) return hosters
def find_season(name,trakt_id): try: media = 'shows' movie_title =name print "TRAKT ID IS : "+trakt_id link = trakt_api.get_show_seasons(trakt_id) for e in link: infoLabels = trakt_api.process_show(e) infoLabels.update(make_infoLabels(e)) #trakt_id = str(infoLabels['trakt_id']) if infoLabels['cover_url'] == None: infoLabels['cover_url'] = artwork+'place_poster.png' menu_items=[] menu_items.append(('[COLOR gold]Show Information[/COLOR]', 'XBMC.Action(Info)')) #if kodi.get_setting('trakt_authorized') == 'true': #menu_items.append(('[COLOR gold]Mark as Watched[/COLOR]', 'RunPlugin(%s)' % addon.build_plugin_url({'trakt_id':trakt_id, 'mode':'add_watched_history', 'name':name, 'media':media}))) kodi.addDir('Season '+str(infoLabels['number']),'','find_episode',infoLabels['cover_url'],movie_title,5,trakt_id,'shows',meta_data=infoLabels,menu_items=menu_items,replace_menu=True) kodi.auto_view('season') except Exception as e: log_utils.log('Error [%s] %s' % (str(e), ''), xbmc.LOGERROR) if kodi.get_setting('error_notify') == "true": kodi.notify(header='Trakt Seasons',msg='(error) %s %s' % (str(e), ''),duration=5000,sound=None)
def get_cooked_url(url, base_url, timeout, cookies=None, data=None, multipart_data=None, headers=None, allow_redirect=True, cache_limit=8): if cookies is None: cookies = {} if timeout == 0: timeout = None if headers is None: headers = {} referer = headers['Referer'] if 'Referer' in headers else url if kodi.get_setting('debug') == "true": log_utils.log('Getting Url: %s cookie=|%s| data=|%s| extra headers=|%s|' % (url, cookies, data, headers)) if data is not None: if isinstance(data, basestring): data = data else: data = urllib.urlencode(data, True) if multipart_data is not None: headers['Content-Type'] = 'multipart/form-data; boundary=X-X-X' data = multipart_data try: cj = _set_cookies(base_url, cookies) request = urllib2.Request(url, data=data) request.add_header('User-Agent', _get_ua()) request.add_unredirected_header('Host', '9movies.to') request.add_unredirected_header('Referer', referer) for key in headers: request.add_header(key, headers[key]) cj.add_cookie_header(request) if not allow_redirect: opener = urllib2.build_opener(NoRedirection) urllib2.install_opener(opener) else: opener = urllib2.build_opener(urllib2.HTTPRedirectHandler) urllib2.install_opener(opener) opener2 = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener2) response = urllib2.urlopen(request, timeout=timeout) cj.extract_cookies(response, request) if kodi.get_setting('cookie_debug') == 'true': print 'Response Cookies: %s - %s' % (url, cookies_as_str(cj)) __fix_bad_cookies() cj.save(ignore_discard=True) if not allow_redirect and (response.getcode() in [301, 302, 303, 307] or response.info().getheader('Refresh')): if response.info().getheader('Refresh') is not None: refresh = response.info().getheader('Refresh') return refresh.split(';')[-1].split('url=')[-1] else: return response.info().getheader('Location') content_length = response.info().getheader('Content-Length', 0) if int(content_length) > MAX_RESPONSE: log_utils.log('Response exceeded allowed size. %s => %s / %s' % (url, content_length, MAX_RESPONSE), log_utils.LOGWARNING) if response.info().get('Content-Encoding') == 'gzip': buf = StringIO(response.read(MAX_RESPONSE)) f = gzip.GzipFile(fileobj=buf) html = f.read() else: html = response.read(MAX_RESPONSE) except urllib2.HTTPError as e: if e.code == 503 and 'cf-browser-verification' in e.read(): #print "WAS ERROR" html = cloudflare.solve(url, cj, _get_ua()) if not html: return '' else: log_utils.log('Error (%s) during THE scraper http get: %s' % (str(e), url), log_utils.LOGWARNING) return '' except Exception as e: log_utils.log('Error (%s) during scraper http get: %s' % (str(e), url), log_utils.LOGWARNING) return '' return html