Esempio n. 1
0
def score_match_title(title1, title2):
    try:
        t1 = cleantitle.simpletitle(title1).lower().split(' ')
        t2 = cleantitle.simpletitle(title2).lower().split(' ')

        c = 0
        for tt1 in t1:
            for tt2 in t2:
                if tt1 in tt2:
                    c += 1
                    break

        return float(float(c) / float(len(t1)))
    except Exception as e:
        print e
        return 0
Esempio n. 2
0
def lose_match_title(title1, title2):
	try:
		t1 = cleantitle.simpletitle(title1).split(' ')
		t2 = cleantitle.simpletitle(title2).split(' ')
		print t1, t2
		c_min = min(len(t1), len(t2))
		c = 0
		for tt1 in t1:
			for tt2 in t2:
				if tt1.strip() == tt2.strip():
					c += 1
					break
				
		if c >= c_min:
			return True
		else:
			raise
	except:
		False
Esempio n. 3
0
    def get_movie(self,
                  imdb,
                  title,
                  year,
                  proxy_options=None,
                  key=None,
                  testing=False):
        try:
            if control.setting('Provider-%s' % name) == False:
                log('INFO', 'get_movie', 'Provider Disabled by User')
                return None
            if self.siteonline == False:
                log('INFO', 'get_movie', 'Provider is Offline')
                return None

            headers = {
                'Referer': self.base_link,
                'User-Agent': self.user_agent
            }
            max = None
            title = title.replace('(3D)', '').strip().lower()
            title = title.replace('3D', '').strip().lower()
            title = re.sub(r'[0-9]+', '', title)
            poss_match = []
            xtitle = title

            try:
                for pg in range(100):
                    if len(poss_match) > 0:
                        break

                    xtitle = cleantitle.simpletitle(xtitle)
                    query_url = urlparse.urljoin(
                        self.base_link,
                        self.search_link) % (pg, urllib.quote_plus(xtitle))

                    if max != None and int(pg) >= int(
                            max) and " " not in xtitle:
                        raise Exception('No results for: %s' % title)

                    log(type='INFO',
                        method='get_movie-1',
                        err='Searching - %s' % (query_url),
                        dolog=True,
                        logToControl=False,
                        doPrint=True)

                    result = proxies.request(query_url,
                                             proxy_options=proxy_options,
                                             use_web_proxy=self.proxyrequired,
                                             headers=headers,
                                             timeout=60)

                    for x in range(3):
                        if len(poss_match) > 0:
                            break
                        while 'Trick: less characters give more results' in result or " " in xtitle:
                            try:
                                xtitle = xtitle.split(" ")[:-1]
                                xtitle = ' '.join(xtitle)
                                query_url = urlparse.urljoin(
                                    self.base_link, self.search_link) % (
                                        pg, urllib.quote_plus(xtitle))
                                log(type='INFO',
                                    method='get_movie-2',
                                    err='Searching - %s' % (query_url),
                                    dolog=True,
                                    logToControl=False,
                                    doPrint=True)
                                result = proxies.request(
                                    query_url,
                                    proxy_options=proxy_options,
                                    use_web_proxy=self.proxyrequired,
                                    headers=headers,
                                    timeout=60)
                                if 'Trick: less characters give more results' not in result:
                                    break
                            except Exception as e:
                                break

                        if max == None:
                            try:
                                max1 = client.parseDOM(
                                    result,
                                    'a',
                                    attrs={'class': 'page gradient'})
                                if len(max1) > 0:
                                    max = int(max1[len(max1) - 1]) - 1
                            except Exception as e:
                                pass

                        try:
                            url_data = client.parseDOM(
                                result, 'div', attrs={'class': 'ajuste4'})
                        except Exception as e:
                            break

                        if len(url_data) > 0:
                            links_data = []

                            for data in url_data:
                                if len(poss_match) > 0:
                                    break
                                try:
                                    data = client.parseDOM(
                                        data, 'div', attrs={'class':
                                                            'view'})[0]
                                    url = client.parseDOM(
                                        data, 'a', ret='href')[0].strip()
                                    titlex = client.parseDOM(data,
                                                             'img',
                                                             ret='alt')[0]
                                except Exception as e:
                                    log(type='INFO',
                                        method='get_movie-3-A',
                                        err='%s' % e,
                                        dolog=False,
                                        logToControl=False,
                                        doPrint=True)
                                if len(titlex
                                       ) == 0 and url == 'player.php?title=':
                                    log(type='INFO',
                                        method='get_movie-3-B',
                                        err='No results for: %s' % xtitle,
                                        dolog=True,
                                        logToControl=False,
                                        doPrint=True)
                                else:
                                    url = urlparse.urljoin(self.base_link, url)
                                    try:
                                        poster = urlparse.urljoin(
                                            self.base_link_alts[0],
                                            client.parseDOM(data,
                                                            'img',
                                                            ret='src')[0])
                                    except:
                                        poster = None

                                    log(type='INFO',
                                        method='get_movie-3-C',
                                        err='Matching - %s' % url,
                                        dolog=False,
                                        logToControl=False,
                                        doPrint=True)
                                    if title in titlex.lower() or titlex.lower(
                                    ) in title or lose_match_title(
                                            title, titlex.lower()):
                                        url = url.replace(' ', '%20')
                                        url = client.request(
                                            url,
                                            headers=headers,
                                            followredirect=True,
                                            output='geturl')
                                        url = client.request(
                                            url,
                                            headers=headers,
                                            followredirect=True,
                                            output='geturl')
                                        result = proxies.request(
                                            url,
                                            proxy_options=proxy_options,
                                            use_web_proxy=self.proxyrequired,
                                            headers=headers,
                                            timeout=60)

                                        try:
                                            url = client.parseDOM(result,
                                                                  'frame',
                                                                  ret='src')[0]
                                        except:
                                            try:
                                                url = client.parseDOM(
                                                    result,
                                                    'iframe',
                                                    ret='src')[0]
                                            except:
                                                pass

                                        if url != 'https://www.freedocufilms.com/player.php?title=':
                                            log(type='INFO',
                                                method='get_movie-3A',
                                                err='Verifying - %s' % url,
                                                dolog=True,
                                                logToControl=False,
                                                doPrint=True)
                                            result = proxies.request(
                                                url,
                                                proxy_options=proxy_options,
                                                use_web_proxy=self.
                                                proxyrequired,
                                                headers=headers,
                                                timeout=60)

                                            ex_title = client.parseDOM(
                                                result,
                                                'div',
                                                attrs={'class': 'rating'})[0]
                                            ex_title = client.parseDOM(
                                                ex_title, 'span')[0].strip()
                                            if year in ex_title:
                                                log(type='INFO',
                                                    method='get_movie-4',
                                                    err='Match found:%s' %
                                                    titlex,
                                                    dolog=True,
                                                    logToControl=False,
                                                    doPrint=True)

                                                all_files_t = re.findall(
                                                    r'({.*file.*:.*\.mp4.*})',
                                                    result)
                                                all_links_t = re.findall(
                                                    r'({.*file.*\.php.*:.*})',
                                                    result)

                                                all_files = remove_dup(
                                                    all_files_t)
                                                all_links = remove_dup(
                                                    all_links_t)

                                                try:
                                                    srt = re.findall(
                                                        r'\"(.*srt.*)\"',
                                                        result)[0]
                                                    srt = urlparse.urljoin(
                                                        self.base_link, srt)
                                                except:
                                                    srt = None

                                                if len(all_links) > 0:
                                                    for sn in range(
                                                            len(all_links)):
                                                        try:
                                                            datax = all_links[
                                                                sn].replace(
                                                                    'fileTV',
                                                                    'file'
                                                                ).replace(
                                                                    'fileHD',
                                                                    'file'
                                                                ).replace(
                                                                    'file',
                                                                    '\'file\''
                                                                ).replace(
                                                                    '\'', '"'
                                                                ).replace(
                                                                    'label',
                                                                    '"label"'
                                                                ).replace(
                                                                    'type',
                                                                    '"type"')
                                                            if len(all_files
                                                                   ) > 0:
                                                                datay = all_files[
                                                                    sn].replace(
                                                                        'fileTV',
                                                                        'file'
                                                                    ).replace(
                                                                        'fileHD',
                                                                        'file'
                                                                    ).replace(
                                                                        'file',
                                                                        '\'file\''
                                                                    ).replace(
                                                                        '\'',
                                                                        '"'
                                                                    ).replace(
                                                                        'label',
                                                                        '"label"'
                                                                    ).replace(
                                                                        'type',
                                                                        '"type"'
                                                                    )
                                                            else:
                                                                datay = None

                                                            data_j1 = json.loads(
                                                                datax)
                                                            if datay != None:
                                                                data_j2 = json.loads(
                                                                    datay)

                                                            file = data_j1[
                                                                'file']
                                                            label = data_j1[
                                                                'label']
                                                            if datay != None:
                                                                src_file = data_j2[
                                                                    'file']
                                                            else:
                                                                src_file = data_j1[
                                                                    'file']

                                                            link_data = {
                                                                'file': file,
                                                                'title':
                                                                titlex,
                                                                'label': label,
                                                                'page': url,
                                                                'srt': srt,
                                                                'src_file':
                                                                src_file,
                                                                'poster':
                                                                poster
                                                            }
                                                            links_data.append(
                                                                link_data)
                                                        except Exception as e:
                                                            log(type='FAIL',
                                                                method=
                                                                'get_movie-5',
                                                                err='%s' % e,
                                                                dolog=False,
                                                                logToControl=
                                                                False,
                                                                doPrint=True)
                                                    return links_data

                                            elif len(poss_match) == 0:
                                                if len(title.replace(
                                                        ' ', '')) >= len(
                                                            titlex.replace(
                                                                ' ', '')):
                                                    score = score_match_title(
                                                        titlex, title)
                                                else:
                                                    score = score_match_title(
                                                        title, titlex)
                                                if score > 0.75:
                                                    log(type='INFO',
                                                        method='get_movie-3B',
                                                        err='Verifying - %s' %
                                                        url,
                                                        dolog=True,
                                                        logToControl=False,
                                                        doPrint=True)
                                                    result = proxies.request(
                                                        url,
                                                        proxy_options=
                                                        proxy_options,
                                                        use_web_proxy=self.
                                                        proxyrequired,
                                                        headers=headers,
                                                        timeout=60)
                                                    ex_title = client.parseDOM(
                                                        result,
                                                        'div',
                                                        attrs={
                                                            'class': 'rating'
                                                        })[0]
                                                    ex_title = client.parseDOM(
                                                        ex_title,
                                                        'span')[0].strip()
                                                    if year in ex_title:
                                                        log(type='INFO',
                                                            method=
                                                            'get_movie-6',
                                                            err=
                                                            'Possible Match (Score:%s) (%s)'
                                                            % (score, titlex),
                                                            dolog=True,
                                                            logToControl=False,
                                                            doPrint=True)
                                                        poss_match.append({
                                                            'data':
                                                            result,
                                                            'ref':
                                                            url
                                                        })
                                                    else:
                                                        log(type='FAIL',
                                                            method=
                                                            'get_movie-6',
                                                            err=
                                                            'Possible Match (Score:%s - Year MisMatch) (%s)'
                                                            %
                                                            (score, ex_title),
                                                            dolog=True,
                                                            logToControl=False,
                                                            doPrint=True)
                                                else:
                                                    log(type='FAIL',
                                                        method='get_movie-6',
                                                        err=
                                                        'Possible Match (Score:%s) (%s)'
                                                        % (score, titlex),
                                                        dolog=True,
                                                        logToControl=False,
                                                        doPrint=True)
                    if ' ' not in xtitle:
                        break
            except Exception as e:
                log(type='FAIL',
                    method='get_movie-7',
                    err='%s' % e,
                    dolog=False,
                    logToControl=False,
                    doPrint=True)

            if len(poss_match) > 0:
                result = poss_match[0]['data']
                url = poss_match[0]['ref']
                log(type='INFO',
                    method='get_movie-8',
                    err='Possible Match found',
                    dolog=True,
                    logToControl=False,
                    doPrint=True)

                all_files_t = re.findall(r'({.*file.*:.*\.mp4.*})', result)
                all_links_t = re.findall(r'({.*file.*\.php.*:.*})', result)

                all_files = remove_dup(all_files_t)
                all_links = remove_dup(all_links_t)

                try:
                    srt = re.findall(r'\"(.*srt.*)\"', result)[0]
                    srt = urlparse.urljoin(self.base_link, srt)
                except:
                    srt = None

                if len(all_links) > 0:
                    for sn in range(len(all_links)):
                        try:
                            datax = all_links[sn].replace(
                                'fileTV',
                                'file').replace('fileHD', 'file').replace(
                                    'file',
                                    '\'file\'').replace('\'', '"').replace(
                                        'label',
                                        '"label"').replace('type', '"type"')
                            if len(all_files) > 0:
                                datay = all_files[sn].replace(
                                    'fileTV',
                                    'file').replace('fileHD', 'file').replace(
                                        'file',
                                        '\'file\'').replace('\'', '"').replace(
                                            'label', '"label"').replace(
                                                'type', '"type"')
                            else:
                                datay = None

                            data_j1 = json.loads(datax)
                            if datay != None:
                                data_j2 = json.loads(datay)

                            file = data_j1['file']
                            label = data_j1['label']
                            if datay != None:
                                src_file = data_j2['file']
                            else:
                                src_file = data_j1['file']

                            link_data = {
                                'file': file,
                                'title': titlex,
                                'label': label,
                                'page': url,
                                'srt': srt,
                                'src_file': src_file,
                                'poster': poster
                            }
                            links_data.append(link_data)
                        except Exception as e:
                            log(type='FAIL',
                                method='get_movie-9',
                                err='%s' % e,
                                dolog=False,
                                logToControl=False,
                                doPrint=True)

                    return links_data

            return

        except Exception as e:
            log('ERROR',
                'get_movie-10',
                '%s: %s' % (title, e),
                dolog=self.init)
        return
Esempio n. 4
0
    def get_sources(self,
                    url,
                    hosthdDict=None,
                    hostDict=None,
                    locDict=None,
                    proxy_options=None,
                    key=None,
                    testing=False):
        try:
            sources = []
            if control.setting('Provider-%s' % name) == False:
                log('INFO', 'get_sources', 'Provider Disabled by User')
                log('INFO', 'get_sources', 'Completed')
                return sources
            if url == None:
                log('FAIL',
                    'get_sources',
                    'url == None. Could not find a matching title: %s' %
                    cleantitle.title_from_key(key),
                    dolog=not testing)
                log('INFO', 'get_sources', 'Completed')
                return sources

            REMAP_TYPE = {
                'trailer': 'Trailer',
                'feature_trailer': 'Trailer',
                'theatrical_trailer': 'Trailer',
                'behind_the_scenes': 'Behind the scenes',
                'deleted_scene': 'Deleted Scenes',
                'featurette': 'Featurette',
                'featured_box': 'Featurette',
                'music-video': 'Music Video',
                'clip': 'Misc.'
            }

            year = None
            episode = None
            season = None

            log('INFO', 'get_sources-1', 'data-items: %s' % url, dolog=False)
            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])
            title = data['tvshowtitle'] if 'tvshowtitle' in data else data[
                'title']
            title = cleantitle.simpletitle(title)
            try:
                year = re.findall(
                    '(\d{4})', data['premiered']
                )[0] if 'tvshowtitle' in data else data['year']
            except:
                try:
                    year = data['year']
                except:
                    year = None

            title_s = title.split(' ')
            queries = []
            for ts in range(len(title_s)):
                titles = ('+'.join(
                    str(x) for x in title_s[:len(title_s) - ts]))
                queries.append('%s+%s' % (titles, year))
                queries.append(titles)
            rs = []

            for q in queries:
                page_count = 1
                search_url = self.base_link + '/movie/results/' + '?lang=hindi&page=' + str(
                    page_count) + '&query=%s' % q
                log('INFO', 'get_sources-2', 'Searching: %s' % search_url)
                r, res = request_einthusan(search_url)

                try:
                    movies = client.parseDOM(res,
                                             'section',
                                             attrs={'id': 'UIMovieSummary'})[0]
                    movies = client.parseDOM(movies, 'li')

                    for block in movies:
                        try:
                            blocka = client.parseDOM(block,
                                                     'div',
                                                     attrs={'class':
                                                            'block1'})[0]
                            loc = self.base_link + client.parseDOM(
                                blocka, 'a', ret='href')[0]
                            poster = "http:" + client.parseDOM(
                                blocka, 'img', ret='src')[0]
                            titlex = client.parseDOM(block, 'h3')[0]
                            yearx = client.parseDOM(block,
                                                    'div',
                                                    attrs={'class': 'info'})[0]
                            yearx = client.parseDOM(yearx, 'p')[0]
                            if str(year) in str(yearx):
                                rs.append([titlex, yearx, loc, poster])
                                log('INFO', 'get_sources-3',
                                    'match-page-url: %s | %s' % (loc, titlex))
                                break
                        except:
                            pass
                    if len(rs) > 0:
                        break
                except:
                    pass

            if len(rs) > 0:
                links_m = []
                vidtype = 'Movie'
                riptype = 'BRRIP'
                quality = '720p'

                for r in rs:
                    video_urls = []
                    trailers = []
                    music_vids = []
                    poster = r[3]
                    page_url = r[2]
                    eindata1, htm = GetEinthusanData(page_url)
                    eindata1 = json.loads(json.dumps(eindata1))

                    log('INFO', 'get_sources-4-A',
                        'GetEinthusanData: %s' % eindata1)

                    video_urls.append(eindata1['MP4Link'])
                    video_urls.append(eindata1['HLSLink'])

                    if testing == False:
                        try:
                            matches = re.compile(
                                'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
                            ).findall(htm)
                            matches = list(set(matches))
                            for match in matches:
                                try:
                                    if 'youtube.com' in match:
                                        match = match.replace(
                                            'embed/', 'watch?v=')
                                        trailers.append(match)
                                        log('INFO', 'get_sources-4-B',
                                            'trailers: %s' % match)
                                except:
                                    pass
                        except Exception as e:
                            log('FAIL', 'get_sources-4', '%s' % e)

                    if testing == False:
                        try:
                            musicblock = client.parseDOM(
                                htm,
                                'section',
                                attrs={'id': 'UICompactMovieClipList'})[0]
                            musicblock = client.parseDOM(musicblock, 'li')
                            music_vids = []
                            locx = None
                            for block in musicblock:
                                try:
                                    music_vids_s = []
                                    locx = self.base_link + client.parseDOM(
                                        block,
                                        'a',
                                        attrs={'class': 'title'},
                                        ret='href')[0]
                                    thumbx = "http:" + client.parseDOM(
                                        block, 'img', ret='src')[0]
                                    titlex = client.parseDOM(
                                        block, 'a', attrs={'class':
                                                           'title'})[0]
                                    titlex = client.parseDOM(titlex, 'h5')[0]
                                    eindata1, htm1 = GetEinthusanData(locx)
                                    eindata1 = json.loads(json.dumps(eindata1))
                                    log('INFO', 'get_sources-4-C',
                                        'GetEinthusanData: %s' % eindata1)
                                    type = eindata1['type']
                                    if type in REMAP_TYPE.keys():
                                        type = REMAP_TYPE[type]
                                    else:
                                        type = REMAP_TYPE['clip']
                                    music_vids_s.append(
                                        [eindata1['MP4Link'], type])
                                    music_vids_s.append(
                                        [eindata1['HLSLink'], type])
                                    music_vids.append(
                                        [titlex, thumbx, music_vids_s, locx])
                                except Exception as e:
                                    log('FAIL', 'get_sources-5A',
                                        '%s : %s' % (e, locx))
                        except Exception as e:
                            log('FAIL', 'get_sources-5B', '%s' % e)

                    for vid in trailers:
                        try:
                            l = resolvers.createMeta(vid,
                                                     self.name,
                                                     self.logo,
                                                     '720p', [],
                                                     key,
                                                     poster=poster,
                                                     vidtype='Trailer',
                                                     testing=testing,
                                                     page_url=page_url)
                            for ll in l:
                                if ll != None and 'key' in ll.keys():
                                    links_m.append(ll)
                        except:
                            log('FAIL', 'get_sources-6',
                                'Could not add: %s' % vid)

                    for vid in music_vids:
                        try:
                            for v in vid[2]:
                                l = resolvers.createMeta(v[0],
                                                         self.name,
                                                         self.logo,
                                                         '720p', [],
                                                         key,
                                                         poster=vid[1],
                                                         vidtype=v[1],
                                                         testing=testing,
                                                         txt=vid[0],
                                                         page_url=vid[3])
                                for ll in l:
                                    if ll != None and 'key' in ll.keys():
                                        links_m.append(ll)
                        except:
                            log('FAIL', 'get_sources-7',
                                'Could not add: %s' % v[0])

                    for vid in video_urls:
                        try:
                            l = resolvers.createMeta(vid,
                                                     self.name,
                                                     self.logo,
                                                     quality, [],
                                                     key,
                                                     poster=poster,
                                                     riptype=riptype,
                                                     vidtype=vidtype,
                                                     testing=testing,
                                                     page_url=page_url)
                            for ll in l:
                                if ll != None and 'key' in ll.keys():
                                    links_m.append(ll)
                        except:
                            log('FAIL', 'get_sources-8',
                                'Could not add: %s' % vid)

                for l in links_m:
                    if l != None and 'key' in l.keys():
                        sources.append(l)

            if len(sources) == 0:
                log(
                    'FAIL', 'get_sources',
                    'Could not find a matching title: %s' %
                    cleantitle.title_from_key(key))
            else:
                log(
                    'SUCCESS', 'get_sources', '%s sources : %s' %
                    (cleantitle.title_from_key(key), len(sources)))

            log('INFO', 'get_sources', 'Completed')

            return sources
        except Exception as e:
            log('ERROR', 'get_sources', '%s' % e)
            log('INFO', 'get_sources', 'Completed')
            return sources