Ejemplo n.º 1
0
    def resolve(self, url):
        try:
            control.sleep(5000)

            url = urlparse.urljoin(self.base_link, url)
            url = client.request(url, redirect=False, output='extended')

            if url and url[2]['Location'].strip():
                url = url[2]['Location']

            if self.base_link not in url:
                if 'google' in url:
                    return self.__google(url)
                return url
        except:
            return
Ejemplo n.º 2
0
def check_for_addon_update():
	log_utils.log('OpenScrapers checking available updates', log_utils.LOGNOTICE)
	try:
		import re
		import requests
		repo_xml = requests.get('https://raw.githubusercontent.com/a4k-openproject/repository.openscrapers/master/zips/addons.xml')
		if not repo_xml.status_code == 200:
			log_utils.log('Could not connect to repo XML, status: %s' % repo_xml.status_code, log_utils.LOGNOTICE)
			return
		repo_version = re.findall(r'<addon id=\"script.module.openscrapers\".*version=\"(\d*.\d*.\d*.\d*)\"', repo_xml.text)[0]
		local_version = control.addonVersion()
		if control.check_version_numbers(local_version, repo_version):
			while control.condVisibility('Library.IsScanningVideo'):
				control.sleep(10000)
			log_utils.log('A newer version of OpenScrapers is available. Installed Version: v%s, Repo Version: v%s' % (local_version, repo_version), log_utils.LOGNOTICE)
			control.notification(title = 'default', message = 'A new verison of OpenScrapers is available from the repository. Please consider updating to v%s' % repo_version, icon = 'default', time=5000, sound=False)
	except:
		log_utils.error()
		pass
Ejemplo n.º 3
0
params = dict(parse_qsl(sys.argv[2].replace('?', '')))
action = params.get('action')
mode = params.get('mode')
query = params.get('query')
name = params.get('name')

if action == "OpenscrapersSettings":
    control.openSettings('0.0', 'script.module.openscrapers')

elif mode == "OpenscrapersSettings":
    control.openSettings('0.0', 'script.module.openscrapers')

elif action == 'ShowChangelog':
    from openscrapers.modules import changelog
    changelog.get()
    control.sleep(200)
    control.openSettings(query, "script.module.openscrapers")

elif action == 'ShowHelp':
    from openscrapers.help import help
    help.get(name)
    control.openSettings(query, "script.module.openscrapers")

elif action == "Defaults":
    sourceList = []
    sourceList = sources_openscrapers.all_providers
    for i in sourceList:
        source_setting = 'provider.' + i
        value = control.getSettingDefault(source_setting)
        control.setSetting(source_setting, value)
    # xbmc.log('provider-default = %s-%s' % (source_setting, value), 2)
Ejemplo n.º 4
0
    def sources(self, url, hostDict, hostprDict):
        sources = []

        try:
            if not url:
                return sources

            r = client.request(urlparse.urljoin(self.base_link, url))

            links = dom_parser.parse_dom(r, 'table')
            links = [
                i.content for i in links if dom_parser.parse_dom(
                    i, 'span', attrs={'class': re.compile('linkSearch(-a)?')})
            ]
            links = re.compile('(<a.+?/a>)', re.DOTALL).findall(''.join(links))
            links = [
                dom_parser.parse_dom(i, 'a', req='href') for i in links
                if re.findall('(.+?)\s*\(\d+\)\s*<', i)
            ]
            links = [i[0].attrs['href'] for i in links if i]

            url = re.sub('/streams-\d+', '', url)

            for link in links:
                if '/englisch/' in link: continue
                control.sleep(3000)
                if link != url:
                    r = client.request(urlparse.urljoin(self.base_link, link))

                quality = 'SD'
                info = []

                detail = dom_parser.parse_dom(r,
                                              'th',
                                              attrs={'class': 'thlink'})
                detail = [
                    dom_parser.parse_dom(i, 'a', req='href') for i in detail
                ]
                detail = [(i[0].attrs['href'],
                           i[0].content.replace('&#9654;', '').strip())
                          for i in detail if i]

                if detail:
                    quality, info = source_utils.get_release_quality(
                        detail[0][1])
                    r = client.request(
                        urlparse.urljoin(self.base_link, detail[0][0]))

                r = dom_parser.parse_dom(r, 'table')
                r = [
                    dom_parser.parse_dom(i, 'a', req=['href', 'title'])
                    for i in r if not dom_parser.parse_dom(i, 'table')
                ]
                r = [(l.attrs['href'], l.attrs['title']) for i in r for l in i
                     if l.attrs['title']]

                info = ' | '.join(info)

                for stream_link, hoster in r:
                    valid, hoster = source_utils.is_host_valid(
                        hoster, hostDict)
                    if not valid: continue

                    direct = False

                    if hoster.lower() == 'gvideo':
                        direct = True

                    sources.append({
                        'source': hoster,
                        'quality': quality,
                        'language': 'de',
                        'url': stream_link,
                        'info': info,
                        'direct': direct,
                        'debridonly': False,
                        'checkquality': True
                    })

            return sources
        except:
            return sources