Пример #1
0
    def _links(self, url):
        result = requests.get(self.base,
                              headers={
                                  'user-agent': constants.USER_AGENT
                              }).text
        soup = webutils.bs(result)
        table = soup.find('span', {'class': url})
        links = table.findAll('tr')
        links.pop(0)
        links = self.__prepare_links(links)

        if len(links) == 0:
            return []
        titles = {}
        for l in links:
            titles[l[0]] = l[1]

        links = [u[0] for u in links]

        ret = linkSearch.getLinks(links)

        out2 = []
        for u in ret:
            out2.append((u, titles[u]))

        return out2
        return links
Пример #2
0
    def _links(self, id):
        out = []
        out2 = []
        counter = 0

        uri = 'https://api.sportsurge.net/streams/list?event={}'.format(id)
        self.session.headers.update({'referer': self.base})
        html = requests.get(uri).text

        links = json.loads(html)['streams']

        titles = {}
        for r in links:
            title = u'{} {}@{} ({})'.format(r['coverage'], r['resolution'],
                                            r['framerate'], r['language'])
            url = r['url']
            titles[url] = title

            out.append((url, title))

        links = [u[0] for u in out]

        ret = linkSearch.getLinks(links)

        out2 = []
        for u in ret:
            out2.append((u, titles[u]))

        return out2
    def _links(self, url):
        out = []
        out2 = []
        counter = 0
        #try:
        uri = 'https://streams.101placeonline.com/streams-table/{}/soccer?new-ui=1&origin={}'.format(
            url, self.ref)
        html = requests.get(uri).text
        soup = webutils.bs(html).find('table',
                                      {'class': 'table streams-table-new'})

        try:
            rows = soup.findAll('tr')
        except:
            return []
        rows.pop(0)
        titles = {}
        for r in rows:
            h = r.findAll('td')
            title = u'{} {} ({})'.format(h[7].getText().strip(),
                                         h[4].getText().strip(),
                                         h[5].getText().strip())
            url = r['data-stream-link']
            titles[url] = title

            out.append((url, title))

        links = [u[0] for u in out]

        ret = linkSearch.getLinks(links)

        out2 = []
        for u in ret:
            out2.append((u, titles[u]))
        return out2
Пример #4
0
	def _links(self,url):
		self.s.headers.update({'referer': self.base})
		html = self.s.get(url).text
		links = re.findall('text\-warning.+?ur(?:i|l)\s*=\s*[\"\']([^\"\']+).+?>([^<]+)', html)
		titles = {}
		inp = []
		for l in links:
			url = self.base + l[0]
			inp.append(url)
			titles[url] = l[1].strip()

		ret = linkSearch.getLinks(inp)

		out2 = []
		for u in ret:
			out2.append((u, titles[u]))

		return out2
Пример #5
0
	def _links(self,url):
		import requests
		html = requests.get(url).text
		links = re.findall('title\s*=\s*[\"\']([^\"\']*)[^$]+linkflag.+?Kbps.+?>([^<]*).+?\&nbsp;(\d*).+?href\s*=\s*[\"\']([^\"\']+).+?(?:&nbsp.+?>([^<]+))?', html, re.DOTALL)
		links = self.__prepare_links(links)
		if len(links) == 0:
			return []
		titles = {}
		for l in links:
			titles[l[0]] = l[1]

		links = [u[0] for u in links]

		ret = linkSearch.getLinks(links)

		out2 = []
		for u in ret:
			out2.append((u, titles[u]))

		return out2
Пример #6
0
    def _links(self, url):
        out = []
        out2 = []
        html = requests.get(url, headers={'Referer': self.base}).text
        id = re.findall('streamsmatchid\s*=\s*(\d+)\;', html, flags=re.I)[0]

        uri = 'https://sportscentral.io/streams-table/{}/basketball?new-ui=1&origin=nbabite.com'.format(
            id)
        html = requests.get(uri,
                            headers={
                                'user-agent': constants.USER_AGENT,
                                'referer': url
                            }).text
        soup = webutils.bs(html).find('table',
                                      {'class': 'table streams-table-new'})

        try:
            rows = soup.findAll('tr')
        except:
            return []
        rows.pop(0)
        titles = {}
        for r in rows:
            h = r.findAll('td')
            title = '{} {} ({})'.format(h[7].getText().strip(),
                                        h[4].getText().strip(),
                                        h[5].getText().strip())
            url = r['data-stream-link']
            titles[url] = title

            out.append((url, title))

        links = [u[0] for u in out]
        ret = linkSearch.getLinks(links)
        out2 = []
        for u in ret:
            out2.append((u, titles[u]))

        return out2