def error_check_make_request(): url1 = None url2 = 23 url3 = '//www.reddit.com' url4 = 'www.google.com' urls = [url1, url2, url3, url4] for url in urls: with pytest.raises(InvalidUrlError): requester.make_request(url)
def user(uuid): if request.method =='GET': response, error = requester.make_request('listUsers',{'id':uuid},None,host,port,apikey,secretkey,protocol,path) return response elif request.method =='PATCH': data = request.json data['id']=uuid response, error = requester.make_request('updateUser',data,None,host,port,apikey,secretkey,protocol,path) return response else: response, error = requester.make_request('deleteUser',{'id':uuid},None,host,port,apikey,secretkey,protocol,path) return response
def users(): if request.method =='GET': return listusers() elif request.method =='POST': '''Need to pass a json dictionary in the request to feed to the update !!!''' response, error = requester.make_request('createUser',request.json,None,host,port,apikey,secretkey,protocol,path) return response
def test_get_format(): request1 = requester.make_request( 'http://freedailyiptv.com/links/29-05-2018/SE_freedailyiptv.com.m3u') request2 = requester.make_request( 'http://freedailyiptv.com/links/11-06-2018/World_freedailyiptv.com.m3u' ) request3 = requester.make_request( 'http://xsat.me:31000/get.php?username=mahmood&password=mahmood&type=m3u' ) # not an m3u request4 = requester.make_request( 'https://dailyiptvlist.com/dl/us-m3uplaylist-2018-06-12-1.m3u') request5 = requester.make_request( 'https://cafe-tv.net/wp-content/uploads/2018/06/france0606.m3u') request6 = requester.make_request( 'http://ipv4.download.thinkbroadband.com/5MB.zip') request7 = requester.make_request( 'http://www.mediafire.com/file/opkx06qikkxetya/IPTV-Espa%C3%B1a-M3u-Playlist-' 'Update-17-12-2017.zip') format1 = requester.get_format(request1) format2 = requester.get_format(request2) format3 = requester.get_format(request3) format4 = requester.get_format(request4) format5 = requester.get_format(request5) format6 = requester.get_format(request6) format7 = requester.get_format(request7) m3us = [format1, format2, format4, format5] zips = [format6] htmls = [format3, format7] for f in m3us: assert f == 'm3u', 'Error: incorrect format' for f in zips: assert f == 'zip', 'Error: incorrect format' for f in htmls: assert f == 'html', 'Error: incorrect format'
def expand(url): if not requester.validate_url(url): raise InvalidUrlError('Cannot fix shortness of invalid url: ' + str(url)) if re.search(regex['short'], url): request = requester.make_request(url) if request is not None: #print('Expanded', url, 'to', make_request.url) url = request.url return url
def crawler_helper(urls, method='check_for_files'): streams = set() for url in urls: crawler = Crawler(test_url=url) if method is 'check_for_files': crawler.check_for_files(url) elif method is 'text': r = requester.make_request(url) if r: soup = BeautifulSoup(r.text, 'html.parser') crawler.check_text_urls(soup, r.url) elif method is 'ref': r = requester.make_request(url) soup = BeautifulSoup(r.text, 'html.parser') crawler.check_ref_urls(soup) for post in crawler.get_streamer().database().streams.find(): for netloc in post['network_locations']: streams.add(netloc['network_location']) crawler.streamer.delete() return streams
def test_make_request(): url1 = 'https://www.google.com' url2 = 'http://freedailyiptv.com/links/29-05-2018/SE_freedailyiptv.com.m3u' url3 = 'http://bestiptvsrv.tk:25461/get.php?username=anis&password=anis&type=m3u' # this link should not work urls = [url1, url2, url3] valid_requests = [] for url in urls: request = requester.make_request(url) if request is not None: valid_requests.append(url) assert valid_requests == [url1, url2]
def apicall(verb, subject, data): command = get_command(verb, subject) response, error = make_request(command, data, None, host, port, apikey, secretkey, protocol, path) if error is not None: return error, get_error_code(error) return response
def templates(): response, error = requester.make_request('listTemplates',{'templatefilter':'all'},None,host,port,apikey,secretkey,protocol,path) resp=json.loads(str(response)) return render_template('res.html',results=resp['listtemplatesresponse'])
def vms(): response, error = requester.make_request('listVirtualMachines',{},None,host,port,apikey,secretkey,protocol,path) resp=json.loads(str(response)) return render_template('res.html',results=resp['listvirtualmachinesresponse'])
def listusers(): response, error = requester.make_request('listUsers',{},None,host,port,apikey,secretkey,protocol,path) resp=json.loads(str(response)) return render_template('res.html',results=resp['listusersresponse'])
def listtemplates(): response, error = requester.make_request('listTemplates',{"templatefilter": 'featured', "listall": 'true'},None,config.host,config.port,config.apikey,config.secretkey,config.protocol,config.path) print(error) print(response) resp=json.loads(response.decode()) return resp['listtemplatesresponse']
def add_to_streams(self, url, host, ext_title=None): if not requester.validate_url(url): raise InvalidUrlError('Cannot add an invalid url to streams: %s' % url) if not requester.validate_url(host): raise InvalidUrlError( 'Cannot add a url to streams with an invalid host: %s' % host) netloc = um.prepare_netloc(url) if netloc not in self.broken_stream_links and netloc not in self.working_stream_links: # Note that network locations are only # added to broken_stream_links or working_stream_links if their working link status is known. Also note that visitor classes # are unique to crawler classes, therefore each visitor class will only deal with one host. Therefore if a stream is # added to the database from a given visitor class with a known working link status, then it is no longer necessary # to evaluate that stream, doing so would lead to redundant requests to the database if netloc not in self.ip_addresses: #if there isn't an IP address assigned to the network location try: ip_addresses = socket.gethostbyname_ex( um.remove_schema(netloc) )[2] #fetches all IP addresses from the network location self.ip_addresses[netloc] = ip_addresses except socket.gaierror: #if this error is raised then the network location is down ip_addresses = None else: ip_addresses = self.ip_addresses[netloc] if ip_addresses: stream_statuses = {} for ip_address in ip_addresses: playable_url = False if (ip_address, netloc) not in self.connection_attempts: self.connection_attempts[(ip_address, netloc)] = 1 if self.connection_attempts[(ip_address, netloc)] in self.fibs: if url not in stream_statuses: try: stream_status = requester.evaluate_stream(url) except StreamTimedOutError: stream_statuses[url] = working_link = False else: if stream_status: stream_statuses[url] = working_link = True r = requester.make_request(url) if r and r.ok: playable_url = True elif self.connection_attempts[( ip_address, netloc)] == self.fibs[-1]: stream_statuses[url] = working_link = False else: stream_statuses[url] = working_link = None else: working_link = stream_statuses[url] if not playable_url: self.add_to_database_by_ip_address( ip_address, netloc, host, working_link, ext_title) else: self.add_to_database_by_ip_address( ip_address, netloc, host, working_link, ext_title, url) if working_link: self.working_stream_links.add(netloc) elif working_link is False: self.broken_stream_links.add(netloc) self.connection_attempts[(ip_address, netloc)] += 1 elif netloc in self.working_stream_links: ip_addresses = self.ip_addresses[netloc] for ip_address in ip_addresses: self.add_to_titles(ip_address, ext_title)
def apicall(command, data): response, error = make_request(command, data, None, host, port, apikey, secretkey, protocol, path) if error is not None: return error, get_error_code(error) return response