def request_freess_cx(url='https://freess.cx/', headers=fake_ua): print('req fscx...') qr = list() servers = list() try: response = requests.get(url, headers=headers).text soup = BeautifulSoup(response, 'html.parser') title = soup.find('title').text msg = soup.find('section', attrs={'id': 'banner'}).text.strip() info = {'message': msg, 'url': url, 'name': str(title)} qr = list( map(lambda x: url.strip('/') + '/' + x.find('a').get('href'), soup.find_all('div', attrs={'class': '4u 12u(mobile)'}))) for i, img_url in enumerate(qr): print('req img', img_url) try: servers.append( parse(scanNetQR(img_url, headers=headers), ' '.join([title, str(i)]))) except Exception as e: print(img_url) logging.exception(e, stack_info=False) print('IMG_URL FOR freess.cx:', img_url) except Exception as e: logging.exception(e, stack_info=True) return [], {'message': str(e), 'url': '', 'name': ''} return servers, info
def crawl_yitianjian(url='https://free.yitianjianss.com', headers=fake_ua): print('req yitianjian...') servers = list() try: response = requests.get(url, headers=headers).text soup = BeautifulSoup(response, 'html.parser') title = 'yitianjianss' info = {'message': '为确保安全,服务器地址会不定期更新。', 'url': url, 'name': str(title)} qr = map(lambda x: url + x.attrs['src'], soup.find_all('img')) for i, img_url in enumerate(qr): try: servers.append(parse(scanNetQR(img_url, headers=headers), ' '.join([title, str(i)]))) except Exception as e: logging.exception(e, stack_info=False) print('IMG_URL FOR yitianjianss:', img_url) except Exception as e: logging.exception(e, stack_info=True) return [], {'message': str(e), 'url': '', 'name': ''} return servers, info
def request_freess_cx(url='https://freess.cx/', headers=None): print('req fscx...') qr = list() servers = list() try: response = requests.get(url, headers=headers).text soup = BeautifulSoup(response, 'html.parser') title = soup.find('title').text msg = soup.find('section', attrs={'id': 'banner'}).text.strip() info = {'message': msg, 'url': url, 'name': str(title)} qr = list(map(lambda x: url.strip('/') + '/' + x.find('a').get('href'), soup.find_all('div', attrs={'class': '4u 12u(mobile)'}))) for i, img_url in enumerate(qr): print('req img', img_url) try: servers.append(parse(scanNetQR(img_url), ' '.join([title, str(i)]))) except Exception as e: print(img_url) logging.exception(e, stack_info=False) print('IMG_URL FOR freess.cx:', img_url) except Exception as e: logging.exception(e, stack_info=True) return [], {'message': str(e), 'url': '', 'name': ''} return servers, info