コード例 #1
0
def request_freess_cx(url='https://freess.cx/', headers=fake_ua):
    print('req fscx...')
    qr = list()
    servers = list()
    try:
        response = requests.get(url, headers=headers).text
        soup = BeautifulSoup(response, 'html.parser')
        title = soup.find('title').text
        msg = soup.find('section', attrs={'id': 'banner'}).text.strip()

        info = {'message': msg, 'url': url, 'name': str(title)}
        qr = list(
            map(lambda x: url.strip('/') + '/' + x.find('a').get('href'),
                soup.find_all('div', attrs={'class': '4u 12u(mobile)'})))
        for i, img_url in enumerate(qr):
            print('req img', img_url)
            try:
                servers.append(
                    parse(scanNetQR(img_url, headers=headers),
                          ' '.join([title, str(i)])))
            except Exception as e:
                print(img_url)
                logging.exception(e, stack_info=False)
                print('IMG_URL FOR freess.cx:', img_url)
    except Exception as e:
        logging.exception(e, stack_info=True)
        return [], {'message': str(e), 'url': '', 'name': ''}
    return servers, info
コード例 #2
0
def request_url(url, headers=None, name=''):
    print('req', url)

    data = set()
    servers = list()
    try:
        response = requests.get(url, headers=headers, verify=False).text
        data.update(
            map(lambda x: re.sub('\s', '', x),
                re.findall('ssr?://[a-zA-Z0-9_]+=*', response)))
        soup = BeautifulSoup(response, 'html.parser')
        title = soup.find('title').text

        info = {'message': '', 'url': url, 'name': str(title)}
        for i, server in enumerate(data):
            try:
                servers.append(parse(server, ' '.join([title, name, str(i)])))
            except Exception as e:
                logging.exception(e, stack_info=False)
                print('URL:', url, 'SERVER', server)
    except Exception as e:
        print(url)
        logging.exception(e, stack_info=False)
        return [], {'message': str(e), 'url': '', 'name': ''}
    return servers, info
コード例 #3
0
ファイル: crawler.py プロジェクト: 2574333900/ssr
def crawl_yitianjian(url='https://free.yitianjianss.com', headers=fake_ua):
    print('req yitianjian...')
    servers = list()
    try:
        response = requests.get(url, headers=headers).text
        soup = BeautifulSoup(response, 'html.parser')
        title = 'yitianjianss'
        info = {'message': '为确保安全,服务器地址会不定期更新。', 'url': url, 'name': str(title)}
        qr = map(lambda x: url + x.attrs['src'], soup.find_all('img'))
        for i, img_url in enumerate(qr):
            try:
                servers.append(parse(scanNetQR(img_url, headers=headers), ' '.join([title, str(i)])))
            except Exception as e:
                logging.exception(e, stack_info=False)
                print('IMG_URL FOR yitianjianss:', img_url)
    except Exception as e:
        logging.exception(e, stack_info=True)
        return [], {'message': str(e), 'url': '', 'name': ''}
    return servers, info
コード例 #4
0
def request_url(url, headers=None, name=''):
    print('req', url)

    data = set()
    servers = list()
    try:
        response = requests.get(url, headers=headers, verify=False).text
        data.update(map(lambda x: re.sub('\s', '', x), re.findall('ssr?://[a-zA-Z0-9=]+', response)))
        soup = BeautifulSoup(response, 'html.parser')
        title = soup.find('title').text

        info = {'message': '', 'url': url, 'name': str(title)}
        for i, server in enumerate(data):
            try:
                servers.append(parse(server, ' '.join([title, name, str(i)])))
            except Exception as e:
                logging.exception(e, stack_info=False)
                print('URL:', url, 'SERVER', server)
    except Exception as e:
        print(url)
        logging.exception(e, stack_info=False)
        return [], {'message': str(e), 'url': '', 'name': ''}
    return servers, info
コード例 #5
0
def request_freess_cx(url='https://freess.cx/', headers=None):
    print('req fscx...')
    qr = list()
    servers = list()
    try:
        response = requests.get(url, headers=headers).text
        soup = BeautifulSoup(response, 'html.parser')
        title = soup.find('title').text
        msg = soup.find('section', attrs={'id': 'banner'}).text.strip()

        info = {'message': msg, 'url': url, 'name': str(title)}
        qr = list(map(lambda x: url.strip('/') + '/' + x.find('a').get('href'), soup.find_all('div', attrs={'class': '4u 12u(mobile)'})))
        for i, img_url in enumerate(qr):
            print('req img', img_url)
            try:
                servers.append(parse(scanNetQR(img_url), ' '.join([title, str(i)])))
            except Exception as e:
                print(img_url)
                logging.exception(e, stack_info=False)
                print('IMG_URL FOR freess.cx:', img_url)
    except Exception as e:
        logging.exception(e, stack_info=True)
        return [], {'message': str(e), 'url': '', 'name': ''}
    return servers, info