예제 #1
0
def proxy_post_processing(url, proxy_data, response, opt):
    opt['out_break'] = False
    try:
        if ', Proxy Web' in proxy_data.get('stat', ''):
            import proxytools
            response["data"] = proxytools.restore_after_proxy_web(
                response["data"], proxy_data['web_name'], opt['url_save'])
            if response["data"] == 'ERROR':
                response['sucess'] = False
            if response["code"] == 302:
                proxy_data['stat'] = ', Proxy Direct'
                opt['forced_proxy'] = 'ProxyDirect'
                url = opt['url_save']
                opt['post'] = opt['post_save']
                response['sucess'] = False

        if proxy_data.get('stat', '') and response['sucess'] == False and \
                opt.get('proxy_retries_counter', 0) <= opt.get('proxy_retries', 1) and opt.get('count_retries_tot', 5) > 1:
            import proxytools
            if ', Proxy Direct' in proxy_data.get('stat', ''):
                proxytools.get_proxy_list_method(proxy_init='ProxyDirect',
                                                 error_skip=proxy_data['addr'],
                                                 url_test=url)
            elif ', Proxy CF' in proxy_data.get('stat', ''):
                proxytools.get_proxy_list_method(
                    proxy_init='ProxyCF', error_skip=proxy_data['CF_addr'])
                url = opt['url_save']
            elif ', Proxy Web' in proxy_data.get('stat', ''):
                if channel_proxy_list(opt['url_save'],
                                      forced_proxy=proxy_data['web_name']):
                    opt['forced_proxy'] = 'ProxyCF'
                    url = opt['url_save']
                    opt['post'] = opt['post_save']
                else:
                    proxytools.get_proxy_list_method(
                        proxy_init='ProxyWeb',
                        error_skip=proxy_data['web_name'])
                    url = opt['url_save']
                    opt['post'] = opt['post_save']

        else:
            opt['out_break'] = True
    except:
        import traceback
        logger.error(traceback.format_exc())
        opt['out_break'] = True

    return response["data"], response['sucess'], url, opt
예제 #2
0
파일: httptools.py 프로젝트: Reunion90/raiz
def proxy_post_processing(url, url_save, post, post_save, proxy_stat, response,
                          proxy, proxy_web, proxy_addr, proxy_CF_addr,
                          proxy_web_name, proxy_retries_counter, proxy_retries,
                          count_retries_tot, forced_proxy):

    out_break = False
    try:
        if ', Proxy Web' in proxy_stat:
            import proxytools
            response["data"] = proxytools.restore_after_proxy_web(
                response["data"], proxy_web_name, url_save)
            if response["data"] == 'ERROR':
                response['sucess'] = False
            if response["code"] == 302:
                proxy_stat = ', Proxy Direct'
                forced_proxy = 'ProxyDirect'
                url = url_save
                post = post_save
                response['sucess'] = False

        if proxy_stat and response[
                'sucess'] == False and proxy_retries_counter <= proxy_retries and count_retries_tot > 1:
            import proxytools
            if ', Proxy Direct' in proxy_stat:
                proxytools.get_proxy_list_method(proxy_init='ProxyDirect',
                                                 error_skip=proxy_addr)
            elif ', Proxy CF' in proxy_stat:
                proxytools.get_proxy_list_method(proxy_init='ProxyCF',
                                                 error_skip=proxy_CF_addr)
                url = url_save
            elif ', Proxy Web' in proxy_stat:
                proxytools.get_proxy_list_method(proxy_init='ProxyWeb',
                                                 error_skip=proxy_web_name)
                url = url_save
                post = post_save

        else:
            out_break = True
    except:
        import traceback
        logger.error(traceback.format_exc())
        out_break = True

    return (response["data"], response['sucess'], url, post, out_break,
            forced_proxy)
예제 #3
0
             else:
                 if not alfa_s:
                     logger.info("Unable to authorize")
                     logger.info("try to use CloudScrape")
                     try:
                         from lib import cloudscraper
                         scraper = cloudscraper.CloudScraper()
                         data = scraper.get(url).content
                         response["data"] = data
                     except:
                         logger.info("Unable to Scrape")
 
     # Si hay errores usando un Proxy, se refrescan el Proxy y se reintenta el número de veces indicado en proxy_retries
     try:
         if ', Proxy Web' in proxy_stat:
             response["data"] = proxytools.restore_after_proxy_web(response["data"], proxy_web_name, url_save)
             if response["data"] == 'ERROR':
                 response['sucess'] = False
         
         if proxy_stat and response['sucess'] == False and proxy_retries_counter <= proxy_retries and count_retries_tot > 1:
             if ', Proxy Direct' in proxy_stat:
                 proxytools.get_proxy_list_method(proxy_init='ProxyDirect')
             elif ', Proxy CF' in proxy_stat:
                 proxytools.get_proxy_list_method(proxy_init='ProxyCF')
                 url = url_save
             elif ', Proxy Web' in proxy_stat:
                 proxytools.get_proxy_list_method(proxy_init='ProxyWeb')
                 url = url_save
                 post = post_save
         else:
             break