예제 #1
0
import json
import sys
import threading
import time
import requests
from selenium import webdriver
from selenium.webdriver import ActionChains
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import TimeoutException, NoSuchFrameException, NoSuchElementException, \
    WebDriverException
from mytools.tools import get_oxylabs_proxy, get, create_proxyauth_extension, logger

logger = logger('refund')


class AliexpressRefundSpider():
    def __init__(self, task_infos, task_id):
        self.proxy = get_oxylabs_proxy('us',
                                       _city=None,
                                       _session=random.random())['https']
        auth = self.proxy.split("@")[0][7:]
        proxyid = self.proxy.split("@")[1]
        proxyauth_plugin_path = create_proxyauth_extension(
            proxy_host=proxyid.split(":")[0],
            proxy_port=int(proxyid.split(":")[1]),
            proxy_username=auth.split(":")[0],
            proxy_password=auth.split(":")[1])
        self.options = webdriver.ChromeOptions()
예제 #2
0
# import threading
from multiprocessing import Process
import time
import requests
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException, NoSuchFrameException, NoSuchElementException, \
    WebDriverException
from mytools.tools import get_oxylabs_proxy, get, post, create_proxyauth_extension, logger
from urllib3.exceptions import NewConnectionError, MaxRetryError
from requests.exceptions import ProxyError
from selenium.webdriver.common.keys import Keys

logger = logger('review_US')


class AliexpressReviewSpider():
    def __init__(self, task_id, task_info):
        self.task_id = task_id
        self.task_info = task_info
        self.target_id = self.task_info["asin"]["asin"]
        #OXY代理
        # self.proxy = get_oxylabs_proxy('us', _city=None, _session=random.random())['https']
        #Geosurf代理
        self.proxy = 'http://10502+US+10502-%s:[email protected]:8000' % random.randint(
            200000, 300000)
        auth = self.proxy.split("@")[0][7:]
        proxyid = self.proxy.split("@")[1]
        proxyauth_plugin_path = create_proxyauth_extension(
예제 #3
0
파일: register_de.py 프로젝트: kknet/YHZX
from selenium import webdriver
from user_agent import generate_user_agent
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from mytools import tools
from mytools.tools import logger
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from config import *
from selenium.webdriver.support.select import Select
from urllib3.exceptions import NewConnectionError, MaxRetryError
from requests.exceptions import ProxyError

logger = logger('register_de')


class AliexpressRegisterSpider():
    def __init__(self, user_info, proxy, index, register_city):
        # self.proxy = get_oxylabs_proxy('us', _city=None, _session=random.random())['https']
        self.proxy = proxy
        self.index = index
        self.register_city = register_city
        auth = self.proxy.split("@")[0][7:]
        proxyid = self.proxy.split("@")[1]
        proxyauth_plugin_path = tools.create_proxyauth_extension(
            proxy_host=proxyid.split(":")[0],
            proxy_port=int(proxyid.split(":")[1]),
            proxy_username=auth.split(":")[0],
            proxy_password=auth.split(":")[1])
예제 #4
0
from selenium.common.exceptions import TimeoutException, NoSuchElementException
from baiduOcr import BaiduOrc
from selenium import webdriver
from user_agent import generate_user_agent
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from mytools import tools
from mytools.tools import logger
from selenium.webdriver.chrome.options import Options
from config import *
from urllib3.exceptions import NewConnectionError, MaxRetryError
from requests.exceptions import ProxyError

logger = logger('register')
User_Agent = generate_user_agent(device_type="desktop")


class AliexpressRegisterSpider():

    # def __init__(self,user_info,proxy):
    # # self.proxy = get_oxylabs_proxy('us', _city=None, _session=random.random())['https']
    # self.proxy = proxy
    # auth = self.proxy.split("@")[0][7:]
    # proxyid = self.proxy.split("@")[1]
    # proxyauth_plugin_path = self.create_proxyauth_extension(
    #     proxy_host=proxyid.split(":")[0],
    #     proxy_port=int(proxyid.split(":")[1]),
    #     proxy_username=auth.split(":")[0],
    #     proxy_password=auth.split(":")[1]
예제 #5
0
# import threading
from multiprocessing import Pool
import time
import requests
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException, NoSuchFrameException, NoSuchElementException, \
    WebDriverException
from mytools.tools import get_oxylabs_proxy, get, post, create_proxyauth_extension, logger
from urllib3.exceptions import NewConnectionError, MaxRetryError
from requests.exceptions import ProxyError
from selenium.webdriver.common.keys import Keys

logger = logger('review_de')


class AliexpressReviewSpider():
    def __init__(self, task_id, task_info):
        self.task_id = task_id
        self.task_info = task_info
        self.target_id = self.task_info["asin"]["asin"]
        #OXY代理
        # self.proxy = get_oxylabs_proxy('us', _city=None, _session=random.random())['https']
        #Geosurf代理
        self.proxy = 'http://10502+DE+10502-%s:[email protected]:8000' % random.randint(
            600000, 700000)
        auth = self.proxy.split("@")[0][7:]
        proxyid = self.proxy.split("@")[1]
        proxyauth_plugin_path = create_proxyauth_extension(
import requests
from lxml import etree
from selenium import webdriver
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver import ActionChains
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import StaleElementReferenceException,NoSuchElementException,TimeoutException,NoSuchFrameException,WebDriverException
from urllib3.exceptions import NewConnectionError, MaxRetryError
from requests.exceptions import ProxyError
from config import City_List
from mytools.tools import get_oxylabs_proxy, get, post, create_proxyauth_extension, logger, load_json, save_json

logger = logger('place_order_new')
class AliexpressOrderSpider():

    def __init__(self,task_infos,task_id):
        self.proxy = get_oxylabs_proxy('us',_city=None,_session=random.random())['https']
        auth = self.proxy.split("@")[0][7:]
        proxyid = self.proxy.split("@")[1]
        proxyauth_plugin_path = create_proxyauth_extension(
            proxy_host=proxyid.split(":")[0],
            proxy_port=int(proxyid.split(":")[1]),
            proxy_username=auth.split(":")[0],
            proxy_password=auth.split(":")[1]
        )
        self.options = webdriver.ChromeOptions()
        self.task_infos = task_infos
        self.task_id = task_id
예제 #7
0
import requests
from lxml import etree
from selenium import webdriver
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver import ActionChains
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import StaleElementReferenceException, NoSuchElementException, TimeoutException, NoSuchFrameException, WebDriverException
from urllib3.exceptions import NewConnectionError, MaxRetryError
from requests.exceptions import ProxyError
from mytools.tools import get_oxylabs_proxy, get, post, create_proxyauth_extension, logger, load_json, save_json
from config import City_List

logger = logger('place_order_new_US')


class AliexpressOrderSpider():
    def __init__(self, task_infos, task_id):
        self.task_infos = task_infos
        self.task_id = task_id
        # register_city = self.task_infos[str(self.task_id)]["account"].get("register_city")
        # register_city_list = register_city.split()
        # if len(register_city_list) > 1:
        #     register_city = '_'.join(register_city_list)
        # proxies = get_oxylabs_proxy('us', _city=register_city, _session=random.random())
        # getIpInfo = get(requests.session(), 'https://ipinfo.io', proxies=proxies)
        # if getIpInfo:
        #     self.proxy = proxies['https']
        #     print('get ip!')
예제 #8
0
#必须登录后,才能持续访问。

import random
import re
from multiprocessing import Process
import time
from user_agent import generate_user_agent
from lxml import etree
import requests
import sys
import json
from mytools.tools import logger, get, post, get_oxylabs_proxy

logger = logger('search_ad')


class AliexpressAdSearcher:
    def __init__(self, id, key_words):
        self.id = id
        self.key_words = key_words
        self.headers = {
            # "Host": "www.aliexpress.com",
            "Connection": "keep-alive",
            "Upgrade-Insecure-Requests": "1",
            "User-Agent": generate_user_agent(device_type='desktop'),
            "Accept":
            "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
            "Referer": "https://www.aliexpress.com/",
            "Accept-Encoding": "gzip, deflate, br",
            "Accept-Language": "zh-CN,zh;q=0.9",
        }
예제 #9
0
import time
import requests
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException, NoSuchFrameException, NoSuchElementException, \
    WebDriverException
from mytools.tools import get_oxylabs_proxy, get, post, create_proxyauth_extension, logger
from urllib3.exceptions import NewConnectionError, MaxRetryError
from requests.exceptions import ProxyError
from selenium.webdriver.common.keys import Keys

date_ym = time.strftime('%Y%m', time.localtime())

logger = logger(f'review_us_{date_ym}')  # 后面补充了年月


class AliexpressReviewSpider():
    def __init__(self, task_id, task_info):
        self.task_id = task_id
        self.task_info = task_info
        self.target_id = self.task_info["asin"]["asin"]
        # OXY代理
        # self.proxy = get_oxylabs_proxy('us', _city=None, _session=random.random())['https']
        # Geosurf代理
        # self.proxy = 'http://10502+US+10502-%s:[email protected]:8000' % random.randint(100000, 800000)
        # auth = self.proxy.split("@")[0][7:]
        # proxyid = self.proxy.split("@")[1]
        # proxyauth_plugin_path = create_proxyauth_extension(
        #     proxy_host=proxyid.split(":")[0],
예제 #10
0
#必须登录后,才能持续访问。

import random
import time
from multiprocessing import Process
import requests
import json
from mytools.tools import logger, get, post, get_oxylabs_proxy

logger = logger('kill_ad')


class AliexpressKillAD:
    def __init__(self, task):
        self.task = task
        self.headers = {
            # "Host": "www.aliexpress.com",
            "Connection": "keep-alive",
            "Upgrade-Insecure-Requests": "1",
            # "User-Agent": generate_user_agent(device_type='desktop'),
            "User-Agent": json.loads(self.task.get('header')),
            "Accept":
            "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
            "Referer": "https://www.aliexpress.com/",
            "Accept-Encoding": "gzip, deflate, br",
            "Accept-Language": "zh-CN,zh;q=0.9",
        }
        self.proxies = get_oxylabs_proxy('us',
                                         _city=None,
                                         _session=random.random())
        self.session = requests.session()
예제 #11
0
파일: register_us.py 프로젝트: kknet/YHZX
from selenium import webdriver
from user_agent import generate_user_agent
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from mytools import tools
from mytools.tools import logger
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from config import *
from selenium.webdriver.support.select import Select
from urllib3.exceptions import NewConnectionError, MaxRetryError
from requests.exceptions import ProxyError

logger = logger('register_us')


class AliexpressRegisterSpider():
    def __init__(self, user_info, proxy, index, register_city):
        # self.proxy = get_oxylabs_proxy('us', _city=None, _session=random.random())['https']
        self.proxy = proxy
        self.index = index
        self.register_city = register_city
        auth = self.proxy.split("@")[0][7:]
        proxyid = self.proxy.split("@")[1]
        proxyauth_plugin_path = tools.create_proxyauth_extension(
            proxy_host=proxyid.split(":")[0],
            proxy_port=int(proxyid.split(":")[1]),
            proxy_username=auth.split(":")[0],
            proxy_password=auth.split(":")[1])
예제 #12
0
파일: register_ru.py 프로젝트: kknet/YHZX
from selenium import webdriver
from user_agent import generate_user_agent
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from mytools import tools
from mytools.tools import logger
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from config import *
from selenium.webdriver.support.select import Select
from urllib3.exceptions import NewConnectionError, MaxRetryError
from requests.exceptions import ProxyError

logger = logger('register_ru')


class AliexpressRegisterSpider():
    def __init__(self, user_info, proxy, index, register_city):
        # self.proxy = get_oxylabs_proxy('us', _city=None, _session=random.random())['https']
        self.proxy = proxy
        self.index = index
        self.register_city = register_city
        auth = self.proxy.split("@")[0][7:]
        proxyid = self.proxy.split("@")[1]
        proxyauth_plugin_path = tools.create_proxyauth_extension(
            proxy_host=proxyid.split(":")[0],
            proxy_port=int(proxyid.split(":")[1]),
            proxy_username=auth.split(":")[0],
            proxy_password=auth.split(":")[1])