예제 #1
0
def Check():
    try:
        for i in proxy:
            i = i.strip()
            i = i.split(":")
            ip = i[0]
            port = i[1]
            addr = dict(http=f"http://{ip}:{port}",
                        https=f"https://{ip}:{port}")
            head = {"User-Agent": users.UserAgent()}
            try:
                req = r.get(url, proxies=addr, headers=head, timeout=6)
            except Exception as e:
                if "HTTPTunel" in str(e):
                    continue
                else:
                    print(f"{c.R}Error{c.C}!!!{c.W} {e}\n")
                    exit()
            if req.status_code == 200:
                print(
                    f"{c.C}Hit{c.R}_{c.C}Proxy {c.R}=> {c.C}{ip}:{port}{c.W}\n"
                )
                files = open("Hit.txt", "a+")
                files.write(f"{i}\n")
                files.close()
            else:
                print(
                    f"{c.C}Not{c.R}_{c.C}Found {c.R}=> {c.C}{ip}:{port}{c.W}\n"
                )
    except Exception as e:
        print(f"{c.R}Error{c.C}!!!{c.W} {e}\n")
        exit()
예제 #2
0
def creatTopic(data):
    ua = UserAgent()
    user_agent = {
        'User-agent': ua.random,
        'Referer': 'https://dealbub.com/',
        'Content-type': 'content_type_value'
    }
    session = Session()
    session.head('https://dealbub.com/')
    data = topicContect()
    response = session.post(url='https://alisdeals.com/posts/',
                            headers=user_agent,
                            data=data)
예제 #3
0
파일: crawler.py 프로젝트: amansachdev/fix
import telegram
import sys
import proxies
from scraper_api import ScraperAPIClient
from proxies import random_proxy
from copy import copy
from lxml import html
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from datetime import date, datetime, timedelta
client = ScraperAPIClient('1911aafa4321c1afdbff5112a4d6685e')
#iplist = ['162.208.48.84:8118','165.138.4.41:8080']
#proxies = {'https':random.choice(iplist) }
#print(proxies)

ua = UserAgent.UserAgent()
#imported_proxy = random_proxy.random_proxies()
#print(imported_proxy)
intervalTimeBetweenCheck = 0
dateIndex = datetime.now()
emailinfo = {}

IFTTT_Key = ""
IFTTT_EventName = ""



# msg_content format
# msg_content['Subject'] = 'Subject'
# msg_content['Content'] = 'This is a content'
def isbotalive():
예제 #4
0
파일: Crack.py 프로젝트: MOHQ-TM/InstaCrack
#Models
import requests as r, Color as c, UserAgent as usr, Logo
#Headers
headers = {
    'Host': 'www.instagram.com',
    'User-Agent': usr.UserAgent(),
    'Accept': '*/*',
    'Accept-Language': 'en-US,en;q=0.5',
    'Accept-Encoding': 'gzip, deflate, br',
    'Referer': 'https://www.instagram.com/',
    'X-CSRFToken': '',
    'X-Instagram-AJAX': '1',
    'Content-Type': 'application/x-www-form-urlencoded',
    'X-Requested-With': 'XMLHttpRequest',
    'Content-Length': '',
    'Cookie': '',
    'Connection': 'keep-alive'
}
#####....

#Globals Values
cambo = open(input(f"{c.C}Cambo{c.R}_{c.C}List {c.R}=> {c.W}"))
########
proxy = open(input(f"{c.C}Proxy{c.R}_{c.C}List {c.R}=> {c.W}"),
             "r").readlines()


#Proxies
def Proxies(num):
    pr = proxy[num].strip("\n")
    return pr