Beispiel #1
0
def requestJob(item):
    word = words[item]

    if SITE()==3 and not 4<len(word)<16:
        with print_lock:
            print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on twitter because it has illegal length.")
    elif SITE()==10 and not len(word)<40:
        with print_lock:
            print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on github because it has illegal length.")
    elif SITE()==13 and not 2<len(word)<21:
        with print_lock:
            print("["+threading.current_thread().name+"] "+word+" is UNAVAILABLE on pastebin because it has illegal length.")
    else:

        link = replace(word)
        s = requests.Session()
        if PROXY() == "True":
            plist = PROXYLIST()
            i = random.randrange(0, plist.__len__())
            sess = ProxyHelper().setProxy(s, plist[i])
            r = sess.get(link)
        else:
            r = s.get(link)
        with print_lock:
            log_result(r, word, link)
Beispiel #2
0
def postJob(item):
    word = words[item]
    payload = ready_payload(word)
    s = requests.Session()
    if PROXY() == "True":
        plist = PROXYLIST()
        i = random.randrange(0, plist.__len__())
        sess = ProxyHelper().setProxy(s, plist[i])
        r = sess.post(link, data=payload, headers=header, cookies=cookie)
    else:
        r = s.post(link, data=payload, headers=header, cookies=cookie)
    with print_lock:
        log_result(r, word, link)
Beispiel #3
0
def requestJob(item):
    word = words[item]
    link = replace(word)
    s = requests.Session()
    if PROXY() == "True":
        plist = PROXYLIST()
        i = random.randrange(0, plist.__len__())
        sess = ProxyHelper().setProxy(s, plist[i])
        r = sess.get(link)
    else:
        r = s.get(link)
    with print_lock:
        log_result(r, word, link)
Beispiel #4
0
def main():
    if PROXY() == "True":
        if PFILTER() == "True":
            ProxyHelper().checkProxies()

    if (SITE() == 5) or (SITE() == 6):  # Steam
        import lib.parse
    elif SITE() == 4:
        import lib.post
    else:
        import lib.get
Beispiel #5
0
import requests
from lib.replace import *
from lib.ConfigHelper import ConfigHelper
from lib.ProxyHelper import ProxyHelper

ch = ConfigHelper()
ph = ProxyHelper()

s = requests.Session()


def get_cookie():
    r = s.get(URLS[ch.getSite()])
    return r.cookies