Exemple #1
0
def update_servers():
    try:
        # servers
        global servers
        servers = crawler.main()
        # subscription
        global encoded
        global full_encoded
        global jsons
        global full_jsons
        jsons = list()
        decoded = list()
        full_decoded = list()
        for website in servers:
            for server in website['data']:
                full_decoded.append(server['ssr_uri'])
                full_jsons.append(server['json'])
                if server['status'] > 0:
                    decoded.append(server['ssr_uri'])
                    jsons.append(server['json'])

        decoded = '\n'.join(decoded)
        encoded = base64.urlsafe_b64encode(bytes(decoded, 'utf-8'))
        full_decoded = '\n'.join(full_decoded)
        full_encoded = base64.urlsafe_b64encode(bytes(full_decoded, 'utf-8'))
        time.sleep(7200)
    except Exception as e:
        logging.exception(e, stack_info=True)
Exemple #2
0
def test4():
    data = crawler.main(debug=['no_validate'])
    data = ssr_check.validate(data)

    for i in data:
        print(i['info'])
        for j in i['data']:
            print(j['status'])
Exemple #3
0
def test3():
    data = crawler.main()
    for i in data:
        print(i['info'])
        for j in i['data']:
            w = ssr_check.test_socks_server(str_json=j['json'])
            print('>>>>>>>结果:', w)
            if w is True:
                print(j['json'])
            elif w == -1:
                print(j['json'])
Exemple #4
0
#!/usr/bin/env python3
from test import test_sodium
from ssshare.ss import crawler
crawler.main()
exit(0)