def parse_mul(args): global retry_num obj, url = args req = Req() while True: try: response = req.get(url) except (requests.exceptions.ConnectionError, requests.exceptions.ChunkedEncodingError): # print('\n\033[0;37;41m远程主机强迫关闭了一个现有的连接。\033[0m') if (retry and (retry_num < retry_max)): retry_num += 1 # print('第%d次重试' % retry_num) else: print('\n爬取失败') else: if response.status_code != 200: parse_mul(url) return soup = BeautifulSoup(response.text, features='lxml') for i in soup.find_all('figure'): img_id = i['data-wallpaper-id'] urls = 'https://w.wallhaven.cc/full/' + img_id[: 2] + '/wallhaven-' + img_id + '.jpg' if urls not in obj.url_list: obj.url_list.append(urls) progress_bar(obj, '爬取进度') break
def start_parser(): # Запуск скрипта read_file_name = 'list_requests' # read_file_name = input('filename: ') if 'json' in (read_file_name): reqs = Req.read_json(read_file_name) else: reqs = Req.read_txt(read_file_name) time_now = datetime.now(tz=None) print("time start {}:{}:{}".format(time_now.hour, time_now.minute, time_now.second)) get_positions(reqs) # делаем запросы в google и яндекс, отправляем список обьектов Req Req.create_json(reqs) time_now = datetime.now(tz=None) print("time finish {}:{}:{}".format(time_now.hour, time_now.minute, time_now.second))
def debug_loop(): print("DebugMode! Keep sending request. 'exit' to stop.") while True: print(f"[Orange-->]: ", end='') q = input() if q == "exit": break r = Req("") r.request = q r.language = 'en-US' t, h = answer(r) print(f"[<--Google]: {t}")
def select_data(self): req = Req("172.16.1.117", 80, "/api/v1") array = [] t1 = datetime.datetime.strftime( (datetime.datetime.now() - datetime.timedelta(hours=3)), '%Y-%m-%d %H:%M:%S.%f') t2 = datetime.datetime.strftime( (datetime.datetime.now() + datetime.timedelta(hours=3)), '%Y-%m-%d %H:%M:%S.%f') for i in req.CursorCompress(t1, t2, self.channel, "raw"): try: array.append(i) except (KeyError): pass return array
def end(self): """ Ask stack-ide to shut down. """ Win(self.window).hide_error_panel() self.send_request(Req.get_shutdown()) self.die()
def create_buff(self): req = Req("172.16.1.117", 80, "/api/v1") array1 = [] for met, data in self.input.items(): array = [] buffer = SortedDict() self.channel = [] for k,i in data.items(): self.channel+=i #print(self.channel) t1 = datetime.datetime.strftime((datetime.datetime.now() - datetime.timedelta(hours=self.config[met])),'%Y-%m-%d %H:%M:%S.%f') t2 = datetime.datetime.strftime((datetime.datetime.now() + datetime.timedelta(hours=self.config[met])),'%Y-%m-%d %H:%M:%S.%f') for i in req.CursorCompress(t1, t2, self.channel, "raw"): try: array.append(i) except(KeyError): pass for i in array: try: del i["next"] except(KeyError): pass #print(array, len(array1),len(array)) for name,sec in data.items(): buff_data = SortedDict() # print(name, sec) for ch in sec: result = SortedDict() #print(array[0][ch]) try: for i in array[0][ch]: result[datetime.datetime.strptime(i["time"],'%Y-%m-%d %H:%M:%S.%f')] = float(i["value"]) except(KeyError): pass buff_data[ch]=result print(buff_data.keys()) if len(buff_data)!=0: self.buffer[name] = {'data': buff_data, 'method': met} for k in buffer.keys(): self.channel_condition[k] = {"active": True} #channel.basic_publish(exchange='', routing_key='alarm_queue', body=str(json.dumps(self.channel_condition)), # properties=pika.BasicProperties(delivery_mode=2, )) # print(self.channel_condition) return self.buffer
def search(self, city, params, low, high): reqo = Req() mapo = Map() self.g = reqo.initialSearch(city, low, high) nHouses = json.loads(self.g.text)['total_records'] temp = [House(0, 0, 0) for i in range(nHouses)] nParams = len(params) paramlist = [x for x in params if x is not None] nParams = len(paramlist) for i in range(0, nHouses): lat = json.loads(self.g.text)['results'][i]['lat'] lng = json.loads(self.g.text)['results'][i]['long'] mlsid = json.loads(self.g.text)['results'][i]['mls_id'] temp[i] = House(mlsid, lat, lng) if (nParams >= 1): temp[i].setWTime( mapo.getDistanceTime((lat, lng), mapo.getGeoCode(paramlist[0]))) if (nParams >= 2): temp[i].setETime( mapo.getDistanceTime((lat, lng), mapo.getGeoCode(paramlist[1]))) if (nParams >= 3): temp[i].set1Time( mapo.getDistanceTime((lat, lng), mapo.getGeoCode(paramlist[2]))) if (nParams >= 4): temp[i].set2Time( mapo.getDistanceTime((lat, lng), mapo.getGeoCode(paramlist[3]))) if (nParams >= 5): temp[i].set3Time( mapo.getDistanceTime((lat, lng), mapo.getGeoCode(paramlist[4]))) print("\n") return temp
def test_can_send_source_errors_request(self, loadtargets_mock): backend = FakeBackend() backend.send_request = Mock() instance = stackide.StackIDE( mock_window([cur_dir + '/mocks/helloworld/']), test_settings, backend) self.assertIsNotNone(instance) self.assertTrue(instance.is_active) self.assertTrue(instance.is_alive) req = Req.get_source_errors() instance.send_request(req) backend.send_request.assert_called_with(req)
def test_request_completions(self): listener = StackIDEAutocompleteHandler() (window, view) = default_mock_window() view.settings().get = Mock(return_value=False) backend = setup_mock_backend(window) listener.on_query_completions(view, 'm', []) #locations not used. req = Req.get_autocompletion(filepath=util.relative_view_file_name(view),prefix="m") req['seq'] = ANY backend.send_request.assert_called_with(req)
def test_can_shutdown(self, loadtargets_mock): backend = FakeBackend() backend.send_request = Mock() instance = stackide.StackIDE( mock_window([cur_dir + '/projects/helloworld/']), test_settings, backend) self.assertIsNotNone(instance) self.assertTrue(instance.is_active) self.assertTrue(instance.is_alive) instance.end() self.assertFalse(instance.is_active) self.assertFalse(instance.is_alive) backend.send_request.assert_called_with( Req.get_shutdown())
def buildReqObject(self): method = self.method.currentText() parsedUrl = urlparse(self.url.text()) protocol = parsedUrl.scheme or "https" url = parsedUrl.netloc + parsedUrl.path headers = self.getInputHeaders() if type(self.currentBodyEditor) == KeyValueEditor: body = self.currentBodyEditor.getData() else: body = self.requestBody.toPlainText() rawFile = self.fileLine.getFile() return Req(method, protocol, url, headers, body, rawFile, self.getContext())
def create_task(): r = Req(request.data.decode('utf-8')) t, h = answer(r) log_print(f"request: {r.request}") if r.output_html_file: log_print(f"Output to {r.output_html_file}") with open(f"output/{r.output_html_file}", 'wb+') as the_file: try: h += f"<!-- {str(datetime.datetime.now())} --> {h.decode('utf-8')}".encode( "utf-8") the_file.write(h) log_print(f"Written to {r.output_html_file}") if cfg.delete_output_files_sec and cfg.delete_output_files_sec > 0: delete_timer = Timer(int(cfg.delete_output_files_sec), delete_file, (r.output_html_file, )) delete_timer.start() except Exception as e: logging.error(e) finally: the_file.close() if r.output_audio_file: to_delete_audio = f"output/{r.output_audio_file}" if cfg.delete_output_files_sec and cfg.delete_output_files_sec > 0: delete_timer = Timer(2.0, delete_file, (to_delete_audio, )) delete_timer.start() if not r.is_return_html: h = "" r = Resp( status=None, request=r.request, text=t, html=h if type(h) is str else h.decode("utf-8"), uuid=r.uuid, output_html_file=f"/output/{r.output_html_file}" if r.output_html_file else None, output_audio_file=f"/output/{r.output_audio_file}" if r.output_audio_file else None, ) resp = make_response( json.dumps(sanitize_json(r.__dict__), indent=4, sort_keys=True, ensure_ascii=True), 200) return resp
def on_selection_modified(self, view): if not is_haskell_view(view): return window = view.window() if not StackIDEManager.is_running(window): return # Only try to get types for views into files # (rather than e.g. the find field or the console pane) if view.file_name(): # Uncomment to see the scope at the cursor: # Log.debug(view.scope_name(view.sel()[0].begin())) request = Req.get_exp_types(span_from_view_selection(view)) send_request(window, request, Win(window).highlight_type)
def on_query_completions(self, view, prefix, locations): if not is_haskell_view(view): return window = view.window() if not StackIDEManager.is_running(window): return # Check if this completion query is due to our refreshing the completions list # after receiving a response from stack-ide, and if so, don't send # another request for completions. if not self.refreshing: self.view = view request = Req.get_autocompletion(filepath=relative_view_file_name(view),prefix=prefix) send_request(window, request, self._handle_response) # Clear the flag to allow future completion queries self.refreshing = False return list(self.format_completion(*completion) for completion in self.returned_completions)
def test_kills_live_orphans(self): window = sublime.create_window('.') StackIDEManager.check_windows() self.assertEqual(1, len(StackIDEManager.ide_backend_instances)) # substitute a 'live' instance backend = MagicMock() stack_ide.stack_ide_loadtargets = Mock(return_value=['app/Main.hs', 'src/Lib.hs']) instance = stack_ide.StackIDE(window, test_settings, backend) StackIDEManager.ide_backend_instances[window.id()] = instance # close the window sublime.destroy_windows() # instance should be killed StackIDEManager.check_windows() self.assertEqual(0, len(StackIDEManager.ide_backend_instances)) self.assertFalse(instance.is_alive) backend.send_request.assert_called_with(Req.get_shutdown())
def test_reset(self): window = mock_window(['.']) sublime.add_window(window) StackIDEManager.check_windows() self.assertEqual(1, len(StackIDEManager.ide_backend_instances)) # substitute a 'live' instance backend = MagicMock() stack_ide.stack_ide_loadtargets = Mock(return_value=['app/Main.hs', 'src/Lib.hs']) instance = stack_ide.StackIDE(window, test_settings, backend) StackIDEManager.ide_backend_instances[window.id()] = instance StackIDEManager.reset() # instances should be shut down. self.assertEqual(1, len(StackIDEManager.ide_backend_instances)) self.assertFalse(instance.is_alive) backend.send_request.assert_called_with(Req.get_shutdown()) sublime.destroy_windows()
def on_query_completions(self, view, prefix, locations): if not is_haskell_view(view): return window = view.window() if not StackIDEManager.is_running(window): return # Check if this completion query is due to our refreshing the completions list # after receiving a response from stack-ide, and if so, don't send # another request for completions. if not self.refreshing: self.view = view request = Req.get_autocompletion( filepath=relative_view_file_name(view), prefix=prefix) send_request(window, request, self._handle_response) # Clear the flag to allow future completion queries self.refreshing = False return list( self.format_completion(*completion) for completion in self.returned_completions)
def __init__(self, trace_path, trace_type, real_time_field=-1, obj_id_field=-1, obj_size_field=-1, cnt_field=-1, op_field=-1, **kwargs): self.trace_path = trace_path self.trace_type = "binary" if trace_type.lower() in ("binary", "bin", "b") else trace_type.lower() self.trace_file = None # binary trace only self.struct = "" self.struct_size = -1 self.n_req = 0 self.n_read_req = 0 self.real_time_field = real_time_field self.obj_id_field = obj_id_field self.obj_size_field = obj_size_field self.cnt_field = cnt_field self.op_field = op_field self.ttl_field = kwargs.pop("ttl_field", -1) self.key_size_field = kwargs.pop("key_size_field", -1) self.value_size_field = kwargs.pop("value_size_field", -1) self.req_selector = kwargs.pop("req_selector", None) self.req = Req(0, None) assert len(kwargs) == 0, "kwargs not empty {}".format(kwargs) self._open_trace()
""" читает текстовый файл и создает файл json """ from req import Req reqs = Req.read_txt('list_requests_test') Req.create_json(reqs)
import json from req import Req from errors import error with open("config.json") as config_file: conf = json.load(config_file) stoplist = conf['stop_words'] while True: u = input(conf['endpoint_list']['basic_url'] + '/') m = input('Type method (GET, POST, PUT, PATCH, DELETE), or STOP for exit: ') if m.lower() in ('get', 'delete'): r = Req(m, u) print(r.send()) elif m.lower() in ('post', 'put', 'patch'): b = input('Body (JSON):' + '\n' + '''''') r = Req(m, u, b) print(r.send()) elif m.lower() in stoplist: break else: print(error('unknown method ' + m.upper()))
def update_files(self, filenames): new_include_targets = self.update_new_include_targets(filenames) self.send_request(Req.update_session_includes(new_include_targets)) self.send_request(Req.get_source_errors(), Win(self.window).handle_source_errors)
import Adafruit_DHT from time import sleep from req import Req DHT_SENSOR = Adafruit_DHT.AM2302 DHT_PIN = 4 while True: humidity, temperature = Adafruit_DHT.read_retry(DHT_SENSOR, DHT_PIN) if humidity is not None and temperature is not None: print("Temp={0:0.1f}*C Humidity={1:0.1f}%".format(temperature, humidity)) Req(format(temperature, '.1f'), format(humidity, '.1f')) else: print("Failed to retrieve data from humidity sensor") sleep(900)
pool = [] for port in ports: stream = threading.Thread(target=run_scraper, args=(port, reqs, requests_google, requests_yandex)) pool.append(stream) for stream in pool: stream.start() for stream in pool: stream.join() if __name__ == '__main__': read_file_name = 'list_requests' # read_file_name = input('filename: ') if 'json' in (read_file_name): reqs = Req.read_json(read_file_name) else: reqs = Req.read_txt(read_file_name) requests_google = [req.id for req in reqs] # список id не сделаных запросов гугл requests_google.reverse( ) # переверням. теперь можно брать первые id с конца requests_yandex = requests_google.copy( ) # список id не сделаный запросов в яндекс time_now = datetime.now(tz=None) print("time start {}:{}:{}".format(time_now.hour, time_now.minute, time_now.second)) ports = get_ports() # получаем список портов print(ports) lock_w, lock_y, lock_g, lock_rest = threading.RLock(), threading.RLock( ), threading.RLock(), threading.RLock()
def run(self,edit): request = Req.get_exp_info(span_from_view_selection(self.view)) send_request(self.view.window(),request, self._handle_response)
from os.path import exists def getT(): from time import time return str(round(time())) dl = DebugList('proxy.txt') SEC = 'test1234' DB = 'proxydebug.db' set_settings('proxyAPISecrets', SEC) set_settings('proxyDatabaseLocation', DB) if exists(DB): remove(DB) r = Req(SEC) print('GET /proxy/list') re = r.getWithSign('/proxy/list', {"a": ["list"], "t": [getT()]}) re = dealAPIResponse(re) debug(f"Get {dumps(re)}", dl=dl) print('GET /proxy/add') re = r.getWithSign('/proxy/add', {"a": ["add"], "t": [getT()], "id": ["test1"], "headers": ["test=1;a=2"], "cookies": ["tes=1;a=3"]}) re = dealAPIResponse(re) debug(f"Get {dumps(re)}", dl=dl) print('POST /proxy/add') re = r.postWithSign('/proxy/add', {"a": ["add"], "t": [getT()], "id": ["test2"], "headers": ['{"test":"2","a":"4"}'], "cookies": ['{"te":"3","a":"1"}']})
from os import remove from utils import set_settings SEC1 = 'test1234' SEC2 = 'esdd1234' DB = 'tiktokdebug.db' USERNAME = '******' VID = '6945812541574499585' VID2 = '6992419824760671489' set_settings('debug', True) set_settings('tiktokRSSSecrets', SEC1) set_settings('tiktokDatabaseLocation', DB) set_settings('RSSProxySerects', SEC2) if exists(DB): remove(DB) r = Req(SEC1) print("Get User's RSS:") re = r.getWithSign('/tiktokRSS', {"u": [USERNAME], 't': ['rss']}) if re.status_code >= 400: print(re.text) raise ValueError(f"{re.status_code} {re.reason}") print("Get video's link with VID:") re = r.getWithSign('/tiktokRSS', { "vid": [VID], 't': ['url'] }, allow_redirects=False) if re.status_code != 302: print(re.text) raise ValueError(f"{re.status_code} {re.reason}") print(f"Get proxy link: {re.headers['location']}")
import json from database import DB from req import Req import config import log import filter import util from map_dict import map_dict db = DB(host=config.db['host'], port=config.db['port'], user=config.db['user'], password=config.db['password'], db_name=config.db['db_name']) req = Req(retry_http=config.retry_http, slience_http_multiplier=config.slience_db_multiplier, slience_http_multiplier_max=config.slience_http_multiplier_max, timeout_http=config.timeout_http) class Sync: def __init__(self): pass def sync_table(self): pass # synchronize once def sync_by_api(): total = 0 for table in config.tables:
def create_buff(self): req = Req("172.16.1.117", 80, "/api/v1") array1 = [] #print(self.group_config) for met in self.rule_config.list_method: channel = [] for group in self.rule_config.__getattribute__(met)["group_list"]: #print(group) #for k, i in self.group_config[group[:group.find("/")]][group[:group.rfind("/")]].items(): str = group[:group.rfind("/")] try: channel += self.group_config[ group[:group.find("/")]][str][group] #print(self.group_config[group[:group.find("/")]][str][group]) except (KeyError): str = str[:str.rfind("/")] channel += self.group_config[ group[:group.find("/")]][str][group] # print(self.group_config[group[:group.find("/")]][str][group]) #print(self.channel) array = [] buffer = SortedDict() #print(channel) print((datetime.datetime.now() - datetime.timedelta( hours=self.rule_config.__getattribute__(met)["insert_inteval"]) ), (datetime.datetime.now() + datetime.timedelta(hours=self.rule_config.__getattribute__( met)["insert_inteval"])), met) t1 = datetime.datetime.strftime( (datetime.datetime.now() - datetime.timedelta(hours=self.rule_config.__getattribute__( met)["insert_inteval"])), '%Y-%m-%d %H:%M:%S.%f') t2 = datetime.datetime.strftime( (datetime.datetime.now() + datetime.timedelta(hours=self.rule_config.__getattribute__( met)["insert_inteval"])), '%Y-%m-%d %H:%M:%S.%f') for i in req.CursorCompress(t1, t2, channel, "raw"): try: array.append(i) except (KeyError): pass for i in array: try: del i["next"] except (KeyError): pass #print(array) for group in self.rule_config.__getattribute__(met)["group_list"]: buff_data = SortedDict() #print(self.group_config[group[:group.find("/")]][group[:group.rfind("/")]]) str = group[:group.rfind("/")] try: arr = self.group_config[ group[:group.find("/")]][str][group] except (KeyError): str = str[:str.rfind("/")] arr = self.group_config[ group[:group.find("/")]][str][group] for ch in arr: result = SortedDict() #print(array[0][ch]) try: for i in array[0][ch]: result[datetime.datetime.strptime( i["time"], '%Y-%m-%d %H:%M:%S.%f')] = float(i["value"]) except (KeyError): pass buff_data[ch] = result #print(buff_data.keys()) if len(buff_data) != 0: self.buffer[group] = buff_data for k in buffer.keys(): self.channel_condition[k] = {"active": True} #channel.basic_publish(exchange='', routing_key='alarm_queue', body=str(json.dumps(self.channel_condition)), # properties=pika.BasicProperties(delivery_mode=2, )) self.channel += channel #print(self.channel) return self.buffer
def end(self): """ Ask stack-ide to shut down. """ self.send_request(Req.get_shutdown()) self.die()