def execute(self, sql=None, args=None, nodb=False, post=None, extra=None): def q(v): # @todo: quote dates if isinstance(v, six.string_types): return "'%s'" % (v.replace("\\", "\\\\").replace("'", "\\'")) else: return str(v) qs = [] if not nodb: qs += ["database=%s" % config.clickhouse.db] if extra: qs += ["%s=%s" % (k, v) for k, v in extra] if sql: if args: sql = sql % tuple(q(v) for v in args) if post: qs += ["query=%s" % urllib_quote(sql.encode("utf8"))] else: post = sql.encode("utf8") url = "http://%s/?%s" % (random.choice(self.addresses), "&".join(qs)) code, headers, body = fetch_sync( url, method="POST", body=post, user=self.user, password=self.password, connect_timeout=config.clickhouse.connect_timeout, request_timeout=config.clickhouse.request_timeout, ) if code != 200: raise ClickhouseError("%s: %s" % (code, body)) return [row.split("\t") for row in body.splitlines()]
def download(cls, ref_book): out = [] # Fetch data into StringIO wrapper url = ref_book.download_url url = url.replace("http://update.nocproject.org/db/", "https://cdn.nocproject.org/refbook/") code, headers, body = fetch_sync(url, follow_redirects=True, allow_proxy=True) if code != 200: raise IOError("Invalid HTTP response: %s" % code) data = StringIO(body) # Wrap GzipFile for gzipped content if ref_book.download_url.endswith(".gz"): data = gzip.GzipFile(fileobj=data) # Iterate through CSV reader = csv.reader(data) header = {} for row in reader: if not row: continue if not header: # Read field names from first line for i, h in enumerate(row): header[i] = smart_text(h, errors="ignore") continue r = {} for i, v in enumerate(row): r[header[i]] = smart_text(v, errors="ignore") out.append(r) return out
def send_tb(self, messages, address, subject, body): # proxy_addres = config.proxy.https_proxy # not used. sendMessage = { "chat_id": address, "text": "*" + self.escape_markdown(subject.encode("utf8")) + "*\n" + self.escape_markdown(body.encode("utf8")), "parse_mode": "Markdown", } time.sleep(config.tgsender.retry_timeout) if self.url: get = self.url + "/sendMessage?" + urlencode(sendMessage) self.logger.info("HTTP GET %s", get) code, header, body = fetch_sync( get, allow_proxy=True, request_timeout=config.activator.http_request_timeout, follow_redirects=True, validate_cert=config.activator.http_validate_cert, ) if 200 <= code <= 299: check = json.loads(body) self.logger.info("Result: %s" % check) metrics["telegram_proxy_sended_ok"] += 1 return True else: self.logger.error("HTTP GET %s failed: %s %s", get, code, body) metrics["telegram_proxy_failed_httperror"] += 1 return False else: self.logger.info("No token, no Url.") return False
def process_request(self, url, body, headers): if not headers: headers = {} self.logger.debug("[%s] Process middleware on: %s", self.name, url) # First query - 401 code, resp_headers, result = fetch_sync( url, headers=None, request_timeout=60, follow_redirects=True, allow_proxy=False, validate_cert=False, eof_mark=self.eof_mark, ) self.logger.debug( "[%s] Response code %s, headers %s on: %s, body: %s", self.name, code, resp_headers, url, body, ) if "WWW-Authenticate" in resp_headers and resp_headers["WWW-Authenticate"].startswith( "Digest" ): items = parse_http_list(resp_headers["WWW-Authenticate"][7:]) digest_response = parse_keqv_list(items) headers["Authorization"] = self.build_digest_header(url, self.method, digest_response) self.logger.debug("[%s] Set headers, %s", self.name, headers) return url, body, headers
def execute(self): # Try SNMP first if self.has_snmp(): try: base = self.snmp.get("1.3.6.1.2.1.2.2.1.6.1") if base: return [{ "first_chassis_mac": base, "last_chassis_mac": base }] except self.snmp.TimeOutError: pass except self.snmp.SNMPError: pass # Fallback to CLI get = "http://" + self.credentials.get("address", "") + "/" code, header, body = fetch_sync(get, allow_proxy=False, eof_mark="</html>") if 200 <= code <= 299: match = self.rx_mac.search(body) if match: mac = (match.group("mac")).strip() return [{"first_chassis_mac": mac, "last_chassis_mac": mac}]
def get(self, path, headers=None, cached=False, json=False, eof_mark=None, use_basic=False): """ Perform HTTP GET request :param path: URI :param headers: Dict of additional headers :param cached: Cache result :param json: Decode json if set to True :param eof_mark: Waiting eof_mark in stream for end session (perhaps device return length 0) :param use_basic: Use basic authentication """ self.ensure_session() self.request_id += 1 self.logger.debug("GET %s", path) if cached: cache_key = "get_%s" % path r = self.script.root.http_cache.get(cache_key) if r is not None: self.logger.debug("Use cached result") return r user, password = None, None if use_basic: user = self.script.credentials.get("user") password = self.script.credentials.get("password") # Apply GET middleware url = self.get_url(path) hdr = self._get_effective_headers(headers) if self.request_middleware: for mw in self.request_middleware: url, _, hdr = mw.process_get(url, "", hdr) code, headers, result = fetch_sync( url, headers=hdr, request_timeout=60, follow_redirects=True, allow_proxy=False, validate_cert=False, eof_mark=eof_mark, user=user, password=password, ) if not 200 <= code <= 299: raise HTTPError(msg="HTTP Error (%s)" % result[:256], code=code) self._process_cookies(headers) if json: try: result = ujson.loads(result) except ValueError as e: raise HTTPError("Failed to decode JSON: %s", e) self.logger.debug("Result: %r", result) if cached: self.script.root.http_cache[cache_key] = result return result
def pub(topic, data, raw=False): logger.debug("Publish to topic %s", topic) url = "http://%s:%s/pub" % (config.nsqd.http_addresses[0].host, config.nsqd.http_addresses[0].port) code, headers, body = fetch_sync( "%s?topic=%s" % (url, topic), method="POST", body=data if raw else ujson.dumps(data), connect_timeout=config.nsqd.connect_timeout, request_timeout=config.nsqd.request_timeout ) if code != 200: metrics["error", ("type", "nsq_pub_error_code %s" % code)] += 1 raise Exception("Cannot publish: %s %s" % (code, body))
def get(self, url): """ Perform get request :param url: :return: """ code, headers, body = fetch_sync(url, follow_redirects=True, validate_cert=False, allow_proxy=True) if 200 <= code <= 299: return code, body else: raise GeoCoderError("HTTP Error %s" % code)
def execute_cli(self, **kwargs): # Fallback to CLI get = "http://" + self.credentials.get("address", "") + "/" code, header, body = fetch_sync(get, allow_proxy=False, eof_mark=b"</html>") if 200 <= code <= 299: try: match = self.rx_mac.search(body) if match: mac = (match.group("mac")).strip() return [{ "first_chassis_mac": mac, "last_chassis_mac": mac }] except ValueError: pass
def nsq_pub(topic, message): """ Publish message to NSQ topic :param topic: NSQ topic name :param message: Raw message (Converted to JSON if is not a string) :return: """ if not isinstance(message, six.string_types): message = ujson.dumps(message) # Resolve NSQd or wait si = config.nsqd.http_addresses[0] # Post message code, _, body = fetch_sync( "http://%s:%s/pub?topic=%s" % (si.host, si.port, topic), method="POST", body=message ) if code != 200: raise NSQPubError("NSQ Pub error: code=%s message=%s" % (code, body))
def process_request(self, url, body, headers): if not headers: headers = {} # First query - 401 code, resp_headers, result = fetch_sync( url, headers=None, request_timeout=60, follow_redirects=True, allow_proxy=False, validate_cert=False, ) if "WWW-Authenticate" in resp_headers and resp_headers[ "WWW-Authenticate"].startswith("Digest"): items = parse_http_list(resp_headers["WWW-Authenticate"][7:]) digest_response = parse_keqv_list(items) headers["Authorization"] = self.build_digest_header( url, self.method, digest_response) return url, body, headers
def post(self, path, data, headers=None, cached=False, json=False, eof_mark=None): """ Perform HTTP GET request :param path: URI :param headers: Dict of additional headers :param cached: Cache result :param json: Decode json if set to True :param eof_mark: Waiting eof_mark in stream for end session (perhaps device return length 0) """ self.logger.debug("POST %s %s", path, data) if cached: cache_key = "post_%s" % path r = self.script.root.http_cache.get(cache_key) if r is not None: self.logger.debug("Use cached result") return r code, headers, result = fetch_sync(self.get_url(path), method="POST", headers=headers, request_timeout=60, follow_redirects=True, allow_proxy=False, validate_cert=False, eof_mark=eof_mark) # pylint: disable=superfluous-parens if not (200 <= code <= 299): # noqa raise self.HTTPError(msg="HTTP Error (%s)" % result[:256], code=code) if json: try: return ujson.loads(result) except ValueError as e: raise self.HTTPError(msg="Failed to decode JSON: %s" % e) self.logger.debug("Result: %r", result) if cached: self.script.root.http_cache[cache_key] = result return result
def process_post(self, url, body, headers): """ Dahua Web auth procedure :param url: :param body: :param headers: :return: """ if self.http.session_id: body["session"] = self.http.session_id return url, body, headers if not headers: headers = {} # First query - /RPC2_Login auth_url = self.http.get_url("/RPC2_Login") code, resp_headers, result = fetch_sync( auth_url, method="POST", body={ "method": "global.login", "params": { "userName": self.user, "password": "", "clientType": "Web3.0", "loginType": "Direct", }, "id": self.http.request_id, }, headers=headers, request_timeout=60, follow_redirects=True, allow_proxy=False, validate_cert=False, ) r = orjson.loads(result) session = r["session"] self.http.set_session_id(session) password = self.get_auth(r["params"]) code, resp_headers, result = fetch_sync( auth_url, method="POST", body={ "method": "global.login", "params": { "userName": self.user, "password": password, "clientType": "Web3.0", "loginType": "Direct", }, "id": self.http.request_id, "session": session, }, headers=headers, request_timeout=60, follow_redirects=True, allow_proxy=False, validate_cert=False, ) self.http.request_id += 2 body["session"] = self.http.session_id return url, body, headers