def sapisid_from_cookie(raw_cookie): cookie = SimpleCookie() cookie.load(raw_cookie) return cookie['SAPISID'].value
def cookies(self): cookies = SimpleCookie() for cookie in self.headers.get('cookie', '').split(';'): cookies.load(cookie) return cookies
def cookies(self): cookies = SimpleCookie() cookies.load(self.headers.get("cookie", "")) return {key: value.value for key, value in cookies.items()}
def _set_client_id(self, client_id): c = SimpleCookie() c['id'] = client_id self.send_header('Set-Cookie', c.output(header=''))
def do_GET(self, post_data=None): user_id = None self.id = None for header in self.headers._headers: if(header[0] == 'Cookie'): cookie = SimpleCookie() cookie.load(header[1]) cookies = {} for key, morsel in cookie.items(): cookies[key] = morsel.value if('sess' in cookies): self.logged_in, self.id = check_session(cookies['sess']) try: url = urlparse(self.path) found = False PARSED_URL = os.path.split(url.path) for app in apps: if(url.path == '/logout'): found = True web_page = [VIEWS['redirect-to-main']] self.send_response(200) self.send_header( 'Set-Cookie', 'sess=deleted; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT') if(url.path == app): found = True web_app = apps[app] if(post_data is not None): user_id = web_app.do_POST(post_data) web_page = web_app.do_GET(user_id = self.id) self.send_response(200) break if(not found): PARSED_URL = url.path.split('/')[1:] for app in nested_webapps: if(PARSED_URL[0] == app): found = True web_app = nested_webapps[app] if(post_data is not None): user_id = web_app.do_POST(post_data) web_page = web_app.do_GET(PARSED_URL[1:], user_id=self.id) self.send_response(200) break if(not found): web_page = "App not found" self.send_response(404) return except: web_page = "File not found" self.send_response(404) if(user_id is not None): print('Cookie Sent') self.send_header('Set-Cookie', 'sess=' + user_id) self.end_headers() for part in web_page: if(isinstance(part, (list,))): self.wfile.write(bytes(str(part[0]), 'utf-8')) else: self.wfile.write(bytes(open(part).read(), 'utf-8'))
def filter_cookies(self, request_url: URL) -> 'BaseCookie[str]': return SimpleCookie()
def make_cookie(name, load, seed, expire=0, domain="", path="", timestamp="", enc_key=None): """ Create and return a cookie The cookie is secured against tampering. If you only provide a `seed`, a HMAC gets added to the cookies value and this is checked, when the cookie is parsed again. If you provide both `seed` and `enc_key`, the cookie gets protected by using AEAD encryption. This provides both a MAC over the whole cookie and encrypts the `load` in a single step. The `seed` and `enc_key` parameters should be byte strings of at least 16 bytes length each. Those are used as cryptographic keys. :param name: Cookie name :type name: text :param load: Cookie load :type load: text :param seed: A seed key for the HMAC function :type seed: byte string :param expire: Number of minutes before this cookie goes stale :type expire: int :param domain: The domain of the cookie :param path: The path specification for the cookie :param timestamp: A time stamp :type timestamp: text :param enc_key: The key to use for cookie encryption. :type enc_key: byte string :return: A tuple to be added to headers """ cookie = SimpleCookie() if not timestamp: timestamp = str(int(time.time())) bytes_load = load.encode("utf-8") bytes_timestamp = timestamp.encode("utf-8") if enc_key: # Make sure the key is 256-bit long, for AES-128-SIV # # This should go away once we push the keysize requirements up # to the top level APIs. key = _make_hashed_key((enc_key, seed)) #key = AESGCM.generate_key(bit_length=128) aesgcm = AESGCM(key) iv = os.urandom(12) # timestamp does not need to be encrypted, just MAC'ed, # so we add it to 'Associated Data' only. ct = split_ctx_and_tag(aesgcm.encrypt(iv, bytes_load, bytes_timestamp)) ciphertext, tag = ct cookie_payload = [ bytes_timestamp, base64.b64encode(iv), base64.b64encode(ciphertext), base64.b64encode(tag) ] else: cookie_payload = [ bytes_load, bytes_timestamp, cookie_signature(seed, load, timestamp).encode('utf-8') ] cookie[name] = (b"|".join(cookie_payload)).decode('utf-8') if path: cookie[name]["path"] = path if domain: cookie[name]["domain"] = domain if expire: cookie[name]["expires"] = _expiration(expire, "%a, %d-%b-%Y %H:%M:%S GMT") return tuple(cookie.output().split(": ", 1))
def get_cookie(self): try: return SimpleCookie(self.env['HTTP_COOKIE']) except KeyError: return SimpleCookie()
# Q2 #print(os.environ["QUERY_STRING"]) # Q3 #print(os.environ["HTTP_USER_AGENT"]) # Q4 #print(login_page()) s = cgi.FieldStorage() username = s.getfirst("username") password = s.getfirst("password") # Q5 form_ok = username == secret.username and password == secret.password c = SimpleCookie(os.environ['HTTP_COOKIE']) c_username = None c_password = None if c.get("username"): c_username = c.get("username").value if c.get("password"): c_password = c.get("password").value cookie_ok = c_username == secret.username and c_password == secret.password if cookie_ok: username = c_username password = c_password if form_ok: print("Set-Cookie: username=", username)
def __iter__(self): return self.handle_route(self.path, self.params, SimpleCookie(self.env.get('HTTP_COOKIE')), None)
def cookie(**args): result = SimpleCookie(args) for k in args: result[k]['path'] = config.URL_ROOT return result
def spider(add_link_dictionary, gid): # getting user's download request from add_link_dictionary link = add_link_dictionary['link'] ip = add_link_dictionary['ip'] port = add_link_dictionary['port'] proxy_user = add_link_dictionary['proxy_user'] proxy_passwd = add_link_dictionary['proxy_passwd'] download_user = add_link_dictionary['download_user'] download_passwd = add_link_dictionary['download_passwd'] header = add_link_dictionary['header'] out = add_link_dictionary['out'] user_agent = add_link_dictionary['user-agent'] raw_cookies = add_link_dictionary['load-cookies'] referer = add_link_dictionary['referer'] if out == '***': out = None requests_session = requests.Session() # defining a requests Session if ip: ip_port = 'http://' + str(ip) + ":" + str(port) if proxy_user: ip_port = 'http://' + proxy_user + ':' + proxy_passwd + '@' + ip_port # setting proxy to the session requests_session.proxies = {'http': ip_port} if download_user: # setting download user pass to the session requests_session.auth(download_user, download_passwd) if raw_cookies != None: # setting cookies cookie = SimpleCookie() cookie.load(raw_cookies) cookies = {key: morsel.value for key, morsel in cookie.items()} requests_session.cookies = cookiejar_from_dict(cookies) if referer != None: requests_session.headers.update({'referer': referer }) #setting referer to the session if user_agent != None: requests_session.headers.update({'user-agent': user_agent }) #setting user_agent to the session #finding headers response = requests_session.head(link) header = response.headers filename = '***' filesize = '***' if 'Content-Disposition' in header.keys( ): # checking if filename is available content_disposition = header['Content-Disposition'] if content_disposition.find('filename') != -1: filename_splited = content_disposition.split('filename=') filename_splited = filename_splited[-1] # getting file name in desired format filename = filename_splited[1:-1] if filename == '***': filename = link.split('/')[-1] if out != None: filename = out if 'Content-Length' in header.keys(): # checking if file_size is available file_size = int(header['Content-Length']) if int(file_size / 1073741824) != 0: # converting file_size to KB or MB or GB file_size = file_size / 1073741824 size_str = str(round(file_size, 2)) + " GB" elif int(file_size / 1048576) != 0: size_str = str(int(file_size / 1048576)) + " MB" elif int(file_size / 1024) != 0: size_str = str(int(file_size / 1024)) + " KB" else: size_str = str(file_size) filesize = size_str download_info_file = os.path.join(download_info_folder, gid) download_info_file_list = readList(download_info_file) download_info = [ filename, None, filesize, None, None, None, None, None, None, None, None, None, None ] for i in range(13): if download_info[i] != None: download_info_file_list[i] = download_info[i] writeList(download_info_file, download_info_file_list)
def upstream_result_into_response(self, _class=Response) -> Optional[Response]: if _class != Response: raise RuntimeError('only Response from requests package is supported at the moment') if self.result is None: return None if self.response.status_code != 200: return None response = Response() response.status_code = self.scrape_result['status_code'] response.reason = self.scrape_result['reason'] response._content = self.scrape_result['content'].encode('utf-8') if self.scrape_result['content'] else None response.headers.update(self.scrape_result['response_headers']) response.url = self.scrape_result['url'] response.request = Request( method=self.config['method'], url=self.config['url'], headers=self.scrape_result['request_headers'], data=self.config['body'] if self.config['body'] else None ) if 'set-cookie' in response.headers: for raw_cookie in response.headers['set-cookie']: for name, cookie in SimpleCookie(raw_cookie).items(): expires = cookie.get('expires') if expires == '': expires = None if expires: try: expires = parse(expires).timestamp() except ValueError: expires = None if type(expires) == str: if '.' in expires: expires = float(expires) else: expires = int(expires) response.cookies.set_cookie(Cookie( version=cookie.get('version') if cookie.get('version') else None, name=name, value=cookie.value, path=cookie.get('path', ''), expires=expires, comment=cookie.get('comment'), domain=cookie.get('domain', ''), secure=cookie.get('secure'), port=None, port_specified=False, domain_specified=cookie.get('domain') is not None and cookie.get('domain') != '', domain_initial_dot=bool(cookie.get('domain').startswith('.')) if cookie.get('domain') is not None else False, path_specified=cookie.get('path') != '' and cookie.get('path') is not None, discard=False, comment_url=None, rest={ 'httponly': cookie.get('httponly'), 'samesite': cookie.get('samesite'), 'max-age': cookie.get('max-age') } )) return response
import os.path import struct import sys import urllib.parse, urllib.error from urllib.request import Request, urlopen from http.cookies import SimpleCookie from payload import Payload from searchresult import SearchResult # config notionSpaceId = os.environ['notionSpaceId'] cookie = os.environ['cookie'] # convert cookie string to dict for later use bakedCookie = SimpleCookie() bakedCookie.load(cookie) # even though SimpleCookie is dictionary-like, it internally uses a Morsel object # Manually construct a dictionary instead. bakedCookies = {} for key, morsel in bakedCookie.items(): bakedCookies[key] = morsel.value # get useDesktopClient env variable and convert to boolean for use later, default to false useDesktopClient = os.environ['useDesktopClient'] if (useDesktopClient == 'true') | (useDesktopClient == 'True') | (useDesktopClient == 'TRUE'): useDesktopClient = True else: useDesktopClient = False
def __init__(self) -> None: super().__init__() self._cookies = SimpleCookie() # type: SimpleCookie[str]
def cookies(self): """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT decoded. Use :meth:`get_cookie` if you expect signed cookies. """ cookies = SimpleCookie(self._env_get('HTTP_COOKIE', '')).values() return FormsDict((c.key, c.value) for c in cookies)
def update_cookies( self, cookies: LooseCookies, response_url: URL = URL()) -> None: """Update cookies.""" hostname = response_url.raw_host if not self._unsafe and is_ip_address(hostname): # Don't accept cookies from IPs return if isinstance(cookies, Mapping): cookies = cookies.items() # type: ignore for name, cookie in cookies: if not isinstance(cookie, Morsel): tmp = SimpleCookie() tmp[name] = cookie # type: ignore cookie = tmp[name] path = cookie["path"] if not path or not path.startswith("/"): # Set the cookie's path to the response path path = response_url.path if not path.startswith("/"): path = "/" else: # Cut everything from the last slash to the end path = "/" + path[1:path.rfind("/")] cookie["path"] = path domain = cookie["domain"] # ignore domains with trailing dots if domain.endswith('.'): domain = "" del cookie["domain"] if not domain and hostname is not None: # Set the cookie's domain to the response hostname # and set its host-only-flag self._host_only_cookies.add((hostname, path, name)) domain = cookie["domain"] = hostname if domain.startswith("."): # Remove leading dot domain = domain[1:] cookie["domain"] = domain if hostname and not self._is_domain_match(domain, hostname): # Setting cookies for different domains is not allowed continue max_age = cookie["max-age"] if max_age: try: delta_seconds = int(max_age) self._expire_cookie(self._loop.time() + delta_seconds, domain, path, name) except ValueError: cookie["max-age"] = "" else: expires = cookie["expires"] if expires: expire_time = self._parse_date(expires) if expire_time: self._expire_cookie(expire_time.timestamp(), domain, path, name) else: cookie["expires"] = "" self._cookies[(domain, path)][name] = cookie self._do_expiration()
import requests, re from http.cookies import SimpleCookie #这里填上你登陆时的cookie,下面有讲解如何获取 cookies = '''id3=DACD2C06-965A-4445-9029-20DCFA32d953ea5d483442256-965A-4445-902bca2989f6ac; SESS73302e152; fingerprint_s=0510db5feb41aff108b6801637d882dd; bp_video_offset_277091524=480147102836298343; bp_t_offset_277091524=480147102836298343; PVID=1''' cookie = SimpleCookie(cookies) cookies = {coo.key: coo.value for coo in cookie.values()} headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36" } base_url = 'https://member.bilibili.com/x/h5/data/fan/list?ps=500' result = requests.get(base_url, headers=headers, cookies=cookies).json()['data'] follower_count = result['my_follower'] fanNamesList = result['result'] page = 0 f = open("Names.txt", "w", encoding='UTF-8') def getNextPageOfFans(lastFanId): global page fansNames = [] page = page + 1 print("Getting page " + str(page))
def setUp(self): signer = TimestampSigner() signed_email = signer.sign('*****@*****.**') self.client.cookies = SimpleCookie({'_ofs': signed_email})
def main(): print('Content-Type: application/json') print() try: cookie = SimpleCookie(environ["HTTP_COOKIE"]) userId = cookie["userId"].value except (Cookie.CookieError, KeyError): print(dumps({'status':'FAILURE','failureCode':'NO_USERID_COOKIE'})) return form = cgi.FieldStorage() data = form['data'].value parsedData = cgi.parse_qs(data) r = resource('dynamodb') booleans = ['children'] radios = ['ageRange','typicalDate','traditionalAdventurous','closeOrFar','reason','sillySerious','braveCautious','laidBackActive', 'sociablePrivate'] strings = ['username','email','whoWith','howLongTogether','howOften','otherTypicalText','numberOfChildren','hobbies','religion','occasion','postcode','allergies','dietary','alcohol','fears','dislikes'] usersTable = r.Table('Users') updateStrings = [] updateValues = {} emailText = 'We got a new signup from ' + parsedData['username'][0] + '\nHere\'s a dump of their information (I\'ll make this a bit prettier once I know it works!)' for booleanKey in booleans: updateStrings.append(booleanKey + '=:' + booleanKey) updateValues[':'+booleanKey] = (parsedData[booleanKey][0] == 'true') emailText += '\n' + booleanKey + ' = ' + parsedData[booleanKey][0] for radiosKey in radios: updateStrings.append(radiosKey + '=:' + radiosKey) updateValues[':'+radiosKey] = parsedData[radiosKey][0] emailText += '\n' + radiosKey + ' = ' + parsedData[radiosKey][0] for stringKey in strings: updateStrings.append(stringKey + '=:' + stringKey) try: updateValues[':'+stringKey] = parsedData[stringKey][0] emailText += '\n' + stringKey + ' = ' + parsedData[stringKey][0] except KeyError: updateValues[':'+stringKey] = '<blank>' emailText += '\n' + stringKey + ' = <blank>' updateStrings.append('interests=:interests') updateValues[':interests'] = ','.join(parsedData['interests']) emailText += '\ninterests = ' + ','.join(parsedData['interests']) usersTable.update_item( Key={ 'id':userId }, UpdateExpression = 'SET ' + ','.join(updateStrings), ExpressionAttributeValues = updateValues ) sesClient = client('ses') sesClient.send_email( Source = EMAIL_ADDRESS, Destination = { 'ToAddresses': [EMAIL_ADDRESS] }, Message = { 'Subject': {'Data':'New signup! - ' + parsedData['username'][0]}, 'Body': {'Text':{'Data':emailText}} } ) print(dumps({'status':'SUCCESS'}))
def do_GET(self): host = self.client_address[0] + ':' + str(self.client_address[1]) # Read cookies cookies = SimpleCookie(self.headers.get('Cookie')) if cookies: print(cookies) if "last_id" in cookies: last_id = cookies["last_id"].value else: last_id = "" # Parse query string query_dict = {} if self.path.find('?') > 0: path, query = self.path.split("?") query_dict = parse_qs(query) print(query_dict) if "p" in query_dict and "k" in query_dict: # New message received pseudo = query_dict["p"][0][:32] # 32 chars max pseudo_lower = pseudo.lower() message = query_dict["k"][0] if host not in users or users[host]["pseudo"] != pseudo: # User unknown, store new user b = hash(pseudo_lower).to_bytes(8, byteorder='big', signed='signed') s = sum(b) hue = s % 360 users[host] = { "pseudo": pseudo, "hue": hue, "last_message": datetime.min, #"num_messages": 0, } # Check the client activity interval, # refuse client if too short now = datetime.now() if (now - users[host]["last_message"] ).total_seconds() > MESSAGE_INTERVAL: # Accept and store message users[host]["last_message"] = now #users[host]["num_messages"] += 1 print("Message:", message) # ID, "IP:port", deiziat, type, pseudo, kemennadenn content_type = "text" if ('<' in message) and ('>' in message): content_type = "html" messages.insert(0, (str(uuid1()), host, now.isoformat(), content_type, pseudo, message)) else: path = self.path self.send_response(200) self.end_headers() if path == "/clear": messages.clear() self.wfile.write(pajenn_degemer) elif path == '/' or path == "/index.html": self.wfile.write(pajenn_degemer) elif path == "/kemennadennou": # Goulenn AJAX eus ar c'hliant # Adkas ar c'hemennadennoù d'ar c'hliant xmldoc = "" for m in messages: id = m[0] if id == last_id: # Don't send older messages than last_id break hostname = m[1] isotime = m[2] content_type = m[3] pseudo = m[4] message = m[5] hue = users[hostname]["hue"] xmldoc += KEMENNADENN.format(id, content_type, isotime, hostname, hue, hue, (hue + 180) % 360, pseudo, message) if xmldoc: response = "" response += XML_HEADER response += XML_ROOT_START response += xmldoc response += XML_ROOT_END #print("response:", response) self.wfile.write( response.encode("utf-8") ) ### Kollet eurvezhioù debugiñ evit ur gudenn a indentation amañ :( else: filename = os.path.abspath(os.path.curdir) + path try: with open(filename, 'rb') as f: self.wfile.write(f.read()) except FileNotFoundError: self.send_error(404, "File not found")
def parse_cookies(value): return [c.OutputString() for c in SimpleCookie(value).values()]
def _set_common_domain_cookie(self, internal_response, http_args, context): """ """ # Find any existing common domain cookie and deconsruct it to # obtain the list of IdPs. cookie = SimpleCookie(context.cookie) if '_saml_idp' in cookie: common_domain_cookie = cookie['_saml_idp'] msg = "Found existing common domain cookie {}".format( common_domain_cookie) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) space_separated_b64_idp_string = unquote( common_domain_cookie.value) b64_idp_list = space_separated_b64_idp_string.split() idp_list = [ urlsafe_b64decode(b64_idp).decode('utf-8') for b64_idp in b64_idp_list ] else: msg = "No existing common domain cookie found" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) idp_list = [] msg = "Common domain cookie list of IdPs is {}".format(idp_list) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) # Identity the current IdP just used for authentication in this flow. this_flow_idp = internal_response.auth_info.issuer # Remove all occurrences of the current IdP from the list of IdPs. idp_list = [idp for idp in idp_list if idp != this_flow_idp] # Append the current IdP. idp_list.append(this_flow_idp) msg = "Added IdP {} to common domain cookie list of IdPs".format( this_flow_idp) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) msg = "Common domain cookie list of IdPs is now {}".format(idp_list) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) # Construct the cookie. b64_idp_list = [ urlsafe_b64encode(idp.encode()).decode("utf-8") for idp in idp_list ] space_separated_b64_idp_string = " ".join(b64_idp_list) url_encoded_space_separated_b64_idp_string = quote( space_separated_b64_idp_string) cookie = SimpleCookie() cookie['_saml_idp'] = url_encoded_space_separated_b64_idp_string cookie['_saml_idp']['path'] = '/' # Use the domain from configuration if present else use the domain # from the base URL for the front end. domain = urlparse(self.base_url).netloc if isinstance(self.config['common_domain_cookie'], dict): if 'domain' in self.config['common_domain_cookie']: domain = self.config['common_domain_cookie']['domain'] # Ensure that the domain begins with a '.' if domain[0] != '.': domain = '.' + domain cookie['_saml_idp']['domain'] = domain cookie['_saml_idp']['secure'] = True # Set the cookie. msg = "Setting common domain cookie with {}".format(cookie.output()) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) http_args['headers'].append(tuple(cookie.output().split(": ", 1)))
def load_page(environ, start_response): if environ['HTTP_HOST'] == "localhost:3000": # This is a development server pass # Assume this is a production server else: if 'HTTPS' not in environ or environ['HTTPS'].lower() != 'on': # Redirect to https dest = "https://" + environ['HTTP_HOST'] + environ[PATH] start_response('307 Temporary Redirect', [('Location', dest)]) return [b'1'] path = "frontend" + environ[PATH] if path[-1] == "/": path += "index.html" # Check cookies cookie = None netId = None if COOKIES in environ: raw_cookies = environ[COOKIES] cookie = SimpleCookie() cookie.load(raw_cookies) try: netId = session_auth(cookie['sessionID'].value) except: #print_exc() # Don't print out that they have an invalid cookie/no cookie pass finally: # Redirect if not logged in if path != "frontend/index.html": if netId == None: return redirect(environ, start_response) # Redirect if logged in else: if netId != None: return redirect(environ, start_response, "/calendar.html") try: contentType = [] message = "" if ".png" in path: message = open(path, "rb").read() contentType = [('Content-Type', 'image/png')] elif ".jpg" in path: message = open(path, "rb").read() contentType = [('Content-Type', 'image/jpg')] elif ".css" in path: message = open(path, "r").read().encode() contentType = [('Content-Type', 'text/css')] else: message = open(path, "r").read().encode() contentType = [('Content-Type', 'text/html')] start_response( '200 OK', contentType + [('Cache-Control', 'no-store, must-revalidate'), ('Pragma', 'no-cache'), ('Expires', '0')]) return [message] except Exception as e: print_exc() # Error occured # TODO specify 404.html file return not_found(environ, start_response)
#question 4 print("Content-Type: text/html\n") #print() #print(os.environ) #print(login_page()) s = cgi.FieldStorage() username = s.getfirst("username") password = s.getfirst("password") #question 5 form_ok = username == secret.username and password == secret.password c = SimpleCookie(os.environ["HTTP_COOKIE"]) c_username = None c_password = None if c.get("username"): c_username = c.get("username").value if c.get("password"): c_username = c.get("password").value cookie_ok = c_username == secret.username and c_password == secret.c_password if cookie_ok: username = c_username password = c_password if form_ok: print("Set-Cookie: username= ", username)
def parse(input_file, pretty_print, **kwargs): if pretty_print: pretty_print = 4 else: pretty_print = None # json template dct = {'version': '0.8', 'arrayOfUsers': []} tree = ET.parse(input_file) # ============================== # LOAD ALL ROLES FROM ITEMS FILE # ============================== roles = {} for role in tree.xpath('//role'): if role.text in roles: continue if not roles: indcol = 0 else: indcol = roles.__len__() roles[role.text] = Role(name=role.text, index=indcol, column=indcol) dct['arrayOfRoles'] = [rv.__dict__() for rk, rv in roles.items()] # ==================== # PARSE EACH BURP ITEM # ==================== counter = 0 for item in tree.xpath('//item'): try: # =================== # BUILD THE BURP ITEM # =================== username = item.find('username') user_roles = item.xpath('.//role') if username is None: raise Exception('username element not found') if user_roles is None: raise Exception('one or role elements not found') # unpack the roles user_roles = [ele.text for ele in user_roles] user_roles = { rval.index.__str__(): (rkey in user_roles) for rkey, rval in roles.items() } item = Item.from_lxml(item) # ============================ # BUILD COOKIES FROM BURP ITEM # ============================ cookies = SimpleCookie() # Load request cookies req_cookies = item.request.headers.get('Cookie') or \ item.request.headers.get('cookie') if req_cookies: cookies.load(req_cookies) # Load response cookies res_cookies = item.response.headers.get('Set-Cookie') or \ item.response.headers.get('set-cookie') if res_cookies: cookies.load(res_cookies) # base64 encode the final output cookies = encode(cookies.output(attrs=[], header='', sep=';')) dct['arrayOfUsers'].append({ 'name': username.text, 'index': counter, 'tableRow': counter, 'cookiesBase64': cookies, 'headersBase64': [], 'roles': user_roles }) except Exception as e: esprint(f'Failed to parse item #{counter}: {e}') counter += 1 print(json.dumps(dct, indent=pretty_print)) return 0
#!/usr/local/bin/python3 from cgitb import enable enable() from os import environ from shelve import open from http.cookies import SimpleCookie result = "" navusername = "" getusername = "" cookie = SimpleCookie() http_cookie_header = environ.get('HTTP_COOKIE') if http_cookie_header: cookie.load(http_cookie_header) if 'sid' in cookie: sid = cookie['sid'].value session_store = open('sessid/sess_' + sid, writeback=False) if session_store.get('authenticated'): getusername = session_store.get('username') if session_store.get('role') == 'admin': navusername = """<div class="drop"><button id="navbarbutton">%s</button><div class="droplinks"><a href="admin.py">Admin</a><a href="personalstats.py">Stats</a><a href="logout.py">Log out</a></div></div>""" % ( getusername) else: navusername = """<div class="drop"><button id="navbarbutton">%s</button><div class="droplinks"><a href="personalstats.py">Stats</a><a href="logout.py">Log out</a></div></div>""" % ( getusername) result = """<h1 class="snake">Pong Game</h1> <p class="snake">The right platform is controled using the Up and Down arrow keys.</p> <main> <canvas height="700px" width="1000px" id="easysinglepong" class="pong"></canvas>
def setUp(self): web.Application.check_messages = lambda *a: None validate.filename.windows = True # escape device names super(MyHTTPTestCase, self).setUp() self.cookie = SimpleCookie()
from bs4 import BeautifulSoup # 自定义headers HEADERS = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36', 'Referer': 'https://consumeprod.alipay.com/record/advanced.htm', 'Host': 'consumeprod.alipay.com', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'Connection': 'keep-alive' } # 将复制到cookies 转换为字典,方便调用 raw_cookies = 'JSESSIONID=RZ13thOM1dM5K05460101"; 中间省略了 one=RZ250AATO/mr4CZ1cRnxgFmVR' cookie = SimpleCookie(raw_cookies) cookies = {i.key: i.value for i in cookie.values()} # 尝试使用面向对象的方式来造爬虫 class Alipay_Bill_Info(object): '''支付宝账单信息''' def __init__(self, headers, cookies): ''' 类的初始化 headers:请求头 cookies: 持久化访问 info_list: 存储账单信息的列表 ''' self.headers = headers
def ZhihuMain(self): cookie_string = '''_xsrf=gvMmi39LtnwK60JM8CYVceN6IatEZdI7; _zap=939df42c-9eca-41db-b5d3-a1527b3a17ff; d_c0="ADCiGnxNzg6PTvbksJWvVmZFpFM3O3KmHSY=|1547142906"; q_c1=dbd4e3fbaeb14e629eb7432289ab8cca|1553082206000|1547142983000; tst=r; capsion_ticket="2|1:0|10:1553081233|14:capsion_ticket|44:MWJhOGQ4NGQyYmNmNGU1NTg4YWI0ZTA1ZDc4NjliYTk=|007d490dae3c26eea075890aec2e8f65b28925856f0fb923480829140dffb007"; tgw_l7_route=4860b599c6644634a0abcd4d10d37251''' sc = SimpleCookie(cookie_string) cookies = {v.key: v.value for k, v in sc.items()} #定义headers headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36' } #初始化params #url初始化 #log及输出list初始化 spider_log = [] keylist = self.keyword tablist = self.tab for tab in tablist: result_dflist = [] for keyword in keylist: url = 'https://api.zhihu.com/search_v3?advert_count=0&correction=1&limit=20&offset=0&q=' + keyword + '&t=' + tab + '&show_all_topics=0' numScc = 0 search_object = [] #提取搜索关键词list下的所有结果 for i in range(1000000): response = requests.get(url, headers=headers, cookies=cookies) response.encoding = 'gzip' html = response.text if len(json.loads(html)['data']) == 0: print( f"For tab- {tab},keyword- {keyword}, page {i} finished, {numScc} records" ) spider_log.append( f"For tab- {tab},keyword- {keyword}, page {i} finished, {numScc} records" ) break else: r_dict = json.loads(html)['data'] search_object.append(r_dict) numScc = numScc + len(r_dict) if json.loads(html)['paging']['is_end'] == False: #url重置化 url = json.loads(html)['paging']['next'] else: print(f"Page {i+1} finished, {numScc} records") spider_log.append( f"Page {i+1} finished, {numScc} records") time.sleep(self.sleeptime) #解析成df并设置为keywordlevel dfs = [] for one in search_object: dfs.append(json_normalize(one)) result = pd.concat(dfs) result['keyword'] = keyword result['tab'] = tab result.reset_index(drop=True) result_dflist.append(result) result_df = pd.concat(result_dflist) result_df.reset_index(drop=True) #输出文件至指定路径下 lib = self.path + tab + ".csv" result_df.to_csv(lib, encoding='utf-8') return result_df