def send_request(self, method, req_params=None, auth_params=None, file_params=None, retries=None, timeout=None): '''Make an HTTP request to a server method. The given method is called with any parameters set in ``req_params``. If auth is True, then the request is made with an authenticated session cookie. Note that path parameters should be set by adding onto the method, not via ``req_params``. :arg method: Method to call on the server. It's a url fragment that comes after the base_url set in __init__(). Note that any parameters set as extra path information should be listed here, not in ``req_params``. :kwarg req_params: dict containing extra parameters to send to the server :kwarg auth_params: dict containing one or more means of authenticating to the server. Valid entries in this dict are: :cookie: **Deprecated** Use ``session_id`` instead. If both ``cookie`` and ``session_id`` are set, only ``session_id`` will be used. A ``Cookie.SimpleCookie`` to send as a session cookie to the server :session_id: Session id to put in a cookie to construct an identity for the server :username: Username to send to the server :password: Password to use with username to send to the server :httpauth: If set to ``basic`` then use HTTP Basic Authentication to send the username and password to the server. This may be extended in the future to support other httpauth types than ``basic``. Note that cookie can be sent alone but if one of username or password is set the other must as well. Code can set all of these if it wants and all of them will be sent to the server. Be careful of sending cookies that do not match with the username in this case as the server can decide what to do in this case. :kwarg file_params: dict of files where the key is the name of the file field used in the remote method and the value is the local path of the file to be uploaded. If you want to pass multiple files to a single file field, pass the paths as a list of paths. :kwarg retries: if we get an unknown or possibly transient error from the server, retry this many times. Setting this to a negative number makes it try forever. Default to use the :attr:`retries` value set on the instance or in :meth:`__init__`. :kwarg timeout: A float describing the timeout of the connection. The timeout only affects the connection process itself, not the downloading of the response body. Defaults to the :attr:`timeout` value set on the instance or in :meth:`__init__`. :returns: If ProxyClient is created with session_as_cookie=True (the default), a tuple of session cookie and data from the server. If ProxyClient was created with session_as_cookie=False, a tuple of session_id and data instead. :rtype: tuple of session information and data from server .. versionchanged:: 0.3.17 No longer send tg_format=json parameter. We rely solely on the Accept: application/json header now. .. versionchanged:: 0.3.21 * Return data as a Bunch instead of a DictContainer * Add file_params to allow uploading files .. versionchanged:: 0.3.33 Added the timeout kwarg ''' self.log.debug('proxyclient.send_request: entered') # parameter mangling file_params = file_params or {} # Check whether we need to authenticate for this request session_id = None username = None password = None if auth_params: if 'session_id' in auth_params: session_id = auth_params['session_id'] elif 'cookie' in auth_params: warnings.warn( 'Giving a cookie to send_request() to' ' authenticate is deprecated and will be removed in 0.4.' ' Please port your code to use session_id instead.', DeprecationWarning, stacklevel=2) session_id = auth_params['cookie'].output(attrs=[], header='').strip() if 'username' in auth_params and 'password' in auth_params: username = auth_params['username'] password = auth_params['password'] elif 'username' in auth_params or 'password' in auth_params: raise AuthError('username and password must both be set in' ' auth_params') if not (session_id or username): raise AuthError( 'No known authentication methods' ' specified: set "cookie" in auth_params or set both' ' username and password in auth_params') # urljoin is slightly different than os.path.join(). Make sure method # will work with it. method = method.lstrip('/') # And join to make our url. url = urljoin(self.base_url, quote(method)) data = None # decoded JSON via json.load() # Set standard headers headers = { 'User-agent': self.useragent, 'Accept': 'application/json', } # Files to upload for field_name, local_file_name in file_params: file_params[field_name] = open(local_file_name, 'rb') cookies = requests.cookies.RequestsCookieJar() # If we have a session_id, send it if session_id: # Anytime the session_id exists, send it so that visit tracking # works. Will also authenticate us if there's a need. Note that # there's no need to set other cookie attributes because this is a # cookie generated client-side. cookies.set(self.session_name, session_id) complete_params = req_params or {} if session_id: # Add the csrf protection token token = sha_constructor(to_bytes(session_id)) complete_params.update({'_csrf_token': token.hexdigest()}) auth = None if username and password: if auth_params.get('httpauth', '').lower() == 'basic': # HTTP Basic auth login auth = (username, password) else: # TG login # Adding this to the request data prevents it from being # logged by apache. complete_params.update({ 'user_name': to_bytes(username), 'password': to_bytes(password), 'login': '******', }) # If debug, give people our debug info self.log.debug('Creating request %(url)s' % {'url': to_bytes(url)}) self.log.debug('Headers: %(header)s' % {'header': to_bytes(headers, nonstring='simplerepr')}) if self.debug and complete_params: debug_data = copy.deepcopy(complete_params) if 'password' in debug_data: debug_data['password'] = '******' self.log.debug('Data: %r' % debug_data) if retries is None: retries = self.retries if timeout is None: timeout = self.timeout num_tries = 0 while True: try: response = requests.post( url, data=complete_params, cookies=cookies, headers=headers, auth=auth, verify=not self.insecure, timeout=timeout, ) except (requests.Timeout, requests.exceptions.SSLError) as e: if isinstance(e, requests.exceptions.SSLError): # And now we know how not to code a library exception # hierarchy... We're expecting that requests is raising # the following stupidity: # requests.exceptions.SSLError( # urllib3.exceptions.SSLError( # ssl.SSLError('The read operation timed out'))) # If we weren't interested in reraising the exception with # full tracdeback we could use a try: except instead of # this gross conditional. But we need to code defensively # because we don't want to raise an unrelated exception # here and if requests/urllib3 can do this sort of # nonsense, they may change the nonsense in the future # # And a note on the __class__.__name__/__module__ parsing: # On top of all the other things it does wrong, requests # is bundling a copy of urllib3. Distros can unbundle it. # But because of the bundling, python will think # exceptions raised by the version downloaded by pypi # (requests.packages.urllib3.exceptions.SSLError) are # different than the exceptions raised by the unbundled # distro version (urllib3.exceptions.SSLError). We could # do a try: except trying to import both of these # SSLErrors and then code to detect either one of them but # the whole thing is just stupid. So we'll use a stupid # hack of our own that (1) means we don't have to depend # on urllib3 just for this exception and (2) is (slightly) # less likely to break on the whims of the requests # author. if not (e.args and e.args[0].__class__.__name__ == 'SSLError' and e.args[0].__class__.__module__.endswith( 'urllib3.exceptions') and e.args[0].args and isinstance(e.args[0].args[0], ssl.SSLError) and e.args[0].args[0].args and 'timed out' in e.args[0].args[0].args[0]): # We're only interested in timeouts here raise self.log.debug('Request timed out') if retries < 0 or num_tries < retries: num_tries += 1 self.log.debug( 'Attempt #%(try)s failed' % {'try': num_tries}) time.sleep(0.5) continue # Fail and raise an error # Raising our own exception protects the user from the # implementation detail of requests vs pycurl vs urllib raise ServerError( url, -1, 'Request timed out after %s seconds' % timeout) # When the python-requests module gets a response, it attempts to # guess the encoding using chardet (or a fork) # That process can take an extraordinarily long time for long # response.text strings.. upwards of 30 minutes for FAS queries to # /accounts/user/list JSON api! Therefore, we cut that codepath # off at the pass by assuming that the response is 'utf-8'. We can # make that assumption because we're only interfacing with servers # that we run (and we know that they all return responses # encoded 'utf-8'). response.encoding = 'utf-8' # Check for auth failures # Note: old TG apps returned 403 Forbidden on authentication # failures. # Updated apps return 401 Unauthorized # We need to accept both until all apps are updated to return 401. http_status = response.status_code if http_status in (401, 403): # Wrong username or password self.log.debug('Authentication failed logging in') raise AuthError( 'Unable to log into server. Invalid' ' authentication tokens. Send new username and password') elif http_status >= 400: if retries < 0 or num_tries < retries: # Retry the request num_tries += 1 self.log.debug( 'Attempt #%(try)s failed' % {'try': num_tries}) time.sleep(0.5) continue # Fail and raise an error try: msg = httplib.responses[http_status] except (KeyError, AttributeError): msg = 'Unknown HTTP Server Response' raise ServerError(url, http_status, msg) # Successfully returned data break # In case the server returned a new session cookie to us new_session = response.cookies.get(self.session_name, '') try: data = response.json # Compatibility with newer python-requests if callable(data): data = data() except ValueError as e: # The response wasn't JSON data raise ServerError( url, http_status, 'Error returned from' ' json module while processing %(url)s: %(err)s' % {'url': to_bytes(url), 'err': to_bytes(e)}) if 'exc' in data: name = data.pop('exc') message = data.pop('tg_flash') raise AppError(name=name, message=message, extras=data) # If we need to return a cookie for deprecated code, convert it here if self.session_as_cookie: cookie = Cookie.SimpleCookie() cookie[self.session_name] = new_session new_session = cookie self.log.debug('proxyclient.send_request: exited') data = munchify(data) return new_session, data
def __init__(self, cookie_path=settings.session["DEFAULT_COOKIE_PATH"], cookie_name=settings.session["COOKIE_NAME"], session_expire_time=settings.session["SESSION_EXPIRE_TIME"], clean_check_percent=settings.session["CLEAN_CHECK_PERCENT"], integrate_flash=settings.session["INTEGRATE_FLASH"], check_ip=settings.session["CHECK_IP"], check_user_agent=settings.session["CHECK_USER_AGENT"], set_cookie_expires=settings.session["SET_COOKIE_EXPIRES"], session_token_ttl=settings.session["SESSION_TOKEN_TTL"], last_activity_update=settings.session["UPDATE_LAST_ACTIVITY"], writer=settings.session["WRITER"]): """ Initializer Args: cookie_name: The name for the session cookie stored in the browser. session_expire_time: The amount of time between requests before the session expires. clean_check_percent: The percentage of requests the will fire off a cleaning routine that deletes stale session data. integrate_flash: If appengine-utilities flash utility should be integrated into the session object. check_ip: If browser IP should be used for session validation check_user_agent: If the browser user agent should be used for sessoin validation. set_cookie_expires: True adds an expires field to the cookie so it saves even if the browser is closed. session_token_ttl: Number of sessions a session token is valid for before it should be regenerated. """ self.cookie_path = cookie_path self.cookie_name = cookie_name self.session_expire_time = session_expire_time self.integrate_flash = integrate_flash self.check_user_agent = check_user_agent self.check_ip = check_ip self.set_cookie_expires = set_cookie_expires self.session_token_ttl = session_token_ttl self.last_activity_update = last_activity_update self.writer = writer # make sure the page is not cached in the browser print self.no_cache_headers() # Check the cookie and, if necessary, create a new one. self.cache = {} string_cookie = str(os.environ.get(u"HTTP_COOKIE", u"")) self.cookie = Cookie.SimpleCookie() self.output_cookie = Cookie.SimpleCookie() self.cookie.load(string_cookie) try: self.cookie_vals = \ simplejson.loads(self.cookie["%s_data" % (self.cookie_name)].value) # sync self.cache and self.cookie_vals which will make those # values available for all gets immediately. for k in self.cookie_vals: self.cache[k] = self.cookie_vals[k] # sync the input cookie with the output cookie self.output_cookie["%s_data" % (self.cookie_name)] = \ self.cookie["%s_data" % (self.cookie_name)] except: self.cookie_vals = {} if writer == "cookie": pass else: self.sid = None new_session = True # do_put is used to determine if a datastore write should # happen on this request. do_put = False # check for existing cookie if self.cookie.get(cookie_name): self.sid = self.cookie[cookie_name].value # The following will return None if the sid has expired. self.session = _AppEngineUtilities_Session.get_session(self) if self.session: new_session = False if new_session: # start a new session self.session = _AppEngineUtilities_Session() self.session.put() self.sid = self.new_sid() if u"HTTP_USER_AGENT" in os.environ: self.session.ua = os.environ[u"HTTP_USER_AGENT"] else: self.session.ua = None if u"REMOTE_ADDR" in os.environ: self.session.ip = os.environ["REMOTE_ADDR"] else: self.session.ip = None self.session.sid = [self.sid] # do put() here to get the session key self.session.put() else: # check the age of the token to determine if a new one # is required duration = datetime.timedelta(seconds=self.session_token_ttl) session_age_limit = datetime.datetime.now() - duration if self.session.last_activity < session_age_limit: self.sid = self.new_sid() if len(self.session.sid) > 2: self.session.sid.remove(self.session.sid[0]) self.session.sid.append(self.sid) do_put = True else: self.sid = self.session.sid[-1] # check if last_activity needs updated ula = datetime.timedelta(seconds=self.last_activity_update) if datetime.datetime.now() > self.session.last_activity + \ ula: do_put = True self.output_cookie[cookie_name] = self.sid self.output_cookie[cookie_name]["path"] = cookie_path if self.set_cookie_expires: self.output_cookie[cookie_name]["expires"] = \ self.session_expire_time self.cache[u"sid"] = self.sid if do_put: if self.sid != None or self.sid != u"": self.session.put() if self.set_cookie_expires: if not self.output_cookie.has_key("%s_data" % (cookie_name)): self.output_cookie["%s_data" % (cookie_name)] = u"" self.output_cookie["%s_data" % (cookie_name)]["expires"] = \ self.session_expire_time print self.output_cookie.output() # fire up a Flash object if integration is enabled if self.integrate_flash: import flash self.flash = flash.Flash(cookie=self.cookie) # randomly delete old stale sessions in the datastore (see # CLEAN_CHECK_PERCENT variable) if random.randint(1, 100) < clean_check_percent: self._clean_old_sessions()
stdStuff.objListToFile(allUsers, stdStuff.directory, stdStuff.userFile) stdStuff.setCounter(counter) def authenticate(u,ID,IP): loggedIn = open(stdStuff.directory + stdStuff.logFile,'r').read().split('\n') loggedIn = [each.split(',') for each in loggedIn] loggedIn.remove(['']) for a in loggedIn: if a[0] == username: return a[1]==str(ID) and a[2]==IP return False c = None if 'HTTP_COOKIE' in os.environ: cookie_string=os.environ.get('HTTP_COOKIE') c = Cookie.SimpleCookie() c.load(cookie_string) ##print all the data in the cookie #body+= "<h1>cookie data</h1>" #for each in c: # body += each+":"+str(c[each].value)+"<br>" if 'username' in c and 'ID' in c: username = c['username'].value ID = c['ID'].value IP = os.environ['REMOTE_ADDR'] if authenticate(username,ID,IP): body += """<form method="GET" action="homepage.py"> <input name="logOut" type="submit" value="Log out">
def main(): form = cgi.FieldStorage() # Get Cookies useCookies = 1 cookies = Cookie.SimpleCookie() try: cookies.load(os.environ['HTTP_COOKIE']) except KeyError: useCookies = 0 if useCookies: try: currentUser = cookies['userID'].value except KeyError: currentUser = '' try: sid = cookies['gh_sid'].value except KeyError: sid = form.getfirst('gh_sid', '') else: currentUser = '' sid = form.getfirst('gh_sid', '') listFormat = form.getfirst('listFormat', '') listType = form.getfirst('listType', '') galaxy = form.getfirst('galaxy', '') profession = form.getfirst('profession', '') # Get a session logged_state = 0 sess = dbSession.getSession(sid) if (sess != ''): logged_state = 1 currentUser = sess # Main program errstr = '' tmpStr = '' if not galaxy.isdigit(): errstr = 'Error: You must provide a valid galaxy id.' if profession != '' and not profession.isdigit(): errstr = 'Error: That is not a valid profession id.' if logged_state != 1: errstr = 'Error: You must be logged in to get your recipe list.' conn = dbShared.ghConn() cursor = conn.cursor() if (cursor and errstr == ''): headStr = '<table width="100%">' if listType == 'suggest': rl = getSuggestedRecipes(conn, currentUser, galaxy, profession) if len(rl) > 0: for r in rl: tmpStr += r.getRow(listType) else: tmpStr = '<tr><td>No suggestions at this time. Try adding more to your inventory.</td></tr>' else: sqlStr = 'SELECT recipeID, userID, schematicID, recipeName, (SELECT imageName FROM tSchematicImages img WHERE img.schematicID=tRecipe.schematicID AND img.imageType=1) AS schemImage FROM tRecipe WHERE userID="' + currentUser + '" AND (galaxy=' + str( galaxy) + ' OR galaxy IS NULL) ORDER BY recipeName;' cursor.execute(sqlStr) row = cursor.fetchone() while (row != None): if (row[4] != None): schemImageName = row[4] else: schemImageName = 'none.jpg' r = ghObjectRecipe.schematicRecipe() r.recipeID = row[0] r.schematicID = row[2] r.recipeName = row[3] r.schematicImage = schemImageName sqlStr = 'SELECT si.ingredientName, ingredientResource, ingredientObject, ingredientContribution, ingredientQuality FROM tSchematicIngredients si LEFT JOIN (SELECT ingredientName, ingredientResource, ingredientQuality FROM tRecipeIngredients WHERE recipeID=' + str( r.recipeID ) + ') ri ON si.ingredientName = ri.ingredientName WHERE schematicID="' + r.schematicID + '" and ingredientType = 0 ORDER BY ingredientQuantity DESC, si.ingredientName;' ingCursor = conn.cursor() ingCursor.execute(sqlStr) ingRow = ingCursor.fetchone() while (ingRow != None): ri = ghObjectRecipe.recipeIngredient() ri.ingredientObject = ingRow[2] ri.ingredientResource = ingRow[1] ri.ingredientName = ingRow[0] ri.ingredientAmount = ingRow[3] ri.resourceQuality = ingRow[4] r.recipeIngredients.append(ri) ingRow = ingCursor.fetchone() ingCursor.close() tmpStr += r.getRow('normal', sid) row = cursor.fetchone() tmpStr = headStr + tmpStr + '</table>' cursor.close() conn.close() print 'Content-type: text/html\n' print tmpStr
def print_class_name(x): print "<h2>" + str(x[1]) + "</h2>" cgitb.enable() class_form = cgi.FieldStorage() conn = sqlite3.connect('classes.db') c = conn.cursor() c.execute('CREATE TABLE IF NOT EXISTS classes(id varchar(100) primary key, name varchar(100), userid varchar(100), days varchar(100), time varchar(10), color varchar(10), instructor varchar(100), notes varchar(200))') try: cookie_string = os.environ.get('HTTP_COOKIE') cookie = Cookie.SimpleCookie(cookie_string) userid = str(cookie['userid'].value) except KeyError: print print "<h1>Please Enable Cookies to continue!</h1>" print "<a href='index.html'>Click here to Log In</a>" quit() print currentEntries = c.execute('SELECT * FROM classes WHERE userid=?', [userid]) # AND username = username data = c.fetchall() if len(data)==0: print '''
import sys import os import Cookie import sys sys.stderr = sys.stdout data = "No Cookie" cookie_string = "" error = "" if 'HTTP_COOKIE' in os.environ: cookie_string = os.environ.get('HTTP_COOKIE') cookieJar = Cookie.SimpleCookie() cookieJar.load(cookie_string) if 'color' in cookieJar: data = cookieJar['color'].value else: error = "No cookie named color" else: error = "No Cookies Sent" buf = "The raw cookie header is " + cookie_string buf = buf + "\nThe value of the color cookie is " + data buf = buf + "\n" + error print "Content-type: text/plain" print 'Set-cookie: color=' + data + '.1' print 'Content-length: ' + str(len(buf))
def __init__(self, *rest): self.cookies = Cookie.SimpleCookie() tornado.testing.AsyncHTTPTestCase.__init__(self, *rest)
class Request(object): """An HTTP request. This object represents the metadata of an HTTP request message; that is, it contains attributes which describe the environment in which the request URL, headers, and body were sent (if you want tools to interpret the headers and body, those are elsewhere, mostly in Tools). This 'metadata' consists of socket data, transport characteristics, and the Request-Line. This object also contains data regarding the configuration in effect for the given URL, and the execution plan for generating a response. """ __metaclass__ = cherrypy._AttributeDocstrings prev = None prev__doc = """ The previous Request object (if any). This should be None unless we are processing an InternalRedirect.""" # Conversation/connection attributes local = http.Host("127.0.0.1", 80) local__doc = \ "An http.Host(ip, port, hostname) object for the server socket." remote = http.Host("127.0.0.1", 1111) remote__doc = \ "An http.Host(ip, port, hostname) object for the client socket." scheme = "http" scheme__doc = """ The protocol used between client and server. In most cases, this will be either 'http' or 'https'.""" server_protocol = "HTTP/1.1" server_protocol__doc = """ The HTTP version for which the HTTP server is at least conditionally compliant.""" base = "" base__doc = """The (scheme://host) portion of the requested URL.""" # Request-Line attributes request_line = "" request_line__doc = """ The complete Request-Line received from the client. This is a single string consisting of the request method, URI, and protocol version (joined by spaces). Any final CRLF is removed.""" method = "GET" method__doc = """ Indicates the HTTP method to be performed on the resource identified by the Request-URI. Common methods include GET, HEAD, POST, PUT, and DELETE. CherryPy allows any extension method; however, various HTTP servers and gateways may restrict the set of allowable methods. CherryPy applications SHOULD restrict the set (on a per-URI basis).""" query_string = "" query_string__doc = """ The query component of the Request-URI, a string of information to be interpreted by the resource. The query portion of a URI follows the path component, and is separated by a '?'. For example, the URI 'http://www.cherrypy.org/wiki?a=3&b=4' has the query component, 'a=3&b=4'.""" protocol = (1, 1) protocol__doc = """The HTTP protocol version corresponding to the set of features which should be allowed in the response. If BOTH the client's request message AND the server's level of HTTP compliance is HTTP/1.1, this attribute will be the tuple (1, 1). If either is 1.0, this attribute will be the tuple (1, 0). Lower HTTP protocol versions are not explicitly supported.""" params = {} params__doc = """ A dict which combines query string (GET) and request entity (POST) variables. This is populated in two stages: GET params are added before the 'on_start_resource' hook, and POST params are added between the 'before_request_body' and 'before_handler' hooks.""" # Message attributes header_list = [] header_list__doc = """ A list of the HTTP request headers as (name, value) tuples. In general, you should use request.headers (a dict) instead.""" headers = http.HeaderMap() headers__doc = """ A dict-like object containing the request headers. Keys are header names (in Title-Case format); however, you may get and set them in a case-insensitive manner. That is, headers['Content-Type'] and headers['content-type'] refer to the same value. Values are header values (decoded according to RFC 2047 if necessary). See also: http.HeaderMap, http.HeaderElement.""" cookie = Cookie.SimpleCookie() cookie__doc = """See help(Cookie).""" rfile = None rfile__doc = """ If the request included an entity (body), it will be available as a stream in this attribute. However, the rfile will normally be read for you between the 'before_request_body' hook and the 'before_handler' hook, and the resulting string is placed into either request.params or the request.body attribute. You may disable the automatic consumption of the rfile by setting request.process_request_body to False, either in config for the desired path, or in an 'on_start_resource' or 'before_request_body' hook. WARNING: In almost every case, you should not attempt to read from the rfile stream after CherryPy's automatic mechanism has read it. If you turn off the automatic parsing of rfile, you should read exactly the number of bytes specified in request.headers['Content-Length']. Ignoring either of these warnings may result in a hung request thread or in corruption of the next (pipelined) request. """ process_request_body = True process_request_body__doc = """ If True, the rfile (if any) is automatically read and parsed, and the result placed into request.params or request.body.""" methods_with_bodies = ("POST", "PUT") methods_with_bodies__doc = """ A sequence of HTTP methods for which CherryPy will automatically attempt to read a body from the rfile.""" body = None body__doc = """ If the request Content-Type is 'application/x-www-form-urlencoded' or multipart, this will be None. Otherwise, this will contain the request entity body as a string; this value is set between the 'before_request_body' and 'before_handler' hooks (assuming that process_request_body is True).""" body_params = None body_params__doc = """ If the request Content-Type is 'application/x-www-form-urlencoded' or multipart, this will be a dict of the params pulled from the entity body; that is, it will be the portion of request.params that come from the message body (sometimes called "POST params", although they can be sent with various HTTP method verbs). This value is set between the 'before_request_body' and 'before_handler' hooks (assuming that process_request_body is True).""" # Dispatch attributes dispatch = cherrypy.dispatch.Dispatcher() dispatch__doc = """ The object which looks up the 'page handler' callable and collects config for the current request based on the path_info, other request attributes, and the application architecture. The core calls the dispatcher as early as possible, passing it a 'path_info' argument. The default dispatcher discovers the page handler by matching path_info to a hierarchical arrangement of objects, starting at request.app.root. See help(cherrypy.dispatch) for more information.""" script_name = "" script_name__doc = """ The 'mount point' of the application which is handling this request. This attribute MUST NOT end in a slash. If the script_name refers to the root of the URI, it MUST be an empty string (not "/"). """ path_info = "/" path_info__doc = """ The 'relative path' portion of the Request-URI. This is relative to the script_name ('mount point') of the application which is handling this request.""" login = None login__doc = """ When authentication is used during the request processing this is set to 'False' if it failed and to the 'username' value if it succeeded. The default 'None' implies that no authentication happened.""" # Note that cherrypy.url uses "if request.app:" to determine whether # the call is during a real HTTP request or not. So leave this None. app = None app__doc = \ """The cherrypy.Application object which is handling this request.""" handler = None handler__doc = """ The function, method, or other callable which CherryPy will call to produce the response. The discovery of the handler and the arguments it will receive are determined by the request.dispatch object. By default, the handler is discovered by walking a tree of objects starting at request.app.root, and is then passed all HTTP params (from the query string and POST body) as keyword arguments.""" toolmaps = {} toolmaps__doc = """ A nested dict of all Toolboxes and Tools in effect for this request, of the form: {Toolbox.namespace: {Tool.name: config dict}}.""" config = None config__doc = """ A flat dict of all configuration entries which apply to the current request. These entries are collected from global config, application config (based on request.path_info), and from handler config (exactly how is governed by the request.dispatch object in effect for this request; by default, handler config can be attached anywhere in the tree between request.app.root and the final handler, and inherits downward).""" is_index = None is_index__doc = """ This will be True if the current request is mapped to an 'index' resource handler (also, a 'default' handler if path_info ends with a slash). The value may be used to automatically redirect the user-agent to a 'more canonical' URL which either adds or removes the trailing slash. See cherrypy.tools.trailing_slash.""" hooks = HookMap(hookpoints) hooks__doc = """ A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}. Each key is a str naming the hook point, and each value is a list of hooks which will be called at that hook point during this request. The list of hooks is generally populated as early as possible (mostly from Tools specified in config), but may be extended at any time. See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools.""" error_response = cherrypy.HTTPError(500).set_response error_response__doc = """ The no-arg callable which will handle unexpected, untrapped errors during request processing. This is not used for expected exceptions (like NotFound, HTTPError, or HTTPRedirect) which are raised in response to expected conditions (those should be customized either via request.error_page or by overriding HTTPError.set_response). By default, error_response uses HTTPError(500) to return a generic error response to the user-agent.""" error_page = {} error_page__doc = """ A dict of {error code: response filename or callable} pairs. The error code must be an int representing a given HTTP error code, or the string 'default', which will be used if no matching entry is found for a given numeric code. If a filename is provided, the file should contain a Python string- formatting template, and can expect by default to receive format values with the mapping keys %(status)s, %(message)s, %(traceback)s, and %(version)s. The set of format mappings can be extended by overriding HTTPError.set_response. If a callable is provided, it will be called by default with keyword arguments 'status', 'message', 'traceback', and 'version', as for a string-formatting template. The callable must return a string which will be set to response.body. It may also override headers or perform any other processing. If no entry is given for an error code, and no 'default' entry exists, a default template will be used. """ show_tracebacks = True show_tracebacks__doc = """ If True, unexpected errors encountered during request processing will include a traceback in the response body.""" throws = (KeyboardInterrupt, SystemExit, cherrypy.InternalRedirect) throws__doc = \ """The sequence of exceptions which Request.run does not trap.""" throw_errors = False throw_errors__doc = """ If True, Request.run will not trap any errors (except HTTPRedirect and HTTPError, which are more properly called 'exceptions', not errors).""" closed = False closed__doc = """ True once the close method has been called, False otherwise.""" stage = None stage__doc = """ A string containing the stage reached in the request-handling process. This is useful when debugging a live server with hung requests.""" namespaces = _cpconfig.NamespaceSet( **{ "hooks": hooks_namespace, "request": request_namespace, "response": response_namespace, "error_page": error_page_namespace, "tools": cherrypy.tools, }) def __init__(self, local_host, remote_host, scheme="http", server_protocol="HTTP/1.1"): """Populate a new Request object. local_host should be an http.Host object with the server info. remote_host should be an http.Host object with the client info. scheme should be a string, either "http" or "https". """ self.local = local_host self.remote = remote_host self.scheme = scheme self.server_protocol = server_protocol self.closed = False # Put a *copy* of the class error_page into self. self.error_page = self.error_page.copy() # Put a *copy* of the class namespaces into self. self.namespaces = self.namespaces.copy() self.stage = None def close(self): """Run cleanup code. (Core)""" if not self.closed: self.closed = True self.stage = 'on_end_request' self.hooks.run('on_end_request') self.stage = 'close' def run(self, method, path, query_string, req_protocol, headers, rfile): """Process the Request. (Core) method, path, query_string, and req_protocol should be pulled directly from the Request-Line (e.g. "GET /path?key=val HTTP/1.0"). path should be %XX-unquoted, but query_string should not be. headers should be a list of (name, value) tuples. rfile should be a file-like object containing the HTTP request entity. When run() is done, the returned object should have 3 attributes: status, e.g. "200 OK" header_list, a list of (name, value) tuples body, an iterable yielding strings Consumer code (HTTP servers) should then access these response attributes to build the outbound stream. """ self.stage = 'run' try: self.error_response = cherrypy.HTTPError(500).set_response self.method = method path = path or "/" self.query_string = query_string or '' # Compare request and server HTTP protocol versions, in case our # server does not support the requested protocol. Limit our output # to min(req, server). We want the following output: # request server actual written supported response # protocol protocol response protocol feature set # a 1.0 1.0 1.0 1.0 # b 1.0 1.1 1.1 1.0 # c 1.1 1.0 1.0 1.0 # d 1.1 1.1 1.1 1.1 # Notice that, in (b), the response will be "HTTP/1.1" even though # the client only understands 1.0. RFC 2616 10.5.6 says we should # only return 505 if the _major_ version is different. rp = int(req_protocol[5]), int(req_protocol[7]) sp = int(self.server_protocol[5]), int(self.server_protocol[7]) self.protocol = min(rp, sp) # Rebuild first line of the request (e.g. "GET /path HTTP/1.0"). url = path if query_string: url += '?' + query_string self.request_line = '%s %s %s' % (method, url, req_protocol) self.header_list = list(headers) self.rfile = rfile self.headers = http.HeaderMap() self.cookie = Cookie.SimpleCookie() self.handler = None # path_info should be the path from the # app root (script_name) to the handler. self.script_name = self.app.script_name self.path_info = pi = path[len(self.script_name):] self.stage = 'respond' self.respond(pi) except self.throws: raise except: if self.throw_errors: raise else: # Failure in setup, error handler or finalize. Bypass them. # Can't use handle_error because we may not have hooks yet. cherrypy.log(traceback=True, severity=40) if self.show_tracebacks: body = format_exc() else: body = "" r = bare_error(body) response = cherrypy.response response.status, response.header_list, response.body = r if self.method == "HEAD": # HEAD requests MUST NOT return a message-body in the response. cherrypy.response.body = [] cherrypy.log.access() if cherrypy.response.timed_out: raise cherrypy.TimeoutError() return cherrypy.response def respond(self, path_info): """Generate a response for the resource at self.path_info. (Core)""" try: try: try: if self.app is None: raise cherrypy.NotFound() # Get the 'Host' header, so we can HTTPRedirect properly. self.stage = 'process_headers' self.process_headers() # Make a copy of the class hooks self.hooks = self.__class__.hooks.copy() self.toolmaps = {} self.stage = 'get_resource' self.get_resource(path_info) self.namespaces(self.config) self.stage = 'on_start_resource' self.hooks.run('on_start_resource') if self.process_request_body: if self.method not in self.methods_with_bodies: self.process_request_body = False self.stage = 'before_request_body' self.hooks.run('before_request_body') if self.process_request_body: self.process_body() self.stage = 'before_handler' self.hooks.run('before_handler') if self.handler: self.stage = 'handler' cherrypy.response.body = self.handler() self.stage = 'before_finalize' self.hooks.run('before_finalize') cherrypy.response.finalize() except (cherrypy.HTTPRedirect, cherrypy.HTTPError), inst: inst.set_response() self.stage = 'before_finalize (HTTPError)' self.hooks.run('before_finalize') cherrypy.response.finalize() finally: self.stage = 'on_end_resource' self.hooks.run('on_end_resource') except self.throws: raise except: if self.throw_errors: raise self.handle_error() def process_headers(self): """Parse HTTP header data into Python structures. (Core)""" self.params = http.parse_query_string(self.query_string) # Process the headers into self.headers headers = self.headers for name, value in self.header_list: # Call title() now (and use dict.__method__(headers)) # so title doesn't have to be called twice. name = name.title() value = value.strip() # Warning: if there is more than one header entry for cookies (AFAIK, # only Konqueror does that), only the last one will remain in headers # (but they will be correctly stored in request.cookie). if "=?" in value: dict.__setitem__(headers, name, http.decode_TEXT(value)) else: dict.__setitem__(headers, name, value) # Handle cookies differently because on Konqueror, multiple # cookies come on different lines with the same key if name == 'Cookie': try: self.cookie.load(value) except Cookie.CookieError: msg = "Illegal cookie name %s" % value.split('=')[0] raise cherrypy.HTTPError(400, msg) if not dict.__contains__(headers, 'Host'): # All Internet-based HTTP/1.1 servers MUST respond with a 400 # (Bad Request) status code to any HTTP/1.1 request message # which lacks a Host header field. if self.protocol >= (1, 1): msg = "HTTP/1.1 requires a 'Host' request header." raise cherrypy.HTTPError(400, msg) host = dict.get(headers, 'Host') if not host: host = self.local.name or self.local.ip self.base = "%s://%s" % (self.scheme, host) def get_resource(self, path): """Call a dispatcher (which sets self.handler and .config). (Core)""" dispatch = self.dispatch # First, see if there is a custom dispatch at this URI. Custom # dispatchers can only be specified in app.config, not in _cp_config # (since custom dispatchers may not even have an app.root). trail = path or "/" while trail: nodeconf = self.app.config.get(trail, {}) d = nodeconf.get("request.dispatch") if d: dispatch = d break lastslash = trail.rfind("/") if lastslash == -1: break elif lastslash == 0 and trail != "/": trail = "/" else: trail = trail[:lastslash] # dispatch() should set self.handler and self.config dispatch(path) def process_body(self): """Convert request.rfile into request.params (or request.body). (Core)""" if not self.headers.get("Content-Length", ""): # No Content-Length header supplied (or it's 0). # If we went ahead and called cgi.FieldStorage, it would hang, # since it cannot determine when to stop reading from the socket. # See http://www.cherrypy.org/ticket/493. # See also http://www.cherrypy.org/ticket/650. # Note also that we expect any HTTP server to have decoded # any message-body that had a transfer-coding, and we expect # the HTTP server to have supplied a Content-Length header # which is valid for the decoded entity-body. raise cherrypy.HTTPError(411) # If the headers are missing "Content-Type" then add one # with an empty value. This ensures that FieldStorage # won't parse the request body for params if the client # didn't provide a "Content-Type" header. if 'Content-Type' not in self.headers: h = http.HeaderMap(self.headers.items()) h['Content-Type'] = '' else: h = self.headers try: forms = _cpcgifs.FieldStorage( fp=self.rfile, headers=h, # FieldStorage only recognizes POST. environ={'REQUEST_METHOD': "POST"}, keep_blank_values=1) except Exception, e: if e.__class__.__name__ == 'MaxSizeExceeded': # Post data is too big raise cherrypy.HTTPError(413) else: raise # Note that, if headers['Content-Type'] is multipart/*, # then forms.file will not exist; instead, each form[key] # item will be its own file object, and will be handled # by params_from_CGI_form. if forms.file: # request body was a content-type other than form params. self.body = forms.file else: self.body_params = p = http.params_from_CGI_form(forms) self.params.update(p)
def run(self, method, path, query_string, req_protocol, headers, rfile): """Process the Request. (Core) method, path, query_string, and req_protocol should be pulled directly from the Request-Line (e.g. "GET /path?key=val HTTP/1.0"). path should be %XX-unquoted, but query_string should not be. headers should be a list of (name, value) tuples. rfile should be a file-like object containing the HTTP request entity. When run() is done, the returned object should have 3 attributes: status, e.g. "200 OK" header_list, a list of (name, value) tuples body, an iterable yielding strings Consumer code (HTTP servers) should then access these response attributes to build the outbound stream. """ self.stage = 'run' try: self.error_response = cherrypy.HTTPError(500).set_response self.method = method path = path or "/" self.query_string = query_string or '' # Compare request and server HTTP protocol versions, in case our # server does not support the requested protocol. Limit our output # to min(req, server). We want the following output: # request server actual written supported response # protocol protocol response protocol feature set # a 1.0 1.0 1.0 1.0 # b 1.0 1.1 1.1 1.0 # c 1.1 1.0 1.0 1.0 # d 1.1 1.1 1.1 1.1 # Notice that, in (b), the response will be "HTTP/1.1" even though # the client only understands 1.0. RFC 2616 10.5.6 says we should # only return 505 if the _major_ version is different. rp = int(req_protocol[5]), int(req_protocol[7]) sp = int(self.server_protocol[5]), int(self.server_protocol[7]) self.protocol = min(rp, sp) # Rebuild first line of the request (e.g. "GET /path HTTP/1.0"). url = path if query_string: url += '?' + query_string self.request_line = '%s %s %s' % (method, url, req_protocol) self.header_list = list(headers) self.rfile = rfile self.headers = http.HeaderMap() self.cookie = Cookie.SimpleCookie() self.handler = None # path_info should be the path from the # app root (script_name) to the handler. self.script_name = self.app.script_name self.path_info = pi = path[len(self.script_name):] self.stage = 'respond' self.respond(pi) except self.throws: raise except: if self.throw_errors: raise else: # Failure in setup, error handler or finalize. Bypass them. # Can't use handle_error because we may not have hooks yet. cherrypy.log(traceback=True, severity=40) if self.show_tracebacks: body = format_exc() else: body = "" r = bare_error(body) response = cherrypy.response response.status, response.header_list, response.body = r if self.method == "HEAD": # HEAD requests MUST NOT return a message-body in the response. cherrypy.response.body = [] cherrypy.log.access() if cherrypy.response.timed_out: raise cherrypy.TimeoutError() return cherrypy.response
</form> </div> </div> </body> </html>""" teacherID = None try: post = cgi.FieldStorage() if "successData" in post: #Accessing homepage from login teacherID = int(post["successData"].value) newCookie = Cookie.SimpleCookie() newCookie["teacherID"] = str(teacherID) print newCookie elif "HTTP_COOKIE" in os.environ: #Accessing from another page cookieString = os.environ.get("HTTP_COOKIE") oldCookie = Cookie.SimpleCookie() oldCookie.load(cookieString) if "teacherID" in oldCookie: teacherID = int(oldCookie["teacherID"].value) else: raise Exception("You do not have permission to view this page") print page % (teacherMenuBar(), teacherID, teacherID)
def __init__(self): self.cookie = Cookie.SimpleCookie()
from mod_python.util import parse_qsl except ImportError: try: # Python 2.6 and greater from urlparse import parse_qsl except ImportError: # Python 2.5, 2.4. Works on Python 2.6 but raises # PendingDeprecationWarning from cgi import parse_qsl import Cookie # httponly support exists in Python 2.6's Cookie library, # but not in Python 2.4 or 2.5. _morsel_supports_httponly = Cookie.Morsel._reserved.has_key('httponly') # Some versions of Python 2.7 and later won't need this encoding bug fix: _cookie_encodes_correctly = Cookie.SimpleCookie().value_encode(';') == (';', '"\\073"') # See ticket #13007, http://bugs.python.org/issue2193 and http://trac.edgewall.org/ticket/2256 _tc = Cookie.SimpleCookie() _tc.load('f:oo') _cookie_allows_colon_in_names = 'Set-Cookie: f:oo=' in _tc.output() if _morsel_supports_httponly and _cookie_encodes_correctly and _cookie_allows_colon_in_names: SimpleCookie = Cookie.SimpleCookie else: if not _morsel_supports_httponly: class Morsel(Cookie.Morsel): def __setitem__(self, K, V): K = K.lower() if K == "httponly": if V: # The superclass rejects httponly as a key,
def decodeCookies(url, server, headers, cookies): """Decode cookies into the supplied cookies dictionary, according to RFC 6265. Relevant specs: http://www.ietf.org/rfc/rfc2109.txt (obsolete) http://www.ietf.org/rfc/rfc2965.txt (obsolete) http://www.ietf.org/rfc/rfc6265.txt (proposed standard) """ # see rfc 6265, section 5.1.4 # empty path => '/' # path must begin with '/', so we only weed out the rightmost '/' request_path = urlparse.urlparse(url)[2] if len(request_path) > 2 and request_path[-1] == '/': request_path = request_path[:-1] else: request_path = '/' # XXX - tried slurping all the set-cookie and joining them on # '\n', some cookies were not parsed. This below worked flawlessly. for ch in headers.getallmatchingheaders('set-cookie'): cookie = Cookie.SimpleCookie(ch.strip()).values()[0] # see rfc 6265, section 5.3, step 7 path = cookie['path'] or request_path # see rfc 6265, section 5.3, step 4 to 6 # XXX - we don't bother with cookie persistence # XXX - we don't check for public suffixes if cookie['domain']: domain = cookie['domain'] # see rfc6265, section 5.2.3 if domain[0] == '.': domain = domain[1:] if not server.endswith(domain): continue else: domain = server # all date handling is done is UTC # XXX - need reviewing by someone familiar with python datetime objects now = datetime.datetime.utcnow() expire = datetime.datetime.min maxage = cookie['max-age'] # see rfc 6265, section 5.3, step 3 if maxage != '': timedelta = int(maxage) if timedelta > 0: expire = now + datetime.timedelta(seconds=timedelta) else: if cookie['expires'] == '': expire = datetime.datetime.max else: expire = datetime.datetime.strptime( cookie['expires'], "%a, %d-%b-%Y %H:%M:%S %Z") cookie['expires'] = expire bydom = cookies.setdefault(domain, {}) bypath = bydom.setdefault(path, {}) if expire > now: bypath[cookie.key] = cookie elif cookie.key in bypath: del bypath[cookie.key]
def cookies(self): return dict([(k, v.value) for k, v in Cookie.SimpleCookie( self.env.get('HTTP_COOKIE', '')).items()])
def main(): # Get current url try: url = os.environ['SCRIPT_NAME'] except KeyError: url = '' form = cgi.FieldStorage() # Get Cookies useCookies = 1 cookies = Cookie.SimpleCookie() try: cookies.load(os.environ['HTTP_COOKIE']) except KeyError: useCookies = 0 if useCookies: try: currentUser = cookies['userID'].value except KeyError: currentUser = '' try: loginResult = cookies['loginAttempt'].value except KeyError: loginResult = 'success' try: sid = cookies['gh_sid'].value except KeyError: sid = form.getfirst('gh_sid', '') else: currentUser = '' sid = form.getfirst('gh_sid', '') spawnName = form.getfirst('spawn', '') galaxy = form.getfirst('galaxy', '') planets = form.getfirst('planets', '') # escape input to prevent sql injection sid = dbShared.dbInsertSafe(sid) spawnName = dbShared.dbInsertSafe(spawnName) galaxy = dbShared.dbInsertSafe(galaxy) planets = dbShared.dbInsertSafe(planets) # Get a session logged_state = 0 sess = dbSession.getSession(sid) if (sess != ''): logged_state = 1 currentUser = sess # Main program print 'Content-type: text/html\n' if (logged_state > 0): if (dbShared.galaxyState(galaxy) == 1): spawnID = dbShared.getSpawnID(spawnName, galaxy) result = removeSpawn(spawnID, planets, currentUser, galaxy) else: result = "Error: That Galaxy is Inactive." else: result = "Error: You must be logged in to mark a resource unavailable." print result if (result.find("Error:") > -1): sys.exit(500) else: sys.exit(200)
class Response(object): """An HTTP Response, including status, headers, and body. Application developers should use Response.headers (a dict) to set or modify HTTP response headers. When the response is finalized, Response.headers is transformed into Response.header_list as (key, value) tuples. """ __metaclass__ = cherrypy._AttributeDocstrings # Class attributes for dev-time introspection. status = "" status__doc = """The HTTP Status-Code and Reason-Phrase.""" header_list = [] header_list__doc = """ A list of the HTTP response headers as (name, value) tuples. In general, you should use response.headers (a dict) instead.""" headers = http.HeaderMap() headers__doc = """ A dict-like object containing the response headers. Keys are header names (in Title-Case format); however, you may get and set them in a case-insensitive manner. That is, headers['Content-Type'] and headers['content-type'] refer to the same value. Values are header values (decoded according to RFC 2047 if necessary). See also: http.HeaderMap, http.HeaderElement.""" cookie = Cookie.SimpleCookie() cookie__doc = """See help(Cookie).""" body = Body() body__doc = """The body (entity) of the HTTP response.""" time = None time__doc = """The value of time.time() when created. Use in HTTP dates.""" timeout = 300 timeout__doc = """Seconds after which the response will be aborted.""" timed_out = False timed_out__doc = """ Flag to indicate the response should be aborted, because it has exceeded its timeout.""" stream = False stream__doc = """If False, buffer the response body.""" def __init__(self): self.status = None self.header_list = None self._body = [] self.time = time.time() self.headers = http.HeaderMap() # Since we know all our keys are titled strings, we can # bypass HeaderMap.update and get a big speed boost. dict.update( self.headers, { "Content-Type": 'text/html', "Server": "CherryPy/" + cherrypy.__version__, "Date": http.HTTPDate(self.time), }) self.cookie = Cookie.SimpleCookie() def collapse_body(self): """Collapse self.body to a single string; replace it and return it.""" newbody = ''.join([chunk for chunk in self.body]) self.body = newbody return newbody def finalize(self): """Transform headers (and cookies) into self.header_list. (Core)""" try: code, reason, _ = http.valid_status(self.status) except ValueError, x: raise cherrypy.HTTPError(500, x.args[0]) self.status = "%s %s" % (code, reason) headers = self.headers if self.stream: if dict.get(headers, 'Content-Length') is None: dict.pop(headers, 'Content-Length', None) elif code < 200 or code in (204, 205, 304): # "All 1xx (informational), 204 (no content), # and 304 (not modified) responses MUST NOT # include a message-body." dict.pop(headers, 'Content-Length', None) self.body = "" else: # Responses which are not streamed should have a Content-Length, # but allow user code to set Content-Length if desired. if dict.get(headers, 'Content-Length') is None: content = self.collapse_body() dict.__setitem__(headers, 'Content-Length', len(content)) # Transform our header dict into a list of tuples. self.header_list = h = headers.output(cherrypy.request.protocol) cookie = self.cookie.output() if cookie: for line in cookie.split("\n"): if line.endswith("\r"): # Python 2.4 emits cookies joined by LF but 2.5+ by CRLF. line = line[:-1] name, value = line.split(": ", 1) h.append((name, value))
import Cookie import os import time import json cgitb.enable() form = cgi.FieldStorage() loc = form['loc'].value count = 0 conn2 = sqlite3.connect('accounts.db') c2 = conn2.cursor() cookstring = os.environ.get('HTTP_COOKIE') my_cookie = Cookie.SimpleCookie(cookstring) saved_session_id = my_cookie['session_id'].value if (loc == "Starbucks"): c2.execute('select Starbucks from users where session_id=?;', (saved_session_id, )) count = c2.fetchone()[0] + 1 c2.execute('update users set starbucks = ? where session_id=?;', ( count, saved_session_id, )) if (loc == "Connections"): c2.execute('select connections from users where session_id=?;', (saved_session_id, )) count = c2.fetchone()[0] + 1 c2.execute('update users set connections = ? where session_id=?;', ( count,
def handleHttpRequest(self, scheme, netloc, path, params, query, frag, qs, posted): data = { 'prog': self.server.pkite.progname, 'mimetype': self.getMimeType(path), 'hostname': socket.gethostname() or 'Your Computer', 'http_host': self.http_host, 'query_string': query, 'code': 200, 'body': '', 'msg': 'OK', 'now': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), 'ver': APPVER } for key in self.headers.keys(): data['http_'+key.lower()] = self.headers.get(key) if 'download' in qs: data['mimetype'] = 'application/octet-stream' # Would be nice to set Content-Disposition too. elif 'view' in qs: data['mimetype'] = 'text/plain' data['method'] = data.get('http_x-pagekite-proto', 'http').lower() if 'http_cookie' in data: cookies = Cookie.SimpleCookie(data['http_cookie']) else: cookies = {} # Do we expose the built-in console? console = self.host_config.get('console', False) # Are we implementing the PhotoBackup protocol? photobackup = self.host_config.get('photobackup', False) if path == self.host_config.get('yamon', False): if common.gYamon: data['body'] = common.gYamon.render_vars_text(qs.get('view', [None])[0]) else: data['body'] = '' elif console and path.startswith('/_pagekite/logout/'): parts = path.split('/') location = parts[3] or ('%s://%s/' % (data['method'], data['http_host'])) self.sendResponse('\n', code=302, msg='Moved', header_list=[ ('Set-Cookie', 'pkite_token=; path=/'), ('Location', location) ]) return elif console and path.startswith('/_pagekite/login/'): parts = path.split('/', 4) token = parts[3] location = parts[4] or ('%s://%s/_pagekite/' % (data['method'], data['http_host'])) if query: location += '?' + query if token == self.server.secret: self.sendResponse('\n', code=302, msg='Moved', header_list=[ ('Set-Cookie', 'pkite_token=%s; path=/' % token), ('Location', location) ]) return else: logging.LogDebug("Invalid token, %s != %s" % (token, self.server.secret)) data.update(self.E404) elif console and path.startswith('/_pagekite/'): if not ('pkite_token' in cookies and cookies['pkite_token'].value == self.server.secret): self.sendResponse('<h1>Forbidden</h1>\n', code=403, msg='Forbidden') return if path == '/_pagekite/': if not self.sendStaticPath('%s/control.pk-shtml' % console, 'text/html', shtml_vars=data): self.sendResponse('<h1>Not found</h1>\n', code=404, msg='Missing') return elif path.startswith('/_pagekite/quitquitquit/'): self.sendResponse('<h1>Kaboom</h1>\n', code=500, msg='Asplode') self.wfile.flush() os._exit(2) elif path.startswith('/_pagekite/add_kite/'): data.update(self.add_kite(path, qs)) elif path.endswith('/pagekite.rc'): data.update({'mimetype': 'application/octet-stream', 'body': '\n'.join(self.server.pkite.GenerateConfig())}) elif path.endswith('/pagekite.rc.txt'): data.update({'mimetype': 'text/plain', 'body': '\n'.join(self.server.pkite.GenerateConfig())}) elif path.endswith('/pagekite.cfg'): data.update({'mimetype': 'application/octet-stream', 'body': '\r\n'.join(self.server.pkite.GenerateConfig())}) else: data.update(self.E403) else: if photobackup and (posted is not None) and (path in '/', '/test'): if self.handlePhotoBackup(path, posted, shtml_vars=data): return elif (posted is not None) and 'upload' in posted: if self.handleFileUpload(path, posted['upload'], shtml_vars=data): if self.sendStaticPath(path, data['mimetype'], shtml_vars=data): return else: data.update(self.E403) else: if self.sendStaticPath(path, data['mimetype'], shtml_vars=data): return if path == '/robots.txt': data.update(self.ROBOTSTXT) else: data.update(self.E404) if data['mimetype'] in ('application/octet-stream', 'text/plain'): response = self.TEMPLATE_RAW % data elif path.endswith('.jsonp'): response = self.TEMPLATE_JSONP % (data, ) else: response = self.TEMPLATE_HTML % data self.sendResponse(response, msg=data['msg'], code=data['code'], mimetype=data['mimetype'], chunked=False) self.sendEof()
# get the stored cookie stored_cookie_string = os.environ.get('HTTP_COOKIE') # dictionary to store the response name/value pairs before JSON conversion # JSON format data that need to send back to client data = {} # the client didn't send a cookie if not stored_cookie_string: # print "Hello, You don't have cookie." print # without printing a blank line, the "end of script output before headers" error will occur print json.dumps(data) # else the client DID send a cookie... else: # get the cookie cookie = Cookie.SimpleCookie(stored_cookie_string) if "user_name" in cookie: cookie["user_name"]["expires"] = 'Thu, 01-Jan-1970 00:00:00 GMT' if "password" in cookie: cookie["password"]["expires"] = 'Thu, 01-Jan-1970 00:00:00 GMT' # do some username & password verifications print cookie print print json.dumps(data)
def _run_test(self, host, path_info='/', cookie_dict=None, action=None, set_email=None, set_admin=None, continue_url=None, method='GET'): """Runs the login HTTP handler, returning information about the response. Args: host: The value of the HTTP Host header. path_info: The absolute path of the request. cookie_dict: A cookie dictionary with the existing cookies. action: Value of the 'action' query argument. set_email: Value of the 'email' query argument. set_admin: Value of the 'admin' query argument. continue_url: Value of the 'continue' query argument. method: The HTTP method (e.g., 'GET'). Returns: Tuple (status, location, set_cookie, content_type) where each value is the value of the corresponding header from the response; if no header exists, the value will be None. In the case of status, it will just return the integer status code and not the rest of the status message. """ environ = {} wsgiref.util.setup_testing_defaults(environ) # The SERVER_NAME should never be used by the login module -- always defer # to the HTTP Host (so the user is not redirected to a different domain). environ['SERVER_NAME'] = 'do_not_use' environ['SERVER_PORT'] = '666' environ['SERVER_PROTOCOL'] = 'HTTP/1.1' environ['HTTP_HOST'] = host environ['PATH_INFO'] = path_info environ['REQUEST_METHOD'] = method if cookie_dict: cookie = Cookie.SimpleCookie(cookie_dict) cookie_value = ';'.join(m.OutputString() for m in cookie.values()) environ['HTTP_COOKIE'] = cookie_value query_dict = {} if action: query_dict['action'] = action if set_email: query_dict['email'] = set_email if set_admin: query_dict['admin'] = set_admin if continue_url: query_dict['continue'] = continue_url if query_dict: environ['QUERY_STRING'] = urllib.urlencode(query_dict) response_dict = {} def start_response(status, headers): response_dict['status'] = int(status.split(' ', 1)[0]) response_dict['headers'] = dict((k.lower(), v) for (k, v) in headers) login.application(environ, start_response) return (response_dict['status'], response_dict['headers'].get('location'), response_dict['headers'].get('set-cookie'), response_dict['headers'].get('content-type'))
def app(environ, start_response): status = '200 OK' response_headers = [('Content-Type', 'text/html')] start_response(status, response_headers) try: if environ['REQUEST_METHOD'] == 'GET': path = environ['PATH_INFO'][1:] if path.endswith(".html"): response_headers = [('Content-Type', 'text/html')] start_response(status, response_headers) f = open(curdir + sep + path, 'rb') # self.path has /test.html return f.read() f.close() elif path.endswith(".jpg"): response_headers = [('Content-Type', 'image/jpg')] start_response(status, response_headers) f = open(curdir + sep + path, 'rb') # self.path has /test.html return f.read() f.close() elif path.endswith(".gif"): response_headers = [('Content-Type', 'image/gif')] start_response(status, response_headers) f = open(curdir + sep + path, 'rb') # self.path has /test.html return f.read() f.close() elif path.endswith(".js"): response_headers = [('Content-Type', 'application/javascript')] start_response(status, response_headers) f = open(curdir + sep + path, 'rb') # self.path has /test.html return f.read() f.close() elif path.endswith(".css"): response_headers = [('Content-Type', 'text/css')] start_response(status, response_headers) f = open(curdir + sep + path, 'rb') # self.path has /test.html return f.read() f.close() else: f = open(curdir + sep + path, 'rb') # self.path has /test.html return f.read() f.close() if environ['REQUEST_METHOD'] == 'POST': path = environ['PATH_INFO'][1:] if path.endswith("new.html"): try: request_body_size = int( environ['REQUEST_HEADERS']['Content-Length']) except (ValueError): request_body_size = 0 input = environ['wsgi.input'] request_body = environ['wsgi.input'] handler = {} if 'Cookie' in environ['REQUEST_HEADERS']: if environ['REQUEST_HEADERS']['Cookie']: cookies = environ['REQUEST_HEADERS']['Cookie'] cookies = cookies.split('; ') handler = {} for cookie in cookies: cookie = cookie.split('=') handler[cookie[0]] = cookie[1] print handler d = parse_qs(request_body) food = d.get('name11', [''])[0] if 'username' in handler: if handler['username'] == 'nikhil': return handler[ 'username'] + 'take your yummy' + food else: return "please login dude yaar" else: return "please login dude" else: return "please login bro" if path.endswith("upload.html"): try: request_body_size = int( environ['REQUEST_HEADERS']['Content-Length']) except (ValueError): request_body_size = 0 input = environ['wsgi.input'] request_body = environ['wsgi.input'] handler = {} print 'wsgi.input text is here' print environ['wsgi.input'] print 'REQUEST_BODY input is here' print environ['REQUEST_BODY'] data = environ['wsgi.input'] data = data.split('\r\n\r\n') fi = data[1].split(';') fi = fi[2].split('\r\n') fi = fi[0] leng = len(fi) filename = fi[11:len(fi) - 1] f = open(filename, 'wb') finaldata = data[2].split('\r\n') # print finaldata f.write(finaldata[0]) f.close() return "success" if path.endswith("login.html"): try: request_body_size = int( environ['REQUEST_HEADERS']['Content-Length']) except (ValueError): request_body_size = 0 input = environ['wsgi.input'] request_body = environ['wsgi.input'] d = parse_qs(request_body) username = d.get('username', [''])[0] password = d.get('password', [''])[0] if username == 'nikhil' and password == 'nikhil': C = Cookie.SimpleCookie() C['username'] = '******' C['password'] = '******' print C response_headers = [('Content-Type', 'text/html'), ('Cookie', C)] start_response(status, response_headers) f = open('new.html', 'rb') # self.path has /test.html return f.read() f.close() else: f = open('login.html', 'rb') # self.path has /test.html return f.read() f.close() except IOError: status = '404 Not Found' start_response(status, response_headers) return "404 Not found"
def misfortune_cookie(self): cookie = Cookie.SimpleCookie(self.headers.getheader('Cookie')) if cookie.has_key('C107373883'): return cookie['C107373883'].value
def __patched_HTTPTransport_call(self, addr, data, namespace, soapaction=None, encoding=None, http_proxy=None, config=Config, timeout=None): def __addcookies(self, r): '''Add cookies from self.cookies to request r ''' for cname, morsel in self.cookies.items(): attrs = [] value = morsel.get('version', '') if value != '' and value != '0': attrs.append('$Version=%s' % value) attrs.append('%s=%s' % (cname, morsel.coded_value)) value = morsel.get('path') if value: attrs.append('$Path=%s' % value) value = morsel.get('domain') if value: attrs.append('$Domain=%s' % value) r.putheader('Cookie', "; ".join(attrs)) if not isinstance(addr, SOAPAddress): addr = SOAPAddress(addr, config) # Build a request if http_proxy: real_addr = http_proxy real_path = addr.proto + "://" + addr.host + addr.path else: real_addr = addr.host real_path = addr.path if addr.proto == 'httpg': from pyGlobus.io import GSIHTTP r = GSIHTTP(real_addr, tcpAttr=config.tcpAttr) elif addr.proto == 'https': r = httplib.HTTPS(real_addr, key_file=config.SSL.key_file, cert_file=config.SSL.cert_file) else: r = HTTPWithTimeout(real_addr, timeout=timeout) r.putrequest("POST", real_path) r.putheader("Host", addr.host) r.putheader("User-agent", SOAPUserAgent()) t = 'text/xml' if encoding is not None: t += '; charset=%s' % encoding r.putheader("Content-type", t) r.putheader("Content-length", str(len(data))) __addcookies(self, r) # if user is not a user:passwd format # we'll receive a failure from the server. . .I guess (??) if addr.user is not None: val = base64.encodestring(urllib.unquote_plus(addr.user)) r.putheader('Authorization', 'Basic ' + val.replace('\012', '')) # This fixes sending either "" or "None" if soapaction is None or len(soapaction) == 0: r.putheader("SOAPAction", "") else: r.putheader("SOAPAction", '"%s"' % soapaction) if config.dumpHeadersOut: s = 'Outgoing HTTP headers' debugHeader(s) print "POST %s %s" % (real_path, r._http_vsn_str) print "Host:", addr.host print "User-agent: SOAPpy " + __version__ + " (http://pywebsvcs.sf.net)" print "Content-type:", t print "Content-length:", len(data) print 'SOAPAction: "%s"' % soapaction debugFooter(s) # PATCH: Show stream before trying to send-it if config.dumpSOAPOut: s = 'Outgoing SOAP' debugHeader(s) print data, if data[-1] != '\n': print debugFooter(s) r.endheaders() # send the payload r.send(data) # read response line code, msg, headers = r.getreply() self.cookies = Cookie.SimpleCookie() if headers: content_type = headers.get("content-type", "text/xml") content_length = headers.get("Content-length") for cookie in headers.getallmatchingheaders("Set-Cookie"): self.cookies.load(cookie) else: content_type = None content_length = None # work around OC4J bug which does '<len>, <len>' for some reaason if content_length: comma = content_length.find(',') if comma > 0: content_length = content_length[:comma] # attempt to extract integer message size try: message_len = int(content_length) except: message_len = -1 f = r.getfile() if f is None: raise HTTPError(code, "Empty response from server\nCode: %s\nHeaders: %s" % (msg, headers)) if message_len < 0: # Content-Length missing or invalid; just read the whole socket # This won't work with HTTP/1.1 chunked encoding data = f.read() message_len = len(data) else: data = f.read(message_len) if(config.debug): print "code=", code print "msg=", msg print "headers=", headers print "content-type=", content_type print "data=", data if config.dumpHeadersIn: s = 'Incoming HTTP headers' debugHeader(s) if headers.headers: print "HTTP/1.? %d %s" % (code, msg) print "\n".join(map(lambda x: x.strip(), headers.headers)) else: print "HTTP/0.9 %d %s" % (code, msg) debugFooter(s) def startswith(string, val): return string[0:len(val)] == val if code == 500 and not \ ((startswith(content_type, "text/xml") or startswith(content_type, "text/plain")) and message_len > 0): raise HTTPError(code, msg) if config.dumpSOAPIn: s = 'Incoming SOAP' debugHeader(s) print data, if (len(data) > 0) and (data[-1] != '\n'): print debugFooter(s) if code not in (200, 500): raise HTTPError(code, msg) # get the new namespace if namespace is None: new_ns = None else: new_ns = self.getNS(namespace, data) # return response payload return data, new_ns
def do_request(self, request, url, method, data): # HTTP call try: httplib.HTTPConnection.debuglevel = 1 # Request creation proto, host, cgi, param, query = urlparse.urlparse(url)[:5] # manage proxies with authentication (get it from environment) proxy = None for proxy_name in settings.NOT_PROXY_FOR: if host.startswith(proxy_name): proxy = urllib2.ProxyHandler({}) #no proxy break if not proxy: #Host is not included in the NOT_PROXY_FOR list => proxy is needed! proxy = urllib2.ProxyHandler() #proxies from environment opener = urllib2.build_opener(proxy) # Adds original request Headers to the request headers = {} for header in request.META.items(): header_name = header[0].lower() if header_name == 'content_type' and header[1]: headers["content-type"] = header[1] elif header_name == 'cookie' or header_name == 'http_cookie': cookie_parser = Cookie.SimpleCookie(header[1]) # Remove EzWeb cookies if hasattr(settings, 'SESSION_COOKIE_NAME'): del cookie_parser[settings.SESSION_COOKIE_NAME] if hasattr(settings, 'CSRF_COOKIE_NAME'): del cookie_parser[settings.CSRF_COOKIE_NAME] content = ', '.join([ cookie_parser[key].OutputString() for key in cookie_parser ]) if content != '': headers['Cookie'] = content elif self.http_headerRE.match( header_name ) and not header_name in self.blacklisted_http_headers: fixed_name = header_name.replace("http_", "", 1).replace('_', '-') headers[fixed_name] = header[1] protocolVersion = self.protocolRE.match( request.META['SERVER_PROTOCOL']) if protocolVersion != None: protocolVersion = protocolVersion.group(1) else: protocolVersion = '1.1' hostName = self.hostRE.match(request.META['HTTP_HOST']) if hostName != None: hostName = hostName.group(1) else: hostName = socket.gethostname() headers["Via"] = "%s %s (GVS-python-Proxy/1.1)" % (protocolVersion, hostName) if (method == 'POST' or method == 'PUT') and not 'content-type' in headers: # Add Content-Type (Servlets bug) headers['content-type'] = "application/x-www-form-urlencoded" # Remote user header if not request.user.is_anonymous(): headers['Remote-User'] = request.user.username # Open the request try: res = self._do_request(opener, method, url, data, headers) except urllib2.URLError, e: if e.reason[0] == errno.ECONNREFUSED: return HttpResponse(status=504) else: return HttpResponseNotFound(e.reason) # Add content-type header to the response if (res.info().has_key('Content-Type')): response = HttpResponse(res.read(), mimetype=res.info()['Content-Type']) else: response = HttpResponse(res.read()) # Set status code to the response response.status_code = res.code # Add all the headers received from the response headers = res.headers for header in headers: if string.lower(header) == 'set-cookie': cookie_parser = Cookie.SimpleCookie() cookies = res.headers.getheaders(header) for i in range(len(cookies)): cookie_parser.load(cookies[i]) for key in cookie_parser: response.set_cookie( key, cookie_parser[key].value, expires=cookie_parser[key]['expires'], path=cookie_parser[key]['path'], domain=cookie_parser[key]['domain']) elif is_valid_header(string.lower(header)): response[header] = headers[header] return response
".* class is insecure.*", DeprecationWarning) # Currently this only tests SimpleCookie cases = [ ('chips=ahoy; vienna=finger', {'chips':'ahoy', 'vienna':'finger'}), ('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;";', {'keebler' : 'E=mc2; L="Loves"; fudge=\012;'}), # Check illegal cookies that have an '=' char in an unquoted value ('keebler=E=mc2;', {'keebler' : 'E=mc2'}) ] for data, dict in cases: C = Cookie.SimpleCookie() ; C.load(data) print repr(C) print str(C) items = dict.items() items.sort() for k, v in items: print ' ', k, repr( C[k].value ), repr(v) verify(C[k].value == v) print C[k] C = Cookie.SimpleCookie() C.load('Customer="WILE_E_COYOTE"; Version=1; Path=/acme') verify(C['Customer'].value == 'WILE_E_COYOTE') verify(C['Customer']['version'] == '1') verify(C['Customer']['path'] == '/acme')
def __init__(self): self.c = Cookie.SimpleCookie() self.cookie_info = dict()
def __init__(self, key="", description="", unlocked=False): self.key = key self.description = description self.unlocked = unlocked self.minReputation = -99 # Get current url try: url = os.environ['REQUEST_URI'] except KeyError: url = '' uiTheme = '' form = cgi.FieldStorage() # Get Cookies useCookies = 1 cookies = Cookie.SimpleCookie() try: cookies.load(os.environ['HTTP_COOKIE']) except KeyError: useCookies = 0 if useCookies: try: currentUser = cookies['userID'].value except KeyError: currentUser = '' try: loginResult = cookies['loginAttempt'].value except KeyError: loginResult = 'success' try:
def _authorize_request(env, operation): """Grant/Deny access based on data from wsgi env """ authdata = None name = '' sessionid = None cookie = Cookie.SimpleCookie() if 'HTTP_COOKIE' in env: #attempt to use the cookie. If it matches cc = RobustCookie() cc.load(env['HTTP_COOKIE']) if 'confluentsessionid' in cc: sessionid = cc['confluentsessionid'].value if sessionid in httpsessions: if env['PATH_INFO'] == '/sessions/current/logout': targets = [] for mythread in httpsessions[sessionid]['inflight']: targets.append(mythread) for mythread in targets: eventlet.greenthread.kill(mythread) del httpsessions[sessionid] return ('logout', ) httpsessions[sessionid]['expiry'] = time.time() + 90 name = httpsessions[sessionid]['name'] authdata = auth.authorize( name, element=None, skipuserobj=httpsessions[sessionid]['skipuserobject']) if (not authdata) and 'HTTP_AUTHORIZATION' in env: if env['PATH_INFO'] == '/sessions/current/logout': return ('logout', ) name, passphrase = base64.b64decode(env['HTTP_AUTHORIZATION'].replace( 'Basic ', '')).split(':', 1) authdata = auth.check_user_passphrase(name, passphrase, element=None) if not authdata: return {'code': 401} sessid = util.randomstring(32) while sessid in httpsessions: sessid = util.randomstring(32) httpsessions[sessid] = { 'name': name, 'expiry': time.time() + 90, 'skipuserobject': authdata[4], 'inflight': set([]) } cookie['confluentsessionid'] = sessid cookie['confluentsessionid']['secure'] = 1 cookie['confluentsessionid']['httponly'] = 1 cookie['confluentsessionid']['path'] = '/' skiplog = _should_skip_authlog(env) if authdata: auditmsg = { 'user': name, 'operation': operation, 'target': env['PATH_INFO'], } authinfo = { 'code': 200, 'cookie': cookie, 'cfgmgr': authdata[1], 'username': authdata[2], 'userdata': authdata[0] } if authdata[3] is not None: auditmsg['tenant'] = authdata[3] authinfo['tenant'] = authdata[3] auditmsg['user'] = authdata[2] if sessionid is not None: authinfo['sessionid'] = sessionid if not skiplog: auditlog.log(auditmsg) return authinfo else: return {'code': 401}
def main(): # Get current url try: url = os.environ['SCRIPT_NAME'] except KeyError: url = '' form = cgi.FieldStorage() # Get Cookies useCookies = 1 cookies = Cookie.SimpleCookie() try: cookies.load(os.environ['HTTP_COOKIE']) except KeyError: useCookies = 0 if useCookies: try: currentUser = cookies['userID'].value except KeyError: currentUser = '' try: loginResult = cookies['loginAttempt'].value except KeyError: loginResult = 'success' try: sid = cookies['gh_sid'].value except KeyError: sid = form.getfirst('gh_sid', '') else: currentUser = '' loginResult = 'success' sid = form.getfirst('gh_sid', '') # Get form info schematic = form.getfirst("schematic", "") recipeName = form.getfirst("recipeName", "") recipeID = form.getfirst("recipeID", "") ingredients = form.getfirst("ingredients", "") operation = form.getfirst("op", "") spawnID = form.getfirst("spawnID", "") galaxy = form.getfirst("galaxy", "") # escape input to prevent sql injection sid = dbShared.dbInsertSafe(sid) schematic = dbShared.dbInsertSafe(schematic) recipeName = dbShared.dbInsertSafe(recipeName) recipeID = dbShared.dbInsertSafe(recipeID) ingredients = dbShared.dbInsertSafe(ingredients) spawnID = dbShared.dbInsertSafe(spawnID) galaxy = dbShared.dbInsertSafe(galaxy) result = "" # Get a session logged_state = 0 sess = dbSession.getSession(sid) if (sess != ''): logged_state = 1 currentUser = sess # Check for errors errstr = "" if recipeName == "" and operation == "": errstr = "Error: You must provide a name for the recipe." if schematic == "" and recipeID == "": errstr = "Error: You must select a schematic to base the recipe on." if logged_state != 1: errstr = "Error: You must be logged in to do that." if galaxy == "" and schematic != "": errstr = "Error: You must select a galaxy before creating a recipe." # Only process if no errors if (errstr == ""): result = "" if (logged_state > 0): conn = dbShared.ghConn() if schematic == "": # Make sure user owns recipe chkcursor = conn.cursor() tempSQL = "".join( ("SELECT userID, schematicID FROM tRecipe WHERE recipeID=", recipeID, " AND userID='", currentUser, "';")) chkcursor.execute(tempSQL) row = chkcursor.fetchone() if row != None: if operation == "delete": result = deleteRecipe(conn, recipeID, currentUser) elif operation == "addspawn": result = addIngredient(conn, recipeID, spawnID, row[1], currentUser) else: result = updateRecipe(conn, recipeID, recipeName) if ingredients != "": result += updateIngredients( conn, recipeID, ingredients, row[1], currentUser) else: result = "Error: That recipe does not exist or is not yours." chkcursor.close() else: result = addRecipe(conn, schematic, recipeName, currentUser, galaxy) tmpPos = result.find("ID") # Save and strip ID on successful add if tmpPos > -1: recipeID = result[tmpPos + 2:] result = result[:tmpPos] # Update ingredients if they were provided (saving suggestion) if ingredients != '': result += updateIngredients(conn, recipeID, ingredients, schematic, currentUser) conn.close() else: result = "Error: must be logged in to do that." else: result = errstr print 'Content-type: text/xml\n' doc = minidom.Document() eRoot = doc.createElement("result") doc.appendChild(eRoot) eName = doc.createElement("recipeID") tName = doc.createTextNode(str(recipeID)) eName.appendChild(tName) eRoot.appendChild(eName) eText = doc.createElement("resultText") tText = doc.createTextNode(result) eText.appendChild(tText) eRoot.appendChild(eText) print doc.toxml() if (result.find("Error:") > -1): sys.exit(500) else: sys.exit(200)
def customTag(target, data): tagExist = False if '%' in data and '#' in data: tagExist = True while tagExist: startPoint = data.index('%') endPoint = data.index('#') while True: try: svSp = data.index('%', startPoint + 1) except: break if svSp > endPoint: break else: startPoint = svSp custom = data[data.index('%') + 1:data.index('#')] replaced_data = None custom = custom.split(':', 1) if custom[0] == "rndint": replaced_data = random.randrange(1, 100000) elif custom[0] == "cookie": cookieManager = Cookie.SimpleCookie() cookieManager.load(charEscaper(target['webLoginCookie'], '[/')) if "reg" in custom[1]: regex_ = re.compile(custom[1].split(':', 1)[1]) for i in cookieManager.keys(): reg_result = regex_.findall(charDescaper(i)) if len(reg_result) > 0: replaced_data = cookieManager[i].value break else: for i in cookieManager.keys(): if charDescaper(i) == custom[1]: replaced_data = cookieManager[i].value break elif custom[0] == "domtoken": arg = custom[1].rsplit('@', 1) token_req = urllib2.Request( customTag(target, target['webUploadPageURL'])) token_req.add_header('cookie', target['webLoginCookie']) token_req.add_header('Accept-encoding', 'gzip,deflate') addHeader(token_req, target['webHost']) if Debug: print "[http request] custom tag - domtoken urllib2" token_res_obj = urllib2.urlopen(token_req) if "content-encoding" in token_res_obj.headers.keys(): cnt = token_res_obj.headers['content-encoding'] else: cnt = None token_res = ungzip(token_res_obj.read(), cnt) regex_token = re.compile(arg[0]) reg_result = regex_token.findall(token_res) regex_extract = re.compile(arg[1]) for i in reg_result: tmp = regex_extract.findall(i) if len(tmp) > 0: replaced_data = tmp[0] break if "%dom:" in target['webUploadURL']: arg = target['webUploadURL'].split(':', 1)[1].split('@') regex_urlform = re.compile(arg[0]) regex_urlform_match = re.compile(arg[1]) token_res = token_res.replace('\\', '') reg_urlform = regex_urlform.search(token_res) return_url = "" if reg_urlform != None: condid_url = reg_urlform.string[reg_urlform.start( ):reg_urlform.end()] reg_urlform_match = regex_urlform_match.search(condid_url) if reg_urlform_match != None: return_url = reg_urlform_match.string[ reg_urlform_match.start():reg_urlform_match.end()] target["webDynamicUploadURL"] = None if return_url != "": target["webDynamicUploadURL"] = return_url else: print "[-] Not Found Dynamic upload URL" exit(0) if replaced_data == None: replaced_data = '' data = data.replace('%{}#'.format(':'.join(custom)), replaced_data) if '%' not in data or '#' not in data: tagExist = False return data