def POST(self, type): i = web.input() statement = i['sql'] params = i['params'] # Validate hash hash = hashlib.sha1( web.config.SECRET_KEY + statement + params).hexdigest() if hash != i['hash']: raise web.notacceptable() # Make sure it is a select statement if not statement.lower().strip().startswith('select'): raise web.notacceptable() params = json.loads(params) engine = web.config.engine if type == 'explain': if engine.driver == 'pysqlite': statement = 'EXPLAIN QUERY PLAN %s' % statement else: statement = 'EXPLAIN %s' % statement result = engine.execute(statement, params) debugtoolbar = web.config.debug_toolbar return debugtoolbar.render('panels/sqla_result.html', { 'result': result.fetchall(), 'headers': result.keys(), 'sql': format_sql(statement, params), 'duration': float(i['duration']), 'type': type, })
def POST(self, hash): self.get_user() problem = self.check_problem(hash) tests = db.select("tests", where="problem_id=$id", vars={"id": problem.id}).list() f = web.input() try: f.source.decode('utf8') except: raise web.notacceptable('please submit code encoded in utf8') successful_tests = 0 total_runtime = 0 error_counts = defaultdict(lambda: 0) for test in tests: stdout, stderr, exitstatus, runtime, error = sj_client.run(f.language, f.source, test.stdin, custom_timelimit=test.timelimit) total_runtime += runtime if error: error_counts[error] += 1 elif stdout == str(test.stdout): successful_tests += 1 else: error_counts["wrong_answer"] += 1 if len(error_counts) == 1: error_counts = error_counts.keys()[0] else: error_counts = ", ".join("%d %s" % (x[1], x[0]) for x in sorted( error_counts.iteritems())) db.insert("submissions", problem_id=problem.id, source=f.source, ip=web.ctx.ip, language=f.language, tests_passed=successful_tests, created_at=timestamp(), user_id=self.user_id, errors=error_counts, runtime=total_runtime) return self.render(hash, "%d/%d tests passed in %0.2f seconds.%s" % ( successful_tests, len(tests), total_runtime, error_counts and " Errors: %s" % error_counts or ""), problem)
def GET(self, domain): if not qv_domains.is_admin(domain): return web.notacceptable() qs = qv_questions.find({'domain': domain}).sort([('inserted_at', -1)]) data = { 'existing_questions': [], 'new_uuid': uuid4(), 'domain': domain, 'active_question': qv_domains.get_active_question(domain), 'submit_url': urls['question_post']['url_pattern'] % (domain), 'get_url': urls['question_get']['url_pattern'] % (domain, ''), 'get_results_url': urls['results_get']['url_pattern'] % (domain, ''), 'delete_url': urls['answers_post']['url_pattern'] % (domain, ''), 'results_url': urls['view']['url_pattern'] % (domain), 'history_url': urls['history']['url_pattern'] % (domain), } qsd = [q for q in qs] data['existing_questions'] = qsd return renderer.admin(data)
def GET(self, domain): # verify the cookie is not set to the current session. # in that case it would be a resubmission if not qv_domains.is_admin(domain): return web.notacceptable() uuid = qv_domains.get_active_question(domain) data = { 'uuid': uuid, 'domain': domain, 'vote_url': config.base_url+domain+'/', 'get_url': urls['results_get']['url_pattern'] % (domain, uuid), 'existing_questions': [], 'active_question': qv_domains.get_active_question(domain), 'activate_question_url': urls['question_get']['url_pattern'] % (domain, ''), 'delete_url': urls['answers_post']['url_pattern'] % (domain, ''), 'get_results_url': urls['results_get']['url_pattern'] % (domain, ''), 'history_url': urls['history']['url_pattern'] % (domain), } qs = qv_questions.find({'domain': domain}).sort([('inserted_at', -1)]) qsd = [q for q in qs] data['existing_questions'] = qsd return renderer.view(data)
def POST(self): f = web.input() try: f.source.decode('utf8') except: raise web.notacceptable('please submit code encoded in utf8') stdout, stderr, exitstatus, runtime, error = sj_client.run(f.language, f.source, f.stdin) return self.GET(stdout, stderr, exitstatus, runtime, error)
def _raise_if_dir_or_not_servable(filepath): #Raise 406 if not servable p = _get_local_path(filepath) if (os.path.dirname(filepath) not in _config['directories'] or os.path.isdir(p)): # request a file which this server isn't supposed to serve! raise web.notacceptable()
def PUT(self, md5_sum): try: relational_util.StatsStructToDatabaseLevelOne(md5_sum) response = cjson.encode({'message': 'Package registered to level 1'}) web.header('Content-Length', str(len(response))) return response except errors.DataError as exc: raise web.notacceptable(exc)
def ReturnError(self, message): web.header('Content-type', 'application/x-vnd.opencsw.pkg;type=error-message') response = json.dumps({ "error_message": unicode(message), }) web.header('Content-Length', len(response)) raise web.notacceptable(data=response)
def POST(self): f = web.input() try: f.source.decode('utf8') except: raise web.notacceptable('please submit code encoded in utf8') stdout, stderr, exitstatus, runtime, error = sj_client.run( f.language, f.source, f.stdin) return self.GET(stdout, stderr, exitstatus, runtime, error)
def ReturnError(self, message): web.header( 'Content-type', 'application/x-vnd.opencsw.pkg;type=error-message') response = cjson.encode({ "error_message": unicode(message), }) web.header('Content-Length', len(response)) raise web.notacceptable(data=response)
def raise_if_dir_or_not_servable(filepath): """Raise a 406 notacceptable if the filepath isn't supposed to be served, or if it's a directory. """ p = get_local_path(filepath) if (os.path.dirname(filepath) not in _config['directories'] or os.path.isdir(p)): raise web.notacceptable()
def _raise_if_dir_or_not_servable(filepath): """Raise a 406 notacceptable if the filepath isn't supposed to be served, or if it's a directory. """ p = _get_local_path(filepath) if (os.path.dirname(filepath) not in _config['directories'] or os.path.isdir(p)): # request a file which this server isn't supposed to serve! raise web.notacceptable()
def GET(self, page_num=None): mime_type = utils.get_preferred_mimetype( ('text/html', 'application/atom+xml'), 'text/html') if mime_type == 'text/html': if page_num is None: return web.seeother(web.url('/pastes/1')) return self.recent(int(page_num)) if mime_type == 'application/atom+xml': return self.feed() # Should never be called. return web.notacceptable()
def PUT(self, md5_sum): url_data = web.input(use_in_catalogs='1') negative_values = (0, '0', 'False', 'false', 'No', 'no') use_in_catalogs = True if url_data['use_in_catalogs'] in negative_values: use_in_catalogs = False try: relational_util.StatsStructToDatabaseLevelTwo( md5_sum, use_in_catalogs=use_in_catalogs) response = cjson.encode({'message': 'Package registered to level 2'}) web.header('Content-Length', str(len(response))) return response except errors.DataError as exc: raise web.notacceptable(exc)
def POST(self): global current_path i = web.input() p = loads(i['path']) zoom = float(i['zoom']) if 'latitude' in i: latitude = float(i['latitude']) else: latitude = 0.0 if 'longitude' in i: longitude = float(i['longitude']) else: longitude = 0.0 geo_location = (latitude, longitude) if not geo_fence.valid_position(geo_location): return web.notacceptable() #pixel_ratio = float(i['pixel_ratio']) for s in p[1]['segments']: s[0] /= (zoom) s[1] /= (zoom) new_path_cond.acquire() try: env = {} for (k, v) in web.ctx.env.items(): if type(v) is str: env[k] = v d = { 'path': p, 'env': env, 'dummy': range(1, 2048), # dummy data to stop proxy buffering } current_path = dumps(d) new_path_cond.notifyAll() # store relevant logs: fname = abspath + '/logs/graphotti_%s.json' % str(datetime.now()) with open(fname, 'w') as f: log_data = { 'path': p, 'env': env, 'longitude': longitude, 'latitude': latitude, 'timestamp': str(datetime.now()) } f.write(dumps(log_data)) finally: new_path_cond.release() return web.ok()
def POST(self, hash): self.get_user() problem = self.check_problem(hash) tests = db.select("tests", where="problem_id=$id", vars={ "id": problem.id }).list() f = web.input() try: f.source.decode('utf8') except: raise web.notacceptable('please submit code encoded in utf8') successful_tests = 0 total_runtime = 0 error_counts = defaultdict(lambda: 0) for test in tests: stdout, stderr, exitstatus, runtime, error = sj_client.run( f.language, f.source, test.stdin, custom_timelimit=test.timelimit) total_runtime += runtime if error: error_counts[error] += 1 elif stdout == str(test.stdout): successful_tests += 1 else: error_counts["wrong_answer"] += 1 if len(error_counts) == 1: error_counts = error_counts.keys()[0] else: error_counts = ", ".join( "%d %s" % (x[1], x[0]) for x in sorted(error_counts.iteritems())) db.insert("submissions", problem_id=problem.id, source=f.source, ip=web.ctx.ip, language=f.language, tests_passed=successful_tests, created_at=timestamp(), user_id=self.user_id, errors=error_counts, runtime=total_runtime) return self.render( hash, "%d/%d tests passed in %0.2f seconds.%s" % (successful_tests, len(tests), total_runtime, error_counts and " Errors: %s" % error_counts or ""), problem)
def GET(self, domain): # verify the cookie is not set to the current session. # in that case it would be a resubmission if not qv_domains.is_admin(domain): return web.notacceptable() uuid = qv_domains.get_active_question(domain) data = { 'uuid': uuid, 'domain': domain, 'vote_url': config.base_url+domain+'/', 'get_url': urls['results_get']['url_pattern'] % (domain, uuid) } return renderer.history(data)
def parse_event(self,get=False,post=False,**kwargs): # get event data (POST or GET) event_data = self._get_event_data(get=get,post=post) print 'got event data: %s' % event_data # validate the event data is_valid = validate_event_data(event_data) print 'is valid: %s' % is_valid # if it's not valid, return an error if not is_valid: # the data isn't acceptable, 406 web.notacceptable() # now we know we have valid event data # pass it off to our handler try: self.event_handler(event_data, **kwargs) except Exception, ex: # client error web.badrequest()
def POST(self, domain): if not qv_domains.is_admin(domain): return web.notacceptable() user_data = web.input() if hasattr(user_data, "delete_question") and \ hasattr(user_data, 'uuid'): print("Deleting") qv_questions.remove({'uuid': user_data.uuid}) elif hasattr(user_data, 'options') and \ hasattr(user_data, 'correct') and \ hasattr(user_data, 'question') and \ hasattr(user_data, 'domain') and \ hasattr(user_data, 'uuid'): if user_data.uuid == '': user_data.uuid = str(uuid4()) if user_data.domain == '': user_data.domain = str(domain) options_str = user_data.options.split(',') correct_str = user_data.correct.split(',') doc = { 'options': [o.strip() for o in options_str], 'correct': [o.strip() for o in correct_str], 'question': user_data.question, 'uuid': user_data.uuid, 'domain': user_data.domain, 'inserted_at': datetime.now() } if hasattr(user_data, 'image'): doc['image'] = user_data.image # the is a delete request if the question is empty: if len(user_data.question) > 0: qv_questions.update({'uuid': user_data.uuid}, doc, upsert=True) else: qv_questions.remove({'uuid': user_data.uuid}) else: web.internalerror("could not all data provided as required: " "user_data=%s" % user_data) return web.ok() # web.seeother('/%s/%s/editor' % (domain, admin_url))
def GET(self): try: query = web.input().query except: raise web.HTTPError("400 REQUEST ERROR: No query supplied") if 'HTTP_ACCEPT' in web.ctx.env: accept = web.ctx.env['HTTP_ACCEPT'] else: accept = 'application/sparql-results+xml' if accept not in ACCEPT_LIST: raise web.notacceptable( data= 'No suitable response format available. (Supported formats:' + ', '.join(ACCEPT_LIST) + ')') web.header('Content-Type', accept) msg, response = conn.sparqlapi(query, accept) if msg: raise web.HTTPError("400 REQUEST ERROR: " + msg) else: return response
def __format(*vargs, **kv): fmt = 'json' m = re.search('application/(.+)', web.ctx.env.get('HTTP_ACCEPT')) if m: fmt = m.group(1) if supported and fmt not in supported: raise web.notacceptable() try: o = func(*vargs, **kv) except HTTPError as e: raise e except Exception as e: tb.print_exc() web.header('content-type', 'application/%s' % fmt) web.ctx.status = '400 bad request' return json.dumps(error.error(e)) else: if isinstance(o, (str, unicode)): return o elif isinstance(o, dict) and 'status' in o and o['status'] == 'error': web.ctx.status = '400 bad request' web.header('content-type', 'application/%s' % fmt) return json.dumps(o)
def GET(self, param): return web.notacceptable()
def GET_AUTH(self, *args, **kwargs): # pylint: disable=unused-argument raise web.notacceptable()
def dir_not_servable(path): p = local_path(path) if (os.path.dirname(path) not in _config['directories'] or os.path.isdir(p)): raise web.notacceptable()
def POST(self, name): """ >>> document = u'<entry><title>Hello World!</title><author><name>Stefan Freudenberg</name></author><updated>2011-04-10T15:10:00+0200</updated><category term=\"webpy\"/><content>Lorem ipsum dolor sit</content></entry>' >>> headers = {'Slug': 'hello-world', 'Content-Type': 'application/atom+xml;type=entry'} >>> req = app_atom.request('/collection/entries', method='POST', headers=headers, data=document) >>> req.status '201 Created' >>> req.headers['Location'] 'http://0.0.0.0:8080/document/hello-world' >>> req.headers['Location'] == req.headers['Content-Location'] True >>> req.headers['Content-Type'] 'application/atom+xml;type=entry;charset=\"utf-8\"' >>> len(req.data) == int(req.headers['Content-Length']) True >>> len(req.data) > len(document) True """ if name not in db.collection_names(): raise web.notfound() collection = db[name] if web.ctx.env.get('CONTENT_TYPE') != 'application/atom+xml;type=entry': raise web.notacceptable() entry = objectify.fromstring(web.data()) if 'HTTP_SLUG' in web.ctx.env: slug = defaultfilters.slugify(web.ctx.env.get('HTTP_SLUG')) else: slug = defaultfilters.slugify(entry.title.text) try: entry.updated except AttributeError: entry.updated = A.updated(datetime.datetime.now()) entries = db.entries entries.insert({ 'slug': slug, 'title': entry.title.text, 'updated': entry.updated.text, 'author': entry.author.name.text, 'content': etree.tostring(entry.content), 'categories': [cat.get('term') for cat in entry.category] }) location = web.ctx.home + '/document/' + slug tree = xmlify(db.entries.find_one({'slug': slug}), web.ctx.home + '/document/') body = etree.tostring(tree) web.header( 'Location', location ) web.header( 'Content-Type', 'application/atom+xml;type=entry;charset="utf-8"' ) web.header( 'Content-Length', len(body) ) web.header( 'Content-Location', location ) raise web.created(body)
def PUT(self, catrel_name, arch_name, osrel_name, md5_sum): """Adds package to a catalog. When pycurl calls this function, it often hangs, waiting. A fix for that is to add the 'Content-Length' header. However, it sometimes still gets stuck and I don't know why. """ if catrel_name not in CAN_UPLOAD_TO_CATALOGS: # Updates via web are allowed only for the unstable catalog. # We should return an error message instead. raise web.forbidden('Not allowed to upload to %s' % catrel_name) try: if arch_name == 'all': raise checkpkg_lib.CatalogDatabaseError( "There is no 'all' catalog, cannot proceed.") srv4 = models.Srv4FileStats.selectBy(md5_sum=md5_sum).getOne() parsed_basename = opencsw.ParsePackageFileName(srv4.basename) if parsed_basename["vendortag"] != "CSW": raise checkpkg_lib.CatalogDatabaseError( "Package vendor tag is %s instead of CSW." % parsed_basename["vendortag"]) if not srv4.registered: # Package needs to be registered for releases stats = srv4.GetStatsStruct() # This can throw CatalogDatabaseError if the db user doesn't have # enough permissions. package_stats.PackageStats.ImportPkg(stats, True) srv4 = models.Srv4FileStats.selectBy(md5_sum=md5_sum).getOne() c = checkpkg_lib.Catalog() sqo_osrel, sqo_arch, sqo_catrel = models.GetSqoTriad( osrel_name, arch_name, catrel_name) # See if there already is a package with that catalogname. res = c.GetConflictingSrv4ByCatalognameResult( srv4, srv4.catalogname, sqo_osrel, sqo_arch, sqo_catrel) if res.count() == 1: # Removing old version of the package from the catalog for pkg_in_catalog in res: srv4_to_remove = pkg_in_catalog.srv4file c.RemoveSrv4(srv4_to_remove, osrel_name, arch_name, catrel_name) # See if there already is a package with that pkgname. res = c.GetConflictingSrv4ByPkgnameResult( srv4, srv4.pkginst.pkgname, sqo_osrel, sqo_arch, sqo_catrel) if res.count() == 1: # Removing old version of the package from the catalog for pkg_in_catalog in res: srv4_to_remove = pkg_in_catalog.srv4file c.RemoveSrv4(srv4_to_remove, osrel_name, arch_name, catrel_name) # This is set by basic HTTP auth. username = web.ctx.env.get('REMOTE_USER') c.AddSrv4ToCatalog(srv4, osrel_name, arch_name, catrel_name, who=username) web.header( 'Content-type', 'application/x-vnd.opencsw.pkg;type=catalog-update') response = cjson.encode([ u"Added to catalog %s %s %s" % (catrel_name, arch_name, osrel_name), u"%s" % srv4.basename, u"%s" % srv4.md5_sum, ]) web.header('Content-Length', len(response)) return response except ( checkpkg_lib.CatalogDatabaseError, sqlobject.dberrors.OperationalError), e: web.header( 'Content-type', 'application/x-vnd.opencsw.pkg;type=error-message') response = cjson.encode({ "error_message": unicode(e), }) web.header('Content-Length', len(response)) raise web.notacceptable(data=response)
def inner(*args, **kwargs): accept = web.ctx.environ.get('HTTP_ACCEPT', '').split(',') if 'application/json' not in accept: raise web.notacceptable() return func(*args, **kwargs)
def GET(self, domain, admin_url): if not qv_domains.is_valid_admin(domain, admin_url): return web.notacceptable() web.setcookie("quuid_"+domain, admin_url, expires=-1) return web.ok()
def LTI_GET(self, *args, **kwargs): raise web.notacceptable()
def LTI_POST(self, *args, **kwargs): # pylint: disable=unused-argument raise web.notacceptable()
def POST(self, domain, question): if not qv_domains.is_admin(domain): return web.notacceptable() qv_domains.set_active_session(domain, question) #qv_collection.remove({'uuid': question, 'domain': domain}) return web.ok()
def PUT(self, catrel_name, arch_name, osrel_name, md5_sum): """Adds package to a catalog. When pycurl calls this function, it often hangs, waiting. A fix for that is to add the 'Content-Length' header. However, it sometimes still gets stuck and I don't know why. """ configuration.SetUpSqlobjectConnection() if catrel_name != 'unstable': # Updates via web are allowed only for the unstable catalog. # We should return an error message instead. raise web.notfound() try: if arch_name == 'all': raise checkpkg_lib.CatalogDatabaseError( "There is no 'all' catalog, cannot proceed.") srv4 = models.Srv4FileStats.selectBy(md5_sum=md5_sum).getOne() parsed_basename = opencsw.ParsePackageFileName(srv4.basename) if parsed_basename["vendortag"] != "CSW": raise checkpkg_lib.CatalogDatabaseError( "Package vendor tag is %s instead of CSW." % parsed_basename["vendortag"]) if not srv4.registered: # Package needs to be registered for releases stats = srv4.GetStatsStruct() # This can throw CatalogDatabaseError if the db user doesn't have # enough permissions. package_stats.PackageStats.ImportPkg(stats, True) srv4 = models.Srv4FileStats.selectBy(md5_sum=md5_sum).getOne() c = checkpkg_lib.Catalog() sqo_osrel, sqo_arch, sqo_catrel = pkgdb.GetSqoTriad( osrel_name, arch_name, catrel_name) # See if there already is a package with that catalogname. res = c.GetConflictingSrv4ByCatalognameResult( srv4, srv4.catalogname, sqo_osrel, sqo_arch, sqo_catrel) if res.count() == 1: # Removing old version of the package from the catalog for pkg_in_catalog in res: srv4_to_remove = pkg_in_catalog.srv4file c.RemoveSrv4(srv4_to_remove, osrel_name, arch_name, catrel_name) # See if there already is a package with that pkgname. res = c.GetConflictingSrv4ByPkgnameResult(srv4, srv4.pkginst.pkgname, sqo_osrel, sqo_arch, sqo_catrel) if res.count() == 1: # Removing old version of the package from the catalog for pkg_in_catalog in res: srv4_to_remove = pkg_in_catalog.srv4file c.RemoveSrv4(srv4_to_remove, osrel_name, arch_name, catrel_name) c.AddSrv4ToCatalog(srv4, osrel_name, arch_name, catrel_name) web.header('Content-type', 'application/x-vnd.opencsw.pkg;type=catalog-update') response = json.dumps([ u"Added to catalog %s %s %s" % (catrel_name, arch_name, osrel_name), u"%s" % srv4.basename, ]) web.header('Content-Length', len(response)) return response except (checkpkg_lib.CatalogDatabaseError, sqlobject.dberrors.OperationalError), e: web.header('Content-type', 'application/x-vnd.opencsw.pkg;type=error-message') response = json.dumps({ "error_message": unicode(e), }) web.header('Content-Length', len(response)) raise web.notacceptable(data=response)