def post(self, id): cookie_val = self.cookie_check() if cookie_val: user = User.get_user(cookie_val) e = Blog.get_blog_entry(id) if e: text_comment = self.request.get("comments") if text_comment: text_comment = text_comment.replace('\n', '<br>') text_comment = db.Text(text_comment) text_comment = db.Text( bleach.clean(text_comment, tags=tags_content)) new_comment = Comment(comment=text_comment, comment_author=user.name) new_comment.put() i = new_comment.key().id() e.comments.append(i) e.put() self.redirect('/blog/%s' % str(id)) else: error = "Enter comment please" comments_list = Comment.get_comments_list(e, 0) self.render("newcomment.html", e=e, cookie_val=cookie_val, user=user.name, comments_list=comments_list, error=error, link=id) else: self.redirect('/blog') else: self.redirect('/blog/login')
def __init__(self, credentials=None): """Initialize Client API object for Compute Engine manipulation. If authorized HTTP is not given by parameter, it uses user ID stored in Memcache and fetches credentials for that user. Args: credentials: OAuth2 credentials of current user. """ if credentials: user_id = users.get_current_user().user_id() credentials_in_json = credentials.to_json() authorized_user = AuthorizedUserId.get_or_insert( self.USER_ID_KEY, user_id=user_id, credentials=db.Text(credentials_in_json)) memcache.set(self.USER_CREDENTIALS_KEY, credentials_in_json) if (authorized_user.user_id != user_id or str(authorized_user.credentials) != credentials_in_json): authorized_user.user_id = user_id authorized_user.credentials = db.Text(credentials_in_json) authorized_user.put() else: credentials_in_json = memcache.get(self.USER_CREDENTIALS_KEY) if not credentials_in_json: authorized_user = AuthorizedUserId.get_by_key_name( self.USER_ID_KEY) credentials_in_json = str(authorized_user.credentials) credentials = OAuth2Credentials.from_json(credentials_in_json) self.compute_api = build('compute', self.COMPUTE_API_VERSION, http=credentials.authorize(httplib2.Http()))
def isAuthor(self, article=None, comment=None, comments=None, update=None): #filter out bad source if article and not comments: if article.provider != self.provider: return False #exact match scenario elif article.author == self.name: return True #old match scenario elif self.emailaddress: if article.author == self.emailaddress or article.author == self.emailaddress.split( '@')[0]: #isGmail(article.author) and #should we update the article author? if update and self.name: article.author = self.name article.put() return True elif comments: commentid = 0 isAuthor = False for comment in comments: commentObj = loads(str(comment)) commentAuthor = str(commentObj[1]).split('@', 2)[0] #comment's Text = commentObj[0] #comment's Time = commentObj[2] if commentAuthor == self.name: isAuthor = True if not (update and article): return isAuthor elif self.emailaddress: if commentAuthor == self.emailaddress or commentAuthor == self.emailaddress.split( '@')[0]: isAuthor = True if update and article: pickled = db.Text( dumps( [commentObj[0], self.name, commentObj[2]])) article.comments[commentid] = pickled else: return isAuthor commentid += 1 if update and article: article.put() elif comment: commentObj = loads(str(comment)) commentAuthor = str(commentObj[1]).split('@', 2)[0] #comment's Text = commentObj[0] #comment's Time = commentObj[2] if commentAuthor == self.name: return True elif self.emailaddress: if commentAuthor == self.emailaddress or commentAuthor == self.emailaddress.split( '@')[0]: if update and article and commentid >= 0: pickled = db.Text( dumps([commentObj[0], self.name, commentObj[2]])) article.comments[commentid] = pickled article.put() return True return False
def post(self): # Check if this is a delete lua task request. delete_key = self.request.get('delete') if delete_key: LuaTasks.delete_lua_task(delete_key) self.redirect('lua_script') return # It is an add lua task request. requested_time = datetime.datetime.now() lua_script = db.Text(self.request.get('lua_script')) lua_aggregator = db.Text(self.request.get('lua_aggregator')) description = self.request.get('description') pagesets_type, chromium_rev, skia_rev = self.request.get( 'pagesets_type_and_chromium_build').split('-') if not description: description = 'None' LuaTasks(username=self.user.email(), lua_script=lua_script, lua_aggregator=lua_aggregator, pagesets_type=pagesets_type, chromium_rev=chromium_rev, skia_rev=skia_rev, requested_time=requested_time, description=description).put() self.redirect('lua_script')
class KeyboardDB(db.Model): index = db.IntegerProperty() kbName = db.StringProperty(u'') langCode = db.StringProperty(u'') lastUpdate = db.DateTimeProperty(auto_now=True, auto_now_add=True) jsonKbData = db.Text(u'DEFAULT') kbdata = db.Text(u'intialized') jsonRules = db.Text(u'') creatorId = db.StringProperty(u'')
def get_value_for_datastore(self, model_instance): cells = super(StationCellProperty, self).get_value_for_datastore(model_instance) value = list(cells) if cells is not None: return db.Text(json.dumps(value, cls=ModelEncoder)) else: return db.Text("[]")
def harvest_facebook_posts(self, social_account): recent_post_date = None start_post_date = None # Connect to facebook fb_connector = connectors.facebook.FacebookConnector(app_id = config.FACEBOOK_API_APP_ID, app_secret = config.FACEBOOK_API_APP_SECRET, app_url = config.APP_URL, graph_url = config.FACEBOOK_API_GRAPH_URL, oauth_url = config.FACEBOOK_API_OAUTH_URL) # Get most recent post date to see what date we should start pulling from query = datamodel.SocialPost.gql('WHERE social_account = :1 ORDER BY original_post_date DESC', social_account) last_social_post = query.get() # If a post exists, then lets load from that last date... if not, then lets try to load everything if last_social_post: recent_post_date = last_social_post.original_post_date else: recent_post_date = None #start_post_date = datetime.datetime(2000,1,1) json_posts = fb_connector.get_posts(social_account.user_id, social_account.access_token, recent_post_date,start_post_date) # Parse JSON returns for post in json_posts['data']: # If item exists, dont restore it. # TODO: Make this query more efficient!!! Do similar to the harvest_friends... query_check = datamodel.SocialPost.gql('WHERE social_account_item_id = :1', post['id']) if (query_check.count() < 1): post_date = datetime.datetime.strptime(post['created_time'], "%Y-%m-%dT%H:%M:%S+0000") if (post['type'] == 'status'): social_post = datamodel.SocialPost(user=social_account.user, social_account=social_account, social_account_item_id=post['id'], post_type='status', raw_text=db.Text(post['message']), original_post_date=post_date) social_post.put() elif (post['type'] == 'video'): social_post = datamodel.SocialPost(user=social_account.user, social_account=social_account, social_account_item_id=post['id'], post_type='video', raw_text=db.Text(post['message']), url_list=[db.Link(post['link'])], original_post_date=post_date) social_post.put() else: pass
def insert_gtest_results(build_step_key): """Inserts GTest results into the datastore, replacing any existing ones. Also records used parser version.""" step = BuildStep.get(build_step_key) log_contents = '' if step.log_gs: with cloudstorage.open(step.log_gs) as gs_file: log_contents = html2text(gs_file.read().decode('utf-8', 'replace')) else: try: blob_reader = blobstore.BlobReader(step.log_stdio) log_contents = html2text(blob_reader.read().decode( 'utf-8', 'replace')) except (ValueError, blobstore.BlobNotFoundError) as e: raise deferred.PermanentTaskFailure(e) gtest_results = gtest_parser.parse(log_contents) to_put = [] for fullname, result in gtest_results.iteritems(): # Only store failure results. if result['is_successful']: continue if isinstance(result['log'], unicode): log = db.Text(result['log']) else: log = db.Text(result['log'], encoding='utf-8') result_entity = GTestResult( parent=db.Key.from_path('GTestResult', str(step.key())), build_step=step, time_finished=step.time_finished, gtest_parser_version=gtest_parser.VERSION, is_crash_or_hang=result['is_crash_or_hang'], fullname=fullname, run_time_ms=result['run_time_ms'], log=log) to_put.append(result_entity) for chunk in chunks(to_put, BATCH_SIZE): db.put(chunk) def tx_parser_version(): step = BuildStep.get(build_step_key) orig_parser_version = step.gtest_parser_version if step.gtest_parser_version < gtest_parser.VERSION: step.gtest_parser_version = gtest_parser.VERSION step.put() return (orig_parser_version, step.gtest_parser_version) _, parser_version = \ db.run_in_transaction_custom_retries(10, tx_parser_version) query = GTestResult.all(keys_only=True) query.filter('build_step =', build_step_key) query.filter('gtest_parser_version <', parser_version) db.delete(query)
def ensure_sanitized(self): if self.last_sanitized >= self.last_modified and self.sanitizer_used == Exercise._CURRENT_SANITIZER: return cajoled = cajole.cajole(self.raw_html) if 'error' in cajoled: raise Exception(cajoled['html']) self.safe_html = db.Text(cajoled['html']) self.safe_js = db.Text(cajoled['js']) self.last_sanitized = datetime.datetime.now() self.sanitizer = Exercise._CURRENT_SANITIZER self.put()
class Iniciativa(db.Expando): uuid = db.StringProperty() link_gaceta = db.StringProperty() rol_iniciativa = db.StringProperty() sinopsis = db.Text() titulo = db.Text() tramite = db.StringProperty() fecha = db.DateProperty() fecha_aprobacion = db.DateProperty() fecha_publicacion = db.DateProperty() diputado = db.StringProperty()
def on_pre_assignment_submission(handler, user, form): submission = FormSubmission(form_name='pre', user=user) for k, v in form.data.items(): setattr(submission, k, db.Text(v)) submission.put() allowed_tags = ['p', 'i', 'b', 'a', 'br'] profile_page = WikiPage.get_page(user, unit=None, create=True) profile_page.text = db.Text( bleach.clean(form.introduction.data, tags=allowed_tags)) profile_page.put() handler.redirect('confirm?page=conf')
def post(self, id): event = Event.get_by_id(int(id)) user = users.get_current_user() access_rights = UserRights(user, event) state = self.request.get('state') if state: desc = '' if state.lower() == 'approve' and access_rights.can_approve: event.approve() desc = 'Approved event' if state.lower( ) == 'notapproved' and access_rights.can_not_approve: event.not_approved() desc = 'Event marked not approved' if state.lower() == 'rsvp' and user: event.rsvp() notify_owner_rsvp(event, user) if state.lower() == 'staff' and access_rights.can_staff: event.add_staff(user) desc = 'Added self as staff' if state.lower() == 'unstaff' and access_rights.can_unstaff: event.remove_staff(user) desc = 'Removed self as staff' if state.lower() == 'onhold' and access_rights.can_cancel: event.on_hold() desc = 'Put event on hold' if state.lower() == 'cancel' and access_rights.can_cancel: event.cancel() desc = 'Cancelled event' if state.lower() == 'delete' and access_rights.can_delete: event.delete() desc = 'Deleted event' notify_deletion(event, user) if state.lower() == 'undelete' and access_rights.can_undelete: event.undelete() desc = 'Undeleted event' if state.lower() == 'expire' and access_rights.is_admin: event.expire() desc = 'Expired event' if event.status == 'approved' and state.lower() == 'approve': notify_owner_approved(event) if desc != '': log = HDLog(event=event, description=desc) log.put() event.details = db.Text(event.details.replace('\n', '<br/>')) show_all_nav = user event.notes = db.Text(event.notes.replace('\n', '<br/>')) self.response.out.write( template.render('templates/event.html', locals()))
def add_unpicklable(self, statement, names): """Adds a statement and list of names to the unpicklables. Also removes the names from the globals. Args: statement: string, the statement that created new unpicklable global(s). names: list of strings; the names of the globals created by the statement. """ self.unpicklables.append(db.Text(statement)) for name in names: self.remove_global(name) if name not in self.unpicklable_names: self.unpicklable_names.append(db.Text(name))
def write_categories(data, category_key='global'): categories = Category.gql("WHERE for_id = :1", category_key).get() if not categories: categories = Category(for_id=category_key) categories.val = db.Text(data) status = categories.put() return status
def post(self): self._cross_site_headers() try: message = simplejson.loads(self.request.body) except ValueError: self.error(400) return # Code modified to store each query in a database print_statement = '\n'.join(message.get('print_statement')) statement = message.get('statement') privacy = message.get('privacy') if statement != '': searches = Searches() searches.user_id = users.get_current_user() searches.query = print_statement if privacy == 'off': searches.private = False if privacy == 'on': searches.private = True searches.put() session_key = message.get('session') printer_key = message.get('printer') live = Live() if session_key is not None: try: session = Session.get(session_key) except db.Error: self.error(400) return else: session = Session() session.unpicklables = [ db.Text(line) for line in INITIAL_UNPICKLABLES ] session_key = session.put() live.evaluate(PREEXEC, session) live.evaluate(PREEXEC_INTERNAL, session) try: printer = PRINTERS[printer_key] except KeyError: printer = None stream = StringIO() live.evaluate(statement, session, printer, stream) result = { 'session': str(session_key), 'output': stream.getvalue(), } self.response.headers['Content-Type'] = 'application/json' self.response.out.write(simplejson.dumps(result))
def get_value_for_datastore(self, model_instance): """Converts the property's value (an object) into a JSON string suitable for storage in the datastore. """ result = super(ParsedJSONObjectProperty, self).get_value_for_datastore(model_instance) result = json.dumps(result) return db.Text(result)
def write_features(data, feature_key='global'): features = Feature.gql("WHERE for_id = :1", feature_key).get() if not features: features = Feature(for_id=feature_key) features.val = db.Text(data) status = features.put() return status
def get_value_for_datastore(self, model_instance): fake_model = getattr(model_instance, self.name) if not fake_model: return None if not self.indexed: return db.Text(fake_model.get_value_for_datastore()) return fake_model.get_value_for_datastore()
def FetchBase(base, patch): """Fetch the content of the file to which the file is relative. Args: base: the base property of the Issue to which the Patch belongs. patch: a models.Patch instance. Returns: A models.Content instance. Raises: FetchError: For any kind of problem fetching the content. """ filename, lines = patch.filename, patch.lines rev = patching.ParseRevision(lines) if rev is not None: if rev == 0: # rev=0 means it's a new file. return models.Content(text=db.Text(u''), parent=patch) try: base = db.Link(base) except db.BadValueError: msg = 'Invalid base URL: %s' % base logging.warn(msg) raise FetchError(msg) url = _MakeUrl(base, filename, rev) logging.info('Fetching %s', url) try: result = urlfetch.fetch(url) except Exception, err: msg = 'Error fetching %s: %s: %s' % (url, err.__class__.__name__, err) logging.warn('FetchBase: %s', msg) raise FetchError(msg)
def post(self, nick, panel_name='main', n=25): if n <= 0 or n > 100: n = 25 user = globals.user owner = datamodel.User.get_user(nick=nick) panel = datamodel.PanelList.get_panel(user=owner, name=panel_name) #TODO: check access privileges and that the panel is active """ Check reset command """ if re.match('!reset', self.request.get('text')) and owner.nick == user.nick: q = db.GqlQuery("SELECT * FROM Event WHERE panel = :1", panel) results = q.fetch(1000) for r in results: r.delete() return event = datamodel.Event(panel=panel, user=user) #TODO: Check what's going on with accented and ñ's, it gives # UnicodeDecodeError if self.request.charset: logging.info("Charset: " + self.request.charset) else: self.request.charset = 'utf8' try: event.text = db.Text(cgi.escape(self.request.get('text'))) except UnicodeDecodeError: logging.info(cgi.escape(self.request.get('text'))) event.type = 'chat' event.author = globals.user.nick event.put() self.write(user, panel, n)
def post(self, user, account, **kwargs): app_id = (kwargs['app_id'] if 'app_id' in kwargs else 'new') body_json = self.request.body body = json.loads(body_json) if 'name' not in body: logging.exception(exception) self.response.set_status(400) self.response.write('Invalid JSON data') # return BadRequest("Invalid JSON data") if app_id == 'new': app = App(name=body['name'], created_by=account.key(), editors=[account.key()]) else: app = App.get_by_id(int(app_id)) if app is None: return render_json_response(self, {'error': 'app-not-found'}) if account.key() not in app.editors: return render_json_response(self, {'error': 'access-denied'}) app.name = body['name'] app.body = db.Text(body_json.decode('utf-8')) app.put() return render_json_response(self, {'id': app.key().id()})
def find_video_links(self, html_message): soup = BeautifulSoup(html_message) embeds = soup('embed') tags = [] for video in embeds: tags.append(db.Text(str(video))) return tags
def write_csv_row_objects(csv_file_obj_key): csv_file_obj = CSVFile.get(csv_file_obj_key) event = csv_file_obj.event blob_fd = blobstore.BlobReader(csv_file_obj.blob) blob_fd.seek(0) encoding = csv_file_obj.encoding try: for row_num, row in read_csv(event, blob_fd, encoding): csv_row_obj = CSVRow( ### parent=csv_file_obj.key(), csv_file=csv_file_obj.key(), num=row_num, row=[db.Text(cell) for cell in row]) csv_row_obj.save() # analyse row in background deferred.defer( analyse_row, csv_file_obj.key(), csv_row_obj.key(), _countdown=int(0.2 * int(row_num)) # delay start ) except HeaderException, e: logging.exception(HeaderException) csv_file_obj.header_present = False csv_file_obj.save()
def post(self): capValue = self.bcapRequest() instance = self.validate_instance() instance.data = db.Text(self.cap_server.data_pre_process(capValue), 'UTF-8') instance.put() self.bcapResponse(True)
def get_patched_content(self): """Get self.patched_content, computing it if necessary. This is the content of the file after applying this patch. Returns: a Content instance. Raises: engine.FetchError: If there was a problem fetching the old content. """ try: if self.patched_content is not None: return self.patched_content except db.Error: # This may happen when a Content entity was deleted behind our # back. self.patched_content = None old_lines = self.get_content().text.splitlines(True) logging.info('Creating patched_content for %s', self.filename) chunks = patching.ParsePatchToChunks(self.lines, self.filename) new_lines = [] for _, _, new in patching.PatchChunks(old_lines, chunks): new_lines.extend(new) text = db.Text(''.join(new_lines)) patched_content = Content(text=text, parent=self) patched_content.put() self.patched_content = patched_content self.put() return patched_content
def testExpando(self): """Test the Expando superclass.""" class Song(db.Expando): title = db.StringProperty() crazy = Song(title='Crazy like a diamond', author='Lucy Sky', publish_date='yesterday', rating=5.0) oboken = Song(title='The man from Hoboken', author=['Anthony', 'Lou'], publish_date=datetime.datetime(1977, 5, 3)) crazy.last_minute_note = db.Text('Get a train to the station.') crazy.put() oboken.put() self.assertEqual('The man from Hoboken', Song.all().filter('author =', 'Anthony').get().title) self.assertEqual( 'The man from Hoboken', Song.all().filter('publish_date >', datetime.datetime(1970, 1, 1)).get().title)
def get(self): # set up the session. TODO: garbage collect old shell sessions session_key = self.request.get('session') if session_key: session = Session.get(session_key) else: # create a new session session = Session() session.unpicklables = [ db.Text(line) for line in INITIAL_UNPICKLABLES ] session_key = session.put() template_file = os.path.join(os.path.dirname(__file__), 'templates', 'shellmobile.html') session_url = '/shellmobile?session=%s' % session_key vars = { 'server_software': os.environ['SERVER_SOFTWARE'], 'python_version': sys.version, 'session': str(session_key), 'user': users.get_current_user(), 'login_url': users.create_login_url(session_url), 'logout_url': users.create_logout_url(session_url), 'tabWidth': self.request.get('tabWidth').lower() or 'undefined' } rendered = webapp.template.render(template_file, vars, debug=_DEBUG) self.response.out.write(rendered)
def post(self, room_id, client_id): message_json = self.request.body with LOCK: client_map = get_room_client_map(room_id) occupancy = len(client_map) # Check that room exists. if occupancy == 0: logging.warning('Unknown room: ' + room_id) self.write_response('UNKNOWN_ROOM') return # Check that client is registered. if not client_id in client_map: logging.warning('Unknown client: ' + client_id) self.write_response('UNKNOWN_CLIENT') return # Check if other client is registered. if occupancy == 1: # No other client registered, save message. logging.info('Saving message from client ' + client_id + ' for room ' + room_id) client = client_map[client_id] text = db.Text(message_json, encoding='utf-8') client.messages.append(text) client.put() self.write_response('SUCCESS') return # Other client registered, forward to collider. Do this outside the lock. # Note: this may fail in local dev server due to not having the right # certificate file locally for SSL validation. # Note: loopback scenario follows this code path. # TODO(tkchin): consider async fetch here. self.send_message_to_collider(room_id, client_id, message_json)
def get(self): # set up the session. TODO: garbage collect old shell sessions session_key = self.request.get('session') if session_key: session = Session.get(session_key) else: # create a new session session = Session() session.unpicklables = [db.Text(line) for line in INITIAL_UNPICKLABLES] session_key = session.put() #Get the 10 most recent queries searches_query = Searches.all().filter('private', False).order('-timestamp') search_results = searches_query.fetch(10) saved_searches = Searches.all().filter('user_id', users.get_current_user()).order('-timestamp') template_file = os.path.join(os.path.dirname(__file__), 'templates', 'shellmobile.html') session_url = '/shellmobile?session=%s' % session_key vars = { 'server_software': os.environ['SERVER_SOFTWARE'], 'python_version': sys.version, 'session': str(session_key), 'user': users.get_current_user(), 'login_url': users.create_login_url(session_url), 'logout_url': users.create_logout_url(session_url), 'tabWidth': self.request.get('tabWidth').lower() or 'undefined', 'searches': searches_query, 'saved_searches': saved_searches } rendered = webapp.template.render(template_file, vars, debug=_DEBUG) self.response.out.write(rendered)
def get_value_for_datastore(self, model_instance): fake_models = getattr(model_instance, self.name) if not self.indexed: return [db.Text(fake_model.get_value_for_datastore()) for fake_model in fake_models] return [fake_model.get_value_for_datastore() for fake_model in fake_models]