def get_entries_by_search_term(search_term): with sqlite3.connect("./dailyjournal.db") as conn: conn.row_factory = sqlite3.Row db_cursor = conn.cursor() db_cursor.execute( """ SELECT e.id, e.entry, e.concept, e.date, e.moodId, m.mood FROM Entries e Join Moods m on m.id = e.moodId WHERE e.entry Like ? """, ("%" + search_term + "%"), ) entries = [] dataset = db_cursor.fetchall() for row in dataset: entry = Entry(row['id'], row['concept'], row['date'], row['moodId'], row['moodId']) mood = Mood(row['moodId'], row['mood']) entry.mood = mood.__dict__ entries.append(entry.__dict__) return json.dumps(entries)
def load_testentries(): """load test entries into database.""" print("test entries") Entry.query.delete() test_entry1 = Entry(user_id=1, date="31-Oct-2015", text_content="this is my test dream entry", title="title", hours_slept=8, mood_awake=1, clarity=1, lucidity=1, lucid_intent=1) test_entry2 = Entry(user_id=1, date="1-Nov-2015", text_content="this is my second test dream entry", title="title", hours_slept=8, mood_awake=1, clarity=1, lucidity=1, lucid_intent=1) db.session.add(test_entry1) db.session.add(test_entry2) db.session.commit()
def deleteEntry(self): """ Deletes an entry """ Entry.deleteEntry(self.currDate) self.entry.delete(1.0, tkinter.END) self.updateStatusBar("Last action: " + str(self.currDate) + " - Entry deleted")
def test_put(self): context = Context(http=MockHttp(HTTP_SRC_DIR), entry="http://example.org/entry/67") entry = Entry(context) (headers, body) = entry.put(body="<entry></entry>") self.assertEqual(200, headers.status) self.assertFalse(entry.has_media()) self.assertEqual(0, len(body))
def get(self, page_id="", operation=""): t_values = {} logging.info("PageManager get: page_id = %s, operation = %s" % (page_id, operation)) # find current_post based on page_id if page_id: current_post = Entry.get_by_id(long(page_id)) if current_post: logging.info("find post %s from post id %s" % (page_id, current_post.title)) if operation == "edit": t_values['current_post'] = current_post elif operation == "publish": current_post.is_external_page = True current_post.put() t_values['alert_message'] = "Post %s has been changed to public" % (current_post.title) elif operation == "unpublish": current_post.is_external_page = False current_post.put() t_values['alert_message'] = "Post %s has been changed to private" % (current_post.title) elif operation == "delete": current_post.delete() t_values['alert_message'] = "Post %s has been changed to deleted" % (current_post.title) # show all posts posts = Entry.all().filter("entrytype =", 'page') t_values['posts'] = posts return self.response.out.write(render_template("pages.html", t_values, "", True))
def test_get(self): context = Context(http=MockHttp(HTTP_SRC_DIR), entry="http://example.org/entry/67") entry = Entry(context) (headers, body) = entry.get() self.assertEqual(200, headers.status) self.assertFalse(entry.has_media()) self.assertEqual(entry.uri(), "http://example.org/entry/67")
def saveEntry(self): """ Updates an entry """ contents = self.entry.get(1.0, tkinter.END) Entry.updateEntry(self.currDate, contents) self.updateStatusBar("Last action: " + str(self.currDate) + " - Entry updated")
def test_field_english_string_to_inches(self): self.assertEqual(120, Entry._field_english_string_to_inches("10'")) # Test when no ' character provided as feet symbol in distance mark self.assertIsNone(Entry._field_english_string_to_inches("8")) self.assertEqual(1200.25, Entry._field_english_string_to_inches("100' 0.25")) self.assertEqual(90.5, Entry._field_english_string_to_inches("7' 6.5"))
def get(self, page_slug=""): if page_slug: t_values = {} posts = Entry.all().filter("is_external_page =", True).filter("entrytype =", 'page').filter("slug =", page_slug) if posts.count() == 1: logging.warning("find one page with slug=%s" % (page_slug)) posts = posts.fetch(limit=1) post = posts[0] t_values['post'] = post # dump(post) # find all comments comments = Comment.all().filter("entry =", post).order("date") t_values['comments'] = comments else: logging.warning("%d entries share the same slug %s" % (posts.count(), page_slug)) links = Link.all().order("date") t_values['links'] = links categories = Category.all() t_values['categories'] = categories pages = Entry.all().filter("is_external_page =", True).filter("entrytype =", 'page').order("date") t_values['pages'] = pages return self.response.out.write(render_template("page.html", t_values, "basic", False)) else: self.redirect(uri_for("weblog.index"))
def create_entry(request): title = "\'" + request.json["title"] + "\'" text = "\'" + request.json["text"] + "\'" entry = Entry(title=title, text=text, created_at=datetime.datetime.now()) entry.save() return json.dumps(entry.id)
def test_put_media(self): context = Context(http=MockHttp(HTTP_SRC_DIR), entry="http://example.org/images/77") entry = Entry(context) (headers, body) = entry.get() self.assertEqual(200, headers.status) self.assertTrue(entry.has_media()) (headers, body) = entry.put_media(headers={}, body="") self.assertEqual(202, headers.status) # We don't really expect 202 from a PUT, just testing.
def test_set_mark_with_no_mark_string_field_event(self): entry_lj = Entry(mde=self.mde_longjump, athlete=self.athlete) db.session.add(entry_lj) db.session.commit() entry_lj.set_mark() self.assertEqual(entry_lj.mark, 0) self.assertEqual(entry_lj.mark_type, "inches") self.assertEqual(entry_lj.mark_to_string(), "")
def test_set_mark_with_no_mark_string_track_event(self): entry_1600m = Entry(mde=self.mde_1600m, athlete=self.athlete) db.session.add(entry_1600m) db.session.commit() entry_1600m.set_mark() # Test when no mark is provided for the event self.assertEqual(entry_1600m.mark, INFINITY_SECONDS) self.assertEqual(entry_1600m.mark_type, "seconds") self.assertEqual(entry_1600m.mark_to_string(), "")
def test_get_media(self): context = Context(http=MockHttp(HTTP_SRC_DIR), entry="http://example.org/images/77") entry = Entry(context) (headers, body) = entry.get() self.assertEqual(200, headers.status) self.assertTrue(entry.has_media()) (headers, body) = entry.get_media() self.assertEqual(200, headers.status) self.assertTrue(headers["content-type"], "image/jpg") self.assertEqual(7483, len(body))
def get(self): # find stats for this blog stats = {} stats['posts'] = Entry.all().filter("entrytype =", "post").filter("is_external_page =", True).count() stats['pages'] = Entry.all().filter("entrytype =", "page").filter("is_external_page =", True).count() stats['comments'] = Comment.all().count() stats['categories'] = Category.all().count() stats['links'] = Link.all().count() t_values = {} t_values['stats'] = stats return self.response.out.write(render_template("index.html", t_values, "", True))
def example_data(): Entry.query.delete() Todo.query.delete() User.query.delete() #users brighticorn = User( name='Brighticorn', user_id=1, email='*****@*****.**', password= '******', zipcode='94566') pairprogrammer = User( name='PairProgrammer', user_id=2, email='*****@*****.**', password= '******', zipcode='94588') #entries be1 = Entry(user_id=1, date='2018-10-3', title='Entry Title', text='Entry body text', quote='Some random quote for the day', weather='Sunny') be2 = Entry(user_id=1, date='2018-10-4', title='Another Title', text='Some text to go in entry body', quote='another quote', weather='Cloudy') pe1 = Entry(user_id=2, date='2018-10-3', title='Testing same date', text='Enrty body for testing same date', quote='some quote', weather='Snowy') pe2 = Entry(user_id=2, date='2018-10-16', title='Test Title', text='Some text for an entry body', quote='daily quote', weather='Sunny') #todo bt1 = Todo(user_id=1, todo_id=1, todo='wash dishes') bt2 = Todo(user_id=1, todo_id=2, todo='laundry') pt1 = Todo(user_id=2, todo_id=3, todo='sweep/mop') db.session.add_all( [brighticorn, pairprogrammer, be1, be2, pe1, pe2, bt1, bt2, pt1]) db.session.commit()
def post(self): target = self.request.get('target') amount = int(self.request.get('amount')) location = self.request.get('location') date = datetime.datetime.strptime(self.request.get('date'), '%Y-%m-%d').date() succeed = True#self.request.get('succeed') story = self.request.get('story') entry = Entry(target=target, amount=amount, location=location, date=date, succeed=succeed, story=story, author=users.get_current_user()) entry.put() self.redirect('/')
def post(self, user_id, feed_id, entry_id): """Updates a specific entry""" current_user = utils.get_current_user() if not current_user: self.error(403) return user = User.get_by_id(int(user_id)) if user == None: self.error(404) return if current_user.key() != user.key(): self.error(401) return feed = InputFeed.get_by_id(int(feed_id), parent=user) if feed == None: self.error(404) return entry = Entry.get_by_id(int(entry_id), parent=feed) published = self.request.get('published') if published != None: entry.published = bool(int(published)) entry.save() self.response.headers['Content-Type'] = 'application/json; charset=utf-8' self.response.headers['Access-Control-Allow-Origin'] = '*' self.response.out.write(json.dumps(entry.to_struct()))
def get(self): context = {} query = Entry.all().order("-updated") next = self.request.get("next") if next: dt = datetime.strptime(next, "%Y-%m-%dT%H:%M:%S") entries = query.filter("updated <=", dt).fetch(PAGESIZE + 1) if len(entries) == PAGESIZE + 1: context["next"] = entries[-1].updated.isoformat("T") else: context["next"] = None context["entries"] = entries[:PAGESIZE] self.response.out.write(self.render_sec_entries(context)) return else: sec = {} entries = query.fetch(PAGESIZE + 1) if len(entries) == PAGESIZE + 1: context["next"] = entries[-1].updated.isoformat("T") else: context["next"] = None context["entries"] = entries[:PAGESIZE] sec["entries"] = self.render_sec_entries(context) sec["featured"] = self.render_sec_featured() path = templatepath("main.html") self.response.out.write(template.render(path, sec))
def delete(self, title): """Delete the specified tag for the current user""" user = utils.get_current_user() title = title.decode('utf-8') if not user: self.error(403) return m = Tag.all().ancestor(user).filter('title_lower =', title.lower()).get() if not m: # Original tag not found self.error(404) return entries = Entry.all().filter('tags =', m.key()) # Remove tag from entries for entry in entries: logging.info(entry) entry.tags.remove(m.key()) entry.save() m.delete() self.response.headers['Content-Type'] = 'text/plain; charset=utf-8' self.response.headers['Access-Control-Allow-Origin'] = '*' self.response.out.write('ok')
def update_basic_info( update_categories=False, update_tags=False, update_links=False, update_comments=False, update_archives=False, update_pages=False): from model import Entry,Archive,Comment,Category,Tag,Link basic_info = ObjCache.get(is_basicinfo=True) if basic_info is not None: info = ObjCache.get_cache_value(basic_info.cache_key) if update_pages: info['menu_pages'] = Entry.all().filter('entrytype =','page')\ .filter('published =',True)\ .filter('entry_parent =',0)\ .order('menu_order').fetch(limit=1000) if update_archives: info['archives'] = Archive.all().order('-year').order('-month').fetch(12) if update_comments: info['recent_comments'] = Comment.all().order('-date').fetch(5) if update_links: info['blogroll'] = Link.all().filter('linktype =','blogroll').fetch(limit=1000) if update_tags: info['alltags'] = Tag.all().order('-tagcount').fetch(limit=100) if update_categories: info['categories'] = Category.all().fetch(limit=1000) logging.debug('basic_info updated') basic_info.update(info)
def get(self): entry_count = Entry.all().count(1000) tags = Tag.all().order('usetime') tags_count = tags.count(1000) tag_list = [] for tag in tags: tag_count = tag.count_link + tag.count_note + tag.count_pic if tag.count_link >= tag.count_note: if tag.count_link >= tag.count_pic: max_type = 'link' else: max_type = 'pic' else: if tag.count_pic >= tag.count_note: max_type = 'pic' else: max_type = 'note' tag_list.append({ "info": tag, "type": max_type, "level": tag_count / (entry_count / tags_count) }) template_values = {'tags': tag_list} path = os.path.join(os.path.dirname(__file__), 'templates/tag.html') self.response.out.write(template.render(path, template_values))
def get_single_entry(id): with sqlite3.connect("./dailyjournal.db") as conn: conn.row_factory = sqlite3.Row db_cursor = conn.cursor() # Use a ? parameter to inject a variable's value # into the SQL statement. db_cursor.execute( """ SELECT e.id, e.concept, e.entry, e.date, e.mood_id FROM Entries e WHERE e.id = ? """, (id, )) data = db_cursor.fetchone() # Create an animal instance from the current row entry = Entry(data['id'], data['concept'], data['entry'], data['date'], data['mood_id']) return json.dumps(entry.__dict__)
def EntryHandler_postTest(self): weight = 75.0 variance = 1.4 currDate = "2012-10-10" currDt =dt.date(2012, 10, 10) nick= users.get_current_user().nickname() head = {"Content-Type" : "application/x-www-form-urlencoded", "Accept" : "text/plain"} payload = urlencode({"date" : currDate, "variance" : float(variance),"weight":float(weight)}) #request = requests.Request("POST","/users/%s/"%nick, data=payload) request = webapp2.Request.blank('/users/%s/entry/%s'%(nick,currDate)) request.method="POST" request.headers=head request.body=payload currUser=users.get_current_user() response = webapp2.Response() handler = EntryHandler() handler.initialize(request, response) handler.post(user=None,cd=None) self.setCurrentUser("*****@*****.**", "aaaaaaa") userId=users.get_current_user().user_id() db.delete(Entry.all()) nick=users.get_current_user().nickname() handler.post(user=nick,cd='2012-10-10') Entry(weight=100.0, variance=5.0,date=currDt,user=currUser, parent=log_key(userId)).put() handler.post(user=nick,cd='2012-10-10') db.delete(Biometric.all()) Biometric(height=150, target=73.3, parent=bio_key(currUser.user_id())).put() handler.post(user=nick,cd='2012-10-10') #Put Test handler.put(user=nick,cd='2012-10-10') handler.put(user=nick,cd='2012-10-15') handler.delete(user=nick,cd='2012-10-10')
def get(self, **kwargs): """Return current status """ e = Entry.latest() if e: return e.as_dict() return {}
def add_entry(trip_id): """This is where the user can add an entry to their trip.""" user_id = session["user_id"] if request.method == "POST": title = request.form["title"] text = request.form["entry"] all_numbers = datetime.now().timestamp() time_stamp = time.ctime(all_numbers) image_url = None if request.files: upload = request.files["file"] uploaded_file_info = cloudinary.uploader.upload(upload) image_url = uploaded_file_info['secure_url'] entry = Entry(title=title, entry=text, user_id=user_id, trip_id=trip_id, time_stamp=time_stamp, user_picture=image_url) db.session.add(entry) db.session.commit() return redirect(f"/user_journal/{user_id}") else: return render_template("create_entry.html", trip_id=trip_id, user_id=user_id)
def add_entry() -> str: #バリデーションクラス vld = validation.Validation() none_validate = { 'practice_id': 'No practice_id', 'member_id': 'No member_id' } messages = vld.check_json(none_validate, 'null_check') if not messages == []: return ','.join(messages) numeric_validate = { 'practice_id': 'practice_id is not numeric', 'member_id': 'member_id is not numeric' } messages = vld.check_json(numeric_validate, 'numeric_check') if not messages == []: return ','.join(messages) entry = Entry(request.json['practice_id'], request.json['member_id']) db.session.add(entry) db.session.commit() return "Success"
def getTagsForUrl(url): try: content = resource.get(url).decodeBody().lower() except: content = "" soup = BeautifulSoup(content) texts = soup.findAll(text=True) def visible(element): if element.parent.name in ['style', 'script', '[document]', 'head', 'title']: return False elif re.match('<!--.*-->', str(element)): return False return True visible_texts = filter(visible, texts) visibleText = " ".join(visible_texts) result = getTagsProposalsForText(visibleText) entry = Entry.all().filter("url =", url).fetch(1) if len(entry) > 0: entryStableTags = entry[0].tags for t in entryStableTags: found = False name = Tag.get(t).name for r in result: if name == r: found = True if not found: result.append(name) return result
def get(self, user, tag_title): """Gets RSS feed for a user, filtered by tag""" user = utils.get_user_model_by_id_or_nick(user) if not user: self.error(403) return tag_title = tag_title.decode('utf-8') tag = Tag.all().ancestor(user.key()).filter('title_lower =', tag_title.lower()).get() if not tag: self.error(404) return entries = Entry.all().filter('tags =', tag.key()).filter('published =', True).order('-time_published').fetch(20) entries = [e.to_struct() for e in entries] path = os.path.join(os.path.dirname(__file__), 'template.rss') self.response.headers['Content-Type'] = 'application/xml; charset=utf-8' self.response.out.write(template.render(path, { 'entries': entries, 'url': self.request.url, 'title': tag_title, }))
def get_unread_entries(count, offset, priority, order): if count == 0: return [] order = getattr(Entry.created_date, order)() return list(Entry.select().join(Feed).where( Feed.priority >= priority, Entry.is_read == False).order_by(order).offset(offset).limit(count))
def get(self): task = self.request.get("task") if task == "cleanup": # Remove obsolete entries Entry.cleanup() elif task == "subscribe": # Periodically make subscribe request # # Priorily subscribe to newly added channel (i.e. status == None); # if there aren't any, then confirm the least checked subscription. ch = Channel.all().filter("status =", None).get() if not ch: ch = Channel.all().filter("status =", "subscribed").order("lastcheck").get() ch.subscribe() else: self.error(404)
def get (self): offset = self.request.get('offset') if not offset: offset = 0 e = Entry.all().order('addtime') e = e.fetch(10,int(offset)) path = os.path.join(os.path.dirname(__file__),'templates/export.tpl') self.response.out.write(template.render(path,{'e':e}))
def delete(self, id): e = Entry.get(Entry.id == id) if not e: self.write({'status':'error'}) return e.delete_instance() self.write({'status':'success'})
def create_entry(entry_list, roaster, score, note): entry = Entry(entry_list=entry_list, roaster=roaster, score=score, note=note) db.session.add(entry) db.session.commit() return entry
def get_unread_entries(count, offset, priority, order): if count == 0: return [] order = getattr(Entry.created_date, order)() return list(Entry.select().join(Feed).where( Feed.priority >= priority, Entry.is_read == False ).order_by(order).offset(offset).limit(count))
def get_all_entries(): # Open a connection to the database with sqlite3.connect("./dailyjournal.db") as conn: # Just use these. It's a Black Box. conn.row_factory = sqlite3.Row db_cursor = conn.cursor() # Write the SQL query to get the information you want db_cursor.execute(""" SELECT e.id, e.concept, e.entry, e.date, e.mood_id, m.label, m.id FROM Entries e JOIN Moods m on m.id = e.mood_id """) # Initialize an empty list to hold all animal representations entries = [] # Convert rows of data into a Python list dataset = db_cursor.fetchall() # Iterate list of data returned from database for row in dataset: # Create an animal instance from the current row. # Note that the database fields are specified in # exact order of the parameters defined in the # Animal class above. entry = Entry(row['id'], row['concept'], row['entry'], row['date'], row['mood_id']) mood = Mood(row['id'], row['label']) entry.mood = mood.__dict__ entries.append(entry.__dict__) # Use `json` package to properly serialize list as JSON return json.dumps(entries)
def parse_genome(genome_path): chrs = [] #lengths = [] names = set() with open(genome_path) as f: for line in f: line = line.strip().split() if len(line) == 1: chrs.append([]) #lengths.append(0) else: e= Entry(line[1],line[5],int(line[7]),int(line[9]),int(line[9])-int(line[7])) e.set_block_id(int(line[3])) chrs[-1].append(e) #lengths[-1] += e.length names.add(e.seq_id.split('.')[1]) return chrs
def create_entry(user, blog, city, title): entry = Entry(user=user, blog=blog, city=city, title=title) db.session.add(entry) db.session.commit() return entry
def get(self,req_user=''): user_lang = 'en' #********************** User Auth **************************# user = users.get_current_user() nickname = '' if user: nickname=user.nickname() if nickname: user_info = User.all().filter('user',nickname) if user_info.count(1)>0: user_info = user_info.get() user_lang = user_info.lang auth_url = users.create_logout_url(self.request.uri) auth_text= 'signout' else: auth_url = users.create_login_url(self.request.uri) auth_text= 'signin' entry_count =Entry.all().count(1000) if req_user: tag_user = req_user tags = Tag.all().filter("user",req_user) else: tag_user = '******' tags = Tag.all() tags_count = tags.count(1000) tag_list=[] for tag in tags: tag_count=tag.count_link + tag.count_note + tag.count_pic if tag.count_link >= tag.count_note: if tag.count_link >= tag.count_pic: max_type = 'link' else: max_type = 'pic' else: if tag.count_pic >= tag.count_note: max_type = 'pic' else: max_type = 'note' #logging.info(tag_count) #logging.info(entry_count) #logging.info(tags_count) tag_list.append({ "info":tag, "type":max_type, "level":int(round(tag_count/(float(entry_count)/tags_count))) }) template_values = { 'nickname' : nickname, 'req_user' : req_user, 'auth_url' : auth_url, 'auth_text': auth_text, 'tag_user' : tag_user, 'tags' : tag_list, 'uri' : self.request.uri } path = os.path.join(os.path.dirname(__file__),'templates/'+user_lang+'/tag.html') self.response.out.write(template.render(path,template_values))
def get(self, page="1", cate_slug=""): t_values = {} page = int(page) logging.info("IndexHandler - get: page = %d, cate_slug = %s" % (page, cate_slug)) # find all entries by order query = Entry.all().filter("is_external_page =", True).filter("entrytype =", 'post').order("-date") # add category filter? if cate_slug: cates = Category.all().filter("slug =", cate_slug) if cates: query = query.filter("category =", cates[0]) # pagination total_posts = query.count() q_limit = Configuration["posts_per_page"] q_offset = (page - 1) * Configuration["posts_per_page"] logging.info("limit = %d, offset = %d" % (q_limit, q_offset)) # get entries entries = query.fetch(limit=q_limit, offset=q_offset) t_values['entries'] = entries # show entries for debug purpose # for entry in entries: # logging.info("entry title: %s, public = %s, cate = %s" % (entry.title, entry.is_external_page, entry.category.name)) logging.info("total posts = %d, current_page = %d, posts_per_page = %d" % (total_posts, page, Configuration['posts_per_page'])) t_values['navlist'] = generateNavList(total_posts, page, Configuration["posts_per_page"]) # logging.info(t_values['navlist']) # find all links links = Link.all().order("date") t_values['links'] = links # find all categories categories = Category.all() t_values['categories'] = categories # find all pages pages = Entry.all().filter("is_external_page =", True).filter("entrytype =", 'page').order("date") t_values['pages'] = pages # show index page return self.response.out.write(render_template("index.html", t_values, "basic", False))
def get(self, post_id="", operation=""): t_values = {} logging.info("PostManager get: post_id = %s, operation = %s" % (post_id, operation)) # find current_post based on post_id if post_id: current_post = Entry.get_by_id(long(post_id)) if current_post: logging.info("find post %s from post id %s" % (post_id, current_post.title)) if operation == "edit": t_values['current_post'] = current_post elif operation == "publish": if not current_post.is_external_page: current_post.category.entrycount += 1 current_post.category.put() current_post.is_external_page = True current_post.put() t_values['alert_message'] = "Post %s has been changed to public" % (current_post.title) else: t_values['alert_message'] = "Post %s was public already" % (current_post.title) elif operation == "unpublish": if current_post.is_external_page: current_post.category.entrycount -= 1 current_post.category.put() current_post.is_external_page = False current_post.put() t_values['alert_message'] = "Post %s has been changed to private" % (current_post.title) else: t_values['alert_message'] = "Post %s was private already" % (current_post.title) elif operation == "delete": if current_post.is_external_page: current_post.category.entrycount -= 1 current_post.category.put() current_post.delete() t_values['alert_message'] = "Post %s has been changed to deleted" % (current_post.title) # show all posts posts = Entry.all().filter("entrytype =", 'post') t_values['posts'] = posts # load all categories categories = Category.all().order("name") t_values['categories'] = categories return self.response.out.write(render_template("posts.html", t_values, "", True))
def save_entry(text): """Create an Entry""" entry = Entry(text=text) db.session.add(entry) db.session.commit() return entry
def get(self): """Gets all entries from all feeds this user subscribes to""" user = utils.get_current_user() if not user: self.error(403) return # Filter and sorting order = self.request.get('order') lang = self.request.get('lang') tag_title = self.request.get('tag') # Filter if tag_title: tag = Tag.all().ancestor(user.key()).filter('title_lower =', tag_title.lower()).get() entries = Entry.all().filter('tags =', tag.key()) else: entries = Entry.all().ancestor(user) if lang and lang != 'all': entries = entries.filter('language =', lang) # Sorting if order: if order == 'date-asc': entries = entries.order('time_published') elif order == 'date-desc': entries = entries.order('-time_published') elif order == 'title-asc': entries = entries.order('title') elif order == 'title-desc': entries = entries.order('-title') else: entries = entries.order('-time_published') entries = entries.fetch(25) self.response.headers['Content-Type'] = 'application/json; charset=utf-8' self.response.headers['Access-Control-Allow-Origin'] = '*' entries = [i.to_struct(include_tags=True) for i in entries] self.response.out.write(json.dumps(entries))
def update(self, silent=False, feed=None): request = self.db_session.query(Feed) if feed is not None: request = request.filter(Feed.name == feed) result = request.all() for feed in result: if feed.twitter: api = twitter.Api() twits = api.GetUserTimeline(feed.name) for twit in twits: # we discard answers to other twits if not twit.in_reply_to_user_id: txt = twit.GetText() id = "%s_%s" % (feed.name, twit.GetId()) e = Entry(id, "", txt, "") self.db_session.add(e) feed.entries.append(e) try: self.db_session.commit() if not silent: msg = "[twitter: %s] %s" % (feed.name, txt) self.bot.say(msg) except (FlushError, IntegrityError): self.db_session.rollback() else: parsed = feedparser.parse(feed.url) for entry in parsed.entries: id = "%s_%s" % (feed.name, get_id(entry)) t = get_time(entry) e = Entry(id, entry.link, t, entry.title) self.db_session.add(e) feed.entries.append(e) try: self.db_session.commit() if not silent: msg = "[%s] %s : %s" % (feed.name, entry.title, entry.link) self.bot.say(msg) except (FlushError, IntegrityError): self.db_session.rollback()
def get(self): entry_count =Entry.all().count(1000) tags = Tag.all().order('usetime') tags_count = tags.count(1000) tag_list=[] for tag in tags: tag_count=tag.count_link + tag.count_note + tag.count_pic if tag.count_link >= tag.count_note: if tag.count_link >= tag.count_pic: max_type = 'link' else: max_type = 'pic' else: if tag.count_pic >= tag.count_note: max_type = 'pic' else: max_type = 'note' tag_list.append({ "info":tag, "type":max_type, "level":tag_count/(entry_count/tags_count) }) template_values = { 'tags' : tag_list } path = os.path.join(os.path.dirname(__file__),'templates/tag.html') self.response.out.write(template.render(path,template_values))
def add_entry(self, eid, url, date, title, feed): f = self.db_session.query(Feed).filter(Feed.name == feed).all() if f == []: raise else: feed = f[0] e = Entry(eid, url, date, title) self.db_session.add(e) feed.entries.append(e) self.db_session.add(feed) self.db_session.commit()
def add_entry(): if not session.get('logged_in'): abort(401) form = request.form sys, dia, pulse = form['systolic'], form['diatolic'], form['pulse'] when, side = form['time'], form['side'] entry = Entry(sys, dia, pulse, side, when) db_session.add(entry) db_session.commit() flash("Succesful posted") return redirect(url_for('show_entries'))
def get_all_entries(): entries = [] for entry in Entry.select(): result = { 'id': entry.id, 'title': entry.title, 'text': entry.text, 'created_at': entry.created_at, 'updated_at': entry.updated_at } entries.append(result) return json.dumps(entries)
def test_seconds_to_time_string_static(self): self.assertEqual("23:23:23.23", Entry._seconds_to_string(84203.23)) self.assertEqual("1:01:01.01", Entry._seconds_to_string(3661.01)) self.assertEqual("59:59.59", Entry._seconds_to_string(3599.592)) self.assertEqual("4:04.04", Entry._seconds_to_string(244.04)) self.assertEqual("58.90", Entry._seconds_to_string(58.9)) self.assertEqual("9.93", Entry._seconds_to_string(9.93))
def get(self, format="json"): results = [] tagsRaw = getTagTerms(self.request.get("q").lower()) if len(tagsRaw) == 0 or tagsRaw[0] == "": simplewebapp.formatResponse(format, self, results) return entries = Entry.all().filter("tagsRaw IN ", tagsRaw).run() results = findEntries(entries, tagsRaw) simplewebapp.formatResponse(format, self, results)
def get(self,format): tags = Tag.all().run() db.delete(tags) firstLevelTags = [ "ActionScript", "Asp", "BASIC", "C", "C++", "Clojure", "COBOL", "ColdFusion", "Erlang", "Fortran", "Groovy", "Haskell", "Java", "JavaScript", "Lisp", "Perl", "PHP", "Python", "Ruby", "Scala", "Scheme", "haxe", "nodejs", 'framework', 'tool', 'wiki', 'tutorial', 'howto', 'library', 'service', 'language' ] for tag in firstLevelTags: t = Tag(name=tag.lower()) t.put() entries = Entry.all() for e in entries: newtags = getTagKeys(e.tagsRaw) e.tags = newtags e.put() simplewebapp.formatResponse(format, self, "OK")
def test_time_string_to_seconds_static(self): self.assertAlmostEqual(11.45, Entry._time_string_to_seconds('11.45')) self.assertAlmostEqual(61.34, Entry._time_string_to_seconds('1:01.34')) self.assertAlmostEqual(7201.24, Entry._time_string_to_seconds('2:00:01.24')) self.assertAlmostEqual(3601.24, Entry._time_string_to_seconds('01:00:01.24')) with self.assertRaises(TmsError): Entry._time_string_to_seconds('17.4h')
def get(self): '''i=0 n = self.request.get('n') if not n: n='1' csvfile='data/data'+n+'.csv' self.response.headers['Content-Type'] = 'text/html' csvreader = csv.reader(file(os.path.join(os.path.dirname(__file__),csvfile))) for line in csvreader: e = Entry() e.title = line[0].decode('utf-8') e.url = line[1].decode('utf-8') e.content = line[2].decode('utf-8') e.private = False tag_name=line[3].decode('utf-8') t_q = Tag.all() t_q = t_q.filter('name =',tag_name) if(t_q.count(1)>0): t = t_q.get() t.count_link+=1 t.usetime=datetime.datetime.now() t.put() else: t = Tag() t.name = tag_name t.count_link=1 t.usetime=datetime.datetime.now() t.put() e.tags.append(db.Category(tag_name)) e.put() i +=1 self.response.out.write('%d ' % i) self.response.out.write('%s ' % e.key()) self.response.out.write('%s <br />' % line[0]) ''' n = self.request.get('n') if not n: n = 0 else: n = int(n) for i in range(1, 1000): e = Entry() e.title = str(i + n * 999) e.pageid = n e.put() self.response.out.write('%d ' % i) self.response.out.write('%s <br />' % e.key())
def _generate_entries(feeds): from model import database as db, Entry if not feeds: return [] entries = [ Entry(url='http://test.example.com/entry/{0}'.format(i), title='entry title {0}'.format(i), is_read=random.choice([True, False]), feed=random.choice(feeds)) for i in range(random.randint(0, 1000)) ] with db.transaction(): for e in entries: e.save() return entries
def search_entries(request): title = request.args.get('title') query = Entry.select().where(Entry.title.contains(title)) entries = [] for entry in query: result = { 'id': entry.id, 'title': entry.title, 'text': entry.text, 'created_at': entry.created_at, 'updated_at': entry.updated_at } entries.append(result) return json.dumps(entries)
def load_entries(): """Load entries from entries.item into database.""" print("Entires") for row in open("seed_data/entries.item"): row = row.rstrip() date, user_id, title, text, quote, weather = row.split("|") entry = Entry(date = date, user_id = user_id, title = title, text = text, quote = quote, weather = weather) db.session.add(entry) db.session.commit()
def update_entry(entry: Entry) -> bool: """ Update an entry in the database with new values, return whether update was success """ if not entry_exists_by_obj(entry): print(f"Entry in {entry.table} with id {entry.id} does not exist") return False col_list, val_list = entry.get_attr_rep_lists() set_str = ",".join( [f"{col} = {val}" for col, val in zip(col_list, val_list)]) query = f"UPDATE {entry.table} SET {set_str} WHERE id = '{entry.id}'" conn.execute(query) conn.commit() return True
def worker(event, context): """Handler for processing batch event to database ingestion""" if event.get("source") in ["aws.events", "serverless-plugin-warmup"]: return {} for record in event['Records']: timestamp = float(record['attributes']['SentTimestamp']) / 1000 body = record['body'] hash_key = record['md5OfBody'] entry = Entry(created_at=datetime.fromtimestamp(timestamp), content=body, hash_key=hash_key) db.session.add(entry) else: db.session.commit() return None