def post(self, entry_id): entry = Entry.update_entry(entry_id, self.request2params(self.request)) if entry is None: entry = Entry.get_entry(entry_id) return self.redirect('/admin/entry/%d?status=fail' % entry.key.id()) time.sleep(0.1) return self.redirect('/admin/entry/%d?status=done' % entry.key.id())
def detail(slug): if session.get('logged_in'): query = Entry.select() else: query = Entry.public() entry = get_object_or_404(query, Entry.slug == slug) return render_template('detail.html', entry=entry)
def get_single_entry(id): with sqlite3.connect("dailyjournal.db") as conn: conn.row_factory = sqlite3.Row db_cursor = conn.cursor() db_cursor.execute( """ SELECT e.id, e.date, e.concept, e.entry, e.mood_id, e.instructor_id, m.label FROM entries e JOIN moods m ON e.mood_id = m.id WHERE e.id = ? """, (id, )) data = db_cursor.fetchone() entry = Entry(data["id"], data["date"], data["concept"], data["entry"], data["mood_id"], data["instructor_id"]) mood = Mood(data["mood_id"], data["label"]) entry.mood = mood.__dict__ return json.dumps(entry.__dict__)
def edit_entry(slug): '''This function renders the edit entry page, and handles the POST for form submission''' # get the entry from slug entry = Entry.get_entry_from_slug(slug) # if the form is submitted if request.method == 'POST': # if the edit is successful if entry.edit( title=request.form['title'], date=request.form['date'], time_spent=request.form['timeSpent'], learned=request.form['whatILearned'], resources=request.form['ResourcesToRemember'], ): flash('Entry #{} updated'.format(entry.id)) # reload the entry entry = Entry.get(Entry.id == entry.id) slug = entry.slugify_title() # forward to detail page return redirect(url_for('details_by_slug', slug=slug)) # on get 'GET' method (not 'POST') return render_view( 'edit.html', entry=entry, slug=slug, )
def entry(request): if request.session.has_key('logged_in'): now = datetime.datetime.now() if request.method == 'POST': form = EntryForm(request.POST) if form.is_valid(): cd = form.cleaned_data now = datetime.datetime.now() strnow = now.strftime('%d/%m/%Y') author = cd['author'] content = cd['content'] e = Entry(date=now, strdate=strnow, author=author, content=content) e.save() return HttpResponseRedirect('/diary/submitted/') else: form = EntryForm() return render(request, 'new_entry.html', { 'form': form, 'now': now }) else: return render(request, 'login.html', {'user_login': '******'})
def edit(slug): if session.get('logged_in'): entry = Entry.get_entry(slug, public=False) else: entry = Entry.get_entry(slug, public=True) if entry is None: return not_found(404) if request.method == 'POST': if request.form.get('delete'): entry.delete() flash('Post supprime', 'danger') return redirect(url_for('news')) if request.form.get('title') and request.form.get('content'): entry.title = request.form['title'] entry.content = request.form['content'] entry.published = request.form.get('published') or False entry.save() flash('Entry saved successfully.', 'success') if entry.published: return redirect(url_for('detail', slug=entry.slug)) else: return redirect(url_for('edit', slug=entry.slug)) else: flash('Title and Content are required.', 'danger') return render_template('edit.html', entry=entry)
def add_entry(request): title = json.loads(request.body)['title'] text = json.loads(request.body)['text'] entry = Entry(user=request.user, title=title, text=text) entry.save() data = json.dumps(Entry.serialize(entry)) return HttpResponse(data, content_type="application/json")
def entries(request): queryset = Entry.objects.order_by('-publication_date')[:5] logged_in = False if request.session.get('logged_in'): logged_in = True if request.method == 'POST': form = EntryForm(request.POST) if form.is_valid(): title = form.cleaned_data['title'] author = User.objects.get(id=request.session.get('user')) text = form.cleaned_data['text'] new_entry = Entry(text=text, title=title, author=author) new_entry.save() form = EntryForm() else: form = EntryForm() else: form = None logged_in = False e_dict = archive_dict() return render(request, 'entry_list.html', { 'form': form, 'list_of_entries': queryset, 'logged_in': logged_in, 'e_dict': e_dict}, )
def get_single_entry(id): with sqlite3.connect("./dailyjournal.db") as conn: conn.row_factory = sqlite3.Row db_cursor = conn.cursor() db_cursor.execute( """ SELECT e.id, e.concept, e.entry, e.date, e.mood_id, m.id moodId, m.label FROM entries e JOIN Mood m ON e.mood_id = m.id WHERE e.id = ? """, (id, )) data = db_cursor.fetchone() entry = Entry(data['id'], data['concept'], data['entry'], data['date'], data['mood_id']) mood = Mood(data['moodId'], data['label']) entry.mood = mood.__dict__ return json.dumps(entry.__dict__)
def get(self): entry_id = self.request.get('entry_id') if entry_id: try: entry_id = int(entry_id) except: self.response.set_status(400) return entry = Entry.get_by_id(entry_id) if not entry: self.response.set_status(404) return self.send_json(entry.to_dict()) return count = self.request.get('count') page = self.request.get('page') try: count = int(count) if count else 20 page = int(page) if page else 0 except: self.response.set_status(400) return entries = Entry.get_latest(count, page) self.send_json([e.to_dict() for e in entries])
def get_single_entry(id): with sqlite3.connect("./dailyjournal.db") as conn: conn.row_factory = sqlite3.Row db_cursor = conn.cursor() # Use a ? parameter to inject a variable's value # into the SQL statement. db_cursor.execute( """ SELECT e.id, e.concept, e.entry, e.date, e.moodId, m.label FROM entry e JOIN mood m ON m.id = e.moodId WHERE e.id = ? """, (id, )) # Load the single result into memory data = db_cursor.fetchone() # Create an entry instance from the current row entry = Entry(data['id'], data['concept'], data['entry'], data['date'], data['moodId']) mood = Mood(data['moodId'], data['label']) entry.mood = mood.__dict__ return json.dumps(entry.__dict__)
def syncWeibo(request): WBLogin = weiboLogin.weiboLogin() if WBLogin.login('*****@*****.**','1234qwer')==1: urlContent = urllib2.urlopen('http://game.weibo.com/club/forum-315-1').read() soup = BeautifulSoup(urlContent) ul = soup.find('ul',{'class':'top_topics'}) lis = ul.findAll('li',{'class':''}) for li in lis: item = { 'user':li.find('div',{'class':'col1'}).find('img')['alt'], 'link':li.find('div',{'class':'no_rep_line'}).findAll('a')[1]['href'], 'tag':li.find('a',{'class':'navy_tag'}).string, 'title':li.find('div',{'class':'no_rep_line'}).findAll('a')[1].string, 'time':li.find('div',{'class':'col2'}).find('span').string } title = "[%s][%s]%s%s"%(item['time'],item['user'],item['tag'],item['title']) link = item['link'] result = Entry.objects.filter(title=title) if len(result) == 0: entry = Entry(title=title,link=link,content='',feedid=0) entry.save() send_mail("*****@*****.**", title, link, ("smtp.163.com", 25, "*****@*****.**", "1234qwer", False)) pass pass return HttpResponse("Login success!") else: return HttpResponse("Login error!") pass
def test_edit_entry_bad_then_good(self): with unittest.mock.patch('builtins.input', side_effect=['Spam', 'fourtwo', '', '42', 'Bey Day', '', '11/11/2011', 'n', 'y', '']): Entry.create( timestamp=datetime.datetime(2018, 1, 1, 1), user_name='Yellow Diamond', task_name='Stomp RQ', task_minutes=1, task_notes="mystery" ) e_dict = { 'user_name': 'Yellow Diamond', 'task_name': 'Stomp RQ', 'task_minutes': 1, 'task_notes': "mystery", 'timestamp': datetime.datetime(2018, 1, 1, 1) } self.assertIsNone(wlui.edit_entry(e_dict)) CardCatalog().delete_entry( {'user_name': 'Yellow Diamond', 'task_name': 'Spam', 'task_minutes': 42, 'task_notes': "mystery", 'timestamp': datetime.datetime(2011, 11, 11, 1)} )
def post(self): key = self.get_argument("key", None) if key: entry = Entry.get(key) entry.title = self.get_argument("title") entry.markdown = self.get_argument("markdown") entry.html = markdown.markdown(self.get_argument("markdown")) else: title = self.get_argument("title") slug = unicodedata.normalize("NFKD", title).encode( "ascii", "ignore") slug = re.sub(r"[^\w]+", " ", slug) slug = "-".join(slug.lower().strip().split()) if not slug: slug = "entry" while True: existing = db.Query(Entry).filter("slug =", slug).get() if not existing or str(existing.key()) == key: break slug += "-2" entry = Entry( author=self.current_user, title=title, slug=slug, markdown=self.get_argument("markdown"), html=markdown.markdown(self.get_argument("markdown")), ) entry.put() self.redirect("/entry/" + entry.slug)
def nowy_post(): form = PostForm() if form.validate_on_submit(): cleaned_data = bleach.clean( form.body.data, tags=bleach.sanitizer.ALLOWED_TAGS + [ 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'iframe', 'img', 'p', 'pre', 'src', 'u', 'sup', 'sub', 'strike', 'br' ], attributes=bleach.sanitizer.ALLOWED_ATTRIBUTES) post = Entry( title=form.title.data, body=cleaned_data, published=form.status.data, ) db.session.add(post) db.session.commit() slug = slugify(str(post.id) + '-' + post.title) post.slug = slug db.session.commit() flash('Post został dodany', 'success') return redirect(url_for('article', slug=slug)) return render_template('create.html', form=form)
def get(self): feed_id, feed_url = self.request.get('feed_id'), \ self.request.get('feed_url') url_result = urllib2.urlopen(feed_url) feed_result = fd.parse(url_result) if feed_result.bozo == 1: logging.error('fetch error, id: %s, url: %s, error: %s', feed_id, feed_url, feed_result.bozo_exception) return feed_update_time = feed_result.get('updated', datetime.utcnow) has_update = True f = Feed.get_by_id(int(feed_id)) if f.is_allow_fetch(feed_update_time): for entry in feed_result.entries: if entry.published_parsed <= f.lastedPublishedTime: logging.info('no updated, id: %s, url: %s', feed_id, feed_url) has_update = False break e = Entry(title = entry.title, url = entry.link, author = entry.author, content = entry.content), publishedTime = entry.published_parsed) e.put() logging.debug('fetch entry, url: %s', entry.link)
def post(self): title = self.get_argument("title", default=None) if title is None or len(title) == 0: self.view("admin/article-new.html", menuIndex=1, error=u"请输入标题") return email = self.get_current_user() user = User.gql("WHERE email = :1", email).get() source = self.get_argument("cleanSource", default=" ") html = self.get_argument("content", default=" ") slug = " " if source != " ": slug = source.replace("\r\n", " ").replace("\t", " ")[0:200] date = datetime.datetime.now() + datetime.timedelta(hours=8) entry = Entry(author=user.key(), slug=slug, title=title, html=html, body_source=source, published=date, updated=date) try: entry.put() self.dispatch(msg=u"创建成功", to=u"文章列表页", toUrl="/admin/article", seconds=2) except: self.view("admin/article-new.html", menuIndex=1, error=u"创建失败")
def generateFeedEntry(urlContent,feed): encodeType = feed['encoding'] urlContent = urlContent.decode(encodeType).encode('utf-8') global_pattern = re.compile(feed['global_search_pattern'].replace('{%}','(.*?)').replace('{*}','.*?'),re.I|re.S|re.M) item_pattern = re.compile(feed['item_search_pattern'].replace('{%}','(.*?)').replace('{*}','.*?'),re.I|re.S|re.M) sub_pattern = re.compile(r'{%(.*?)}',re.I|re.S|re.M) globalMatch = global_pattern.search(urlContent) globalContent = globalMatch.group() globalContent = globalContent[0:len(globalContent)/4] itemList = item_pattern.findall(globalContent) print len(itemList) for i in xrange(len(itemList)-1,-1,-1): item = itemList[i] link = feed['item_link'] title = feed['item_title'] content = feed['item_content'] link = sub_pattern.sub(lambda m: str(item[int(m.group(1))-1]),link) content = sub_pattern.sub(lambda m: str(item[int(m.group(1))-1]),content) title = sub_pattern.sub(lambda m: str(item[int(m.group(1))-1]),title) result = Entry.objects.filter(title=title) if len(result) == 0: entry = Entry(title=title,link=link,content=content,feedid=feed['id']) entry.save() if feed['mail_address']!='': send_mail(feed['mail_address'], title, link, ("smtp.163.com", 25, "*****@*****.**", "1234qwer", False)) pass pass pass
def test_existing_id(self): client = Client() entry = Entry(title="Hello World!", body="Test Suite action!") entry.save() pk = entry.id response = client.get('/entry/' + str(pk)) self.assertEqual(response.status_code, 200)
def add_entry(): fname = request.form['filename'] rtype = request.form['type'] keys = request.form['keywords'] text = request.form.get('text', u'') status = request.form.get('status', 0) _id = process_slug(fname) if g.files.get(_id): raise Exception('Entry duplicated.') elif not _id: raise Exception('Entry ID is required.') entry = Entry( { '_id': _id, 'type': rtype, 'keywords': _parse_input_keys(keys), 'status': parse_int(status), 'text': text, 'messages': [], }, make_file_path(_id)) entry.save() g.files[_id] = entry return_url = url_for('.entry', _id=entry['_id']) return redirect(return_url)
def feed_push_update(feed_key): feed = ndb.Key(urlsafe=feed_key).get() if not feed: raise ndb.Return(("No feed", 404)) data = request.stream.read() if feed.hub_secret: server_signature = request.headers.get('X-Hub-Signature', None) signature = hmac.new(feed.hub_secret, data).hexdigest() if server_signature != signature: logger.warn('Got PuSH subscribe POST for feed key=%s w/o valid signature: sent=%s != expected=%s', feed_key, server_signature, signature) raise ndb.Return('') logger.info('Got PuSH body: %s', data) logger.info('Got PuSH headers: %s', request.headers) parsed_feed = feedparser.parse(data) new_guids, old_guids = yield Entry.process_parsed_feed(parsed_feed, feed, overflow=False) yield Entry.publish_for_feed(feed, skip_queue=False) raise ndb.Return('')
def resave_entries(): """ Fetch and re-save all entries """ entries = db.get_all_entries() for item in entries: print("Updating entry {} from user {}".format(item.get('id'), item.get('created_by'))) entry = Entry(id=item.get('id'), created_by=item.get('created_by')) entry.save()
def add_entry(): if 'user_id' not in session: return redirect("/") else: Entry.add_new_entry(request.form) flash("Successfully logged hydration.", "log_success") return redirect('/home')
def get_all_entries(): with sqlite3.connect("./dailyjournal.db") as conn: conn.row_factory = sqlite3.Row db_cursor = conn.cursor() db_cursor.execute(""" SELECT e.id, e.concept, e.entry, e.date, e.mood_id, m.id moodId, m.label FROM entries e JOIN Mood m ON e.mood_id = m.id """) entries = [] dataset = db_cursor.fetchall() for row in dataset: entry = Entry(row['id'], row['concept'], row['entry'], row['date'], row['mood_id']) mood = Mood(row['moodId'], row['label']) entry.mood = mood.__dict__ entries.append(entry.__dict__) return json.dumps(entries)
def setUp(self): models.initialize() Entry.create(timestamp=datetime.datetime(2018, 7, 2, 8), user_name='White Diamond', task_name='Unknown', task_minutes=1200, task_notes="mystery")
def __build_expanded_entry_from_row(self, row): entry = Entry(row['id'], row['date'], row['entry'], row['moodId']) mood = Mood(row['moodId'], row['mood_value'], row['mood_label']) entry.mood = mood.__dict__ return entry.__dict__
def test_add_entry(self, mock): mock.side_effect = [ "Test Name", "Test Task", 120, "Test Notes", "y", "q" ] self.add_task_test.get_input() results = Entry.get(Entry.select().where( Entry.task.contains("Test Task"))) self.assertIsNotNone(results)
def testUndoMyEntry(self): try: s = get_object_or_404(ProgramSlot, pk=1) u = get_object_or_404(User, pk=1) e = Entry(pk=2, notes="", slot=s, user=u) e.save() except Http404, e: self.fail("Something doesn't exist.")
def index(): en_tags = Entry.all_tags() en = Entry.all_en() tags = [] tags.extend(["".join(x) for x in en_tags]) unique_tags = [] for _index in tags: unique_tags.extend(_index.split(";")) return render_template('index.html', tag_entries=set(unique_tags), entries=en)
def index(): search_query = request.args.get('q') if search_query: query = Entry.search(search_query) else: query = Entry.public().order_by(Entry.timestamp.desc()) return object_list('items/index.html', query, search=search_query, check_bounds=False)
def new(): # Create the form form = EntryForm(request.form) # If we're posting and the form is filled out correctly # Create the new entry # Otherwise return the empty form if request.method == 'POST' and form.validate(): Entry.create(**form.data) return redirect('/') return render_template('new.html', form=form)
def entry_view(entry_id): template_name = 'index.html' if Entry.select().where(Entry.id == entry_id, Entry.is_active == True, Entry.parent == None).exists(): entries = Entry.select().where(Entry.id == entry_id, Entry.is_active == True) responses = Entry.select().where(Entry.parent << entries) else: abort(404) return views.render(template_name, entries=entries, responses=responses, is_entry=True)
def setUp(self): models.initialize() self.view_test = View.__new__(View) self.deletion_entry = Entry.create(name="Test Name", task="Test Task", minutes=120, notes="Test Notes") self.entries_test = Entry.get(Entry.select().where( Entry.task.contains("test task"))) self.entry = self.entries_test
def delete_feed(feed_id): """Get a feed""" feed = Feed.get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") Entry.delete_for_feed(feed) feed.key.delete() return jsonify(status='ok')
def remove_view(entry_id): user = Session.get_user() if Entry.select().where(Entry.id == entry_id, Entry.user == user, Entry.is_active == True).exists(): entry = Entry.get(Entry.user == user, Entry.is_active == True) entry.is_active = False entry.save() else: abort(404) return redirect(URLS['index'])
def detail(slug): #TODO: implement drafts and publication handling if session.get('logged_in'): entry = Entry.get_entry(slug, public=False) else: entry = Entry.get_entry(slug, public=True) if entry is None: return not_found(404) return render_template('detail.html', entry=entry)
def get_all_entries(): # Open a connection to the database with sqlite3.connect("./dailyjournal.db") as conn: # Just use these. It's a Black Box. conn.row_factory = sqlite3.Row db_cursor = conn.cursor() # Write the SQL query to get the information you want db_cursor.execute(""" SELECT e.id, e.concept, e.entry, e.date, e.mood_id, m.label mood_label FROM Entry e JOIN Mood m ON m.id = e.mood_id """) # Initialize an empty list to hold all entry representations entries = [] # returns everything that matches the query # Convert rows of data into a Python list # fetchall() returning a easier version of the rows that come back # appending entry dictionary to entry list # fetchall() fetches all the data dataset = db_cursor.fetchall() # Iterate list of data returned from database for row in dataset: # Create an entry instance from the current row. # The database fields are specified in # exact order of the parameters defined in the Entry class. # use bracket notation to get the value of the keys, pass in parameters for class entry = Entry(row['id'], row['concept'], row['entry'], row['date'], row['mood_id']) # Create a Mood instance from the current row mood = Mood(row['mood_id'], row['mood_label']) # .__dict__ : is a dictionary or other mapping object used to store an object’s (writable) attributes. # Add the dictionary representation of the mood and customer to the animal entry.mood = mood.__dict__ # Adds the dictionary representation of the entry to the list entries.append(entry.__dict__) # Use `json` package to properly serialize list as JSON # pass dumps a list of dictionaries return json.dumps(entries)
def index(tag=None): '''Home page which list all entries or specific tag entries''' if tag: entries = [] for entry in Entry.select(): for x in entry.tags.split(' '): if x == tag: entries.append(entry) else: entries = Entry.select() return render_template('index.html', entries=entries)
def index(): search_query = request.args.get('q') if search_query: query = Entry.search(search_query) else: query = Entry.public().order_by(Entry.timestamp.desc()) # The `object_list` helper will take a base query and then handle # paginating the results if there are more than 20. For more info see # the docs: # http://docs.peewee-orm.com/en/latest/peewee/playhouse.html#object_list return object_list('index.html', query, search=search_query, check_bounds=False)
def EntryFromDict(dico, feed_url): dico = normalize_entry_dict(dico, feed_url) entry = Entry(dico['title'], dico['content']) entry.url = dico['url'] entry.link = dico['link'] entry.title = dico['title'] entry.content = dico['content'] entry.mimetype = dico['mimetype'] entry.created = dico['created'] entry.updated = dico['updated'] entry.public = False return entry
def delete_element(): id = request.args.get('element') element = Entry.get_by_id(id) if element.image != "": if os.path.isfile(element.image): os.remove(element.image) if Entry.delete_by_id(id): return jsonify({'status': 200}) else: return jsonify({'status': 500})
def index(): search_query = request.args.get("q") if search_query: query = Entry.search(search_query) else: query = Entry.public().order_by(Entry.timestamp.desc()) # The `object_list` helper will take a base query and then handle # paginating the results if there are more than 20. For more info see # the docs: # http://docs.peewee-orm.com/en/latest/peewee/playhouse.html#object_list return object_list("index.html", query, search=search_query, check_bounds=False)
def edit(id): # Get selected entry and create the form entry = Entry.get(Entry.id == id) form = EntryForm(request.form, entry) # If we're posting and the form is filled out correctly # Update the entry # Otherwise render the empty form if request.method == 'POST' and form.validate(): print(form.data) Entry.update(**form.data).where(Entry.id == id).execute() return redirect('/') return render_template('edit.html', entry=entry, form=form)
def get_all_entries(value=""): with sqlite3.connect("./dailyjournal.db") as conn: # Just use these. It's a Black Box. conn.row_factory = sqlite3.Row db_cursor = conn.cursor() # Write the SQL query to get the information you want if len(value) > 0: db_cursor.execute( """ SELECT e.id, e.date, e.entry, e.mood_id, e.concepts, m.label FROM entries e JOIN Moods m ON e.mood_id = m.id WHERE e.entry LIKE ? """, ('%' + value + '%', )) else: db_cursor.execute(""" SELECT e.id, e.date, e.entry, e.mood_id, e.concepts, m.label FROM entries e JOIN Moods m ON e.mood_id = m.id """) # Initialize an empty list to hold all location representations entries = [] # Convert rows of data into a Python list dataset = db_cursor.fetchall() # Iterate list of data returned from database for row in dataset: entry = Entry(row['id'], row['date'], row['entry'], row['mood_id'], row['concepts']) mood = Mood("", row['label']) entry.mood = mood.__dict__ entries.append(entry.__dict__) # Use `json` package to properly serialize list as JSON return json.dumps(entries)
def index(): search_query = request.args.get('q') if search_query: query = Entry.search(search_query) else: query = Entry.public().order_by(Entry.last_mod_date.desc()) try: return object_list('index.html', query) except NotFound as exc: # peewee throws 404 `NotFound` Exception if no results found # but we would rather show page with no results return render_template('index.html', object_list=[])
def add_entry(): if not session.get('logged_in'): abort(401) entry = Entry( title = request.form['title'], text = request.form['text'] ) entry.save() flash('New entry was successfully posted') return redirect(url_for('show_entries'))
def newentry(): """Create a new entry""" user = session["user"] assert user new_entry = Entry(None, user.username) new_entry.save() logging.info("Creating a new entry with id {} for user {}".format(new_entry.id, user.username)) if request.method == "GET": return redirect(url_for("page")) else: return new_entry
def post(self): current_user = users.get_current_user() location = self.request.get('user-location') date = self.request.get('user-date') details = self.request.get('user-details') links = self.request.get('user-links') journal_entry = Entry(entry_location=location, entry_date=date, entry_details=details, entry_links=links) journal_entry.user_id = current_user.user_id() journal_entry.put() self.redirect('/memories')
def init(post=None): """Initialises a data entry from a blog post.""" user = User.objects.get(username="******") posts = Post.objects.all() if post: posts = [posts[post]] for index, post in enumerate(posts): try: #entry = Entry(**{'post':post, 'user':user}) entry = Entry(post, user) entry.save() except Exception as e: print("data.models.init: %s (%s %s)" % (e, index, post.title))
def fetch_old_entries(feed): # example url: http://www.google.com/reader/public/atom/feed/http://feeds.feedburner.com/thesimpledollar?n=1000 feed_url = 'http://feeds.feedburner.com/thesimpledollar' google_url = 'http://www.google.com/reader/public/atom/feed/' + sanitize_url(feed_url) + '?n=' + str(max_entries) # print "Fetching feed from", feed_url, "..." f = feedparser.parse(google_url) # print "Fetched", len(f.entries), "entries of", f.feed.title # print "Creating new feed..." for i in f.entries: e = Entry(title=i.title, link=i.link, description=i.description, date=i.updated_parsed) e.feed = feed e.save()
def delete_entry(): """Delete the entry""" user = session["user"] entry_id = request.form["entry_id"] assert user assert entry_id assert isinstance(entry_id, basestring) entry = Entry(entry_id, user.username) assert entry.created_by == user.username entry = entry.delete() return "ok"
def check_for_updates(records): for r in records: name = r['vocab'] for e in r['entries'] : timestamp = datetime.strptime(e['timestamp'],'%m/%d/%Y %H:%M:%S') m = Entry.objects.filter(name=name,timestamp=timestamp) if len(m) == 0 : entry = Entry(name=name,timestamp=timestamp) entry.save() print '{} ({}) is new!'.format(name,str(timestamp)) tweet_about_it(name, e) else : print '{} ({}) already exists!'.format(name,str(timestamp))
def save_feed_preview(feed_id): """preview a saved feed""" form = FeedUpdate(request.args) if not form.validate(): return jsonify_error(message="Invalid update data") feed = Feed.get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") form.populate_obj(feed) feed.preview = True preview_entries = Entry.entry_preview(Entry.latest_published(feed).fetch(3), feed, format=True) return jsonify(status='ok', data=preview_entries)