def delete(ad_id): ad = Ad.get(id=ad_id) if not ad: abort(404) ad.delete() db.commit() return '', 204
def get_cmt_from_cid(self, str_cid, uid): key_name = uid + str_cid cmt = UserCmt.get_by_key_name(key_name) if not cmt: ad = Ad.get_by_id(int(str_cid)) if not ad: logging.warn('%s for unknown Ad cid=%s from_uid=%s' % (self.get_action_name(), str_cid, uid)) return False # create a new user comment entity for this Ad return UserCmt(key_name=key_name, feeds=ad.feeds) return cmt
def get_cmt_from_cid(self, str_cid, uid): key_name = uid + str_cid cmt = UserCmt.get_by_key_name(key_name) if not cmt: ad = Ad.get_by_id(int(str_cid)) if not ad: logging.warn("%s for unknown Ad cid=%s from_uid=%s" % (self.get_action_name(), str_cid, uid)) return False # create a new user comment entity for this Ad return UserCmt(key_name=key_name, feeds=ad.feeds) return cmt
def show(ad_id): # This will serialize our data schema = AdSchema() # This gets a ad by ID ad = Ad.get(id=ad_id) # If we can't find a ad, send a 404 response if not ad: abort(404) # otherwise, send back the ad data as JSON return schema.dumps(ad)
def update(ad_id): schema = AdSchema() ad = Ad.get(id=ad_id) if not ad: abort(404) try: data = schema.load(request.get_json()) ad.set(**data) db.commit() except ValidationError as err: return jsonify({ 'message': 'Validation failed', 'errors': err.messages }), 422 return schema.dumps(ad)
def create(): # This will deserialize the JSON from insomnia schema = AdSchema() try: # attempt to convert the JSON into a dict data = schema.load(request.get_json()) # Use that to create a ad object ad = Ad(**data, createdBy=g.current_user) # store it in the database db.commit() except ValidationError as err: # if the validation fails, send back a 422 response return jsonify({ 'message': 'Validation failed', 'errors': err.messages }), 422 # otherwise, send back the ad data as JSON return schema.dumps(ad), 201
offset = int(updated_str[-5:][:2]) except: logging.error('unable to extract UTC offset for link=%s: %s' % (link, updated_str)) offset = 0 try: dt = datetime.strptime(updated_str[:len(updated_str)-6], '%Y-%m-%dT%H:%M:%S') updated = dt + timedelta(hours=offset/100) except ValueError: logging.error('unable to parse the datetime for link=%s: %s' % (link, updated_str)) updated = now if not title: logging.warn('Got Ad (%s) with no title from RSS feed' % link) elif not desc: logging.warn('Got Ad (%s) with no desc from RSS feed' % link) else: ad = Ad(key=ad_key, feeds=[fhid], title=title, desc=desc, update_dt=updated, url=link) ads.append(ad) # determine which ads already exist in the datastore ads_to_put = [] keys = [ad.key() for ad in ads] existing_ads = db.get(keys) for i in xrange(len(keys)): ad = ads[i] e_ad = existing_ads[i] if e_ad is None: ads_to_put.append(ad) elif ad.feeds[0] not in e_ad.feeds: # If the ad is not listed as being in feeds we already know it # to be in, then ad the existing feed list to the new ad. Do # it this way so that we store the latest ad info just retrieved.
draw3(Math.floor(Math.random() * 500), 20 + Math.floor(Math.random() * 500), Math.floor(Math.random() * 200), 20) } drawComics() """, description= """Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec purus libero, iaculis sit amet nunc a, varius interdum turpis. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Aliquam ornare ultrices libero, vitae malesuada erat interdum sit amet. Quisque vel felis dignissim, efficitur felis auctor, consequat magna. Phasellus tortor elit, dignissim ac euismod id, fermentum ut turpis. Nullam ex orci, elementum ac felis quis, interdum consequat eros. Etiam pharetra diam ut tincidunt consectetur. Nunc sit amet bibendum diam, et laoreet sapien. Nam aliquam convallis libero ac maximus. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Sed vitae finibus arcu.\n \n Vivamus cursus placerat ipsum eget suscipit. Sed malesuada diam nec fermentum rutrum. Pellentesque sagittis hendrerit ligula vel placerat. Suspendisse blandit libero sed sapien lacinia, nec finibus leo feugiat. Duis porta sit amet magna nec interdum. Praesent vitae convallis arcu, vitae interdum erat. Vivamus nisi nulla, tincidunt eu ultricies sit amet, placerat et lectus. Integer at erat volutpat, ultricies sapien luctus, pulvinar mauris. Cras eget luctus elit.""", medium=application) Ad(name="Looking for an Illustartor", createdBy=company, created="2018/07/02", description= """Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec purus libero, iaculis sit amet nunc a, varius interdum turpis. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Aliquam ornare ultrices libero, vitae malesuada erat interdum sit amet. Quisque vel felis dignissim, efficitur felis auctor, consequat magna. Phasellus tortor elit, dignissim ac euismod id, fermentum ut turpis. Nullam ex orci, elementum ac felis quis, interdum consequat eros. Etiam pharetra diam ut tincidunt consectetur. Nunc sit amet bibendum diam, et laoreet sapien. Nam aliquam convallis libero ac maximus. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Sed vitae finibus arcu.\n \n Vivamus cursus placerat ipsum eget suscipit. Sed malesuada diam nec fermentum rutrum. Pellentesque sagittis hendrerit ligula vel placerat. Suspendisse blandit libero sed sapien lacinia, nec finibus leo feugiat. Duis porta sit amet magna nec interdum. Praesent vitae convallis arcu, vitae interdum erat. Vivamus nisi nulla, tincidunt eu ultricies sit amet, placerat et lectus. Integer at erat volutpat, ultricies sapien luctus, pulvinar mauris. Cras eget luctus elit.""", medium=illustration) Ad(name="Need music for a project", createdBy=cyberdyne, created="2018/07/02", description= """Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec purus libero, iaculis sit amet nunc a, varius interdum turpis. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Aliquam ornare ultrices libero, vitae malesuada erat interdum sit amet. Quisque vel felis dignissim, efficitur felis auctor, consequat magna. Phasellus tortor elit, dignissim ac euismod id, fermentum ut turpis. Nullam ex orci, elementum ac felis quis, interdum consequat eros. Etiam pharetra diam ut tincidunt consectetur. Nunc sit amet bibendum diam, et laoreet sapien. Nam aliquam convallis libero ac maximus. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Sed vitae finibus arcu.\n \n Vivamus cursus placerat ipsum eget suscipit. Sed malesuada diam nec fermentum rutrum. Pellentesque sagittis hendrerit ligula vel placerat. Suspendisse blandit libero sed sapien lacinia, nec finibus leo feugiat. Duis porta sit amet magna nec interdum. Praesent vitae convallis arcu, vitae interdum erat. Vivamus nisi nulla, tincidunt eu ultricies sit amet, placerat et lectus. Integer at erat volutpat, ultricies sapien luctus, pulvinar mauris. Cras eget luctus elit.""", medium=music) db.commit()
def get(self): session = is_logged_in(self) if not session: return self.redirect('/') uid = session['my_id'] now = datetime.datetime.now() feed_key_name = self.request.get('f') t = self.request.get('t') overall_view = (not feed_key_name and t != 'newest') if feed_key_name == 'manual': fhid = 'manual' age = desc = None updating_shortly = False if t == 'hidden': name = "Manually-Added Ads that were Hidden" elif t == 'newest': return self.redirect('/tracker') else: name = "Manually-Added Ads" elif feed_key_name: fhid = Feed.hashed_id_from_pk(feed_key_name) # get the user's name for this feed name = get_search_name(self, feed_key_name) if name is None: return self.redirect('/tracker') # user is no longer tracking this feed elif name is False: return self.redirect('/') # login related error # compute how old the data is feed_dt_updated = dt_feed_last_updated(feed_key_name) if not feed_dt_updated: return self.redirect('/tracker?err=That%20feed%20no%20longer%20exists.') age = str_age(feed_dt_updated, now) td = now - feed_dt_updated updating_shortly = td.days>0 or td.seconds>MAX_AGE_MIN*60 if updating_shortly: age += ' - update in progress' # update the feed if we haven't retrieved the latest ads recently updating = update_feed_if_needed(feed_key_name) if updating is None: return self.redirect('/tracker?err=The%20requested%20feed%20does%20not%20exist.') elif overall_view: age = desc = fhid = None updating_shortly = False if t == 'hidden': name = "All Hidden Ads" else: name = "All Rated/Noted Ads" else: # t=newest and feed=all doesn't make sense together return self.redirect('/tracker') # determine which set of ads to show next = self.request.get('next') if t == 'newest': # show the newest ads (regardless of whether the user has commented on them or not) q = Ad.all().filter('feeds =', fhid).order('-update_dt') if next: q.with_cursor(next) ads = q.fetch(ADS_PER_PAGE) # get user comments on these ads, if any user_ad_keys = [db.Key.from_path('UserCmt', '%s%s' % (uid, a.cid)) for a in ads] user_ad_notes = db.get(user_ad_keys) title_extra = 'Newest Ads' else: # show ads this user has commented on/rated (whether to show hidden ads or not depends on t) hidden = (t == 'hidden') q = UserCmt.all() q.filter('uid =', session['my_id']) if fhid: q.filter('feeds =', fhid) if hidden: q.filter('dt_hidden >', DT_PRESITE).order('-dt_hidden') else: q.filter('dt_hidden =', None).order('-rating') if next: q.with_cursor(next) user_ad_notes = q.fetch(ADS_PER_PAGE) # get the ads associated with these comments ad_keys = [db.Key.from_path('Ad', uan.cid) for uan in user_ad_notes] ads = db.get(ad_keys) if t == 'hidden': title_extra = "Ignored Ads" else: title_extra = "Ads I've Rated" # put the ads and their comments together ad_infos = zip(ads, user_ad_notes) # check that each UserCmt.feeds field is up to date with Ad.feeds (can # only do this when we're searching by Ad, i.e., t=newest) if t == 'newest': # TODO: only mark as outdated if they are inequal EXCEPT 'manual' # TODO: when updating cmt.feeds, don't copy over 'manual' (user-specific) # TODO: reconsider this code ... outdated = [(ad,cmt) for ad, cmt in ad_infos if cmt and ad.feeds!=cmt.feeds] if outdated: # update any out of date comments for ad,cmt in outdated: cmt.feeds = ad.feeds db.put([cmt for ad,cmt in outdated]) # whether there may be more ads more = (len(ads) == ADS_PER_PAGE) if more: more = q.cursor() if not more or more==str(next): more = None # get a description of the search we're viewing if fhid and fhid!='manual': tmp_feed = Feed(key_name=feed_key_name) tmp_feed.extract_values() desc = tmp_feed.desc() if not next: page = 1 else: try: page = int(self.request.get('page', 1)) except ValueError: page = 1; self.response.headers['Content-Type'] = 'text/html' self.response.out.write(MakoLoader.render('search_view.html', request=self.request, ADS_PER_PAGE=ADS_PER_PAGE, ads=ad_infos, more=more, age=age, now=now, search_desc=desc, title_extra=title_extra, page=page, name=name, updating_shortly=updating_shortly, overall_view=overall_view))
def index(): # This will serialize our data # `many=True` because there are many ads, ie we expect a list schema = AdSchema(many=True) ads = Ad.select() # get all the ads return schema.dumps(ads) # `schema.dumps` converts the list to JSON
def post(self): session = is_logged_in(self) if not session: return self.redirect(REDIR_URL) req = self.request errors = {} ad_url = validate_string(req, errors, 'ad_url', 'Craigslist Ad URL') if ad_url: if ad_url[:7] != 'http://': ad_url = 'http://' + ad_url m = RE_URL_CHECK.match(ad_url) if not m: errors[ 'ad_url'] = 'This URL does not appear to be a valid craigslist.org webpage.' else: m = RE_ID.match(ad_url) if not m: errors['ad_url'] = 'Could not extract the ID from Ad URL' else: cid = int(m.group(1)) if len(errors): return self.redirect_to_self(GET_PARAMS, errors) # efficiency: get Ad and UserCmt at the same time to_put = [] ad_key = db.Key.from_path('Ad', cid) cmt_key = db.Key.from_path('UserCmt', '%s%s' % (session['my_id'], cid)) ad, cmt = db.get([ad_key, cmt_key]) # download the ad if we don't already have it in our db if not ad: ret = self.fetch_and_parse_page(ad_url) if not ret: errors['ad_url'] = 'Unable to download the webpage' return self.redirect_to_self(GET_PARAMS, errors) title, desc, dt = ret ad = Ad(key=ad_key, feeds=['manual'], title=title, desc=desc, update_dt=dt, url=ad_url) to_put = [ad] elif 'manual' not in ad.feeds: ad.feeds.insert(0, 'manual') to_put = [ad] # create UserCmt if not cmt: cmt = UserCmt(key=cmt_key, feeds=ad.feeds) to_put.append(cmt) elif 'manual' in cmt.feeds: return self.redirect( '/tracker?info=You%20are%20already%20manually%20tracking%20that%20ad.' ) elif cmt.feeds != ad.feeds: cmt.feeds = ad.feeds to_put.append(cmt) # save the new entities if to_put: db.put(to_put) # redirect the user to the feed page self.redirect('/tracker?info=Added%20Ad%20%23' + str(cid) + '%20to%20your%20manually%20specified%20list.')