def get(self, **kwargs): admin = False user = self.current_user page_id = kwargs.get('pageid') options_dict = {} try: pages = Page.get(user.pages) for p in pages: if p.id == page_id: admin = True page = p widgets = Widget.all().filter('page =', page).filter('deleted = ', False).order('order') options = Option.all().filter('type_reference =', page) for option in options: options_dict[option.name] = {'id': str(option.key().id()), 'value': option.value} except: page = None widgets = None options_dict = None if admin: #page = Page.get_by_key_name(str(page_id)) #add_app_url = 'https://www.facebook.com/add.php?api_key=a284fdd504b5191923362afabc0ea6c7&pages=1&page=141947329155355' upload_url = blobstore.create_upload_url('/upload') page_id = encrypt(page_id).encode('hex') self.render("app/edit.html", admin=True, page=page,upload_url=upload_url, page_id=page_id,widgets= widgets, options=options_dict) else: self.redirect('/dashboard')
def do_import(): page = Page.get(request.form.get('page_key', '')) if not page or page.import_state != IMPORTING: return 'NO_IMPORTER' # We're done importer = Importer.gql('WHERE page=:1', page.key()).get() if not importer: # This requires a request to fetch the page and parse the URLs. # It also enqueues the next run. create_importer(page) return 'CREATED' if importer.urls: url = importer.urls.pop(0) parser = None try: resp = urlfetch.fetch(url, deadline=10) if resp.status_code == 200: parser = CSSParser() sheet = parser.parseString(resp.content, href=url) style = sheet.cssText importer.style += '\n\n/* Imported from %s */\n%s' % (url, style) else: raise Exception('Error fetching %s' % url) except Exception, e: import traceback importer.errors.append('Error importing %s' % url) logging.error('Error importing for Page %s from %s:\n%s\n%s', page.key().id(), url, e, traceback.format_exc()) finally:
def _stats(): if not (users.is_current_user_admin() or users.get_current_user().email().endswith('@fogcreek.com') or request.remote_addr in ['127.0.0.1', '71.190.247.30']): abort(404) user_count = GqlQuery('SELECT __key__ FROM UserSettings').count(None) site_count = GqlQuery( 'SELECT __key__ FROM Site WHERE example=false').count(None) now = datetime.now() days = list(reversed([now - timedelta(days) for days in range(14)])) day_views = [ get_period_and_count('css:all', PeriodType.DAY, day) for day in days ] day_users = [ get_period_and_count('user:all', PeriodType.DAY, day) for day in days ] day_sites = [ get_period_and_count('site:all', PeriodType.DAY, day) for day in days ] # overwrite today's cached numbers with the live count we just got from the database day_users[-1] = day_users[-1][0], user_count day_sites[-1] = day_sites[-1][0], site_count # get the top referrers period_type = PeriodType.DAY fetch_limit = 50 query = LivecountCounter.all().order('-count') query.filter('period_type = ', period_type) query.filter('period = ', PeriodType.find_scope(period_type, datetime.now())) top_counters = query.fetch(fetch_limit) top_referrers = [] for counter in top_counters: name = counter.name if name.startswith('css:page') and not name.startswith( 'css:page:www.webputty.net'): parts = name.split(':') page = None preview_size = 0 published_size = 0 try: page = Page.get(parts[3]) except Exception: logging.warn("_stats counldn't find matching page: %s", parts[3]) if page: preview_size = len(page.compressed_css(True)) published_size = len(page.compressed_css(False)) top_referrers.append((parts[2], counter.count, parts[3], preview_size, published_size)) return render_template('_stats.html', user_count=user_count, site_count=site_count, day_views=day_views, day_users=day_users, day_sites=day_sites, top_referrers=top_referrers)
def get(self, **kwargs): user = self.current_user name = user.name.split() fname = name[0] lname = name[len(name)-1] pages = Page.get(user.pages) page_id = self.request.get("p") self.render("app/upgrade.html", admin=True, pages=pages, page_id=page_id, user=user, fname=fname, lname=lname)
def show_page(pagename): try: page = Page.get(Page.slug==str(pagename)) if page is None: abort(404) else: return render_template('page.html', page=page) except: abort(404)
def save_page(language, query, link, rank, title): try: pg = Page.create(language=language, query=query, link=link, rank=rank, title=title) except peewee.IntegrityError: pg = Page.get(Page.language == language, Page.link == link, Page.query == query) pg.language = language pg.rank = rank pg.title = title pg.save()
def post(self): item = None vals = {} try: # get all the incoming values section = Section.get( self.request.get('section') ) name = self.request.get('name').strip() title = self.request.get('title').strip() content = self.request.get('content') type = self.request.get('type') label_raw = self.request.get('label_raw').strip() attribute_raw = util.make_attr_raw_string( { 'index-entry' : self.request.get('index_entry'), 'has-comments' : self.request.get('has_comments'), 'comments-open' : self.request.get('comments_open'), } ).strip() # some pre-processing of the input params if name == '': name = util.urlify(self.request.get('title')) if self.request.get('key'): item = Page.get( self.request.get('key') ) item.section = section item.name = name item.title = title item.content = content item.type = type item.label_raw = label_raw item.attribute_raw = attribute_raw else: item = Page( section = section, name = name, title = title, content = content, type = type, label_raw = label_raw, attribute_raw = attribute_raw, ) # update and save this page item.set_derivatives() item.put() # once saved, regenerate certain section properties section.regenerate() # also, check that this section doesn't have duplicate content Task( params={ 'section_key': str(section.key()), 'name': item.name }, countdown=30, ).add( queue_name='section-check-duplicate-nodes' ) self.redirect('.') except Exception, err: vals['item'] = self.request.POST vals['err'] = err vals['sections'] = Section.all() vals['types'] = models.type_choices self.template( 'page-form.html', vals, 'admin' );
def get_page_object(page_id): """ Retrieves a Page from a given page ID, getting page data from the API and saving the Page if it doesn't already exist in the DB. """ try: return Page.get(Page.page_id == page_id) except DoesNotExist: data = get_raw_page_by_id(page_id) return Page.create(page_id=data['pageid'], page_title=data['title'])
def get(self): item = None if self.request.get('key'): item = Page.get( self.request.get('key') ) vals = { 'item' : item, 'sections' : Section.all(), 'types' : models.type_choices } self.template( 'page-form.html', vals, 'admin' );
def get(self): try: if self.request.get('key'): item = Page.get( self.request.get('key') ) vals = { 'item' : item, } self.template( 'page-del.html', vals, 'admin' ); else: self.redirect('.') except: self.redirect('.')
def post(self): try: item = Page.get( self.request.get('key') ) if self.request.get('key') else None if item is not None: try: item.delete() self.redirect('.') except: vals = { 'item' : item, 'err' : 'There was an error when deleting this page, please try again' } self.template( 'page-del.html', vals, 'admin' ); except: self.redirect('.')
def order_pages(language, random_seed): ''' Random seed should be deterministic, but different for each language. ''' random.seed(random_seed * hash(language)) pages = (Page.select() .group_by(Page.link) .where( Page.language == language, Page.has_example == 1, )) ids = [p.id for p in pages] random.shuffle(ids) ordered_pages = [Page.get(Page.id == id_) for id_ in ids] return ordered_pages
def get(self, page_title, revision=None): page_title = page_title.strip('/') user = self.authenticate_session_id() if not user: self.redirect("/login") elif revision is None: p = Page.get_newest(page_title) if p: self.render("edit.html", user=user.username, title=p.title, content=p.content) elif revision == "/new" or revision == "new": self.render("edit.html", user=user.username, title=page_title) else: revision = int(revision.strip('/')) p = Page.get(page_title, revision) self.render("edit.html", user=user.username, title=page_title, content=p.content)
def order_pages(language, random_seed): ''' Random seed should be deterministic, but different for each language. ''' random.seed(random_seed * hash(language)) pages = (Page.select() .group_by(Page.link) .where( Page.language == language, (Page.purpose == 'targeted end use') | (Page.purpose == 'miscellany end use') )) ids = [p.id for p in pages] random.shuffle(ids) ordered_pages = [Page.get(Page.id == id_) for id_ in ids] return ordered_pages
def _stats(): if not (users.is_current_user_admin() or users.get_current_user().email().endswith('@fogcreek.com') or request.remote_addr in ['127.0.0.1', '71.190.247.30']): abort(404) user_count = GqlQuery('SELECT __key__ FROM UserSettings').count(None) site_count = GqlQuery('SELECT __key__ FROM Site WHERE example=false').count(None) now = datetime.now() days = list(reversed([now-timedelta(days) for days in range(14)])) day_views = [get_period_and_count('css:all', PeriodType.DAY, day) for day in days] day_users = [get_period_and_count('user:all', PeriodType.DAY, day) for day in days] day_sites = [get_period_and_count('site:all', PeriodType.DAY, day) for day in days] # overwrite today's cached numbers with the live count we just got from the database day_users[-1] = day_users[-1][0], user_count day_sites[-1] = day_sites[-1][0], site_count # get the top referrers period_type = PeriodType.DAY fetch_limit = 50 query = LivecountCounter.all().order('-count') query.filter('period_type = ', period_type) query.filter('period = ', PeriodType.find_scope(period_type, datetime.now())) top_counters = query.fetch(fetch_limit) top_referrers = [] for counter in top_counters: name = counter.name if name.startswith('css:page') and not name.startswith('css:page:www.webputty.net'): parts = name.split(':') page = None preview_size = 0 published_size = 0 try: page = Page.get(parts[3]) except Exception: logging.warn("_stats counldn't find matching page: %s", parts[3]) if page: preview_size = len(page.compressed_css(True)) published_size = len(page.compressed_css(False)) top_referrers.append((parts[2], counter.count, parts[3], preview_size, published_size)) return render_template('_stats.html', user_count=user_count, site_count=site_count, day_views=day_views, day_users=day_users, day_sites=day_sites, top_referrers=top_referrers)
def get(self, page_title, revision=None): user = self.authenticate_session_id() #remove slash at beginning page_title = page_title.strip('/') if revision is None: p = Page.get_newest(page_title) else: #change to int revision = int(revision.strip('/')) p = Page.get(page_title, revision) if p: md = markdown.Markdown(safe_mode='escape', output_format='html5', extensions=['attr_list', 'fenced_code', 'codehilite']) content = md.convert(p.content) if user: self.render("wikipage.html", user=user.username, content=content, title=p.title, creator=p.created_by, last_editor=p.edited_by, version=p.revision) else: self.render("wikipage.html", content=content, title=p.title, creator=p.created_by, last_editor=p.edited_by, version=p.revision) else: self.redirect("/_edit/" + page_title + "/new") """
def upload_style(): page_key = request.form.get('page_key', None) if page_key: page = Page.get(page_key) page.upload_to_cdn() return 'OK'
def get(self, **kwargs): '''Get Users Pages From Facebook''' try: fb_users_pages = self.graph.get_connections("me", "accounts") fb_page_ids = [] for p in fb_users_pages['data']: #if p['category'] != 'Application' or p['id'] == '141947329155355': try: if p['name']: fb_page_ids.append(p["id"]) except: pass fb_pages = self.graph.get_objects(fb_page_ids) '''Update Pages Cache''' batch = [] for k,fb_page in fb_pages.items(): try: picture = fb_page["picture"] except KeyError: picture = None try: fan_count = fb_page["fan_count"] except KeyError: fan_count = None try: has_added_app = fb_page["has_added_app"] except KeyError: has_added_app = None try: category = fb_page["category"] except KeyError: category = None page = Page.get_by_key_name(fb_page["id"]) if not page: page = Page(key_name=str(fb_page["id"]), id=str(fb_page["id"]), name=fb_page["name"], link=fb_page["link"], category=category, picture=picture, fan_count=str(fan_count), has_added_app=has_added_app ) batch.append(page) else: page.picture = picture page.fan_count=str(fan_count) page.has_added_app = has_added_app batch.append(page) if batch: page_keys = db.put(batch) user = self.current_user user.pages = page_keys db.put(user) except: pass """Get Users Pages""" try: user = self.current_user pages = Page.get(user.pages) self.render("app/dashboard.html", admin=True,pages=pages) except: pass
def post(self, **kwargs): user = self.current_user admin = False method = kwargs.get('method') if method == 'deletewidget': key_name = self.request.get("wid") widget = Widget.get_by_key_name(key_name) if widget: widget.deleted = True widget.last_modified_by = user try: db.put(widget) self.response.out.write('True') except: self.response.out.write('False') if method == 'savepageorder': page_order = self.request.get('pageorder') page_order = page_order.split(',') batch = [] for k,v in enumerate(page_order): widget = Widget.get_by_key_name(v) if widget: widget.order = k widget.last_modified_by = user batch.append(widget) try: db.put(batch) self.response.out.write('True') except: self.response.out.write('False') if method == 'savewidget': page = Page.get_by_key_name(self.request.get('pageid')) key_name = self.request.get('wid') widget = Widget.get_by_key_name(key_name) if self.request.get('wtype') == 'embedly': fields = simplejson.loads(self.request.get('wcontents')) #get_embedly_code({'id':self.request.get('wid'),"url":fields['embedly_url'],"type":"embedly"}) deferred.defer(get_embedly_code,{'id':self.request.get('wid'),"url":fields['embedly_url'],"type":"embedly"}) if self.request.get('wtype') == 'googlemaps': fields = simplejson.loads(self.request.get('wcontents')) deferred.defer(get_embedly_code,{'id':self.request.get('wid'),"url":fields['googlemaps_link'],"type":"googlemaps"}) if not widget: widget = Widget(key_name=key_name, id = key_name, type = self.request.get('wtype'), name = self.request.get('wname'), page = page, contents = self.request.get('wcontents'), last_modified_by = user ) fields = simplejson.loads(self.request.get('wcontents')) for k,v in fields.iteritems(): db_type = k.split('__') try: db_type = db_type[1] except IndexError: db_type = None logging.info(db_type) if db_type == 'text': setattr(widget, k, db.Text(v)) else: setattr(widget, k, v) else: widget.name = self.request.get('wname') widget.contents = self.request.get('wcontents') widget.last_modified_by = user fields = simplejson.loads(self.request.get('wcontents')) for k,v in fields.iteritems(): db_type = k.split('__') try: db_type = db_type[1] except IndexError: db_type = None logging.info(db_type) if db_type == 'text': setattr(widget, k, db.Text(v)) else: setattr(widget, k, v) try: db.put(widget) self.response.out.write('True') except: self.response.out.write('False') if method == 'saveoption': try: option = Option.get_by_id(int(self.request.get('id'))) except: option = None if self.request.get('otype') == 'page': link = Page.get_by_key_name(self.request.get('opageid')) if not option: option = Option( name = self.request.get('oname'), value = self.request.get('ovalue'), type = self.request.get('otype'), type_reference = link ) else: option.value = self.request.get('ovalue') try: db.put(option) self.response.out.write('True') except: self.response.out.write('False') if method == 'upgradedowngrade': try: username = self.get_config('saasy','username') password = self.get_config('saasy','password') product = self.get_config('saasy','product') qty = str(self.request.get('qty')) basic_auth = base64.b64encode('%s:%s' % (username, password)) xml_data = "<subscription><productPath>/%s</productPath><quantity>%s</quantity><no-end-date/></subscription>" % (product,qty) subscriber_info=simplejson.loads(self.current_user.subscriber_info) url = "https://api.fastspring.com/company/seedprod/subscription/%s" % subscriber_info['reference'] response = urlfetch.fetch(url=url,payload=xml_data,headers={'Authorization': 'Basic %s' % basic_auth ,'Content-Type': 'application/xml' },method=urlfetch.PUT) if response.status_code == 200: # Update Pages upgraded_pages = self.request.get('pages').split(',') pages = Page.get(user.pages) batch = [] for p in pages: if p.id in upgraded_pages: p.upgraded = '1' p.upgraded_by = user else: if p.upgraded_by: if p.upgraded_by.id == user.id: p.upgraded = '0' p.upgraded_by = None batch.append(p) db.put(batch) self.response.out.write('True') else: self.response.out.write('False') except: self.response.out.write('False')