def email_star_broadcast(self, delay=0, span=600): spec = { 'send_email': True, 'created': { "$gt": now() - delay - span, "$lt": now() - delay } } stats = {'send_count': 0, 'matched': 0} mailer = newhive.mail.Feed(db=self.db, jinja_env=self.jinja_env) def send(item): stats['matched'] += 1 if item.initiator.id == item.entity.owner.id: return mailer.send(item) stats['send_count'] += 1 item.update(send_email=False, email_sent=now()) for item in self.db.Star.search(spec): send(item) for item in self.db.Broadcast.search(spec): send(item) return stats
def site_referral_reminder(self, delay=48 * 3600, span=60): spec = { 'user_created': { '$exists': False }, 'reuse': { '$exists': False }, 'user': self.db.User.site_user.id, 'created': { '$gt': now() - delay - span, '$lt': now() - delay }, 'to': re.compile(r'@') } stats = {'send_count': 0} mailer = newhive.mail.SiteReferralReminder(db=self.db, jinja_env=self.jinja_env) sent_emails = [] for referral in self.db.Referral.search(spec): address = referral.get('to') if address not in sent_emails: mailer.send(referral) referral.update_cmd({'$push': {'reminder_sent': now()}}) stats['send_count'] += 1 sent_emails.append(address) return stats
def day(self, key, days_ago, day_span=1): """ Assumes value of key is a timestamp. Converts days_ago from days into past to timestamp t and day_span to days past days_ago """ n = now() t = n - 86400 * days_ago self.bt(key, t, t + day_span * 86400) return self
def image_from_instance(instance_name, no_reboot=True): conn = get_ec2_con() instance = get_instance(instance_name) if len(instance.groups) == 1: group = instance.groups[0].name else: print "Instance belongs to multiple security groups: {}".format( [g.name for g in instance.groups]) group = raw_input("enter category for image: ").strip() name = "{group} {date}".format(group=group, date=local_date()) description = "image generated by newhive.manage.ec2.image_from_instance from server {name} on {date}" description = description.format(name=server_name, date=local_date()) image_id = conn.create_image(instance.id, name, description, no_reboot) image = None while not image: time.sleep(0.5) try: image = conn.get_image(image_id) except conn.ResponseError: print "retrying" image = None image.add_tag('Name', name) image.add_tag('category', group) image.add_tag('created', int(now())) return image
def threaded_wget(self, url, time_out=0, pipe=None): if False and time_out: # If given a maximum execution time, call back into self, # and join with a timeout. If joined thread succeeds, it takes # care of itself, otherwise handle errors on this thread. pipe = {} t = threading.Thread(target=self.threaded_wget, args=(url, 0, pipe)) t.daemon = True t.start() t.join(time_out) if t.isAlive(): append_log(url, "timeout") self.error_count += 1 self.running_queries -= 1 pipe['kill'] = True return time_start = now() # debug("fetching: " + url) error = False try: res = urlopen(url, None, time_out) except Exception, e: error = True
def add_work(self): # TODO: Need to write maintenance script to clean up files which # failed resamples, namely: resample_time older than 6 hours AND has no resamples work = list(db.File.search({'resample_time':0}).limit(10)) _now = now() for k in work: k.update(resample_time=_now) self.queue.extend(work)
def user_invites_reminder(self, delay=0, span=0): mailer = newhive.mail.UserInvitesReminder(db=self.db, jinja_env=self.jinja_env) stats = {'send_count': 0} spec = { 'created': { '$gt': now() - span - delay, '$lt': now() - delay }, 'referrals': config.initial_invite_count } for user in self.db.User.search(spec): mailer.send(user) stats['send_count'] += 1 return stats
def settings_update(self, tdata, owner_name=None, **args): """ Doubles as post handler and settings page api route for settings """ owner = tdata.user request = tdata.request subscribed = owner.get('email_subscriptions', []) email_lists = map( lambda email_list: { 'id': 'email_' + email_list.name, 'subscribed': email_list.name in subscribed, 'description': ui_str.email_subscription_ui[email_list.name], 'name': email_list.name }, mail.MetaMailer.unsubscribable('user')) # if user submitted form if len(request.form.keys()): # update user email and password. if request.form.get('email'): update = dict(email=request.form.get('email')) if ((request.form.get('email') != owner['email']) or request.form.get('new_password')) and ( not owner.cmp_password(request.form.get('password'))): return { 'error': 'Password given does not match existing password' } if request.form.get('new_password'): password = request.form.get('new_password', '') if owner.check_password(password): return {'error': owner.check_password(password)} update.update({'password': password}) if update['email'] and update['email'] != owner.get('email'): request_date = now() # owner.update(email_confirmation_request_date=request_date) try: mail.EmailConfirmation(db=self.db, jinja_env=self.jinja_env).send( owner, update['email'], request_date) except Exception, e: pass # return { 'error': 'Email not sent' }; # message = message + ui.email_change_success_message + " " # update email subscriptions subscribed = [] for email_list in email_lists: if request.form.get(email_list['id']): subscribed.append(email_list['name']) email_list['subscribed'] = True else: email_list['subscribed'] = False update['email_subscriptions'] = subscribed owner.update(**update)
def _email_milestone_send(self, expr, mailer): milestone = self._milestone_check(expr) if milestone: expr_milestones = expr.get('milestones', {}) expr_milestones.update({str(milestone): now()}) expr.update(milestones=expr_milestones, updated=False) mailer.send(expr, milestone) return milestone return False
def loadtest(self, max_count=9999, qps=5., generate_url=test_url): global log log = Log() self.error_count = 0 self.success_count = 0 self.running_queries = 0 count = 0 time_out = max_time time_start = now() for count in xrange(max_count): calc_qps = 0 if not count else count / (now() - time_start) while calc_qps > qps or threading.active_count() > max_threads: calc_qps = 0 if not count else count / (now() - time_start) # # debug("waiting for %s threads:" % (threading.active_count() - max_threads)) # debug("qps: %f" % calc_qps) time.sleep(.1) url = generate_url(count) t = threading.Thread(target=self.threaded_wget, args=(url, time_out)) t.daemon = True t.start() self.running_queries += 1 while threading.active_count() > 1: time.sleep(.1) count += 1 total_time = now() - time_start final_qps = self.success_count / total_time print print "Loadtest complete (%f seconds)" % total_time print "(%d/%d) errors/total: %f QPS" % (self.error_count, count, final_qps) print log.histogram # Passing condition is that 98% of queries succeeded. return (self.error_count < count * .02)
def get_exprs(query_and={}): time_last = now() - 10 * 60 # Don't re-snapshot within 10 minutes q = mq(**snapshots_pending(time_last)) and_exp = [] if query_and: and_expr.append(query_and) if test: and_exp.append({'owner_name': 'abram'}) if and_exp: and_exp.append(q) q = {'$and': and_exp} expressions_to_snapshot = db.Expr.search(q, sort=[('updated', -1)]) return expressions_to_snapshot
class LoadTest(unittest.TestCase): def setUp(self): self.error_count = 0 self.success_count = 0 def threaded_wget(self, url, time_out=0, pipe=None): if False and time_out: # If given a maximum execution time, call back into self, # and join with a timeout. If joined thread succeeds, it takes # care of itself, otherwise handle errors on this thread. pipe = {} t = threading.Thread(target=self.threaded_wget, args=(url, 0, pipe)) t.daemon = True t.start() t.join(time_out) if t.isAlive(): append_log(url, "timeout") self.error_count += 1 self.running_queries -= 1 pipe['kill'] = True return time_start = now() # debug("fetching: " + url) error = False try: res = urlopen(url, None, time_out) except Exception, e: error = True if pipe and pipe.get('kill'): return if error: self.error_count += 1 append_log(url, "error") elif res.getcode() >= 400: self.error_count += 1 append_log(url, "timeout") else: self.success_count += 1 append_log(url, now() - time_start) self.running_queries -= 1
def _milestone_check(self, expr): def latest_milestone(n): for m in reversed(config.milestones): if m <= n: return m return 0 views = expr.get('views', 0) milestones = expr.get('milestones') last_milestone = max([int(m) for m in milestones.keys()]) if milestones else 0 new_milestone = latest_milestone(views) seconds_since_last = now() - max( milestones.values()) if milestones else float('inf') if new_milestone > last_milestone and seconds_since_last > 86400: median = user_expression_summary(expr.owner).views.median() if new_milestone >= median: return new_milestone return False
def cron(self, tdata, request, response, method_name=None, **args): """ Reads internal crontab, list of tuples of the format: (Cron Format String, Method Name, Method Options Dictionary) Cron Format String is a simplified cron format of the form: min hour """ method = getattr(self, method_name) if not request.is_secure or not method or (request.args.get('key') != self.key): return self.serve_404(tdata) opts_serial = dfilter(request.args, ['delay', 'span']) opts = dict((k, int(v)) for k, v in opts_serial.iteritems()) status = method(**opts) status.update({'timestamp': now(), 'args': opts}) return self.serve_json(response, status)
def send(item): stats['matched'] += 1 if item.initiator.id == item.entity.owner.id: return mailer.send(item) stats['send_count'] += 1 item.update(send_email=False, email_sent=now())
def admin_query(self, tdata, db_args={}, **kwargs): if not self.flags.get('admin'): return {} parse = json.loads args = tdata.request.args out = args.get('out', 'cards') collection = collection_of(self.db, args.get('collection', 'Expr').capitalize()) q = mq(parse(args.get('q', '{}'))) if args.get('day'): (time_prop, days_ago, day_span) = args['day'].split(',') days_ago, day_span = float(days_ago), float(day_span) q.day(time_prop, days_ago, day_span) fields = None if args.get('fields'): fields = args['fields'].split(',') db_args.update(spec=q, sort=args.get('sort', 'created'), order=parse(args.get('order', '-1')), limit=parse(args.get('limit', '0' if fields else '20')), fields=fields) help = """ Query parameters: q: database query, e.g., {"owner_name": "zach"} day: time_property,days_ago,day_span sort: default updated order: default -1 fields: prop1,prop2,prop3 (outputs fields in CSV format) collection: 'user' | 'feed' | 'trash' | 'expr' (default) special: 'top_tags' | 'top_lovers' | None help: ...this... Examples: Get list of emails from recent signups in the last 14 days /home/admin/query?day=created,14,14&collection=user&fields=email,name,fullname Show users with given email /home/admin/query?q={"email":"*****@*****.**"}&collection=user """ # Special cases special = args.get('special') if special == 'top_tags': if tdata.request.args.get('help', False) != False: return {'text_result': 'limit: default 1000'} db_args.update(limit=parse(args.get('limit', '1000'))) common = self.db.tags_by_frequency(collection=collection, **db_args) return { 'data': "\n".join([x[0] + ": " + str(x[1]) for x in common]) } elif special == 'top_lovers': if tdata.request.args.get('help', False) != False: return {'text_result': 'last_days: default 30'} last_days = parse(args.get('last_days', '30')) loves_by_users = Counter() for r in self.db.Feed.search( mq(class_name='Star', entity_class='Expr').gt('created', now() - 86400 * last_days)): loves_by_users[r['initiator_name']] += 1 resp = json.dumps(loves_by_users.most_common()) return {'text_result': re.sub(r'\],', '],\n', resp)} if tdata.request.args.get('help', False) != False: return {'data': help} data = {} res = collection.paginate(**db_args) if fields: rows = [[r.get(k, '') for k in fields] for r in res] data['data'] = '\n'.join([','.join(row) for row in rows]) else: data['cards'] = list(res) return data
def main(db): print "running migration: setting initial expression view milestones" t0 = now() for expr in db.Expr.search({}): migrate(expr) print "migration complete in {} seconds".format(now() - t0)
def test_email_confirmation(self): self.mailer.send(self.test_user, '*****@*****.**', request_date=now())
def launch_instance(name, category='dev', git_branch='master', **kwargs): conn = get_ec2_con() image = get_latest_image(category) # Default kwargs for run_instance run_args = {'instance_type': 't1.micro'} if category in [g.name for g in conn.get_all_security_groups()]: run_args.update({'security_groups': [category]}) # override defaults run_args.update(kwargs) print "Launching instance with ami: {} and args \n{}".format( image.id, run_args) reservation = conn.run_instances(image.id, **run_args) instance = reservation.instances[0] # Poll for instance startup print "\nWhile instance is launching, enter remote login credentials:" username = raw_input("username: "******"password: "******"instance status pending" time.sleep(5) status = instance.update() print "instance status {}".format(status) print "\nsetting server metadata and name for ec2 console" instance.add_tag("Name", name) instance.add_tag('created', int(now())) instance.add_tag('terminatable', 'true') print "adding dns routes for {name}.newhive.com and {name}.newhiveexpression.com".format( name=name) manage.route53.add_cname('newhive.com', name, instance.public_dns_name, no_confirmation=True) manage.route53.add_cname('tnh.me', name, instance.public_dns_name, no_confirmation=True) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) def connect(): try: ssh.connect(instance.public_dns_name, username=username, password=password) return True except Exception as e: return False print "\nWaiting 20 seconds for server to startup, then attempting to establish ssh connection" time.sleep(20) connected = connect() while not connected: print "retrying ssh connection" time.sleep(5) connected = connect() def remote_exec(command): stdin, stdout, stderr = ssh.exec_command(command) print "\nRemote Server:" for line in stdout: print '... ' + line.strip('\n') print ssh.close() command = ";".join([ 'echo', 'echo "Checking out branch {git_branch}"', 'cd /var/www/newhive', 'git fetch', 'git checkout {git_branch}', 'git pull origin {git_branch}', 'echo', 'echo "setting server name"', 'sudo /var/www/newhive/bin/set_server_name {name}', 'echo', 'echo "restarting apache"', 'sudo apache2ctl graceful', ]).format(git_branch=git_branch, name=name) remote_exec(command) return instance
def handle(request): time_start = now() print(request) environ = copy.copy(request.environ) # Redirect www. links to naked URL if request.host.startswith('www.'): return base_controller.redirect(Response(), re.sub('//www.', '//', request.url, 1), permanent=True) # Redirect thenewhive.com links to newhive.com if re.search('thenewhive.com', environ['HTTP_HOST']): return base_controller.redirect(Response(), re.sub(r'//((.+\.)?)thenewhive\.com', r'//\1newhive.com', request.url), permanent=True) prefix, site = split_domain(environ['HTTP_HOST']) # Convert any ip v4 address into a specific dns # site = re.sub('([0-9]+\.){3}[0-9]+','site',site) #//!! # Convert the specified DNS into the shorthand DNS (without search dns) # site = re.sub('(.*)\.(office|cos)\.newhive\.com','\g<1>',site) #//!! environ['HTTP_HOST'] = site if prefix and prefix not in config.live_prefixes: base_controller.redirect( Response(), 'https://' + site + '/' + prefix + environ['PATH_INFO']) stats = False # stats = True if stats: pass # statprof.start() # if not yappi.is_running(): # yappi.start() try: (controller, handler), args = routes.bind_to_environ(environ).match() except exceptions.NotFound as e: err = True if not config.live_server: try: err = False #dev = config.dev_prefix + '.' if config.dev_prefix else '' environ['HTTP_HOST'] = config.server_name + ':' + environ[ 'SERVER_PORT'] (controller, handler), args = routes.bind_to_environ(environ).match() except exceptions.NotFound as e: err = True if err: print "Gap in routing table!" print request return base_controller.serve_500( base_controller.new_transaction(request), exception=e, json=False) except RequestRedirect as e: # bugbug: what's going on here anyway? raise Exception('redirect not implemented: from: ' + request.url + ', to: ' + e.new_url) # print controller # print handler try: if stats: pr = cProfile.Profile() pr.enable() doflags( functools.partial(controller.dispatch, handler, request, **args), ("iterations", "feed_max"), [5], [2000, 1000, 500, 100]) response = controller.dispatch(handler, request, **args) if stats: pr.disable() s = io.StringIO() ps = pstats.Stats(pr) ps.sort_stats('cumulative') ps.print_stats(25) ps.dump_stats(os.path.join(config.src_home, 'stats')) # To view stats graphically, use: # alias gprof='gprof2dot.py -f pstats stats | dot -Tpng -o output.png;open output.png' except: (blah, exception, traceback) = sys.exc_info() response = base_controller.serve_500( base_controller.new_transaction(request), exception=exception, traceback=traceback, json=False) print "time %s ms" % (1000. * (now() - time_start)) if stats and yappi.is_running(): # statprof.stop() # statprof.display() yappi.stop() yappi.print_stats(sys.stdout, yappi.SORTTYPE_TTOT, yappi.SORTORDER_DESC, 25) yappi.clear_stats() # this allows unsecure pages to make API calls to https # response.headers.add('Access-Control-Allow-Origin', config.abs_url().strip('/')) # TODO-security: CONSIDER. Allow pages on custom domains to make API calls response.headers.add('Access-Control-Allow-Origin', '*') response.headers.add('Access-Control-Allow-Headers', 'Content-Type, Authorization, X-Requested-With') return response
def test_high_frequency_milestones(self): self.expr.update(milestones={'20': now() - 3600}, views=80) milestone = self.f(self.expr, self.mailer) self.assertIs(milestone, False)
class Expr(ModelController): model_name = 'Expr' def fetch_naked(self, tdata, request, response, expr_id=None, owner_name=None, expr_name=None, **args): # Request must come from content_domain, as this serves untrusted content if expr_id: # hack for overlap of /owner_name and /expr_id routes expr_obj = self.db.Expr.fetch(expr_id) or self.db.Expr.named( expr_id, '') else: expr_obj = self.db.Expr.named(owner_name, expr_name) return self.serve_naked(tdata, request, response, expr_obj) def expr_custom_domain(self, tdata, request, response, path='', **args): url = request.host + ('/' if path else '') + path expr = self.db.Expr.find({'url': url}) tdata.context['domain'] = request.host return self.serve_naked(tdata, request, response, expr) def serve_naked(self, tdata, request, response, expr_obj): if not expr_obj: return self.serve_404(tdata) # for custom pages using external files custom_html = expr_obj.get('index_url') if custom_html: custom_html = request.scheme + ':' + re.sub( '^.*?//', '//', custom_html) return self.redirect(response, custom_html) bg = expr_obj.get('background') if bg and bg.get('file_id') and not bg.get('dimensions'): f = self.db.File.fetch(bg.get('file_id')) dimensions = f.get('dimensions') if dimensions: bg['dimensions'] = dimensions expr_obj.update(updated=False, background=bg) else: f.update(resample_time=0) if (expr_obj.get('auth') == 'password' and not expr_obj.cmp_password(request.form.get('password')) and not expr_obj.cmp_password(request.args.get('pw'))): expr_obj = {'auth': 'password'} expr_client = expr_obj else: expr_client = expr_obj.client_view(mode='page') # TODO: consider allowing analytics for content frame. viewport = [ int(x) for x in request.args.get('viewport', '1000x750').split('x') ] snapshot_mode = request.args.get('snapshot') is not None tdata.context.update( html=expr_to_html(expr_obj, snapshot_mode, viewport=viewport), expr=expr_obj, use_ga=False, ) body_style = '' if snapshot_mode: body_style = 'overflow: hidden;' if expr_obj.get('clip_x'): body_style = 'overflow-x: hidden;' if expr_obj.get('clip_y'): body_style += 'overflow-y: hidden;' if body_style: tdata.context['css'] = 'body {' + body_style + '}' tdata.context.update(expr=expr_obj, expr_client=expr_client) return self.serve_page(tdata, 'pages/expr.html') def embed(self, tdata, request, response, owner_name=None, expr_name=None, expr_id=None, **args): expr = (self.db.Expr.fetch(expr_id) if expr_id else self.db.Expr.named( owner_name, expr_name)) if not expr: return self.serve_404(tdata) tdata.context.update( expr=expr, embed=True, content_url=abs_url(domain=self.config.content_domain, secure=tdata.request.is_secure) + expr.id, icon=False, route_args=args, barebones=True) tdata.context.update(self.cards_for_expr(tdata, expr)) return self.serve_page(tdata, 'pages/embed.html') def cards_for_expr(self, tdata, expr): collection = {} expr_ids = [] query = tdata.request.args.get('q') if query: query_obj = self.db.parse_query(query) if len(query_obj.get('tags', [])) and query_obj.get('user'): expr_ids = self.db.User.named(query_obj['user']).get_tag( query_obj['tags'][0]) collection['name'] = query_obj['tags'][0] collection['username'] = query_obj['user'] else: cards = self.db.query(query, search_id=expr.id) else: # get implicit collection from expr['tags'] collection = expr.primary_collection if collection.get('items'): expr_ids = collection.pop('items') at_card = expr_ids.index(expr.id) if expr.id in expr_ids else False cards = [r.client_view() for r in self.db.Expr.search(expr_ids)] page_data = dict(cards=cards, at_card=at_card) if collection: page_data['collection'] = collection return page_data def save(self, tdata, request, response, **args): """ Parses JSON object from POST variable 'exp' and stores it in database. If the name (url) does not match record in database, create a new record.""" autosave = (request.form.get('autosave') == "1") try: expr = self.db.Expr.new(json.loads(request.form.get('expr', '0'))) except: expr = False if not expr: raise ValueError('Missing or malformed expr') # Name of the expression before rename orig_name = expr.get('orig_name') res = self.db.Expr.fetch(expr.id) allowed_attributes = [ 'name', 'url', 'title', 'apps', 'dimensions', 'auth', 'password', 'tags', 'background', 'thumb', 'images', 'value', 'remix_value', 'remix_value_add', 'container', 'clip_x', 'clip_y', 'layout_coord', 'groups', 'globals' ] # TODO: fixed expressions, styles, and scripts, need to be done right # if tdata.user.is_admin: # allowed_attributes.extend(['fixed_width', 'script', 'style']) upd = dfilter(expr, allowed_attributes) upd['name'] = upd.get('name', '').lower().strip('/ ') draft = res and (res.get('draft') == True) if draft and orig_name: res['name'] = upd['name'] # Create data and upload to s3 # TODO: proper versioning def module_names(app): """ Returns: comma-separated lists of module names """ names = [m.get('name') for m in app.get('modules')] return ",".join([""] + names) def module_modules(app): """ Returns: comma-separated lists of module imports """ def path(module): path = module.get('path') for app in upd.get('apps', []): if path == app.get('id') or path == app.get('name'): path = app.get('file_id') if not path: raise "Not found" path = 'media/' + path break return path try: names = ["'" + path(m) + "'" for m in app.get('modules')] except Exception, e: return False return ",".join([""] + names) apps = deque(upd.get('apps', [])) while len(apps): # extract files from sketch and code objects app = apps.popleft() ok = True file_data = None suffix = "" file_id = app.get('file_id') #if app['type'] == 'hive.code' and app['code_type'] == 'js': # file_data = app.get('content') # modules = module_modules(app) # ok = ok and file_id and (modules != False) # if ok: # # expand to full module code # data = ("define(['jquery'%s], function($%s" # + ") {\nvar self = {}\n%s\nreturn self\n})" # ) % (modules, module_names(app), file_data) # name = "code" # mime = "application/javascript" # suffix = ".js" if app['type'] == 'hive.sketch': # deal with inline base64 encoded images from Sketch app file_data = base64.decodestring( app.get('content').get('src').split(',', 1)[1]) name = 'sketch' mime = 'image/png' if not file_data: continue if not ok: # dependencies not visited apps.append(app) # sync files to s3 f = os.tmpfile() f.write(file_data) file_res = None # TODO-feature-versioning goes here # If file exists, overwrite it if file_id: file_res = self.db.File.fetch(file_res) if file_res: file_res.update_file(f) else: file_res = self.db.File.create( dict(owner=tdata.user.id, tmp_file=f, name=name, mime=mime, suffix=suffix)) f.close() app_upd = {'file_id': file_res.id} #if app['type'] == 'hive.code': # app_upd.update({'code_url' : file_res.url }) if app['type'] == 'hive.sketch': app_upd.update({'type': 'hive.image', 'content': file_res.url}) app.update(app_upd) def record_expr_save(res): self.db.ActionLog.create(tdata.user, "new_expression_save", data={'expr_id': res.id}) tdata.user.flag('expr_new') duplicate = False if not res or upd['name'] != res['name']: """ we're creating a new page """ if autosave: # TODO-autosave: create anonymous expression if upd.get('name', '') == '': upd['name'] = self.db.Expr.unused_name( tdata.user, time.strftime("%Y_%m_%d")) upd['auth'] = "private" upd['tags'] += " #draft" upd['draft'] = True res = tdata.user.expr_create(upd) return self.serve_json(response, {"autosave": 1, "expr": res}) # remix: ensure that the remixed tag is saved with a remix if upd.get('remix_parent_id'): upd['tags'] += " #remixed" # + remix_name try: res = tdata.user.expr_create(upd) except DuplicateKeyError: duplicate = True else: record_expr_save(res) else: """ we're updating a page """ if not res['owner'] == tdata.user.id: raise exceptions.Unauthorized( 'Nice try. You no edit stuff you no own') if autosave: if draft: upd['auth'] = 'password' res.update(**upd) else: upd['updated'] = now() res.update(updated=False, draft=upd) return self.serve_json(response, {'autosave': 1}) if draft: upd['draft'] = False try: res.update(**upd) except DuplicateKeyError: duplicate = True else: self.db.UpdatedExpr.create(res.owner, res) self.db.ActionLog.create(tdata.user, "update_expression", data={'expr_id': res.id}) def rename_error(): return self.serve_json( response, { 'error': 'rename', 'rename_existing': self.db.Expr.unused_name(tdata.user, upd['name']), 'name_existing': upd['name'] }) if duplicate: if expr.get('rename_existing'): existing = self.db.Expr.named(tdata.user['name'], upd['name']) try: existing.update(updated=False, name=expr.get('rename_existing')) res = tdata.user.expr_create(upd) except DuplicateKeyError: return rename_error() else: record_expr_save(res) else: return rename_error() # autosave: Remove "draft" on first save if draft and not autosave: new_tags = res['tags_index'] if "draft" in new_tags: new_tags.remove("draft") res.update(tags=tag_string(new_tags)) if (not self.config.snapshot_async and (upd.get('apps') or upd.get('background'))): res.threaded_snapshot(retry=120) # TODO-cleanup: create client_view for full expression record, instead # of just feed cards res['id'] = res.id return self.serve_json(response, res)
def recent_snapshot_fails(days=1): return db.Expr.search(mq().bt('snapshot_fails', 5, 12).gt('updated', now() - days * 86400))
def snapshots_pending(time_last=False): if not time_last: time_last = now() return mq(snapshot_needed=True).js( '!this.password && (!this.snapshot_fails || this.snapshot_fails < 6)' + '&& (!this.snapshot_fail_time || ' + 'this.snapshot_fail_time < ' + str(time_last) + ')')