def now_playing(): try: ret = {} #gather showinfos show = Show.get_active_show() if show: user = show.get_active_user() if show.end: end = to_timestamp(to_user_timezone(show.end)) else: end = None ret['show'] = {'id': show.show, 'name': show.name, 'begin': to_timestamp(to_user_timezone(show.begin)), 'now': to_timestamp(to_user_timezone(now())), 'end': end, 'logo': show.get_logo(), 'type': Show.FLAGS.name(show.flags), 'user': {'countryball': iso_country_to_countryball(user.country)} } if show.series: ret['series'] = {'name': show.series.name} link_users = [] for ushow in show.users: link_users.append(make_user_link(ushow.user)) ret['users'] = {'links': natural_join(link_users)} #gather trackinfos track = Track.current_track() if track: ret['track'] = {'title': track.title.name, 'artist': track.title.artist.name, } #gather nextshow infos if show and show.end: filter_begin = show.end else: filter_begin = now() if request.args.get('full') == 'true': nextshow = Show.query.filter(Show.begin >= filter_begin).order_by(Show.begin.asc()).first(); if nextshow: ret['nextshow'] = {'name': nextshow.name, 'begin': to_timestamp(to_user_timezone(nextshow.begin)), 'logo': nextshow.get_logo()} if nextshow.series: ret['nextshow']['series'] = nextshow.series.name #get listenerinfo for disco listeners = Listener.get_current_listeners() ret['listener'] = {} for listener in listeners: ret['listener'][listener.listener] = {'listener': listener.listener, 'county': listener.country, 'countryball': iso_country_to_countryball(listener.country)} return jsonify({'success': True, 'data': ret}) except Exception as e: raise e return jsonify({'success': False, 'data': unicode(e)})
def show_edit(show): s = Show.query.get(show) if s is None: return 'no show found' # TODO: proper error page if request.args.get('inline'): template = '/shows/showform-inline.html' else: template = '/shows/showform.html' tags = [] for tag in s.tags: tags.append(tag.tag.name) return render_template(template, show={'name': s.name, 'description': s.description, 'series': s.series, 'users': s.users, 'tags': ",".join(tags), 'begin': to_user_timezone(s.begin).strftime('%s'), 'logo': s.logo, 'show': s.show, 'duration': (s.end - s.begin).total_seconds() / 60}, imgur={'client': CONFIG.get('site', 'imgur-client')}, format=get_datetime_format())
def date_histogram(data, getter, ignore_year=False): """Get the date histogram from given objects. :param data: Iterable objects to be analyzed. :param getter: A :func:`callable` object to get a :class:`~datetime.datetime` from an object. :param ignore_year: Ignore the year in the date. Only month and day will be used in histogram. :return: A sorted :class:`list` of :class:`tuple` ((year, month, day), freq). """ ret = {} if ignore_year: get_key = lambda dt: (dt.month, dt.day) else: get_key = lambda dt: (dt.year, dt.month, dt.day) for obj in data: key = get_key(to_user_timezone(getter(obj))) if key in ret: ret[key] += 1 else: ret[key] = 1 return sorted(ret.items())
def show_edit(show): s = Show.query.get(show) if s is None: return "no show found" if request.args.get("inline"): template = "/shows/showform-inline.html" else: template = "/shows/showform.html" tags = [] for tag in s.tags: tags.append(tag.tag.name) return render_template( template, show={ "name": s.name, "description": s.description, "series": s.series, "users": s.users, "tags": ",".join(tags), "begin": to_user_timezone(s.begin).strftime("%s"), "logo": s.logo, "show": s.show, "duration": (s.end - s.begin).total_seconds() / 60, }, imgur={"client": CONFIG.get("site", "imgur-client")}, format=get_datetime_format(), )
def listenerdata(start, stop): from rfk.site import app app.logger.warn(start) app.logger.warn(stop) stop = parse_datetimestring(stop) start = parse_datetimestring(start) app.logger.warn(start) app.logger.warn(stop) ret = {'data': {}, 'shows': []} streams = Stream.query.all() for stream in streams: ret['data'][str(stream.mount)] = [] #just set an initial stating point from before the starting point stats = stream.statistic.get(stop=start, num=1, reverse=True) c = 0 for stat in stats: c = stat.value if not stats: c = 0 ret['data'][str(stream.mount)].append( (to_timestamp(to_user_timezone(start)), int(c))) #fill in the actual datapoints streams = Stream.query.all() for stream in streams: stats = stream.statistic.get(start=start, stop=stop) for stat in stats: ret['data'][str(stream.mount)].append( (to_timestamp(to_user_timezone(stat.timestamp)), int(stat.value))) streams = Stream.query.all() for stream in streams: stats = stream.statistic.get(stop=stop, num=1, reverse=True) for stat in stats: c = stat.value if not stats: c = 0 ret['data'][str(stream.mount)].append( (to_timestamp(to_user_timezone(stop)), int(c))) #get the shows for the graph shows = Show.query.filter(between(Show.begin, start, stop) \ | between(Show.end, start, stop)).order_by(Show.begin.asc()).all() for show in shows: sstart = to_timestamp(to_user_timezone(show.begin)) if show.end: send = to_timestamp(to_user_timezone(show.end)) else: send = to_timestamp(to_user_timezone(now())) ret['shows'].append({'name': show.name, 'b': sstart, 'e': send}) return jsonify(ret)
def liquidsoap_log(): try: client = LiquidDaemonClient() client.connect() offset = request.args.get('offset') if offset is not None: offset = int(offset) offset, log = client.get_log(offset) client.close() lines = [] for line in log: ts = to_user_timezone(datetime.utcfromtimestamp(int(line[0]))) lines.append((ts.isoformat(), line[3])) return jsonify({'log': lines, 'offset': offset}) except Exception as e: return jsonify({'error': str(e)})
def listenerdata(start, stop): from rfk.site import app app.logger.warn(start) app.logger.warn(stop) stop = parse_datetimestring(stop) start = parse_datetimestring(start) app.logger.warn(start) app.logger.warn(stop) ret = {'data': {}, 'shows': []} streams = Stream.query.all() for stream in streams: ret['data'][str(stream.mount)] = [] #just set an initial stating point from before the starting point stats = stream.statistic.get(stop=start, num=1, reverse=True) c = 0 for stat in stats: c = stat.value if not stats: c = 0 ret['data'][str(stream.mount)].append((to_timestamp(to_user_timezone(start)), int(c))) #fill in the actual datapoints streams = Stream.query.all() for stream in streams: stats = stream.statistic.get(start=start, stop=stop) for stat in stats: ret['data'][str(stream.mount)].append( (to_timestamp(to_user_timezone(stat.timestamp)), int(stat.value))) streams = Stream.query.all() for stream in streams: stats = stream.statistic.get(stop=stop, num=1, reverse=True) for stat in stats: c = stat.value if not stats: c = 0 ret['data'][str(stream.mount)].append((to_timestamp(to_user_timezone(stop)), int(c))) #get the shows for the graph shows = Show.query.filter(between(Show.begin, start, stop) \ | between(Show.end, start, stop)).order_by(Show.begin.asc()).all() for show in shows: sstart = to_timestamp(to_user_timezone(show.begin)) if show.end: send = to_timestamp(to_user_timezone(show.end)) else: send = to_timestamp(to_user_timezone(now())) ret['shows'].append({'name': show.name, 'b': sstart, 'e': send}) return jsonify(ret)
def logs(page=1, user_id=False): form = DownloadLogsForm() query = Log.query if user_id: user = User.by_id(user_id) query = query.filter_by(user=user) if request.method == 'POST' and form.validate(): data = request.form access_start = data.get('access_start', False) access_end = data.get('access_end', False) button_action = data.get('button-action', 'show-table') if access_start: query = query.filter(Log.time >= access_start) if access_end: query = query.filter(Log.time < access_end) query = query.order_by(desc(Log.time)) if button_action == 'show-table': results = query.paginate(page, current_app.config["USER_PER_PAGE"]) else: results = query.all() csv = log_spec_to_csv( logs=results, csv_headers=current_app.config['LOG_CSV_HEADER'] ) filename = 'geobox-access-%s.csv' % (to_user_timezone(datetime.utcnow()).strftime('%Y%m%d-%H%M%S')) resp = Response( csv, headers={ 'Content-type': 'application/octet-stream', 'Content-disposition': 'attachment; filename=%s' % filename}) return resp else: results = query.paginate(page, current_app.config["USER_PER_PAGE"]) if user_id: return render_template('admin/user_log.html', user=user, logs=results) return render_template('admin/logs.html', form=form, logs=results)
def timesince(dt, default="just now"): """ Returns string representing "time since" e.g. 3 days ago, 5 hours ago etc. """ now = to_user_timezone(datetime.now()) diff = now - dt periods = ( (diff.days / 365, "year", "years"), (diff.days / 30, "month", "months"), (diff.days / 7, "week", "weeks"), (diff.days, "day", "days"), (diff.seconds / 3600, "hour", "hours"), (diff.seconds / 60, "minute", "minutes"), ) for period, singular, plural in periods: if period: return "%d %s ago" % (period, singular if period == 1 else plural) return default
def make_charts_data(hw): """Make hwcharts data object.""" ACCEPTED_AND_REJECTED = ('Accepted', 'Rejected') # Query about all the submission for this homework handins = (db.session.query(Handin).options( db.defer('partials')).join(User).filter(Handin.hwid == hw.uuid).filter( Handin.state.in_(ACCEPTED_AND_REJECTED)).filter( User.is_admin == 0)) # The date histogram to count everyday submissions. def ListAdd(target, addition): for i, v in enumerate(addition): target[i] += v return target date_bucket = {} date_author_bucket = {} for obj in handins: dt = to_user_timezone(obj.get_ctime()) key = dt.month, dt.day value = (1, int(obj.is_accepted()), int(not obj.is_accepted())) # We count the day freq if key in date_bucket: ListAdd(date_bucket[key], value) else: date_bucket[key] = list(value) # We count the day author freq if key not in date_author_bucket: date_author_bucket[key] = {obj.user.name} else: date_author_bucket[key].add(obj.user.name) date_author_bucket = {k: len(v) for k, v in date_author_bucket.iteritems()} # Cache the submission count of each user user_submit_bucket = {} for obj in handins: name = obj.user.name value = (1, int(obj.is_accepted()), int(not obj.is_accepted())) if name not in user_submit_bucket: user_submit_bucket[name] = list(value) else: ListAdd(user_submit_bucket[name], value) # Get the frequency of user submissions user_submit = {} for __, (total, __, __) in user_submit_bucket.iteritems(): user_submit.setdefault(total, 0) user_submit[total] += 1 # Get the score frequency of Accepted submissions user_finalscores = {} for obj in handins: name = obj.user.name score = obj.score or 0.0 if name not in user_finalscores: user_finalscores[name] = score elif score > user_finalscores[name]: user_finalscores[name] = score final_score = group_histogram(user_finalscores.itervalues(), lambda v: round_score(v)) # Count the Accepted and Rejected submissions. acc_reject = group_histogram(handins, lambda d: d.state) # Count the number of the reasons for Rejected reject_brief = group_histogram((h for h in handins if not h.is_accepted()), lambda d: unicode(d.result)) # Generate the JSON data json_obj = { 'day_freq': sorted(date_bucket.items()), 'day_author': sorted(date_author_bucket.items()), 'acc_reject': [(k, acc_reject.get(k, 0)) for k in ACCEPTED_AND_REJECTED], 'reject_brief': sorted(reject_brief.items()), 'user_submit': sorted(user_submit.items()), 'final_score': [(str(v[0]), v[1]) for v in sorted(final_score.items())], } return json_obj
def hwcharts(hwid): """The admin page to view various of charts of a given homework. All users except the administrators will be considered to generate the charts. :route: /admin/hwcharts/<hwid>/ :method: GET :template: admin.hwcharts.html """ ACCEPTED_AND_REJECTED = ('Accepted', 'Rejected') g.scripts.deps('chart.js') # Query about given homework hw = g.homeworks.get_by_uuid(hwid) if hw is None: raise NotFound(lazy_gettext('Requested homework not found.')) # Query about all the submission for this homework handins = (db.session.query(Handin).join(User). filter(Handin.hwid == hwid). filter(Handin.state.in_(ACCEPTED_AND_REJECTED)). filter(User.is_admin == 0)).all() # The date histogram to count everyday submissions. def ListAdd(target, addition): for i, v in enumerate(addition): target[i] += v return target date_bucket = {} date_author_bucket = {} for obj in handins: dt = to_user_timezone(obj.get_ctime()) key = dt.month, dt.day value = (1, int(obj.is_accepted()), int(not obj.is_accepted())) # We count the day freq if key in date_bucket: ListAdd(date_bucket[key], value) else: date_bucket[key] = list(value) # We count the day author freq if key not in date_author_bucket: date_author_bucket[key] = {obj.user.name} else: date_author_bucket[key].add(obj.user.name) date_author_bucket = {k: len(v) for k, v in date_author_bucket.iteritems()} # Cache the submission count of each user user_submit_bucket = {} for obj in handins: name = obj.user.name value = (1, int(obj.is_accepted()), int(not obj.is_accepted())) if name not in user_submit_bucket: user_submit_bucket[name] = list(value) else: ListAdd(user_submit_bucket[name], value) # Get the frequency of user submissions user_submit = {} for __, (total, __, __) in user_submit_bucket.iteritems(): user_submit.setdefault(total, 0) user_submit[total] += 1 # Get the score frequency of Accepted submissions user_finalscores = {} for obj in handins: name = obj.user.name score = obj.score or 0.0 if name not in user_finalscores: user_finalscores[name] = score elif score > user_finalscores[name]: user_finalscores[name] = score final_score = group_histogram( user_finalscores.itervalues(), lambda v: round_score(v) ) # Count the Accepted and Rejected submissions. acc_reject = group_histogram( handins, lambda d: d.state ) # Count the number of the reasons for Rejected reject_brief = group_histogram( (h for h in handins if not h.is_accepted()), lambda d: unicode(d.result) ) # Generate the JSON data json_obj = { 'day_freq': sorted(date_bucket.items()), 'day_author': sorted(date_author_bucket.items()), 'acc_reject': [ (k, acc_reject.get(k, 0)) for k in ACCEPTED_AND_REJECTED ], 'reject_brief': sorted(reject_brief.items()), 'user_submit': sorted(user_submit.items()), 'final_score': [ (str(v[0]), v[1]) for v in sorted(final_score.items()) ], } json_text = json.dumps(json_obj) # Render the page return render_template('admin.hwcharts.html', chart_data=json_text, hw=hw)
def calendar_week(): now = to_user_timezone(datetime.datetime.utcnow()).date() return calendar_week_spec(int(now.strftime('%Y')), int(now.strftime('%W')) + 1)
def make_charts_data(hw): """Make hwcharts data object.""" ACCEPTED_AND_REJECTED = ('Accepted', 'Rejected') # Query about all the submission for this homework handins = (db.session.query(Handin).options(db.defer('partials')). join(User). filter(Handin.hwid == hw.uuid). filter(Handin.state.in_(ACCEPTED_AND_REJECTED)). filter(User.is_admin == 0)) # The date histogram to count everyday submissions. def ListAdd(target, addition): for i, v in enumerate(addition): target[i] += v return target date_bucket = {} date_author_bucket = {} for obj in handins: dt = to_user_timezone(obj.get_ctime()) key = dt.month, dt.day value = (1, int(obj.is_accepted()), int(not obj.is_accepted())) # We count the day freq if key in date_bucket: ListAdd(date_bucket[key], value) else: date_bucket[key] = list(value) # We count the day author freq if key not in date_author_bucket: date_author_bucket[key] = {obj.user.name} else: date_author_bucket[key].add(obj.user.name) date_author_bucket = {k: len(v) for k, v in date_author_bucket.iteritems()} # Cache the submission count of each user user_submit_bucket = {} for obj in handins: name = obj.user.name value = (1, int(obj.is_accepted()), int(not obj.is_accepted())) if name not in user_submit_bucket: user_submit_bucket[name] = list(value) else: ListAdd(user_submit_bucket[name], value) # Get the frequency of user submissions user_submit = {} for __, (total, __, __) in user_submit_bucket.iteritems(): user_submit.setdefault(total, 0) user_submit[total] += 1 # Get the score frequency of Accepted submissions user_finalscores = {} for obj in handins: name = obj.user.name score = obj.score or 0.0 if name not in user_finalscores: user_finalscores[name] = score elif score > user_finalscores[name]: user_finalscores[name] = score final_score = group_histogram( user_finalscores.itervalues(), lambda v: round_score(v) ) # Count the Accepted and Rejected submissions. acc_reject = group_histogram( handins, lambda d: d.state ) # Count the number of the reasons for Rejected reject_brief = group_histogram( (h for h in handins if not h.is_accepted()), lambda d: unicode(d.result) ) # Generate the JSON data json_obj = { 'day_freq': sorted(date_bucket.items()), 'day_author': sorted(date_author_bucket.items()), 'acc_reject': [ (k, acc_reject.get(k, 0)) for k in ACCEPTED_AND_REJECTED ], 'reject_brief': sorted(reject_brief.items()), 'user_submit': sorted(user_submit.items()), 'final_score': [ (str(v[0]), v[1]) for v in sorted(final_score.items()) ], } return json_obj
def show_entries(): db = get_db() entries = DinoUpdate.query.filter(DinoUpdate.user_id==session['userid']).order_by(DinoUpdate.timestamp.desc()) entries = [(json.loads(x.update), timesince(to_user_timezone(x.timestamp))) for x in entries] return render_template('show_entries.html', entries=entries)