def user_contact(request): if request.method == 'POST': form = ContactForm(request.POST) if form.is_valid(): try: email = form.cleaned_data['contact_email'] phone = form.cleaned_data['contact_phone'] bday = form.cleaned_data['contact_bday'] bday = re.match('[0-9]{1,2}\/[0-9]{1,2}', bday) if bday is None: raise ValueError bday = bday.string if len(bday) < 5: if len(bday[:bday.find('/')]) < 2: bday = '0' + bday if len(bday[bday.find('/')+1:]) < 2: bday = bday[:bday.find('/')+1] + '0' + bday[-1] except ValueError: return error400(request) else: return error400(request) member = Member.objects.get(sunet_id=user_sunetid(request)) member.phone = phone member.email = email member.bday = bday member.save() return HttpResponseRedirect('/group/contact/') else: return error400(request)
def result(request): if request.method == 'POST': return error403(request) if 'job_id' not in request.GET: return error400(request) else: job_id = request.GET.get('job_id') if not job_id: return HttpResponseRedirect('/') if len(job_id) != 16 or (not re.match('[0-9a-fA-F]{16}', job_id)): return error400(request) if 'json' in request.GET and request.GET.get( 'json').lower() != 'false': return result_json(job_id) try: job_list_entry = JobIDs.objects.get(job_id=job_id) except: return error404(request) form = Design1DForm() if job_list_entry.type == "1" else ( Design2DForm() if job_list_entry.type == "2" else Design3DForm()) json = { 'result_job_id': job_id, '%sd_form' % job_list_entry.type: form } return render(request, PATH.HTML_PATH['design_%sd' % job_list_entry.type], json)
def result(request): if request.method != 'GET': return error403(request) if 'job_id' not in request.GET: return error400(request) job_id = request.GET.get('job_id') if len(job_id) != 16 or (not re.match('[0-9a-fA-F]{16}', job_id)): return error400(request) response = result_json(job_id) response['Access-Control-Allow-Origin'] = request.META.get('HTTP_ORIGIN') response['Access-Control-Allow-Methods'] = 'GET' return response
def test(request): # print request.META raise ValueError return error400(request) # send_notify_emails('test', 'test') # send_mail('text', 'test', EMAIL_HOST_USER, [EMAIL_NOTIFY]) return HttpResponse(content="", status=200)
def dash_dropbox(request): if 'qs' in request.GET and 'tqx' in request.GET: qs = request.GET.get('qs') req_id = request.GET.get('tqx').replace('reqId:', '') if qs in ['sizes', 'folders']: return simplejson.dumps( simplejson.load( open('%s/cache/dropbox/%s.json' % (MEDIA_ROOT, qs), 'r'))) elif qs == 'history': results = pickle.load( open('%s/cache/dropbox/history.pickle' % MEDIA_ROOT, 'rb')) return results.replace('__REQ_ID__', req_id) else: return error400(request) else: return error400(request)
def dash_slack(request): if 'qs' in request.GET and 'tqx' in request.GET: qs = request.GET.get('qs') req_id = request.GET.get('tqx').replace('reqId:', '') if qs in ['users', 'home', 'channels', 'files']: return simplejson.dumps( simplejson.load( open('%s/cache/slack/%s.json' % (MEDIA_ROOT, qs), 'r'))) elif qs in ['plot_files', 'plot_msgs']: results = pickle.load( open('%s/cache/slack/%s.pickle' % (MEDIA_ROOT, qs), 'rb')) return results.replace('__REQ_ID__', req_id) else: return error400(request) else: return error400(request)
def dash_ga(request): if 'qs' in request.GET and 'id' in request.GET and 'tqx' in request.GET: qs = request.GET.get('qs') id = request.GET.get('id') req_id = request.GET.get('tqx').replace('reqId:', '') if qs == 'init': return simplejson.dumps( simplejson.load(open('%s/cache/ga/init.json' % MEDIA_ROOT, 'r'))) elif qs in ['sessions', 'percentNewSessions']: results = pickle.load( open('%s/cache/ga/%s_%s.pickle' % (MEDIA_ROOT, id, qs), 'rb')) return results.replace('__REQ_ID__', req_id) else: return error400(request) else: return error400(request)
def refresh_stat(request, keyword): keyword = keyword.strip('/') if keyword == 'sys': call_command('versions', '--force') return HttpResponseRedirect('/admin/') elif keyword == 'backup': get_backup_stat() return HttpResponseRedirect('/admin/backup/') elif keyword == 'dash': if 'QUERY_STRING' in request.META: flag = request.META['QUERY_STRING'].replace('int=', '') if flag in ('3', '15', '30'): call_command('cache', item=[int(flag)]) else: return error400(request) else: return error400(request) return HttpResponseRedirect('/admin/')
def get_browse(request, keyword): if keyword in ('general', 'puzzle', 'eterna'): json = simplejson.load( open('%s/cache/stat_browse_%s.json' % (MEDIA_ROOT, keyword), 'r')) return HttpResponse(simplejson.dumps(json, sort_keys=True, indent=' ' * 4), content_type='application/json') else: return error400(request)
def dash_aws(request): if 'qs' in request.GET and 'id' in request.GET and 'tp' in request.GET and 'tqx' in request.GET: qs = request.GET.get('qs') id = request.GET.get('id') tp = request.GET.get('tp') req_id = request.GET.get('tqx').replace('reqId:', '') if qs == 'init': return simplejson.dumps( simplejson.load( open('%s/cache/aws/init.json' % MEDIA_ROOT, 'r'))) elif qs in ['cpu', 'net', 'lat', 'req', 'disk']: results = pickle.load( open('%s/cache/aws/%s_%s_%s.pickle' % (MEDIA_ROOT, tp, id, qs), 'rb')) return results.replace('__REQ_ID__', req_id) else: return error400(request) else: return error400(request)
def review(request): if request.method == 'POST': form = ReviewForm(request.POST) if form.is_valid(): new_stat = form.cleaned_data['new_stat'] rmdb_id = form.cleaned_data['rmdb_id'] review_entry(new_stat, rmdb_id) return HttpResponseRedirect('/detail/%s' % rmdb_id) return error400(request)
def search(request): if request.method == 'POST': return error400(request) else: form = SearchForm(request.GET) if form.is_valid(): sstring = form.cleaned_data['sstring'].encode('utf-8', 'ignore') return render(request, PATH.HTML_PATH['search_res'], {'sstring': sstring}) else: return render(request, PATH.HTML_PATH['search_res'], {'sstring': ''})
def dash_git(request): if 'qs' in request.GET and 'repo' in request.GET and 'org' in request.GET and 'tqx' in request.GET: qs = request.GET.get('qs') repo = request.GET.get('repo') org = request.GET.get('org') req_id = request.GET.get('tqx').replace('reqId:', '') if qs == 'init': return simplejson.dumps( simplejson.load( open('%s/cache/git/init.json' % MEDIA_ROOT, 'r'))) elif qs in ['c', 'ad', 'num']: results = pickle.load( open( '%s/cache/git/%s+%s_%s.pickle' % (MEDIA_ROOT, org, repo, qs), 'rb')) return results.replace('__REQ_ID__', req_id) else: return error400(request) else: return error400(request)
def git_hook(request): if request.method != 'POST': return error404(request) if ('HTTP_X_HUB_SIGNATURE' not in request.META) or ( 'HTTP_X_GITHUB_DELIVERY' not in request.META) or ('HTTP_X_GITHUB_EVENT' not in request.META): return error400(request) signature = request.META['HTTP_X_HUB_SIGNATURE'] mac = hmac.new(env('GITHOOK_SECRET'), msg=request.body, digestmod=sha1) if not hmac.compare_digest('sha1=' + str(mac.hexdigest()), str(signature)): return error403(request) try: call_command('dist') except Exception: print traceback.format_exc() return error500(request) return HttpResponse(content="", status=201)
def submit(request): if not ('HTTP_ORIGIN' in request.META and request.META.get('HTTP_ORIGIN') in ALLOWED_ORIGIN): return error403(request) if request.method != 'POST' or ('type' not in request.POST) or ( request.POST.get('type') not in ('1', '2', '3')): return error400(request) job_type = int(request.POST.get('type')) if job_type == 1: response = design_1d_run(request) elif job_type == 2: response = design_2d_run(request) elif job_type == 3: response = design_3d_run(request) response['Access-Control-Allow-Origin'] = request.META.get('HTTP_ORIGIN') response['Access-Control-Allow-Methods'] = 'POST, OPTIONS' return response
def link(request, tag): if not tag: return error400(request) if 'first_name' in request.GET and 'last_name' in request.GET and 'email' in request.GET: first_name = request.GET.get('first_name') last_name = request.GET.get('last_name') email = request.GET.get('email') records = SourceDownloader.objects.filter(first_name=first_name, last_name=last_name, email=email) print len(records) if len(records): tag = tag.replace('/', '') file_name = '%s/dist/Primerize-%s.zip' % (MEDIA_ROOT, tag) if os.path.exists(file_name): response = HttpResponse(content_type='application/zip') response[ 'Content-Disposition'] = 'attachment; filename=Primerize-%s.zip' % tag response['X-Sendfile'] = smart_str(file_name) return response else: return error401(request) return error401(request)
def user_email(request): if request.method == 'POST': form = EmailForm(request.POST) if form.is_valid(): em_from = form.cleaned_data['email_from'] em_subject = form.cleaned_data['email_subject'] em_content = form.cleaned_data['email_content'] http_header = '(CONTENT_TYPE, %s)\n(CONTENT_LENGTH, %s)\n' % (request.META.get('CONTENT_TYPE'), request.META.get('CONTENT_LENGTH')) for key, value in request.META.items(): if key.startswith('HTTP_'): http_header += '(%s, %s)\n' % (key, request.META.get(key)) http_header += request.body em_content = 'Contact Admin from %s Website Internal\n\nFrom: %s: %s\nSubject: %s\n%s\n\nREQUEST:\n%s' % (env('SERVER_NAME'), request.user, em_from, em_subject, em_content, http_header) send_mail('{%s} SYSTEM: Internal Email Notice' % env('SERVER_NAME'), em_content, EMAIL_HOST_USER, [EMAIL_NOTIFY]) messages = 'success' else: messages = 'invalid' return HttpResponse(simplejson.dumps({'messages': messages}, sort_keys=True, indent=' ' * 4), content_type='application/json') else: return error400(request)
def cache_aws(request): if request['qs'] == 'init': dict_aws = {'ec2': [], 'elb': [], 'ebs': [], 'table': []} conn = boto.ec2.connect_to_region( AWS['REGION'], aws_access_key_id=AWS['ACCESS_KEY_ID'], aws_secret_access_key=AWS['SECRET_ACCESS_KEY'], is_secure=True) resvs = conn.get_only_instances() for i, resv in enumerate(resvs): sub_conn = boto.ec2.cloudwatch.connect_to_region( AWS['REGION'], aws_access_key_id=AWS['ACCESS_KEY_ID'], aws_secret_access_key=AWS['SECRET_ACCESS_KEY'], is_secure=True) data = sub_conn.get_metric_statistics( 600, datetime.utcnow() - timedelta(hours=2), datetime.utcnow(), 'CPUCreditBalance', 'AWS/EC2', 'Average', {'InstanceId': resv.id}, 'Count') avg = 0 for d in data: avg += d[u'Average'] if len(data): avg = avg / len(data) name = resv.tags['Name'] if 'Name' in resv.tags else '' dict_aws['ec2'].append({ 'name': name, 'type': resv.instance_type, 'dns': resv.dns_name, 'status': resv.state_code, 'arch': resv.architecture, 'region': resv.placement, 'credit': '%.1f' % avg, 'id': resv.id }) resvs = conn.get_all_volumes() for i, resv in enumerate(resvs): name = resv.tags['Name'] if 'Name' in resv.tags else '' dict_aws['ebs'].append({ 'name': name, 'size': resv.size, 'type': resv.type, 'region': resv.zone, 'encrypted': resv.encrypted, 'status': resv.status, 'id': resv.id }) conn = boto.ec2.elb.connect_to_region( AWS['REGION'], aws_access_key_id=AWS['ACCESS_KEY_ID'], aws_secret_access_key=AWS['SECRET_ACCESS_KEY'], is_secure=True) resvs = conn.get_all_load_balancers() for i, resv in enumerate(resvs): sub_conn = boto.ec2.cloudwatch.connect_to_region( AWS['REGION'], aws_access_key_id=AWS['ACCESS_KEY_ID'], aws_secret_access_key=AWS['SECRET_ACCESS_KEY'], is_secure=True) data = sub_conn.get_metric_statistics( 300, datetime.utcnow() - timedelta(minutes=30), datetime.utcnow(), 'HealthyHostCount', 'AWS/ELB', 'Maximum', {'LoadBalancerName': resv.name}, 'Count') status = True for d in data: if d[u'Maximum'] < 1: status = False break dict_aws['elb'].append({ 'name': resv.name, 'dns': resv.dns_name, 'region': ', '.join(resv.availability_zones), 'status': status }) if (not status) and BOT['SLACK']['ADMIN']['MSG_AWS_WARN']: last_status = True if os.path.exists('%s/cache/aws/init.json' % MEDIA_ROOT): init = simplejson.load( open('%s/cache/aws/init.json' % MEDIA_ROOT, 'r')) for elb in init['elb']: if elb['name'] == resv.name: last_status = elb['status'] break if last_status: result = dash_duty(0) ppls = result['ppls'] (who, _) = find_slack_id(ppls['monthly']['amazon']['main']) send_notify_slack('@' + who, '', [{ "fallback": 'AWS WARNING', "mrkdwn_in": ["text"], "color": "ff69bc", "text": '*`WARNING`*: AWS ELB Server `%s` has *NO* healthy host! @ _%s_\n' % (resv.name, time.ctime()) }]) dict_aws['ec2'] = sorted(dict_aws['ec2'], key=operator.itemgetter(u'name')) dict_aws['ebs'] = sorted(dict_aws['ebs'], key=operator.itemgetter(u'name')) dict_aws['elb'] = sorted(dict_aws['elb'], key=operator.itemgetter(u'name')) for i in xrange( max(len(dict_aws['ec2']), len(dict_aws['elb']), len(dict_aws['ebs']))): temp = {} if i < len(dict_aws['ec2']): temp.update({ 'ec2': { 'name': dict_aws['ec2'][i]['name'], 'status': dict_aws['ec2'][i]['status'], 'id': dict_aws['ec2'][i]['id'] } }) if i < len(dict_aws['ebs']): temp.update({ 'ebs': { 'name': dict_aws['ebs'][i]['name'], 'status': dict_aws['ebs'][i]['status'], 'id': dict_aws['ebs'][i]['id'] } }) if i < len(dict_aws['elb']): temp.update({ 'elb': { 'name': dict_aws['elb'][i]['name'], 'status': dict_aws['elb'][i]['status'] } }) dict_aws['table'].append(temp) return dict_aws else: qs = request['qs'] id = request['id'] tp = request['tp'] conn = boto.ec2.cloudwatch.connect_to_region( AWS['REGION'], aws_access_key_id=AWS['ACCESS_KEY_ID'], aws_secret_access_key=AWS['SECRET_ACCESS_KEY'], is_secure=True) if tp in ['ec2', 'elb', 'ebs']: args = { 'period': 3600, 'start_time': datetime.utcnow() - timedelta(days=1), 'end_time': datetime.utcnow() } else: return error400(request) if qs == 'lat': args.update({ 'metric': ['Latency'], 'namespace': 'AWS/ELB', 'cols': ['Maximum'], 'dims': {}, 'unit': 'Seconds', 'calc_rate': False }) elif qs == 'req': args.update({ 'metric': ['RequestCount'], 'namespace': 'AWS/ELB', 'cols': ['Sum'], 'dims': {}, 'unit': 'Count', 'calc_rate': False }) elif qs == 'net': args.update({ 'metric': ['NetworkIn', 'NetworkOut'], 'namespace': 'AWS/EC2', 'cols': ['Sum'], 'dims': {}, 'unit': 'Bytes', 'calc_rate': True }) elif qs == 'cpu': args.update({ 'metric': ['CPUUtilization'], 'namespace': 'AWS/EC2', 'cols': ['Average'], 'dims': {}, 'unit': 'Percent', 'calc_rate': False }) elif qs == 'disk': args.update({ 'metric': ['VolumeWriteBytes', 'VolumeReadBytes'], 'namespace': 'AWS/EBS', 'cols': ['Sum'], 'dims': {}, 'unit': 'Bytes', 'calc_rate': True }) if args['namespace'] == 'AWS/ELB': args['dims'] = {'LoadBalancerName': id} elif args['namespace'] == 'AWS/EC2': args['dims'] = {'InstanceId': id} elif args['namespace'] == 'AWS/EBS': args['dims'] = {'VolumeId': id} return aws_call(conn, args, qs)
def git_stats(request): if 'qs' in request.GET and 'tqx' in request.GET: qs = request.GET.get('qs') req_id = request.GET.get('tqx').replace('reqId:', '') gh = Github(login_or_token=GIT["ACCESS_TOKEN"]) repo_name = GIT["REPOSITORY"] repo = gh.get_repo(repo_name) if qs in ['init', 'num']: if qs == 'init': data = [] i = 0 contribs = repo.get_stats_contributors() while (contribs is None and i <= 5): time.sleep(1) contribs = repo.get_stats_contributors() i += 1 if contribs is None: return error500(request) for contrib in contribs: a, d = (0, 0) for w in contrib.weeks: a += w.a d += w.d au = '(None)' if not contrib.author else '<i>%s</i> <span style="color:#888">(%s)</span>' % (contrib.author.login, contrib.author.name) data.append({u'Contributors': au, u'Commits': contrib.total, u'Additions': a, u'Deletions': d}) data = sorted(data, key=operator.itemgetter(u'Commits')) return simplejson.dumps({'contrib': data}, sort_keys=True, indent=' ' * 4) else: created_at = repo.created_at.replace(tzinfo=pytz.utc).astimezone(pytz.timezone(TIME_ZONE)).strftime('%Y-%m-%d %H:%M:%S') pushed_at = repo.pushed_at.replace(tzinfo=pytz.utc).astimezone(pytz.timezone(TIME_ZONE)).strftime('%Y-%m-%d %H:%M:%S') num_issues = len(requests.get('https://api.github.com/repos/' + repo_name + '/issues?access_token=%s' % GIT['ACCESS_TOKEN']).json()) num_pulls = len(requests.get('https://api.github.com/repos/' + repo_name + '/pulls?access_token=%s' % GIT['ACCESS_TOKEN']).json()) num_watchers = len(requests.get('https://api.github.com/repos/' + repo_name + '/watchers?access_token=%s' % GIT['ACCESS_TOKEN']).json()) num_branches = len(requests.get('https://api.github.com/repos/' + repo_name + '/branches?access_token=%s' % GIT['ACCESS_TOKEN']).json()) num_forks = len(requests.get('https://api.github.com/repos/' + repo_name + '/forks?access_token=%s' % GIT['ACCESS_TOKEN']).json()) num_downloads = len(requests.get('https://api.github.com/repos/' + repo_name + '/downloads?access_token=%s' % GIT['ACCESS_TOKEN']).json()) return simplejson.dumps({'created_at': created_at, 'pushed_at': pushed_at, 'num_watchers': num_watchers, 'num_pulls': num_pulls, 'num_issues': num_issues, 'num_branches': num_branches, 'num_forks': num_forks, 'num_downloads': num_downloads}, sort_keys=True, indent=' ' * 4) else: data = [] desp = {'Timestamp': ('date', 'Timestamp')} stats = ['Timestamp'] if qs == 'c': i = 0 contribs = repo.get_stats_commit_activity() while (contribs is None and i <= 5): time.sleep(1) contribs = repo.get_stats_commit_activity() i += 1 if contribs is None: return error500(request) fields = ['Commits'] for contrib in contribs: for i, day in enumerate(contrib.days): data.append({u'Timestamp': (contrib.week + timedelta(days=i)).date(), u'Commits': day}) elif qs == 'ad': i = 0 contribs = repo.get_stats_code_frequency() while (contribs is None and i <= 5): time.sleep(1) contribs = repo.get_stats_code_frequency() i += 1 if contribs is None: return error500(request) fields = ['Additions', 'Deletions'] for contrib in contribs: data.append({u'Timestamp': contrib.week.date(), u'Additions': contrib.additions, u'Deletions': contrib.deletions}) elif qs == 'au': i = 0 contribs = repo.get_stats_contributors() while (contribs is None and i <= 5): time.sleep(1) contribs = repo.get_stats_contributors() i += 1 if contribs is None: return error500(request) fields = ['Commits', 'Additions', 'Deletions'] for contrib in contribs: a, d = (0, 0) for w in contrib.weeks: a += w.a d += w.d au = contrib.author.login if contrib.author else '(None)' data.append({u'Contributors': au, u'Commits': contrib.total, u'Additions': a, u'Deletions': d}) stats = ['Contributors'] desp['Contributors'] = ('string', 'Name') del desp['Timestamp'] else: return error400(request) for field in fields: stats.append(field) desp[field] = ('number', field) data = sorted(data, key=operator.itemgetter(stats[0])) data_table = gviz_api.DataTable(desp) data_table.LoadData(data) results = data_table.ToJSonResponse(columns_order=stats, order_by='Timestamp', req_id=req_id) return results else: return error400(request)
def aws_stats(request): if 'qs' in request.GET and 'sp' in request.GET and 'tqx' in request.GET: qs = request.GET.get('qs') sp = request.GET.get('sp') req_id = request.GET.get('tqx').replace('reqId:', '') if qs == 'init': conn = boto.ec2.connect_to_region(AWS['REGION'], aws_access_key_id=AWS['ACCESS_KEY_ID'], aws_secret_access_key=AWS['SECRET_ACCESS_KEY'], is_secure=True) resv = conn.get_only_instances(instance_ids=AWS['EC2_INSTANCE_ID']) stat = resv[0].__dict__ stat1 = {k: stat[k] for k in ('id', 'instance_type', 'private_dns_name', 'public_dns_name', 'vpc_id', 'subnet_id', 'image_id', 'architecture')} resv = conn.get_all_volumes(volume_ids=AWS['EBS_VOLUME_ID']) stat = resv[0].__dict__ stat2 = {k: stat[k] for k in ('id', 'type', 'size', 'zone', 'snapshot_id', 'encrypted')} conn = boto.ec2.elb.connect_to_region(AWS['REGION'], aws_access_key_id=AWS['ACCESS_KEY_ID'], aws_secret_access_key=AWS['SECRET_ACCESS_KEY'], is_secure=True) resv = conn.get_all_load_balancers(load_balancer_names=AWS['ELB_NAME']) stat = resv[0].__dict__ stat3 = {k: stat[k] for k in ('dns_name', 'vpc_id', 'subnets', 'health_check')} stat3['health_check'] = str(stat3['health_check']).replace('HealthCheck:', '') return simplejson.dumps({'ec2': stat1, 'ebs': stat2, 'elb': stat3}, sort_keys=True, indent=' ' * 4) else: conn = boto.ec2.cloudwatch.connect_to_region(AWS['REGION'], aws_access_key_id=AWS['ACCESS_KEY_ID'], aws_secret_access_key=AWS['SECRET_ACCESS_KEY'], is_secure=True) if sp == '7d': args = {'period': 10800, 'start_time': datetime.utcnow() - timedelta(days=7), 'end_time': datetime.utcnow()} elif sp == '48h': args = {'period': 720, 'start_time': datetime.utcnow() - timedelta(hours=48), 'end_time': datetime.utcnow()} else: return error400(request) if qs == 'latency': args.update({'metric': ['Latency'], 'namespace': 'AWS/ELB', 'cols': ['Maximum'], 'dims': {}, 'unit': 'Seconds', 'calc_rate': False}) elif qs == 'request': args.update({'metric': ['RequestCount'], 'namespace': 'AWS/ELB', 'cols': ['Sum'], 'dims': {}, 'unit': 'Count', 'calc_rate': False}) elif qs == '23xx': args.update({'metric': ['HTTPCode_Backend_2XX', 'HTTPCode_Backend_3XX'], 'namespace': 'AWS/ELB', 'cols': ['Sum'], 'dims': {}, 'unit': 'Count', 'calc_rate': False}) elif qs == '45xx': args.update({'metric': ['HTTPCode_Backend_4XX', 'HTTPCode_Backend_5XX'], 'namespace': 'AWS/ELB', 'cols': ['Sum'], 'dims': {}, 'unit': 'Count', 'calc_rate': False}) elif qs == 'host': args.update({'metric': ['HealthyHostCount', 'UnHealthyHostCount'], 'namespace': 'AWS/ELB', 'cols': ['Minimum', 'Maximum'], 'dims': {}, 'unit': 'Count', 'calc_rate': False}) elif qs == 'status': args.update({'metric': ['BackendConnectionErrors', 'StatusCheckFailed_Instance', 'StatusCheckFailed_System'], 'namespace': 'AWS/EC2', 'cols': ['Sum'], 'dims': {}, 'unit': 'Count', 'calc_rate': False}) elif qs == 'network': args.update({'metric': ['NetworkIn', 'NetworkOut'], 'namespace': 'AWS/EC2', 'cols': ['Sum'], 'dims': {}, 'unit': 'Bytes', 'calc_rate': True}) elif qs == 'cpu': args.update({'metric': ['CPUUtilization'], 'namespace': 'AWS/EC2', 'cols': ['Average'], 'dims': {}, 'unit': 'Percent', 'calc_rate': False}) elif qs == 'credit': args.update({'metric': ['CPUCreditUsage', 'CPUCreditBalance'], 'namespace': 'AWS/EC2', 'cols': ['Average'], 'dims': {}, 'unit': 'Count', 'calc_rate': False}) elif qs == 'volops': args.update({'metric': ['VolumeWriteOps', 'VolumeReadOps'], 'namespace': 'AWS/EBS', 'cols': ['Sum'], 'dims': {}, 'unit': 'Count', 'calc_rate': False}) elif qs == 'volbytes': args.update({'metric': ['VolumeWriteBytes', 'VolumeReadBytes'], 'namespace': 'AWS/EBS', 'cols': ['Sum'], 'dims': {}, 'unit': 'Bytes', 'calc_rate': True}) else: return error400(request) else: return error400(request) if args['namespace'] == 'AWS/ELB': args['dims'] = {'LoadBalancerName': AWS['ELB_NAME']} elif args['namespace'] == 'AWS/EC2': args['dims'] = {'InstanceId': AWS['EC2_INSTANCE_ID']} elif args['namespace'] == 'AWS/EBS': args['dims'] = {'VolumeId': AWS['EBS_VOLUME_ID']} return aws_call(conn, args, qs, req_id)
def ga_stats(request): if 'qs' in request.GET and 'tqx' in request.GET: qs = request.GET.get('qs') req_id = request.GET.get('tqx').replace('reqId:', '') access_token = requests.post('https://www.googleapis.com/oauth2/v3/token?refresh_token=%s&client_id=%s&client_secret=%s&grant_type=refresh_token' % (GA['REFRESH_TOKEN'], GA['CLIENT_ID'], GA['CLIENT_SECRET'])).json()['access_token'] stats = {} url_colon = urllib.quote(':') url_comma = urllib.quote(',') if qs == 'init': temp = requests.get('https://www.googleapis.com/analytics/v3/data/ga?ids=ga%s%s&start-date=30daysAgo&end-date=yesterday&metrics=ga%ssessionDuration%sga%sbounceRate%sga%spageviewsPerSession%sga%spageviews%sga%ssessions%sga%susers&access_token=%s' % (url_colon, GA['ID'], url_colon, url_comma, url_colon, url_comma, url_colon, url_comma, url_colon, url_comma, url_colon, url_comma, url_colon, access_token)).json()['totalsForAllResults'] temp_prev = requests.get('https://www.googleapis.com/analytics/v3/data/ga?ids=ga%s%s&start-date=60daysAgo&end-date=30daysAgo&metrics=ga%ssessionDuration%sga%sbounceRate%sga%spageviewsPerSession%sga%spageviews%sga%ssessions%sga%susers&access_token=%s' % (url_colon, GA['ID'], url_colon, url_comma, url_colon, url_comma, url_colon, url_comma, url_colon, url_comma, url_colon, url_comma, url_colon, access_token)).json()['totalsForAllResults'] for i, key in enumerate(temp): ga_key = key[3:] if ga_key in ['bounceRate', 'pageviewsPerSession']: prev = '%.2f' % (float(temp[key]) - float(temp_prev[key])) curr = '%.2f' % float(temp[key]) elif ga_key == 'sessionDuration': diff = int(float(temp[key]) / 1000) - int(float(temp_prev[key]) / 1000) prev = str(timedelta(seconds=abs(diff))) if diff < 0: prev = '-%s' % prev curr = str(timedelta(seconds=int(float(temp[key]) / 1000))) else: prev = '%d' % (int(temp[key]) - int(temp_prev[key])) curr = '%d' % int(temp[key]) stats.update({ga_key: curr, (ga_key + '_prev'): prev}) return simplejson.dumps(stats, sort_keys=True, indent=' ' * 4) elif 'sp' in request.GET: sp = request.GET.get('sp') if qs == 'chart': (dm, strpt) = ('date', '%Y%m%d') if sp == '24h': (d1, d2, dm, strpt, ts) = ('yesterday', 'today', 'dateHour', '%Y%m%d%H', 'datetime') elif sp == '7d': (d1, d2, ts) = ('7daysAgo', 'today', 'date') elif sp == '1m': (d1, d2, ts) = ('30daysAgo', 'yesterday', 'date') elif sp == '3m': (d1, d2, ts) = ('90daysAgo', 'yesterday', 'date') else: return error400(request) i = 0 while True: temp = requests.get('https://www.googleapis.com/analytics/v3/data/ga?ids=ga%s%s&start-date=%s&end-date=%s&metrics=ga%ssessions&dimensions=ga%s%s&access_token=%s' % (url_colon, GA['ID'], d1, d2, url_colon, url_colon, dm, access_token)).json() if 'rows' in temp: temp = temp['rows'] break time.sleep(2) i += 1 if i == 3: return error500(request) data = [] stats = ['Timestamp', 'Sessions'] desp = {'Timestamp': (ts, 'Timestamp'), 'Sessions': ('number', 'Sessions')} for row in temp: ts = datetime.strptime(row[0], strpt) ts = ts if sp == '24h' else ts.date() data.append({u'Timestamp': ts, 'Sessions': float(row[1])}) data = sorted(data, key=operator.itemgetter(stats[0])) data_table = gviz_api.DataTable(desp) data_table.LoadData(data) return data_table.ToJSonResponse(columns_order=stats, order_by='Timestamp', req_id=req_id) elif qs == 'pie': if sp == 'session': (me, dm, field) = ('sessions', 'userType', 'Sessions') elif sp == 'user': (me, dm, field) = ('users', 'userType', 'Visitors') elif sp == 'browser': (me, dm, field) = ('users', 'browser', 'Browsers') elif sp == 'pageview': (me, dm, field) = ('pageviews', 'userType', 'Page Views') else: return error400(request) i = 0 while True: temp = requests.get('https://www.googleapis.com/analytics/v3/data/ga?ids=ga%s%s&start-date=30daysAgo&end-date=yesterday&metrics=ga%s%s&dimensions=ga%s%s&access_token=%s' % (url_colon, GA['ID'], url_colon, me, url_colon, dm, access_token)).json() if 'rows' in temp: temp = temp['rows'] break time.sleep(2) i += 1 if i == 3: return error500(request) data = [] stats = ['Category', field] desp = {'Category': ('string', 'Category'), field: ('number', field)} for row in temp: data.append({'Category': row[0], field: float(row[1])}) data = sorted(data, key=operator.itemgetter(stats[0])) if sp == 'browser': data = sorted(data, key=operator.itemgetter(stats[1]), reverse=True)[:min(len(data), 4)] data_table = gviz_api.DataTable(desp) data_table.LoadData(data) return data_table.ToJSonResponse(columns_order=stats, order_by='Timestamp', req_id=req_id) else: return error400(request) elif qs == 'geo': i = 0 while True: temp = requests.get('https://www.googleapis.com/analytics/v3/data/ga?ids=ga%s%s&start-date=30daysAgo&end-date=yesterday&metrics=ga%ssessions&dimensions=ga%scountry&access_token=%s' % (url_colon, GA['ID'], url_colon, url_colon, access_token)).json() if 'rows' in temp: temp = temp['rows'] break time.sleep(2) i += 1 if i == 3: return error500(request) data = [] stats = ['Country', 'Sessions'] desp = {'Country': ('string', 'Country'), 'Sessions': ('number', 'Sessions')} for row in temp: data.append({'Country': row[0], 'Sessions': float(row[1])}) data = sorted(data, key=operator.itemgetter(stats[0])) data_table = gviz_api.DataTable(desp) data_table.LoadData(data) return data_table.ToJSonResponse(columns_order=stats, order_by='Timestamp', req_id=req_id) else: return error400(request) else: return error400(request)