def dq_subsdump_oracle(): if request.method == "POST": start_date = datetime.strptime(request.form["start_date"], "%Y-%m-%d").date() end_date = datetime.strptime(request.form["end_date"], "%Y-%m-%d").date() elif request.method == "GET": end_date = date.today() start_date = end_date - relativedelta(months=1) dates = [ start_date + timedelta(days=x) for x in range(0, (end_date - start_date).days + 1) ] lookup = db.session.query(subsdump).filter( and_(subsdump.file_date >= start_date, subsdump.file_date <= end_date, subsdump.db == "FC")).all() result_set = {"dates": [d.strftime("%Y-%m-%d") for d in dates], "data": {}} for l in lookup: if l.table_name not in result_set["data"].keys(): result_set["data"][l.table_name] = init_list(len(dates), 0) result_set["data"][l.table_name][dates.index( l.file_date)] = l.count else: result_set["data"][l.table_name][dates.index( l.file_date)] = l.count return jsonify(result_set)
def dqchecks_exce_oracle_excel(): if request.method == "POST": start_date = request.form["start_date"] end_date = request.form["end_date"] period_select = request.form["period"] if period_select == "day": period = manifest_oracle_monitoring.file_date elif period_select == "month": period = func.month(manifest_oracle_monitoring.file_date) elif period_select == "year": period = func.year(manifest_oracle_monitoring.file_date) cdr_types = [ "com", "vou", "cm", "adj", "first", "mon", "data", "voice", "sms", "clr" ] dates = db.session.query(period).filter( and_(manifest_oracle_monitoring.file_date >= start_date, manifest_oracle_monitoring.file_date <= end_date)).group_by(period).all() lookup = db.session.query( period, manifest_oracle_monitoring.cdr_type, func.sum(manifest_oracle_monitoring.ocs_manifest), func.sum(manifest_oracle_monitoring.t1_oracle), func.sum(manifest_oracle_monitoring.variance)).filter( and_(manifest_oracle_monitoring.file_date >= start_date, manifest_oracle_monitoring.file_date <= end_date)).group_by( period, manifest_oracle_monitoring.cdr_type).all() len_date = len(dates) dates = [d[0] for d in dates] cdr_dict = {} for l in lookup: if l.cdr_type not in cdr_dict.keys(): cdr_dict[l.cdr_type] = { "manifest": init_list(len_date), "t1": init_list(len_date), "variance": init_list(len_date) } insert_cdr(cdr_dict[l.cdr_type], dates.index(l[0]), l[2], l[3], l[4]) else: insert_cdr(cdr_dict[l.cdr_type], dates.index(l[0]), l[2], l[3], l[4])
def topsku_day_js(): if request.method == "POST": query_date = request.form["sku_date"] else: query_date = date.today() lookup = db.session.query( top_sku_talendfc.txn_date, top_sku_talendfc.processing_hr, top_sku_talendfc.brand, func.sum(top_sku_talendfc.txn_amount), func.sum(top_sku_talendfc.topup_cnt)).filter( top_sku_talendfc.txn_date == query_date).group_by( top_sku_talendfc.txn_date, top_sku_talendfc.processing_hr, top_sku_talendfc.brand).all() sku_dict = { "totals": { "total_amt_hr": init_list(6, 0), "total_cnt_hr": init_list(6, 0) }, "brands": OrderedDict({}) } for l in lookup: if l.brand not in sku_dict["brands"].keys(): sku_dict["brands"][l.brand] = { "amount": init_list(6, 0), "count": init_list(6, 0) } insert_sku(sku_dict["brands"][l.brand], l.processing_hr, l[3], l[4]) else: insert_sku(sku_dict["brands"][l.brand], l.processing_hr, l[3], l[4]) for k in sku_dict["brands"].keys(): for i in range(6): sku_dict["totals"]["total_amt_hr"][i] += float( 0 if sku_dict["brands"][k]["amount"][i] is None else sku_dict["brands"][k]["amount"][i]) sku_dict["totals"]["total_cnt_hr"][i] += int( 0 if sku_dict["brands"][k]["count"][i] is None else sku_dict["brands"][k]["count"][i]) return jsonify(sku_dict)
def bca_monitoring_dq_pcodes(): if request.method == "POST": start_date = datetime.strptime(request.form["start_date"], "%Y-%m-%d").date() end_date = datetime.strptime(request.form["end_date"], "%Y-%m-%d").date() elif request.method == "GET": end_date = date.today() start_date = end_date - relativedelta(months=1) dates = [ start_date + timedelta(days=x) for x in range(0, (end_date - start_date).days + 1) ] lookup = db.session.query(bca_dq_pcodes).filter( and_(bca_dq_pcodes.effective_date >= start_date, bca_dq_pcodes.effective_date <= end_date)).all() result_set = {"dates": [d.strftime("%Y-%m-%d") for d in dates], "data": {}} for l in lookup: if l.brand not in result_set["data"].keys(): result_set["data"][l.brand] = { "total_topup": init_list(len(dates)), "topup_count": init_list(len(dates)), "total_count": init_list(len(dates)) } result_set["data"][l.brand]["total_topup"][dates.index( l.effective_date)] = l.total_topup result_set["data"][l.brand]["topup_count"][dates.index( l.effective_date)] = l.count_topup result_set["data"][l.brand]["total_count"][dates.index( l.effective_date)] = l.total_count else: result_set["data"][l.brand]["total_topup"][dates.index( l.effective_date)] = l.total_topup result_set["data"][l.brand]["topup_count"][dates.index( l.effective_date)] = l.count_topup result_set["data"][l.brand]["total_count"][dates.index( l.effective_date)] = l.total_count return jsonify(result_set)
def topsku_week_js(): if request.method == "POST": start_date = datetime.strptime(request.form["start_date"], "%Y-%m-%d").date() end_date = datetime.strptime(request.form["end_date"], "%Y-%m-%d").date() else: today = date.today() start_date = today - timedelta(days=today.weekday() + 1) end_date = start_date + timedelta(days=6) dates = [ start_date + timedelta(days=x) for x in range(0, (end_date - start_date).days + 1) ] lookup = db.session.query( top_sku_talendfc.txn_date, func.sum(top_sku_talendfc.txn_amount), func.sum(top_sku_talendfc.topup_cnt)).filter( and_(top_sku_talendfc.txn_date >= start_date, top_sku_talendfc.txn_date <= end_date, top_sku_talendfc.processing_hr == 1)).group_by( top_sku_talendfc.txn_date).all() sku_dict = { "dates": [d.strftime("%Y-%m-%d") for d in dates], "amounts": init_list(len(dates)), "counts": init_list(len(dates)) } for l in lookup: try: sku_dict["amounts"][dates.index(l.txn_date)] = str(l[1]) sku_dict["counts"][dates.index(l.txn_date)] = str(l[2]) except: continue return jsonify(sku_dict)
def sprint2_api(): if request.method == "POST": ### EXECUTES AGGREGATE PER DAY OF DATE RANGE POSTED BY FORM start_date = datetime.strptime(request.form["start_date"], "%Y-%m-%d").date() end_date = datetime.strptime(request.form["end_date"], "%Y-%m-%d").date() dates = [start_date + timedelta(days=x) for x in range(0, (end_date-start_date).days+1)] lookup = db.session.query(durations.file_date,durations.cdr_type, func.avg(durations.average_duration),func.avg(durations.file_count))\ .filter(and_(durations.file_date >= start_date, durations.file_date <= end_date, durations.cdr_type.in_(("cbs_cdr_com","cbs_cdr_mon","cbs_cdr_cm","cbs_cdr_adj","cbs_cdr_first","cbs_cdr_vou"))))\ .group_by(durations.file_date,durations.cdr_type).all() result = {"dates":[ d.strftime("%Y-%m-%d") for d in dates ],"data":{}} for l in lookup: # if l.file_date not in result["dates"]: # result["dates"][dates.index(l.file_date)] = l.file_date.strftime("%Y-%m-%d") if l.cdr_type not in result["data"].keys(): result["data"][l.cdr_type] = { "duration": init_list(len(dates),0), "count": init_list(len(dates),0), } result["data"][l.cdr_type]["duration"][dates.index(l.file_date)] = str(l[2]) result["data"][l.cdr_type]["count"][dates.index(l.file_date)] = str(l[3]) else: result["data"][l.cdr_type]["duration"][dates.index(l.file_date)] = str(l[2]) result["data"][l.cdr_type]["count"][dates.index(l.file_date)] = str(l[3]) else: # EXECUTES 24 HOUR FORMAT dt_now = datetime.now() if dt_now.hour == 0: yesterday = date.today() - timedelta(days=1) lookup = db.session.query(durations.file_date,durations.cdr_type, durations.hour, durations.average_duration,durations.file_count)\ .filter(and_(durations.file_date == yesterday, durations.cdr_type.in_(("cbs_cdr_com","cbs_cdr_mon","cbs_cdr_cm","cbs_cdr_adj","cbs_cdr_first","cbs_cdr_vou"))))\ .all() start_dt = datetime.combine(yesterday,time(0,0)) dates = [start_dt + timedelta(hours=x) for x in range(24)] else: start_hour = datetime.now().hour end_hour = datetime.now().hour - 1 start_date = (dt_now - timedelta(hours=24)).date() end_date = dt_now.date() lookup = db.session.query(durations.file_date,durations.cdr_type, durations.hour, durations.average_duration,durations.file_count)\ .filter(durations.cdr_type.in_(("cbs_cdr_com","cbs_cdr_mon","cbs_cdr_cm","cbs_cdr_adj","cbs_cdr_first","cbs_cdr_vou")))\ .filter(or_(and_(durations.file_date == start_date,durations.hour >= start_hour),and_(durations.file_date == end_date,durations.hour <= end_hour))).all() start_dt = datetime.combine(start_date,time(start_hour,0)) dates = [start_dt + timedelta(hours=x) for x in range(24)] result = {"dates":[ d.strftime("%Y-%m-%d %I %p") for d in dates ],"data":{}} for l in lookup: dt = datetime.combine(l.file_date,time(l.hour,0)) # if dt not in result["dates"]: # result["dates"][dates.index(l.file_date)] = l.file_date.strftime("%Y-%m-%d") if l.cdr_type not in result["data"].keys(): result["data"][l.cdr_type] = { "duration": init_list(len(dates),0), "count": init_list(len(dates),0), } result["data"][l.cdr_type]["duration"][dates.index(dt)] = str(l[3]) result["data"][l.cdr_type]["count"][dates.index(dt)] = str(l[4]) else: result["data"][l.cdr_type]["duration"][dates.index(dt)] = str(l[3]) result["data"][l.cdr_type]["count"][dates.index(dt)] = str(l[4]) return jsonify(result)
def dqchecks_manvsoracle_js(): if request.method == "POST": query_date = request.form["cdr_date"] else: query_date = date.today() results = db.session.query(manifest_oracle_monitoring).filter( manifest_oracle_monitoring.file_date == query_date) com_manifest = init_list() com_t1 = init_list() com_variance = init_list() vou_manifest = init_list() vou_t1 = init_list() vou_variance = init_list() first_manifest = init_list() first_t1 = init_list() first_variance = init_list() mon_manifest = init_list() mon_t1 = init_list() mon_variance = init_list() cm_manifest = init_list() cm_t1 = init_list() cm_variance = init_list() adj_manifest = init_list() adj_t1 = init_list() adj_variance = init_list() data_manifest = init_list() data_t1 = init_list() data_variance = init_list() voice_manifest = init_list() voice_t1 = init_list() voice_variance = init_list() sms_manifest = init_list() sms_t1 = init_list() sms_variance = init_list() clr_manifest = init_list() clr_t1 = init_list() clr_variance = init_list() for r in results: if r.cdr_type == "com": com_manifest[r.processing_hour] = (str(r.ocs_manifest)) com_t1[r.processing_hour] = (str(r.t1_oracle)) com_variance[r.processing_hour] = (str(r.variance)) elif r.cdr_type == "vou": vou_manifest[r.processing_hour] = (str(r.ocs_manifest)) vou_t1[r.processing_hour] = (str(r.t1_oracle)) vou_variance[r.processing_hour] = (str(r.variance)) elif r.cdr_type == "first": first_manifest[r.processing_hour] = (str(r.ocs_manifest)) first_t1[r.processing_hour] = (str(r.t1_oracle)) first_variance[r.processing_hour] = (str(r.variance)) elif r.cdr_type == "mon": mon_manifest[r.processing_hour] = (str(r.ocs_manifest)) mon_t1[r.processing_hour] = (str(r.t1_oracle)) mon_variance[r.processing_hour] = (str(r.variance)) elif r.cdr_type == "cm": cm_manifest[r.processing_hour] = (str(r.ocs_manifest)) cm_t1[r.processing_hour] = (str(r.t1_oracle)) cm_variance[r.processing_hour] = (str(r.variance)) elif r.cdr_type == "adj": adj_manifest[r.processing_hour] = (str(r.ocs_manifest)) adj_t1[r.processing_hour] = (str(r.t1_oracle)) adj_variance[r.processing_hour] = (str(r.variance)) elif r.cdr_type == "data": data_manifest[r.processing_hour] = (str(r.ocs_manifest)) data_t1[r.processing_hour] = (str(r.t1_oracle)) data_variance[r.processing_hour] = (str(r.variance)) elif r.cdr_type == "voice": voice_manifest[r.processing_hour] = (str(r.ocs_manifest)) voice_t1[r.processing_hour] = (str(r.t1_oracle)) voice_variance[r.processing_hour] = (str(r.variance)) elif r.cdr_type == "sms": sms_manifest[r.processing_hour] = (str(r.ocs_manifest)) sms_t1[r.processing_hour] = (str(r.t1_oracle)) sms_variance[r.processing_hour] = (str(r.variance)) elif r.cdr_type == "clr": clr_manifest[r.processing_hour] = (str(r.ocs_manifest)) clr_t1[r.processing_hour] = (str(r.t1_oracle)) clr_variance[r.processing_hour] = (str(r.variance)) result_set = { "com_manifest": com_manifest, "com_t1": com_t1, "com_variance": com_variance, "vou_manifest": vou_manifest, "vou_t1": vou_t1, "vou_variance": vou_variance, "first_manifest": first_manifest, "first_t1": first_t1, "first_variance": first_variance, "mon_manifest": mon_manifest, "mon_t1": mon_t1, "mon_variance": mon_variance, "cm_manifest": cm_manifest, "cm_t1": cm_t1, "cm_variance": cm_variance, "adj_manifest": adj_manifest, "adj_t1": adj_t1, "adj_variance": adj_variance, "data_manifest": data_manifest, "data_t1": data_t1, "data_variance": data_variance, "voice_manifest": voice_manifest, "voice_t1": voice_t1, "voice_variance": voice_variance, "sms_manifest": sms_manifest, "sms_t1": sms_t1, "sms_variance": sms_variance, "clr_manifest": clr_manifest, "clr_t1": clr_t1, "clr_variance": clr_variance } return jsonify(result_set)
def dqchecks_overview_oracle_js(): if request.method == "POST": start_date = request.form["start_date"] end_date = request.form["end_date"] period_select = request.form["period"] elif request.method == "GET": date_today = date.today() start_date = date_today - relativedelta(months=4) end_date = date_today period_select = "day" if period_select == "day": period = manifest_oracle_monitoring.file_date elif period_select == "month": period = func.month(manifest_oracle_monitoring.file_date) elif period_select == "year": period = func.year(manifest_oracle_monitoring.file_date) dates = db.session.query(period).filter( and_(manifest_oracle_monitoring.file_date >= start_date, manifest_oracle_monitoring.file_date <= end_date)).group_by(period).all() variances = db.session.query( period, manifest_oracle_monitoring.cdr_type, func.sum(manifest_oracle_monitoring.variance)).filter( and_(manifest_oracle_monitoring.file_date >= start_date, manifest_oracle_monitoring.file_date <= end_date)).group_by( period, manifest_oracle_monitoring.cdr_type).all() len_date = len(dates) date_list = init_list(len_date) variance_com = init_list(len_date) variance_vou = init_list(len_date) variance_first = init_list(len_date) variance_mon = init_list(len_date) variance_cm = init_list(len_date) variance_adj = init_list(len_date) variance_data = init_list(len_date) variance_voice = init_list(len_date) variance_sms = init_list(len_date) variance_clr = init_list(len_date) for i, d in enumerate(dates): date_list[i] = format_date(d[0], period_select) for v in variances: if v.cdr_type == "com" and v[0] == d[0]: variance_com[i] = str(v[2]) elif v.cdr_type == "vou" and v[0] == d[0]: variance_vou[i] = str(v[2]) elif v.cdr_type == "first" and v[0] == d[0]: variance_first[i] = str(v[2]) elif v.cdr_type == "mon" and v[0] == d[0]: variance_mon[i] = str(v[2]) elif v.cdr_type == "cm" and v[0] == d[0]: variance_cm[i] = str(v[2]) elif v.cdr_type == "adj" and v[0] == d[0]: variance_adj[i] = str(v[2]) elif v.cdr_type == "data" and v[0] == d[0]: variance_data[i] = str(v[2]) elif v.cdr_type == "voice" and v[0] == d[0]: variance_voice[i] = str(v[2]) elif v.cdr_type == "sms" and v[0] == d[0]: variance_sms[i] = str(v[2]) elif v.cdr_type == "clr" and v[0] == d[0]: variance_clr[i] = str(v[2]) result_set = { "date_list": date_list, "variance_com": variance_com, "variance_vou": variance_vou, "variance_first": variance_first, "variance_mon": variance_mon, "variance_cm": variance_cm, "variance_adj": variance_adj, "variance_data": variance_data, "variance_voice": variance_voice, "variance_sms": variance_sms, "variance_clr": variance_clr, } return jsonify(result_set)
def dqchecks_hive_excel(): if request.method == "POST": start_date = request.form["start_date"] end_date = request.form["end_date"] period_select = request.form["period"] if period_select == "day": period = manifest_hive_monitoring.file_date elif period_select == "month": period = func.month(manifest_hive_monitoring.file_date) elif period_select == "year": period = func.year(manifest_hive_monitoring.file_date) cdr_types = [ "com", "vou", "cm", "adj", "first", "mon", "data", "voice", "sms", "clr" ] dates = db.session.query(period).filter( and_(manifest_hive_monitoring.file_date >= start_date, manifest_hive_monitoring.file_date <= end_date)).group_by(period).all() lookup = db.session.query( period, manifest_hive_monitoring.cdr_type, func.sum(manifest_hive_monitoring.ocs_manifest), func.sum(manifest_hive_monitoring.t1_hive), func.sum(manifest_hive_monitoring.variance)).filter( and_(manifest_hive_monitoring.file_date >= start_date, manifest_hive_monitoring.file_date <= end_date)).group_by( period, manifest_hive_monitoring.cdr_type).all() len_date = len(dates) dates = [d[0] for d in dates] cdr_dict = {} for l in lookup: if l.cdr_type not in cdr_dict.keys(): cdr_dict[l.cdr_type] = { "manifest": init_list(len_date), "t1": init_list(len_date), "variance": init_list(len_date) } insert_cdr(cdr_dict[l.cdr_type], dates.index(l[0]), l[2], l[3], l[4]) else: insert_cdr(cdr_dict[l.cdr_type], dates.index(l[0]), l[2], l[3], l[4]) #output in bytes output = io.BytesIO() #create WorkBook object workbook = Workbook() workbook_name = "Manifest Hive Bashing {}".format( datetime.now().strftime("%Y-%m-%d %H-%M-%S")) #add a sheet ws = workbook.create_sheet('Manifest vs T1 Bashing Validation', 0) greenFill = PatternFill(start_color='AEEA00', end_color='AEEA00', fill_type='solid') x_pos = 1 y_pos = 1 temp_y = 1 x_lim = 19 row = 0 for c in cdr_dict.keys(): ### CREATE THE HEADER FOR THE CDR # Merge for Date Column ws.cell(row=y_pos, column=x_pos, value="Date").alignment = Alignment(horizontal='center') # ws.cell(row=y_pos, column=x_pos, value="Date").fill = greenFill ws.merge_cells(start_row=y_pos, start_column=x_pos, end_row=y_pos + 1, end_column=x_pos) # Merge for CDR Row ws.cell(row=y_pos, column=x_pos + 1, value=c).alignment = Alignment(horizontal='center') ws.merge_cells(start_row=y_pos, start_column=x_pos + 1, end_row=y_pos, end_column=x_pos + 3) ws.cell(row=y_pos + 1, column=x_pos + 1, value="MANIFEST").alignment = Alignment(horizontal='center') ws.cell(row=y_pos + 1, column=x_pos + 2, value="T1").alignment = Alignment(horizontal='center') ws.cell(row=y_pos + 1, column=x_pos + 3, value="VARIANCE").alignment = Alignment(horizontal='center') for i, d in enumerate(dates): ws.cell(row=y_pos + 2, column=x_pos, value=d.strftime("%m/%d/%y")) ws.cell(row=y_pos + 2, column=x_pos + 1, value=cdr_dict[c]["manifest"][i]) ws.cell(row=y_pos + 2, column=x_pos + 2, value=cdr_dict[c]["t1"][i]) ws.cell(row=y_pos + 2, column=x_pos + 3, value=cdr_dict[c]["variance"][i]) y_pos += 1 ### SET X AND Y POSITIONS if x_pos + 3 < 19: y_pos = temp_y x_pos += 5 else: row += 1 y_pos = temp_y + (len(dates) + 3) temp_y = y_pos x_pos = 1 workbook.save(output) output.seek(0) pprint.pprint(cdr_dict) filename = workbook_name return Response( output, mimetype= "application/openxmlformats-officedocument.spreadsheetml.sheet", headers={ "Content-Disposition": "attachment;filename={}.xlsx".format(filename) })