def visit(self, func_name): from sqlalchemy.sql import func, case func_entry = { 'entry': { 'next_year': lambda x: func.TIMESTAMPADD(text('year'), self.visit_param(1), x), 'next_quarter': lambda x: func.TIMESTAMPADD(text('quarter'), self.visit_param(1), x), 'next_month': lambda x: func.TIMESTAMPADD(text('month'), self.visit_param(1), x), 'next_day': lambda x: func.TIMESTAMPADD(text('day'), self.visit_param(1), x), 'cast': lambda x, y: func.cast(x, y), 'cast_day': lambda x: func.cast(x, get_type('date')), 'cast_month': lambda x: self.visit('concat')( self.visit('year')(x), self.visit_param('-'), self.visit('month')(x) ), 'cast_quarter': lambda x: self.visit('concat')( self.visit('year')(x), self.visit('quarter')(x), self.visit_param('Q') ), 'cast_year': lambda x: self.visit('year')(x) }, 'oracle': { 'if': lambda x, y, z: case([(x, y)], else_=z), 'concat': lambda *args: reduce(func.concat, args), 'year': lambda x: func.to_char(x, self.visit_param('yyyy')), 'month': lambda x: func.to_char(x, self.visit_param('mm')), 'day': lambda x: func.to_char(x, self.visit_param('dd')) }, 'postgresql': { 'if': lambda x, y, z: case([(x, y)], else_=z), 'year': lambda x: func.to_char(x, self.visit_param('yyyy')), 'month': lambda x: func.to_char(x, self.visit_param('mm')), 'day': lambda x: func.to_char(x, self.visit_param('dd')) }, 'db2': { 'concat': lambda *args: reduce(func.concat, args), 'if': lambda x, y, z: case([(x, y)], else_=z) }, 'hana': { 'if': lambda x, y, z: case([(x, y)], else_=z), 'concat': lambda *args: reduce(func.concat, args), 'year': lambda x: func.to_char(x, self.visit_param('yyyy')), 'month': lambda x: func.to_char(x, self.visit_param('mm')), 'day': lambda x: func.to_char(x, self.visit_param('dd')) }, 'mssql': { 'if': lambda x, y, z: case([(x, y)], else_=z), 'concat': lambda *args: reduce(lambda a, b: a+b, map( lambda x: func.cast(x, get_type('string')), args )) } }
def selectReportMonthDetails(userId, year, month, startDay, endDay): """選択された月の日別とその稼働時間を取得するDB処理 :param userId: 登録ユーザID :param year: 登録年 :param month: 登録月 :param startDay: 月の初日 :param endDay: 月の最後の日 """ subq1 = db.session.query( func.generate_series( func.date(startDay) - func.CURRENT_DATE(), func.date(endDay) - func.CURRENT_DATE()).label('i')).subquery() subq2 = db.session.query( func.cast(func.date_part('day', func.CURRENT_DATE() + subq1.c.i), Integer).label('day')).subquery() monthDetails = db.session.query( subq2.c.day, __model.rest_flg, db.func.to_char( __model.normal_working_hours + __model.overtime_hours + __model.holiday_work_hours, '999D99').label('workTime'), ).outerjoin( __model, and_(subq2.c.day == __model.work_day, __model.user_id == userId, __model.work_year == year, __model.work_month == month)).order_by(subq2.c.day).all() return monthDetails
def analyze_day(day = "2014-08-22"): hour = func.cast(func.substr(trips.c.joreohitusaika_time,1,2), sa.Integer) cols = [trips.c.tulopysakki, hour, func.avg(trips.c.ohitusaika_ero).label('delay_avg')] new_conds = conditions new_conds.append(trips.c.tapahtumapaiva==day) conds = and_(*new_conds) groupcols = [trips.c.tulopysakki, hour] ts = run(cols, conds, groupcols, n_limit=None) save_multi_json(ts, "../site/hourly_stop_delays_%s.json" % day)
def get_stop_time_matrix(): weekhour = func.strftime('%w', trips.c.tapahtumapaiva) * 24 + func.cast(func.substr(trips.c.tuloaika_time,1,2), sa.Integer) cols = [trips.c.tulopysakki, weekhour, func.sum(trips.c.pysakkiaika).label('stop_sum')] conds = and_(*conditions) groupcols = [trips.c.tulopysakki, weekhour] ts = run(cols, conds, groupcols, n_limit=None) # Write to a csv file stops = list(set([val[0] for val in ts])) stop_map = {v: idx for (idx, v) in enumerate(stops)} mat = np.zeros((168, len(stops))) for (stop, wh, val) in ts: mat[wh, stop_map[stop]] = val with open('../site/stop_time_matrix.csv', 'w') as f: f.write(",".join(map(str, stops)) + '\n') for i in range(mat.shape[0]): f.write(",".join(map(str, mat[i,:])) + '\n')
def assign_order(): if request.method == "GET": techs = getTech() return render_template("order/assign_order.html", techs = techs) else: serial = request.form.get("serial") place = request.form.get("place") tech_code = request.form.get("tech") selectStatment = device_essentials.select().where(device_essentials.c.serial == serial ).where(device_essentials.c.status == "operational").limit(1) selectStatment = db.execute(selectStatment) device = selectStatment.fetchone() if device is None: return apology("Device serial is wrong or device is scraped", 400) table = ppm_map[device[2]] insert1 = order_essentials.insert().values(serial = serial, place = place, type = device[2], department = session.get("department"), tech_code = tech_code, date_issued = func.cast(func.now(), DATE)) try: db.execute(insert1) except sqlalchemy.exc.IntegrityError: print("Error duplicate enteries") return apology("Can't enter duplicate enteries", 403) sel_command = order_essentials.select().with_only_columns([func.max(order_essentials.c.code)] ).where(order_essentials.c.serial == serial).limit(1) sel_command = db.execute(sel_command) r_code = sel_command.fetchone()[0] insert2 = table.insert().values(r_code = r_code) db.execute(insert2) return render_template("control/main.html", message = "Order assigned successfully")
def removeDevice(): reportDictionary = {} code = request.form.get("code") reportDictionary.update({"code" : code}) cause = request.form.get("cause") reportDictionary.update({"cause" : cause}) reportDictionary.update({"date" : func.cast(func.now(), DATE)}) updateDevice1 = device_essentials.update().where(device_essentials.c.code == code).values(status = "obselete") updateDevice2 = device_extras.update().where(device_extras.c.d_code == code).values(remove_date = func.cast(func.now(), DATE)) selDevice = device_essentials.select().with_only_columns([device_essentials.c.serial, device_essentials.c.type]).where(device_essentials.c.code == code).limit(1) selDevice = db.execute(selDevice) rows = selDevice.fetchone() reportDictionary.update({"device_serial" : rows[0]}) reportDictionary.update({"device_type" : rows[1]}) selDevice = device_extras.select().with_only_columns([device_extras.c.name, device_extras.c.manufacturer]).where(device_extras.c.d_code == code).limit(1) selDevice = db.execute(selDevice) rows = selDevice.fetchone() reportDictionary.update({"device_name" : rows[0]}) reportDictionary.update({"device_manufacturer" : rows[1]}) insertReport = report_scrap.insert().values(**reportDictionary) delete_maint = maintain_dates.delete().where(maintain_dates.c.device_code == code) db.execute(updateDevice1) db.execute(updateDevice2) db.execute(insertReport) db.execute(delete_maint) return None
def submit_order(): if request.method == "GET": order_id = request.args.get("id") rows = [] # check if a wrong argument is passed if order_id is None: return apology("Invalid request", 400) try: order_id = int(order_id) except: return apology("Invalid request", 400) tech_code = db.execute(users_tech.select().with_only_columns([users_tech.c['r_code']] ).where(users_tech.c['username'] == session.get("username"))).fetchone()[0] selOrder = order_essentials.select().where(order_essentials.c.tech_code == tech_code).where( order_essentials.c.date_responded == None).where(order_essentials.c.code == order_id).limit(1) selOrder = db.execute(selOrder) order_ess = selOrder.fetchone() serial = order_ess[1] place = order_ess[2] device_type = order_ess[3] date_issued = order_ess[6].strftime('%d-%m-%Y') device_code = db.execute(device_essentials.select().with_only_columns([device_essentials.c['code']] ).where(device_essentials.c['serial'] == serial)).fetchone()[0] tech_name = db.execute(tech_essentials.select().with_only_columns([tech_essentials.c['name']] ).where(tech_essentials.c['code'] == order_ess[5])).fetchone()[0] if order_ess is None: return apology("Order not found", 404) extras_table = ppm_map[device_type] order_extra = extras_table.c.keys() size = len(order_extra) - 1 for index in range(1,size): rows.append([order_extra[index].replace("_"," "), order_extra[index]]) return render_template("order/submit_order.html", serial = serial, place = place, device_type = device_type, device_code = device_code, date_issued = date_issued, tech_name = tech_name, order_id = order_id, rows = rows) elif request.method == "POST": code = request.form.get("code") device_type = request.form.get("device_type") device_code = request.form.get("device_code") updateDictionary = {} ignored_keys = ["code","notes","device_code","device_type"] for key in request.form.keys(): if key not in ignored_keys: updateDictionary.update({key : func.cast(1, Integer)}) up_command = order_essentials.update().where(order_essentials.c.code == code).values(date_responded = func.cast(func.now(), DATE)) db.execute(up_command) extras_table = ppm_map[device_type] if len(updateDictionary) != 0: up_command = extras_table.update().where(extras_table.c.r_code == code).values(**updateDictionary) db.execute(up_command) up_command = maintain_dates.update().where(maintain_dates.c.device_code == device_code ).values(maint_date = datetime.date.today() + datetime.timedelta(days = 30)) db.execute(up_command) session["message"] = "Order submitted successfully" return redirect("/due_orders")
def update_entries(session, date, logger=None): """Create receiver coverage stats for Melissas ognrange.""" if logger is None: logger = app.logger logger.info("Compute receiver coverages.") (start, end) = date_to_timestamps(date) # Filter aircraft beacons sq = (session.query( AircraftBeacon.location_mgrs_short, AircraftBeacon.receiver_id, AircraftBeacon.signal_quality, AircraftBeacon.altitude, AircraftBeacon.device_id).filter( and_(between(AircraftBeacon.timestamp, start, end), AircraftBeacon.location_mgrs_short != null(), AircraftBeacon.receiver_id != null(), AircraftBeacon.device_id != null())).subquery()) # ... and group them by reduced MGRS, receiver and date query = (session.query( sq.c.location_mgrs_short, sq.c.receiver_id, func.cast(date, Date).label("date"), func.max(sq.c.signal_quality).label("max_signal_quality"), func.min(sq.c.altitude).label("min_altitude"), func.max(sq.c.altitude).label("max_altitude"), func.count(sq.c.altitude).label("aircraft_beacon_count"), func.count(func.distinct(sq.c.device_id)).label("device_count"), ).group_by(sq.c.location_mgrs_short, sq.c.receiver_id).subquery()) # if a receiver coverage entry exist --> update it upd = (update(ReceiverCoverage).where( and_( ReceiverCoverage.location_mgrs_short == query.c.location_mgrs_short, ReceiverCoverage.receiver_id == query.c.receiver_id, ReceiverCoverage.date == date)).values({ "max_signal_quality": query.c.max_signal_quality, "min_altitude": query.c.min_altitude, "max_altitude": query.c.max_altitude, "aircraft_beacon_count": query.c.aircraft_beacon_count, "device_count": query.c.device_count, })) result = session.execute(upd) update_counter = result.rowcount session.commit() logger.debug( "Updated receiver coverage entries: {}".format(update_counter)) # if a receiver coverage entry doesnt exist --> insert it new_coverage_entries = session.query(query).filter(~exists().where( and_( ReceiverCoverage.location_mgrs_short == query.c.location_mgrs_short, ReceiverCoverage.receiver_id == query.c.receiver_id, ReceiverCoverage.date == date))) ins = insert(ReceiverCoverage).from_select( ( ReceiverCoverage.location_mgrs_short, ReceiverCoverage.receiver_id, ReceiverCoverage.date, ReceiverCoverage.max_signal_quality, ReceiverCoverage.min_altitude, ReceiverCoverage.max_altitude, ReceiverCoverage.aircraft_beacon_count, ReceiverCoverage.device_count, ), new_coverage_entries, ) result = session.execute(ins) insert_counter = result.rowcount session.commit() finish_message = "ReceiverCoverage: {} inserted, {} updated".format( insert_counter, update_counter) logger.debug(finish_message) return finish_message
"bigquery": aggregations_bigquery, } ####################### # Conversions are a callable on a column expression that yields a # nonaggregated column expression # for instance, quarter(sales_date) => func.date_trunc('quarter', MyTable.sales_date) ####################### conversions = { "month": lambda fld: func.date_trunc("month", fld), "week": lambda fld: func.date_trunc("week", fld), "year": lambda fld: func.date_trunc("year", fld), "quarter": lambda fld: func.date_trunc("quarter", fld), "string": lambda fld: func.cast(fld, String()), "int": lambda fld: func.cast(fld, Integer()), } conversions_redshift = { # age doesn't work on all databases "age": lambda fld: postgres_age(fld), } conversions_bigquery = { "month": lambda fld: func.date_trunc(fld, text("month")), "week": lambda fld: func.date_trunc(fld, text("week")), "year": lambda fld: func.date_trunc(fld, text("year")), "quarter": lambda fld: func.date_trunc(fld, text("quarter")), "age": lambda fld: bq_age(fld),
def query_work_day_stats( company_id, start_date=None, end_date=None, first=None, after=None, tzname="Europe/Paris", ): tz = gettz(tzname) if after: max_time, user_id_ = parse_datetime_plus_id_cursor(after) max_date = max_time.date() end_date = min(max_date, end_date) if end_date else max_date query = (Activity.query.join(Mission).join( Expenditure, and_( Activity.user_id == Expenditure.user_id, Activity.mission_id == Expenditure.mission_id, ), isouter=True, ).with_entities( Activity.id, Activity.user_id, Activity.mission_id, Mission.name, Activity.start_time, Activity.end_time, Activity.type, Expenditure.id.label("expenditure_id"), Expenditure.type.label("expenditure_type"), func.generate_series( func.date_trunc( "day", func.timezone( tzname, func.timezone("UTC", Activity.start_time), ), ), func.timezone( tzname, func.coalesce( func.timezone("UTC", Activity.end_time), func.now(), ), ), "1 day", ).label("day"), ).filter( Mission.company_id == company_id, ~Activity.is_dismissed, Activity.start_time != Activity.end_time, )) query = _apply_time_range_filters( query, to_datetime(start_date, tz_for_date=tz), to_datetime(end_date, tz_for_date=tz, convert_dates_to_end_of_day_times=True), ) has_next_page = False if first: activity_first = max(first * 5, 200) query = query.order_by(desc("day"), desc( Activity.user_id)).limit(activity_first + 1) has_next_page = query.count() > activity_first query = query.subquery() query = (db.session.query(query).group_by( query.c.user_id, query.c.day, query.c.mission_id, query.c.name).with_entities( query.c.user_id.label("user_id"), query.c.day, func.timezone("UTC", func.timezone(tzname, query.c.day)).label("utc_day_start"), query.c.mission_id.label("mission_id"), query.c.name.label("mission_name"), func.min( func.greatest( query.c.start_time, func.timezone("UTC", func.timezone(tzname, query.c.day)), )).label("start_time"), func.max( func.least( func.timezone( "UTC", func.timezone( tzname, query.c.day + func.cast("1 day", Interval)), ), func.coalesce(query.c.end_time, func.now()), )).label("end_time"), func.bool_or( and_( query.c.end_time.is_(None), query.c.day == func.current_date(), )).label("is_running"), *[ func.sum( case( [( query.c.type == a_type.value, extract( "epoch", func.least( func.timezone( "UTC", func.timezone( tzname, query.c.day + func.cast("1 day", Interval), ), ), func.coalesce(query.c.end_time, func.now()), ) - func.greatest( query.c.start_time, func.timezone( "UTC", func.timezone(tzname, query.c.day), ), ), ), )], else_=0, )).label(f"{a_type.value}_duration") for a_type in ActivityType ], func.greatest(func.count(distinct(query.c.expenditure_id)), 1).label("n_exp_dups"), func.count(distinct(query.c.id)).label("n_act_dups"), *[ func.sum( case( [(query.c.expenditure_type == e_type.value, 1)], else_=0, )).label(f"n_{e_type.value}_expenditures") for e_type in ExpenditureType ], ).subquery()) query = (db.session.query(query).group_by( query.c.user_id, query.c.day).with_entities( query.c.user_id.label("user_id"), query.c.day, func.array_agg(distinct( query.c.mission_name)).label("mission_names"), func.min(query.c.start_time).label("start_time"), func.max(query.c.end_time).label("end_time"), func.bool_or(query.c.is_running).label("is_running"), *[ func.sum( getattr(query.c, f"{a_type.value}_duration") / query.c.n_exp_dups).cast(Integer).label( f"{a_type.value}_duration") for a_type in ActivityType ], *[ func.sum( getattr(query.c, f"n_{e_type.value}_expenditures") / query.c.n_act_dups).cast(Integer).label( f"n_{e_type.value}_expenditures") for e_type in ExpenditureType ], ).order_by(desc("day"), desc("user_id")).subquery()) query = db.session.query(query).with_entities( *query.c, extract("epoch", query.c.end_time - query.c.start_time).label("service_duration"), reduce( lambda a, b: a + b, [ getattr(query.c, f"{a_type.value}_duration") for a_type in ActivityType ], ).label("total_work_duration"), ) results = query.all() if after: results = [ r for r in results if r.day.date() < max_date or ( r.day.date() == max_date and r.user_id < user_id_) ] if first: if has_next_page: # The last work day may be incomplete because we didn't fetch all the activities => remove it results = results[:-1] if len(results) > first: results = results[:first] has_next_page = True return results, has_next_page
def GetResponseRate(self, request, context): with session_scope() as session: # this subquery gets the time that the request was sent t = (select(Message.conversation_id, Message.time).where( Message.message_type == MessageType.chat_created).subquery()) # this subquery gets the time that the user responded to the request s = (select(Message.conversation_id, func.min(Message.time).label("time")).where( Message.author_id == request.user_id).group_by( Message.conversation_id).subquery()) res = session.execute( select( User.id, # number of requests received func.count().label("n"), # percentage of requests responded to (func.count(s.c.time) / func.cast(func.greatest(func.count(t.c.time), 1.0), Float) ).label("response_rate"), # the 33rd percentile response time percentile_disc(0.33).within_group( func.coalesce(s.c.time - t.c.time, timedelta(days=1000)) ).label("response_time_p33"), # the 66th percentile response time percentile_disc(0.66).within_group( func.coalesce(s.c.time - t.c.time, timedelta(days=1000)) ).label("response_time_p66"), ).where_users_visible(context).where( User.id == request.user_id).outerjoin( HostRequest, HostRequest.host_user_id == User.id).outerjoin( t, t.c.conversation_id == HostRequest.conversation_id).outerjoin( s, s.c.conversation_id == HostRequest.conversation_id).group_by( User.id)).one_or_none() if not res: context.abort(grpc.StatusCode.NOT_FOUND, errors.USER_NOT_FOUND) _, n, response_rate, response_time_p33, response_time_p66 = res if n < 3: return requests_pb2.GetResponseRateRes( insufficient_data=requests_pb2. ResponseRateInsufficientData(), ) if response_rate <= 0.33: return requests_pb2.GetResponseRateRes( low=requests_pb2.ResponseRateLow(), ) response_time_p33_coarsened = Duration_from_timedelta( timedelta( seconds=round(response_time_p33.total_seconds() / 60) * 60)) if response_rate <= 0.66: return requests_pb2.GetResponseRateRes( some=requests_pb2.ResponseRateSome( response_time_p33=response_time_p33_coarsened), ) response_time_p66_coarsened = Duration_from_timedelta( timedelta( seconds=round(response_time_p66.total_seconds() / 60) * 60)) if response_rate <= 0.90: return requests_pb2.GetResponseRateRes( most=requests_pb2.ResponseRateMost( response_time_p33=response_time_p33_coarsened, response_time_p66=response_time_p66_coarsened), ) else: return requests_pb2.GetResponseRateRes( almost_all=requests_pb2.ResponseRateAlmostAll( response_time_p33=response_time_p33_coarsened, response_time_p66=response_time_p66_coarsened), )