def startstop(): startdt = "2017-01-01" endt="2017-01-07" vr_res = session.query(Measurement.date,(func.Max(Measurement.tobs)),(func.Min(Measurement.tobs)),(func.avg(Measurement.tobs))).\ filter(Measurement.date >= startdt,Measurement.date <= endt) all_vtobs = [] for vtobs in vr_res: vtobs_dict = {} vtobs_dict["Daterange"] = f" for the entire vacation Date range {startdt} and {endt} Max, Avg and Min Temps" vtobs_dict["TMax"] = vtobs[1] vtobs_dict["TMin"] = vtobs[2] vtobs_dict["TAvg"] = vtobs[3] all_vtobs.append(vtobs_dict) vr1_res=session.query(Measurement.date,(func.Max(Measurement.tobs)),(func.Min(Measurement.tobs)),(func.avg(Measurement.tobs))).\ filter(Measurement.date >= startdt,Measurement.date <= endt).group_by(Measurement.date) for vtobs2 in vr1_res: vtobs2_dict = {} vtobs2_dict["Daterange"] = f" for the Date range {startdt} and {endt} Max, Avg and Min Temps for the day {vtobs2.date} of vacation" vtobs2_dict["TMax"] = vtobs2[1] vtobs2_dict["TMin"] = vtobs2[2] vtobs2_dict["TAvg"] = vtobs2[3] all_vtobs.append(vtobs2_dict) vr_res1 = session.query(Measurement.date,(func.Max(Measurement.tobs)),(func.Min(Measurement.tobs)),(func.avg(Measurement.tobs))).\ filter(Measurement.date >= startdt) for vtobs1 in vr_res1: vtobs1_dict = {} vtobs1_dict["Daterange"] = f" for the Entire Date range {startdt} and No End Date Max, Avg and Min Temps" vtobs1_dict["TMax"] = vtobs1[1] vtobs1_dict["TMin"] = vtobs1[2] vtobs1_dict["TAvg"] = vtobs1[3] all_vtobs.append(vtobs1_dict) vr_res11 = session.query(Measurement.date,(func.Max(Measurement.tobs)),(func.Min(Measurement.tobs)),(func.avg(Measurement.tobs))).\ filter(Measurement.date >= startdt).group_by(Measurement.date) for vtobs22 in vr_res11: vtobs22_dict = {} vtobs22_dict["Daterange"] = f" for the Date range {startdt} and No End Date Max, Avg and Min Temps for the day {vtobs22.date}" vtobs22_dict["TMax"] = vtobs22[1] vtobs22_dict["TMin"] = vtobs22[2] vtobs22_dict["TAvg"] = vtobs22[3] all_vtobs.append(vtobs22_dict) return jsonify(all_vtobs)
def _query(cls, invoice): from autonomie.models.task import Task q = DBSESSION().query(func.Max(SequenceNumber.index)) q = q.filter(Task.type_.in_(('invoice', 'cancelinvoice'))) q = q.filter_by(sequence=cls.db_key) return q
def tobs(): # Create our session (link) from Python to the DB session = Session(engine) ## Get the last date in the table row = session.query(func.Max(Measurement.date)).first() last_date = row[0] # Build a new string with fourth digit made one lower # Will not work if the last_date occurs on the first year of a decade one_year_from_last = last_date[0:3] + str(int(last_date[3])-1) + last_date[4:] # Query all measurment from most active station during last year results = session.query(Measurement.date, Measurement.tobs)\ .filter(Measurement.station == 'USC00519281')\ .filter(Measurement.date > one_year_from_last)\ .order_by(Measurement.date.asc()) session.close() output = [] for result in results: output.append( {"date" : result[0], "tobs" : result[1] }) return jsonify(output)
def spider_dashboard(project_id): Project.query.get_or_404(project_id) session['project_id'] = project_id last_runtime_query = db.session.query( SpiderInstance.spider_name, func.Max(JobExecution.date_created).label('last_runtime'), ).outerjoin(JobInstance, JobInstance.spider_name == SpiderInstance.spider_name)\ .outerjoin(JobExecution).filter(SpiderInstance.project_id == project_id)\ .group_by(SpiderInstance.id) last_runtime = dict((spider_name, last_runtime) for spider_name, last_runtime in last_runtime_query) avg_runtime_query = db.session.query( SpiderInstance.spider_name, func.Avg(func.julianday(JobExecution.end_time) - func.julianday(JobExecution.start_time)).label('avg_runtime'), ).outerjoin(JobInstance, JobInstance.spider_name == SpiderInstance.spider_name)\ .outerjoin(JobExecution).filter(SpiderInstance.project_id == project_id)\ .filter(JobExecution.end_time != None, JobExecution.start_time != None)\ .group_by(SpiderInstance.id) avg_runtime = dict((spider_name, avg_runtime) for spider_name, avg_runtime in avg_runtime_query) spiders = [] for spider in SpiderInstance.query.filter( SpiderInstance.project_id == project_id).all(): spider.last_runtime = last_runtime.get(spider.spider_name) if avg_runtime.get(spider.spider_name) is not None: spider.avg_runtime = str( datetime.timedelta(days=avg_runtime.get(spider.spider_name))) spiders.append(spider) return render_template("spider_dashboard.html", spiders=spiders)
def processa_resumo(engine, origem, destino, chaves): with engine.begin() as conn: s = select([func.Max(destino.c.create_date)]) c = conn.execute(s).fetchone() start_date = 0 if c and c[0] is not None: start_date = c[0] # print(c) print('Start date %s' % start_date) s = select([origem] ).where(origem.c.create_date >= start_date) cont = 0 for row in conn.execute(s): cont += 1 chaves_valores = [destino.c[chave] == row[chave] for chave in chaves] # print(numeroCEmercante) tipoMovimento = row[origem.c.tipoMovimento] result_proxy = execute_movimento(conn, destino, chaves_valores, tipoMovimento, destino.c.keys(), row) return cont
def data_ultimo_arquivo_baixado(engine): with engine.begin() as conn: s = select([func.Max(ArquivoBaixado.c.filename_date)]) c = conn.execute(s).fetchone() return c[0]
app = Flask(__name__) #I was getting a thread error, found this connect_arg fix, not sure what causes the error need to research later. engine = create_engine("sqlite:///Resources/hawaii.sqlite", connect_args={'check_same_thread': False}) Base = automap_base() Base.prepare(engine, reflect=True) Station = Base.classes.station Measurement = Base.classes.measurement session = Session(engine) #Get most recent date used later recent_date = session.query(func.Max(Measurement.date)).first()[0] print(recent_date) start_date = dt.datetime.strptime(recent_date, '%Y-%m-%d').date() - dt.timedelta(days=365) @app.route("/") def home(): return (f"App Routes: <br/>" f"/api/v1.0/precipitation<br/>" f"/api/v1.0/stations<br/>" f"/api/v1.0/tobs<br/>" f"/api/v1.0/<start> EX Date format: 2017-01-01<br/>" f"/api/v1.0/<start>/<end> EX Date format: 2017-01-01<br/>")