def fetch_blog_posts( page: int = 1, page_size: int = 20, search: str = "", start_date: Optional[date] = None, end_date: Optional[date] = None, tags: List[str] = Query(None), session: Session = Depends(deps.get_database_session), ): offset = (page - 1) * page_size limit = page_size posts_query = session.query(models.BlogPost) if start_date: posts_query = posts_query.filter( or_( func.DATE(models.BlogPost.created_at) >= start_date, func.DATE(models.BlogPost.updated_at) >= start_date, )) if end_date: posts_query = posts_query.filter( or_( func.DATE(models.BlogPost.created_at) <= start_date, func.DATE(models.BlogPost.updated_at) <= start_date, )) # TODO implement search if tags: for tag in tags: blog_tag: models.BlogTag = session.query(models.BlogTag).get(tag) if blog_tag: posts_query = posts_query.filter( models.BlogPost.tags.contains(blog_tag)) return posts_query.offset(offset).limit(limit).all()
def date_query(keyword): return db.session.query(Event.event_id, Event.title, Event.img_root)\ .join(EventSlot, Event.event_id == EventSlot.event_id)\ .filter(Event.is_launched, func.DATE(EventSlot.event_date) >= keyword['from_date'], func.DATE(EventSlot.event_date) <= keyword['to_date'])\ .group_by(Event.event_id).order_by(Event.title).all()
def index(): try: #data_atual = date.today() confirmado = Agenda.query.with_entities( func.sum(Agenda.valor_servico).label('valor')).filter( Agenda.confirmado == 1).join(Servico.agenda) naocompareceu = Agenda.query.with_entities( func.sum(Servico.valor).label('valor')).filter( Agenda.confirmado == 0, Agenda.ausencia == 0, func.DATE(Agenda.data_agenda_inicio) < date.today()).join( Servico.agenda) previsto = Agenda.query.with_entities( func.sum(Servico.valor).label('valor')).filter( Agenda.ausencia == 0).join(Servico.agenda) atendimento = Agenda.query.with_entities( func.count(Agenda.id).label('totalagenda')).filter( func.DATE(Agenda.data_agenda_inicio) == date.today(), Agenda.ausencia == 0) total = Agenda.query.with_entities( func.sum(Agenda.valor_servico).label('valor') ).join( FormaPagamento ).filter( #func.DATE(Agenda.data_agenda_inicio) == date.today(), Agenda.confirmado == True, Agenda.ausencia == 0).all() #montagem dos totalizadores de entradas confirmadas no sistema no mês recebimentos = Agenda.query.with_entities( FormaPagamento.descricao.label('descricao'), func.sum(Agenda.valor_servico).label('valorservico'), (func.sum(Agenda.valor_servico) * 100 / total[0][0]).label('perc') ).join( FormaPagamento ).filter( #func.DATE(Agenda.data_agenda_inicio) == date.today(), Agenda.confirmado == True, Agenda.ausencia == 0).group_by(FormaPagamento.descricao).all() return render_template('index.html', confirmado=confirmado, naocompareceu=naocompareceu, previsto=previsto, atendimento=atendimento, recebimentos=recebimentos, total=total[0][0]) except Exception as ex: flash('Não foi possível carregar os dados {}'.format(ex), 'error') return render_template('index.html', form='')
def event_times_query(eid, date): return db.session.query(EventSlot.slot_id, func.TIME(EventSlot.event_date).label('time'), EventSlot.vacancy.label('vacancy'))\ .filter(EventSlot.event_id == eid, EventSlot.is_active, func.DATE(EventSlot.event_date) == date)\ .order_by(EventSlot.event_date).all()
def created_at_date(cls, date): if hasattr(cls, "created_at"): attr = "created_at" else: raise AttributeError('No "created_at" for created_recently') return cls.query.filter(func.DATE(getattr(cls, attr)) == date).all()
def sort_notes(phrase=None): user_notes = Note.query.filter_by(user_id=current_user.id).all() if phrase: temp = [] for note in user_notes: if phrase.lower() in note.text.lower() or phrase.lower( ) in note.note_name.lower(): temp.append(note) user_notes = temp user_notes_ids = [note.id for note in reversed(user_notes)] dates = [] for note in reversed(user_notes): dates.append(str(note.date).split(' ')[0].split('-')) years = {} for splitted_date in dates: years[splitted_date[0]] = {} for splitted_date in dates: for year in years.keys(): if year == splitted_date[0]: years[year][splitted_date[1]] = {} for splitted_date in dates: for year in years.keys(): for month in years[year].keys(): if month == splitted_date[1]: years[year][month][splitted_date[2]] = Note.query.filter( func.DATE(Note.date) == f'{year}-{month}-{splitted_date[2]}').\ filter(Note.id.in_(user_notes_ids)).all() return years
def news(): raterlist = db.session.execute( '''select rater.name, count(rating) as ratings from rater join rating on rater.user_id=rating.user_id group by rater.name''').fetchall() # restaurants restaurants = db.session().query( Restaurant ,Rating , Location).\ filter( func.extract('year',Rating.date) == '2015').\ filter( Location.business_id == Restaurant.business_id)\ .limit(5).all() restaurant, _, location = zip(*restaurants) # raters raters = db.session().query(Rater, func.min( Rating.mood), Location, Restaurant).filter( func.upper(Rater.name).like( 'Peter'.upper()) # since 0 is Staff in our description ).filter(Rating.business_id == Location.business_id).filter( Restaurant.business_id == Rating.business_id).group_by( Rater.user_id, Rating.mood, Location.location_id, Restaurant.business_id, Rating.date).order_by(asc(Rating.mood), func.DATE(Rating.date)).limit(20).all() # rest_spec rest_spec = raters[len(raters) - 1][3] other_critics = db.session.query( Restaurant, Rating.date, Rating.mood).filter( Restaurant.business_id == rest_spec.business_id).group_by( Restaurant.business_id, Rating.id, Rating.date).order_by(Rating.date).limit(10).all() Best_cate = db.session.execute(''' select rating,food_cate.cate_type from rating join ( select unnest(restaurant.food_type) as cate_type, restaurant.business_id as b_id from restaurant ) as food_cate on rating.business_id = food_cate.b_id where food_cate.cate_type='{}' group by ( food_cate.cate_type , rating.mood , rating.* ) having ( rating.mood >= avg(rating.mood) ) order by rating.mood desc '''.format("Active Life")).fetchall() os.system("clear") return render_template('news.html', restaurant=restaurant[0], location=location[0], johns=raters[0], others=other_critics)
def news_dates(): print("here") results = db.session.query(cast( sentiment_results.publishedAt, Date)).distinct().order_by( func.DATE(sentiment_results.publishedAt)).all() news_data = [] for result in results: #print ("here") #print(result) news_data.append({'date': result[0].strftime('%Y/%m/%d')}) news_data.reverse() return jsonify(news_data)
def get_favorite_files(username): """ get all files a user has favorited :param username: the username of the use for whom to get the favorites :return: List of files as dictionaries """ files_to_show = [] file_list = db.session.query( Favorite.file_id, Favorite.user_username, File.name, File.type, File.file_hash, func.DATE(File.upload_date).label("upload_date"), File.display_path, File.uploader_username, File.subject_id).filter(Favorite.user_username == username).join( File, Favorite.file_id == File.file_id).all() if not file_list: return [] for file in file_list: file = dict(zip(file.keys(), file)) if file_exists(file["file_hash"]): file['size'] = get_file_size(file['file_hash']) file['downloadpath'] = app.config['FILESTORE_PATH'] + "/" + file[ 'file_hash'] files_to_show.append(file) return files_to_show
def data_loader(*args): """ """ # Get dates that aren't fully loaded dates_query = TablesLoadedModel.query.filter( or_( TablesLoadedModel.calls_loaded.is_(False), TablesLoadedModel.events_loaded.is_(False) ) ) # Filter to fresh dates or dates with grace time to prevent # multiple loads for the same date dates_query = dates_query.filter( or_( TablesLoadedModel.last_updated.is_(None), TablesLoadedModel.last_updated + timedelta(minutes=2) < func.DATETIME(datetime.utcnow()) ) ) # Minimize stressing the system by preventing massive queries dates_to_load = dates_query.limit( current_app.config.get('MAX_INTERVAL', 3) ).all() for tl_model in dates_to_load: tl_model.update(last_updated=datetime.utcnow()) TablesLoadedModel.session.commit() if not len(dates_to_load) > 0: logger.info("No tables to load.") return "Success: No tasks." logger.info(dumps({ "Message": "Loading data.", "Load Interval": ", ".join([str(tl_model.loaded_date) for tl_model in dates_to_load]) }, indent=2, default=str)) # Check if events and calls are needed for that date calls_interval = [ tl_model.loaded_date for tl_model in dates_to_load if tl_model.calls_loaded == False ] call_events_interval = [ tl_model.loaded_date for tl_model in dates_to_load if tl_model.events_loaded == False ] ext_uri = current_app.config.get('EXTERNAL_DATABASE_URI') if not ext_uri: logger.error("Error: External database connection not set.\n" "Add 'EXTERNAL_DATABASE_URI' to your config with\n" "the address to your database.") return "Error: No external connection available." ext_session = get_external_session(ext_uri) load_info = { CallTableModel: calls_interval, EventTableModel: call_events_interval } try: for table, loading_interval in load_info.items(): # Get the data from the source database results = ext_session.query(table).filter( func.DATE(table.start_time).in_(loading_interval) ) # Slice the data up by date to keep track of dates loaded grouped_data = {} for r in results.all(): record = r.__dict__ record_date = record['start_time'].date() dates_records = grouped_data.get(record_date, []) dates_records.append(record) grouped_data[record_date] = dates_records matching_key = get_pk(table) # Add the records from the external database to the local # database. Add record by record grouped by date. Check if # the record exists before adding. for date, gr in grouped_data.items(): for rec in gr: primary_key = rec.get(matching_key) if not primary_key: logger.error("Could not identify primary key for foreign record.\n" "{dump}".format(dump=dumps(gr, indent=2, default=str))) continue record = table.find(primary_key) if not record: table.create( **{ entry: rec[entry] for entry in rec.keys() if entry != '_sa_instance_state' } ) else: logger.warning("Record Exists: {rec}".format(rec=record)) tl_model = TablesLoadedModel.find(date) if table.__tablename__ == "c_call": tl_model.update(calls_loaded=True) if table.__tablename__ == "c_event": tl_model.update(events_loaded=True) TablesLoadedModel.session.commit() table.session.commit() except Exception as err: logger.error("Error: Major failure loading data.") return dumps(err, indent=4, default=str) else: logger.info("Success: Loaded all data for task request.") return "Success: Tables loaded." finally: # Always close the connection to the external database ext_session.close() logger.info("Closed external data connection.")
def event_dates_query(eid): return db.session.query(func.DATE(EventSlot.event_date).label('date'), EventSlot.vacancy.label('vacancy'))\ .filter(EventSlot.event_id == eid, EventSlot.is_active)\ .order_by(EventSlot.event_date).all()
def pedido_dia(self, hoy): hoy = datetime.strptime(hoy, '%d/%m/%Y') return self.db.query( self.entity).filter(func.DATE(self.entity.fecha) == hoy).filter( self.entity.enabled == True)
def account_generate(firm_id, search_date, send=True): log = logging.getLogger('task') firm = Firm.query.get(firm_id) if not firm: log.error('Not found firm with id %s' % firm_id) return False firm_term = FirmTerm.get_list_by_firm_id(firm.id, False) leased_term = FirmTerm.get_list_by_firm_id(firm.id, True) comission_terms = [] for term_id in (firm_term | leased_term): term = Term.query.get(term_id) if not term.has_comission: continue comission_terms.append(term.id) interval_date = search_date - timedelta(days=20) interval = date_helper.get_date_interval(interval_date, 'month') query = Report.query.filter(Report.term_firm_id == firm.id) query = query.filter(Report.status == Report.STATUS_COMPLETE) query = query.filter(Report.term_id.in_(comission_terms)) query = query.filter( Report.creation_date.between(interval[0], interval[1])) reports = query.all() if not len(reports): log.debug('Empty report for firm id %s, date %s' % (firm_id, search_date)) return False query = PaymentAccount.query.filter(PaymentAccount.firm_id == firm.id) query = query.filter( PaymentAccount.status == PaymentAccount.STATUS_GENERATED) query = query.filter( func.DATE(PaymentAccount.generated_date) == func.DATE(search_date)) account = query.first() if not account: account = PaymentAccount() account.firm_id = firm.id account.generated_date = search_date account.items_count = len(reports) account.summ = 0 for report in reports: if firm.transaction_percent > 0: comission = float(report.amount) * \ (float(firm.transaction_percent) / 100 / 100) if firm.transaction_comission > 0 and comission < firm.transaction_comission: comission = firm.transaction_comission account.summ = account.summ + comission elif firm.transaction_comission > 0: account.summ = account.summ + firm.transaction_comission account.items_count = len(reports) account.item_price = int( round(float(account.summ) / account.items_count)) account.gprs_terms_count = 0 if firm.gprs_rate: terms_used = 0 gprs_summ = 0 for term_id in firm_term: term = Term.query.get(term_id) delta = account.generated_date - term.config_date if not term.has_gprs: continue if delta.total_seconds() > Term.USED_LAST_MONTH: continue terms_used += 1 gprs_summ += firm.gprs_rate if terms_used: account.gprs_terms_count = terms_used account.summ += gprs_summ account.filename = PaymentAccount.get_filename(firm_id, search_date) account.save() if not account.generate_pdf(): log.error('Not generate account for firm id %s, date %s' % (firm_id, search_date)) account.delete() return False account.save() if not send: return True emails = firm.decode_field(firm.account_email) for email in emails: mail.send.delay(AccountMessage, to=email, attach='%s/%s' % (app.config['PDF_FOLDER'], account.filename), date_text=account.get_month_year()) return True
def find(cls, date): return cls.query.filter(cls.loaded_date == func.DATE(date)).first()
def get_events_by_time(start_time, end_time): start_time = datetime.strptime(start_time, '%Y-%m-%d') end_time = datetime.strptime(end_time, '%Y-%m-%d') return Events.query.filter(func.DATE(Events.date) > start_time).filter( func.DATE(Events.date) < end_time).all()
def today_events(): return Events.query.filter( func.DATE(Events.date) == date.today()).all()