def delete_note(note_id): """Удалить заметку""" try: Session.query(Note).filter(Note.id == note_id).delete() Session.commit() return make_response(jsonify(success=True), 200) except: Session.rollback() raise return make_response(jsonify(success=True), 404)
async def delete_by_id(fizz_id: int, session: Session) -> List[int]: fizz = session.query(Fizz).filter(Fizz.fizz_id == fizz_id).first() if not fizz: return [] session.delete(fizz) session.commit() return [fizz_id]
def save_market_to_db(msg_data: dict): """ - Получить запись текущего юзера (запрос в базу по имени и взять одну запись, если исключение что нет записи, то создать юзера нового) - Получить все записи маркетов текущего юзера - :return: """ print('----now------') print(msg_data) session = Session() user = get_or_create(session, User, user_name=msg_data['user']['user_name']) user.free_slots = int(msg_data['market']['free_slots']) user.telegram_id = int(msg_data['user']['telegram_id']) user.name = str(msg_data['user']['name']) all_markets_user = session.query(Market).filter_by(user=user).all() # удалить все магазины текущего юзера из базы for market in all_markets_user: session.delete(market) session.commit() res_dict = msg_data['market']['resources'] current_resources = [get_or_create(session, Resource, name=resource) for resource in res_dict.keys()] current_markers = [Market(user=user, resource=res, count=res_dict[res.name], last_update=msg_data['user']['last_update']) for res in current_resources] session.add_all(current_markers) session.commit()
def options_v4(): """ Return what options user has when creating new alias. Same as v3 but return time-based signed-suffix in addition to suffix. To be used with /v2/alias/custom/new Input: a valid api-key in "Authentication" header and optional "hostname" in args Output: cf README can_create: bool suffixes: [[suffix, signed_suffix]] prefix_suggestion: str recommendation: Optional dict alias: str hostname: str """ user = g.user hostname = request.args.get("hostname") ret = { "can_create": user.can_create_new_alias(), "suffixes": [], "prefix_suggestion": "", } # recommendation alias if exist if hostname: # put the latest used alias first q = (Session.query(AliasUsedOn, Alias, User).filter( AliasUsedOn.alias_id == Alias.id, Alias.user_id == user.id, AliasUsedOn.hostname == hostname, ).order_by(desc(AliasUsedOn.created_at))) r = q.first() if r: _, alias, _ = r LOG.d("found alias %s %s %s", alias, hostname, user) ret["recommendation"] = { "alias": alias.email, "hostname": hostname } # custom alias suggestion and suffix if hostname: # keep only the domain name of hostname, ignore TLD and subdomain # for ex www.groupon.com -> groupon ext = tldextract.extract(hostname) prefix_suggestion = ext.domain prefix_suggestion = convert_to_id(prefix_suggestion) ret["prefix_suggestion"] = prefix_suggestion suffixes = get_available_suffixes(user) # custom domain should be put first ret["suffixes"] = list([suffix.suffix, suffix.signed_suffix] for suffix in suffixes) return jsonify(ret)
def get_alias_log(alias: Alias, page_id=0) -> [AliasLog]: logs: [AliasLog] = [] q = ( Session.query(Contact, EmailLog) .filter(Contact.id == EmailLog.contact_id) .filter(Contact.alias_id == alias.id) .order_by(EmailLog.id.desc()) .limit(PAGE_LIMIT) .offset(page_id * PAGE_LIMIT) ) for contact, email_log in q: al = AliasLog( website_email=contact.website_email, reverse_alias=contact.website_send_to(), alias=alias.email, when=email_log.created_at, is_reply=email_log.is_reply, blocked=email_log.blocked, bounced=email_log.bounced, email_log=email_log, contact=contact, ) logs.append(al) logs = sorted(logs, key=lambda l: l.when, reverse=True) return logs
def alias_log(alias_id, page_id): alias = Alias.get(alias_id) # sanity check if not alias: flash("You do not have access to this page", "warning") return redirect(url_for("dashboard.index")) if alias.user_id != current_user.id: flash("You do not have access to this page", "warning") return redirect(url_for("dashboard.index")) logs = get_alias_log(alias, page_id) base = ( Session.query(Contact, EmailLog) .filter(Contact.id == EmailLog.contact_id) .filter(Contact.alias_id == alias.id) ) total = base.count() email_forwarded = ( base.filter(EmailLog.is_reply.is_(False)) .filter(EmailLog.blocked.is_(False)) .count() ) email_replied = base.filter(EmailLog.is_reply.is_(True)).count() email_blocked = base.filter(EmailLog.blocked.is_(True)).count() last_page = ( len(logs) < PAGE_LIMIT ) # lightweight pagination without counting all objects return render_template("dashboard/alias_log.html", **locals())
def get_alias_info(alias: Alias) -> AliasInfo: q = ( Session.query(Contact, EmailLog) .filter(Contact.alias_id == alias.id) .filter(EmailLog.contact_id == Contact.id) ) alias_info = AliasInfo( alias=alias, nb_blocked=0, nb_forward=0, nb_reply=0, mailbox=alias.mailbox, mailboxes=[alias.mailbox], ) for _, el in q: if el.is_reply: alias_info.nb_reply += 1 elif el.blocked: alias_info.nb_blocked += 1 else: alias_info.nb_forward += 1 return alias_info
def check_mailbox_valid_domain(): """detect if there's mailbox that's using an invalid domain""" mailbox_ids = (Session.query(Mailbox.id).filter( Mailbox.verified.is_(True), Mailbox.disabled.is_(False)).all()) mailbox_ids = [e[0] for e in mailbox_ids] # iterate over id instead of mailbox directly # as a mailbox can be deleted during the sleep time for mailbox_id in mailbox_ids: mailbox = Mailbox.get(mailbox_id) # a mailbox has been deleted if not mailbox: continue if email_can_be_used_as_mailbox(mailbox.email): LOG.d("Mailbox %s valid", mailbox) mailbox.nb_failed_checks = 0 else: mailbox.nb_failed_checks += 1 nb_email_log = nb_email_log_for_mailbox(mailbox) LOG.w( "issue with mailbox %s domain. #alias %s, nb email log %s", mailbox, mailbox.nb_alias(), nb_email_log, ) # send a warning if mailbox.nb_failed_checks == 5: if mailbox.user.email != mailbox.email: send_email( mailbox.user.email, f"Mailbox {mailbox.email} is disabled", render( "transactional/disable-mailbox-warning.txt.jinja2", mailbox=mailbox, ), render( "transactional/disable-mailbox-warning.html", mailbox=mailbox, ), retries=3, ) # alert if too much fail and nb_email_log > 100 if mailbox.nb_failed_checks > 10 and nb_email_log > 100: mailbox.disabled = True if mailbox.user.email != mailbox.email: send_email( mailbox.user.email, f"Mailbox {mailbox.email} is disabled", render("transactional/disable-mailbox.txt.jinja2", mailbox=mailbox), render("transactional/disable-mailbox.html", mailbox=mailbox), retries=3, ) Session.commit()
def get_multi(self, offset: int = 0, limit: int = 10, lazy: bool = True) -> T.List[DbBaseModel]: query = Session.query(self.model).offset(offset).limit(limit) if lazy: return query return query.all()
def get_note(note_id): """Получить заметку с индексом из БД""" note = Session.query(Note).filter(Note.id == note_id).one() result = {"id": note.id, "title": note.title, "description": note.description} return jsonify(result)
async def delete_by_id(widget_id: int, session: Session) -> List[int]: widget = session.query(Widget).filter( Widget.widget_id == widget_id).first() if not widget: return [] session.delete(widget) session.commit() return [widget_id]
def notify_hibp(): """ Send aggregated email reports for HIBP breaches """ # to get a list of users that have at least a breached alias alias_query = (Session.query(Alias).options(joinedload( Alias.hibp_breaches)).filter(Alias.hibp_breaches.any()).filter( Alias.id.notin_(Session.query( HibpNotifiedAlias.alias_id))).distinct(Alias.user_id).all()) user_ids = [alias.user_id for alias in alias_query] for user in User.filter(User.id.in_(user_ids)): breached_aliases = (Session.query(Alias).options( joinedload(Alias.hibp_breaches)).filter( Alias.hibp_breaches.any(), Alias.user_id == user.id).all()) LOG.d( f"Send new breaches found email to %s for %s breaches aliases", user, len(breached_aliases), ) send_email( user.email, f"You were in a data breach", render( "transactional/hibp-new-breaches.txt.jinja2", user=user, breached_aliases=breached_aliases, ), render( "transactional/hibp-new-breaches.html", user=user, breached_aliases=breached_aliases, ), retries=3, ) # add the breached aliases to HibpNotifiedAlias to avoid sending another email for alias in breached_aliases: HibpNotifiedAlias.create(user_id=user.id, alias_id=alias.id) Session.commit()
def get_stats(user: User) -> Stats: nb_alias = Alias.filter_by(user_id=user.id).count() nb_forward = (Session.query(EmailLog).filter_by(user_id=user.id, is_reply=False, blocked=False, bounced=False).count()) nb_reply = (Session.query(EmailLog).filter_by(user_id=user.id, is_reply=True, blocked=False, bounced=False).count()) nb_block = (Session.query(EmailLog).filter_by(user_id=user.id, is_reply=False, blocked=True, bounced=False).count()) return Stats(nb_alias=nb_alias, nb_forward=nb_forward, nb_reply=nb_reply, nb_block=nb_block)
def update_note(note_id): """Обновить заметку""" response = request.get_json() try: Session.query(Note).filter(Note.id == note_id).update( {"title": response["title"], "description": response["description"]} ) Session.commit() return make_response(jsonify(success=True), 202) except: Session.rollback() raise return make_response(jsonify(success=True), 404)
def transfer(alias, new_user, new_mailboxes: [Mailbox]): # cannot transfer alias which is used for receiving newsletter if User.get_by(newsletter_alias_id=alias.id): raise Exception( "Cannot transfer alias that's used to receive newsletter") # update user_id Session.query(Contact).filter(Contact.alias_id == alias.id).update( {"user_id": new_user.id}) Session.query(AliasUsedOn).filter(AliasUsedOn.alias_id == alias.id).update( {"user_id": new_user.id}) Session.query(ClientUser).filter(ClientUser.alias_id == alias.id).update( {"user_id": new_user.id}) # remove existing mailboxes from the alias Session.query(AliasMailbox).filter( AliasMailbox.alias_id == alias.id).delete() # set mailboxes alias.mailbox_id = new_mailboxes.pop().id for mb in new_mailboxes: AliasMailbox.create(alias_id=alias.id, mailbox_id=mb.id) # alias has never been transferred before if not alias.original_owner_id: alias.original_owner_id = alias.user_id # inform previous owner old_user = alias.user send_email( old_user.email, f"Alias {alias.email} has been received", render( "transactional/alias-transferred.txt", alias=alias, ), render( "transactional/alias-transferred.html", alias=alias, ), ) # now the alias belongs to the new user alias.user_id = new_user.id # set some fields back to default alias.disable_pgp = False alias.pinned = False Session.commit()
def get_notes(): """Получить все заметки с БД""" notes = Session.query(Note).all() results = [ {"id": note.id, "title": note.title, "description": note.description} for note in notes ] return jsonify(results)
def aliases_for_mailbox(mailbox: Mailbox) -> [Alias]: """ get list of aliases for a given mailbox """ ret = set(Alias.filter(Alias.mailbox_id == mailbox.id).all()) for alias in (Session.query(Alias).join( AliasMailbox, Alias.id == AliasMailbox.alias_id).filter( AliasMailbox.mailbox_id == mailbox.id)): ret.add(alias) return list(ret)
def get_alias_info_v2(alias: Alias, mailbox=None) -> AliasInfo: if not mailbox: mailbox = alias.mailbox q = ( Session.query(Contact, EmailLog) .filter(Contact.alias_id == alias.id) .filter(EmailLog.contact_id == Contact.id) ) latest_activity: Arrow = alias.created_at latest_email_log = None latest_contact = None alias_info = AliasInfo( alias=alias, nb_blocked=0, nb_forward=0, nb_reply=0, mailbox=mailbox, mailboxes=[mailbox], ) for m in alias._mailboxes: alias_info.mailboxes.append(m) # remove duplicates # can happen that alias.mailbox_id also appears in AliasMailbox table alias_info.mailboxes = list(set(alias_info.mailboxes)) for contact, email_log in q: if email_log.is_reply: alias_info.nb_reply += 1 elif email_log.blocked: alias_info.nb_blocked += 1 else: alias_info.nb_forward += 1 if email_log.created_at > latest_activity: latest_activity = email_log.created_at latest_email_log = email_log latest_contact = contact alias_info.latest_contact = latest_contact alias_info.latest_email_log = latest_email_log return alias_info
def fill_up_email_log_alias(): """Fill up email_log.alias_id column""" # split all emails logs into 1000-size trunks nb_email_log = EmailLog.count() LOG.d("total trunks %s", nb_email_log // 1000 + 2) for trunk in reversed(range(1, nb_email_log // 1000 + 2)): nb_update = 0 for email_log, contact in (Session.query( EmailLog, Contact).filter(EmailLog.contact_id == Contact.id).filter( EmailLog.id <= trunk * 1000).filter( EmailLog.id > (trunk - 1) * 1000).filter( EmailLog.alias_id.is_(None))): email_log.alias_id = contact.alias_id nb_update += 1 LOG.d("finish trunk %s, update %s email logs", trunk, nb_update) Session.commit()
def alias_creation_report() -> List[Tuple[str, int]]: """return the accounts that have created most aliases in the last 7 days, e.g. (email1, 2021-3-21, 30) (email2, 2021-3-20, 20) Produce this query ``` SELECT count(*) AS c, users.email, date(alias.created_at) AS d FROM alias, users WHERE alias.user_id = users.id AND alias.created_at > '2021-3-22' GROUP BY users.email, d HAVING count(*) > 50 ORDER BY c DESC; ``` """ min_dt = arrow.now().shift(days=-7) query = (Session.query( User.email, func.count(Alias.id).label("count"), func.date(Alias.created_at).label("date"), ).join( Alias, Alias.user_id == User.id).filter(Alias.created_at > min_dt).group_by( User.email, "date").having(func.count(Alias.id) > 50).order_by(desc("count"))) res = [] for email, count, date in query: res.append((email, count, date)) return res
def rate_limited_for_alias(alias: Alias) -> bool: min_time = arrow.now().shift(minutes=-1) # get the nb of activity on this alias nb_activity = (Session.query(EmailLog).join( Contact, EmailLog.contact_id == Contact.id).filter( Contact.alias_id == alias.id, EmailLog.created_at > min_time, ).group_by(EmailLog.id).count()) if nb_activity > MAX_ACTIVITY_DURING_MINUTE_PER_ALIAS: LOG.w( "Too much forward on alias %s. Nb Activity %s", alias, nb_activity, ) return True return False
def get_alias_infos_with_pagination(user, page_id=0, query=None) -> [AliasInfo]: ret = [] q = ( Session.query(Alias) .options(joinedload(Alias.mailbox)) .filter(Alias.user_id == user.id) .order_by(Alias.created_at.desc()) ) if query: q = q.filter( or_(Alias.email.ilike(f"%{query}%"), Alias.note.ilike(f"%{query}%")) ) q = q.limit(PAGE_LIMIT).offset(page_id * PAGE_LIMIT) for alias in q: ret.append(get_alias_info(alias)) return ret
def get_all_markets(): session = Session() all_markets = session.query(Market).all() markets = {mrkt.user.user_name: [mrkt.user.free_slots] for mrkt in all_markets} strings_markets = [] for market in all_markets: try: res_name = market.resource.name except AttributeError: res_name = 'ErrorName' markets[market.user.user_name].append(f'{res_name} - {market.count}') for key, value in markets.items(): strings_markets.append(f'<b>{key}</b> 📦 ({value[0]})') value.pop(0) for line in value: strings_markets.append(line) strings_markets.append('\n') return '\n'.join(strings_markets) or 'None'
def bounce_report() -> List[Tuple[str, int]]: """return the accounts that have most bounces, e.g. (email1, 30) (email2, 20) Produce this query ``` SELECT count(*) AS c, users.email FROM email_log, users WHERE email_log.user_id = users.id AND email_log.created_at > '2021-3-20' and email_log.bounced = true GROUP BY users.email ORDER BY c DESC; ``` """ min_dt = arrow.now().shift(days=-1) query = (Session.query( User.email, func.count(EmailLog.id).label("count")).join( EmailLog, EmailLog.user_id == User.id).filter( EmailLog.bounced, EmailLog.created_at > min_dt).group_by(User.email).having( func.count(EmailLog.id) > 5).order_by(desc("count"))) res = [] for email, count in query: res.append((email, count)) return res
def all_bounce_report() -> str: """ Return a report for all mailboxes that have most bounces. Using this query to get mailboxes that have bounces. For each mailbox in the list, return the first bounce info. ``` SELECT email, count(*) AS nb_bounce FROM bounce WHERE created_at > '2021-10-16' GROUP BY email ORDER BY nb_bounce DESC ``` """ res = "" min_dt = arrow.now().shift(days=-1) query = ( Session.query(Bounce.email, func.count(Bounce.id).label("nb_bounce")).filter( Bounce.created_at > min_dt).group_by(Bounce.email) # not return mailboxes that have too little bounces .having(func.count(Bounce.id) > 3).order_by(desc("nb_bounce"))) for email, count in query: res += f"{email}: {count} bounces. " most_recent: Bounce = (Bounce.filter(Bounce.email == email).order_by( Bounce.created_at.desc()).first()) # most_recent.info can be very verbose res += f"Most recent cause: \n{most_recent.info[:1000] if most_recent.info else 'N/A'}" res += "\n----\n" return res
async def put(widget: WidgetSchema, session: Session = Depends(get_db)) -> WidgetSchema: """Update Single Widget""" cur_widget = session.query(Widget).get(widget.widget_id) return await WidgetService.update(cur_widget, widget, session)
async def get_by_id(fizz_id: int, session: Session) -> FizzSchema: resp = session.query(Fizz).get(fizz_id) return FizzSchema(**resp.__dict__)
def get(self, oid: int) -> T.Union[DbBaseModel, None]: return Session.query(self.model).get(oid)
async def put(fizz: FizzSchema, session: Session = Depends(get_db)) -> FizzSchema: """Update Single Fizz""" cur_fizz = session.query(Fizz).get(fizz.fizz_id) return await FizzService.update(cur_fizz, fizz, session)
def get_products(session: Session = Depends(get_db)) -> List[Product]: return session.query(Product).all()