def tag_read_api(public_id): try: valid_public_id(public_id) tag = g.db_session.query(Tag).filter( Tag.public_id == public_id, Tag.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError('No tag found') return g.encoder.jsonify(tag)
def thread_api_update(public_id): try: valid_public_id(public_id) thread = g.db_session.query(Thread).filter( Thread.public_id == public_id, Thread.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find thread `{0}` ".format(public_id)) data = request.get_json(force=True) update_thread(thread, data, g.db_session) return g.encoder.jsonify(thread)
def raw_message_api(public_id): try: valid_public_id(public_id) message = g.db_session.query(Message).filter( Message.public_id == public_id, Message.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find message {0}".format(public_id)) if message.full_body is None: raise NotFoundError("Couldn't find message {0}".format(public_id)) if message.full_body is not None: b64_contents = base64.b64encode(message.full_body.data) else: g.log.error("Message without full_body attribute: id='{0}'".format( message.id)) raise NotFoundError( "Couldn't find raw contents for message `{0}` ".format(public_id)) return g.encoder.jsonify({"rfc2822": b64_contents})
def calendar_read_api(public_id): """Get all data for an existing calendar.""" valid_public_id(public_id) try: calendar = g.db_session.query(Calendar).filter( Calendar.public_id == public_id, Calendar.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find calendar {0}".format(public_id)) return g.encoder.jsonify(calendar)
def event_delete_api(public_id): valid_public_id(public_id) try: event = g.db_session.query(Event).filter_by( public_id=public_id, namespace_id=g.namespace.id). \ options(subqueryload(Event.calendar)).one() except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(public_id)) if event.calendar.read_only: raise NotFoundError('Cannot delete event {} from read_only ' 'calendar.'.format(public_id)) schedule_action('delete_event', event, g.namespace.id, g.db_session, event_uid=event.uid, calendar_name=event.calendar.name) events.crud.delete(g.namespace, g.db_session, public_id) return g.encoder.jsonify(None)
def get_calendar(calendar_public_id, namespace, db_session): if calendar_public_id is None: account = db_session.query(Account).filter( Account.id == namespace.account_id).one() return account.default_calendar valid_public_id(calendar_public_id) try: return db_session.query(Calendar). \ filter(Calendar.public_id == calendar_public_id, Calendar.namespace_id == namespace.id).one() except NoResultFound: raise NotFoundError('Calendar {} not found'.format(calendar_public_id))
def message_update_api(public_id): try: valid_public_id(public_id) message = g.db_session.query(Message).filter( Message.public_id == public_id, Message.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find message {0} ".format(public_id)) data = request.get_json(force=True) update_message(message, data, g.db_session) return g.encoder.jsonify(message)
def calendar_update_api(public_id): calendar = get_calendar(public_id, g.namespace, g.db_session) if calendar.read_only: raise InputError("Cannot update a read_only calendar.") data = request.get_json(force=True) result = events.crud.update_calendar(g.namespace, g.db_session, public_id, data) if result is None: raise NotFoundError("Couldn't find calendar {0}".format(public_id)) return g.encoder.jsonify(result)
def thread_api(public_id): g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) # Use a new encoder object with the expand parameter set. encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded') try: valid_public_id(public_id) thread = g.db_session.query(Thread).filter( Thread.public_id == public_id, Thread.namespace_id == g.namespace.id).one() return encoder.jsonify(thread) except NoResultFound: raise NotFoundError("Couldn't find thread `{0}`".format(public_id))
def message_read_api(public_id): g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded') try: valid_public_id(public_id) message = g.db_session.query(Message).filter( Message.public_id == public_id, Message.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find message {0} ".format(public_id)) if request.headers.get('Accept', None) == 'message/rfc822': if message.full_body is not None: return Response(message.full_body.data, mimetype='message/rfc822') else: g.log.error("Message without full_body attribute: id='{0}'" .format(message.id)) raise NotFoundError( "Couldn't find raw contents for message `{0}` " .format(public_id)) return encoder.jsonify(message)
def get_calendar(calendar_public_id, namespace, db_session): valid_public_id(calendar_public_id) try: return ( db_session.query(Calendar) .filter( Calendar.public_id == calendar_public_id, Calendar.namespace_id == namespace.id, ) .one() ) except NoResultFound: raise NotFoundError("Calendar {} not found".format(calendar_public_id))
def get_sending_draft(draft_public_id, namespace_id, db_session): valid_public_id(draft_public_id) try: draft = db_session.query(Message).filter( Message.public_id == draft_public_id, Message.namespace_id == namespace_id).one() except NoResultFound: raise NotFoundError( "Couldn't find multi-send draft {}".format(draft_public_id)) if draft.is_sent or not draft.is_sending: raise InputError( 'Message {} is not a multi-send draft'.format(draft_public_id)) return draft
def file_download_api(public_id): valid_public_id(public_id) try: f = g.db_session.query(Block).filter( Block.public_id == public_id, Block.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find file {0} ".format(public_id)) # Here we figure out the filename.extension given the # properties which were set on the original attachment # TODO consider using werkzeug.secure_filename to sanitize? if f.content_type: ct = f.content_type.lower() else: # TODO Detect the content-type using the magic library # and set ct = the content type, which is used below g.log.error("Content type not set! Defaulting to text/plain") ct = 'text/plain' if f.filename: name = f.filename else: g.log.debug("No filename. Generating...") if ct in common_extensions: name = 'attachment.{0}'.format(common_extensions[ct]) else: g.log.error("Unknown extension for content-type: {0}" .format(ct)) # HACK just append the major part of the content type name = 'attachment.{0}'.format(ct.split('/')[0]) # TODO the part.data object should really behave like a stream we can read # & write to response = make_response(f.data) response.headers['Content-Type'] = 'application/octet-stream' # ct # Werkzeug will try to encode non-ascii header values as latin-1. Try that # first; if it fails, use RFC2047/MIME encoding. See # https://tools.ietf.org/html/rfc7230#section-3.2.4. try: name = name.encode('latin-1') except UnicodeEncodeError: name = email.header.Header(name, 'utf-8').encode() response.headers['Content-Disposition'] = \ 'attachment; filename={0}'.format(name) g.log.info(response.headers) return response
def tag_read_api(public_id): try: valid_public_id(public_id) tag = g.db_session.query(Tag).filter( Tag.public_id == public_id, Tag.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError('No tag found') unread_tag = g.db_session.query(Tag).filter_by(namespace_id=g.namespace.id, name='unread').first() if unread_tag: tag.unread_count = tag.intersection(unread_tag.id, g.db_session) tag.thread_count = tag.count_threads() return g.encoder.jsonify(tag)
def calendar_update(account_public_id): request.environ["log_context"]["account_public_id"] = account_public_id try: valid_public_id(account_public_id) with global_session_scope() as db_session: account = (db_session.query(GmailAccount).filter( GmailAccount.public_id == account_public_id).one()) account.handle_gpush_notification() db_session.commit() return resp(200) except ValueError: raise InputError("Invalid public ID") except NoResultFound: raise NotFoundError( "Couldn't find account `{0}`".format(account_public_id))
def event_update(calendar_public_id): try: valid_public_id(calendar_public_id) calendar = g.db_session.query(Calendar) \ .filter(Calendar.public_id == calendar_public_id) \ .one() calendar.handle_gpush_notification() return resp(200) except ValueError: raise InputError('Invalid public ID') except NoResultFound: g.log.info('Getting push notifications for non-existing calendar', calendar_public_id=calendar_public_id) raise NotFoundError( "Couldn't find calendar `{0}`".format(calendar_public_id))
def delete_account(namespace_public_id): """ Mark an existing account for deletion. """ try: with global_session_scope() as db_session: namespace = (db_session.query(Namespace).filter( Namespace.public_id == namespace_public_id).one()) account = namespace.account account.mark_for_deletion() db_session.commit() except NoResultFound: raise NotFoundError( "Couldn't find account `{0}` ".format(namespace_public_id)) encoder = APIEncoder() return encoder.jsonify({})
def calendar_update(account_public_id): try: valid_public_id(account_public_id) account = g.db_session.query(GmailAccount) \ .filter(GmailAccount.public_id == account_public_id) \ .one() account.handle_gpush_notification() return resp(200) except ValueError: raise InputError('Invalid public ID') except NoResultFound: g.log.info('Getting push notifications for non-existing account', account_public_id=account_public_id) raise NotFoundError( "Couldn't find account `{0}`".format(account_public_id))
def event_update_api(public_id): g.parser.add_argument('notify_participants', type=strict_bool, location='args') args = strict_parse_args(g.parser, request.args) notify_participants = args['notify_participants'] valid_public_id(public_id) try: event = g.db_session.query(Event).filter( Event.public_id == public_id, Event.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(public_id)) if event.read_only: raise InputError('Cannot update read_only event.') if (isinstance(event, RecurringEvent) or isinstance(event, RecurringEventOverride)): raise InputError('Cannot update a recurring event yet.') data = request.get_json(force=True) valid_event_update(data, g.namespace, g.db_session) if 'participants' in data: for p in data['participants']: if 'status' not in p: p['status'] = 'noreply' # Don't update an event if we don't need to. if noop_event_update(event, data): return g.encoder.jsonify(event) for attr in ['title', 'description', 'location', 'when', 'participants']: if attr in data: setattr(event, attr, data[attr]) event.sequence_number += 1 g.db_session.commit() schedule_action('update_event', event, g.namespace.id, g.db_session, calendar_uid=event.calendar.uid, notify_participants=notify_participants) return g.encoder.jsonify(event)
def event_update(calendar_public_id): request.environ["log_context"]["calendar_public_id"] = calendar_public_id try: valid_public_id(calendar_public_id) allowed, tokens, sleep = limitlion.throttle( "gcal:{}".format(calendar_public_id), rps=0.5) if allowed: with global_session_scope() as db_session: calendar = (db_session.query(Calendar).filter( Calendar.public_id == calendar_public_id).one()) calendar.handle_gpush_notification() db_session.commit() return resp(200) except ValueError: raise InputError("Invalid public ID") except NoResultFound: raise NotFoundError( "Couldn't find calendar `{0}`".format(calendar_public_id))
def get_draft(draft_public_id, version, namespace_id, db_session): valid_public_id(draft_public_id) if version is None: raise InputError('Must specify draft version') try: draft = db_session.query(Message).filter( Message.public_id == draft_public_id, Message.namespace_id == namespace_id).one() except NoResultFound: raise NotFoundError("Couldn't find draft {}".format(draft_public_id)) if draft.is_sent or not draft.is_draft: raise InputError('Message {} is not a draft'.format(draft_public_id)) if draft.version != version: raise ConflictError( 'Draft {0}.{1} has already been updated to version {2}'.format( draft_public_id, version, draft.version)) return draft
def start(): g.db_session = InboxSession(engine) g.log = get_logger() try: valid_public_id(g.namespace_public_id) g.namespace = g.db_session.query(Namespace) \ .filter(Namespace.public_id == g.namespace_public_id).one() g.encoder = APIEncoder(g.namespace.public_id) except NoResultFound: raise NotFoundError("Couldn't find namespace `{0}` ".format( g.namespace_public_id)) g.parser = reqparse.RequestParser(argument_class=ValidatableArgument) g.parser.add_argument('limit', default=DEFAULT_LIMIT, type=limit, location='args') g.parser.add_argument('offset', default=0, type=offset, location='args')
def tag_delete_api(public_id): try: valid_public_id(public_id) t = g.db_session.query(Tag).filter( Tag.public_id == public_id, Tag.namespace_id == g.namespace.id).one() if not t.user_created: raise InputError('delete non user-created tag.') g.db_session.delete(t) g.db_session.commit() # This is essentially what our other API endpoints do after deleting. # Effectively no error == success return g.encoder.jsonify(None) except NoResultFound: raise NotFoundError('No tag found')
def event_delete_api(public_id): g.parser.add_argument('notify_participants', type=strict_bool, location='args') args = strict_parse_args(g.parser, request.args) notify_participants = args['notify_participants'] valid_public_id(public_id) try: event = g.db_session.query(Event).filter_by( public_id=public_id, namespace_id=g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(public_id)) if event.calendar.read_only: raise InputError( 'Cannot delete event {} from read_only calendar.'.format( public_id)) # Set the local event status to 'cancelled' rather than deleting it, # in order to be consistent with how we sync deleted events from the # remote, and consequently return them through the events, delta sync APIs event.sequence_number += 1 event.status = 'cancelled' g.db_session.commit() account = g.namespace.account # FIXME @karim: do this in the syncback thread instead. if notify_participants and account.provider != 'gmail': ical_file = generate_icalendar_invite(event, invite_type='cancel').to_ical() send_invite(ical_file, event, account, invite_type='cancel') schedule_action('delete_event', event, g.namespace.id, g.db_session, event_uid=event.uid, calendar_name=event.calendar.name, calendar_uid=event.calendar.uid, notify_participants=notify_participants) return g.encoder.jsonify(None)
def file_delete_api(public_id): valid_public_id(public_id) try: f = g.db_session.query(Block).filter( Block.public_id == public_id, Block.namespace_id == g.namespace.id).one() if g.db_session.query(Block).join(Part) \ .filter(Block.public_id == public_id).first() is not None: raise InputError("Can't delete file that is attachment.") g.db_session.delete(f) g.db_session.commit() # This is essentially what our other API endpoints do after deleting. # Effectively no error == success return g.encoder.jsonify(None) except NoResultFound: raise NotFoundError("Couldn't find file {0} ".format(public_id))
def calendar_update(account_public_id): g.log.info('Received request to update Google calendar list', account_public_id=account_public_id) try: valid_public_id(account_public_id) with global_session_scope() as db_session: account = db_session.query(GmailAccount) \ .filter(GmailAccount.public_id == account_public_id) \ .one() account.handle_gpush_notification() db_session.commit() return resp(200) except ValueError: raise InputError('Invalid public ID') except NoResultFound: g.log.info('Getting push notifications for non-existing account', account_public_id=account_public_id) raise NotFoundError( "Couldn't find account `{0}`".format(account_public_id))
def event_read_api(public_id): """Get all data for an existing event.""" valid_public_id(public_id) g.parser.add_argument('participant_id', type=valid_public_id, location='args') g.parser.add_argument('action', type=valid_event_action, location='args') g.parser.add_argument('rsvp', type=valid_rsvp, location='args') args = strict_parse_args(g.parser, request.args) # FIXME karim -- re-enable this after landing the participants refactor (T687) #if 'action' in args: # # Participants are able to RSVP to events by clicking on links (e.g. # # that are emailed to them). Therefore, the RSVP action is invoked via # # a GET. # if args['action'] == 'rsvp': # try: # participant_id = args.get('participant_id') # if not participant_id: # return err(404, "Must specify a participant_id with rsvp") # participant = g.db_session.query(Participant).filter_by( # public_id=participant_id).one() # participant.status = args['rsvp'] # g.db_session.commit() # result = events.crud.read(g.namespace, g.db_session, # public_id) # if result is None: # return err(404, "Couldn't find event with id {0}" # .format(public_id)) # return g.encoder.jsonify(result) # except NoResultFound: # return err(404, "Couldn't find participant with id `{0}` " # .format(participant_id)) result = events.crud.read(g.namespace, g.db_session, public_id) if result is None: raise NotFoundError("Couldn't find event id {0}".format(public_id)) return g.encoder.jsonify(result)
def file_download_api(public_id): valid_public_id(public_id) try: f = g.db_session.query(Block).filter( Block.public_id == public_id, Block.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find file {0} ".format(public_id)) # Here we figure out the filename.extension given the # properties which were set on the original attachment # TODO consider using werkzeug.secure_filename to sanitize? if f.content_type: ct = f.content_type.lower() else: # TODO Detect the content-type using the magic library # and set ct = the content type, which is used below g.log.error("Content type not set! Defaulting to text/plain") ct = 'text/plain' if f.filename: name = f.filename else: g.log.debug("No filename. Generating...") if ct in common_extensions: name = 'attachment.{0}'.format(common_extensions[ct]) else: g.log.error("Unknown extension for content-type: {0}" .format(ct)) # HACK just append the major part of the content type name = 'attachment.{0}'.format(ct.split('/')[0]) # TODO the part.data object should really behave like a stream we can read # & write to response = make_response(f.data) response.headers['Content-Type'] = 'application/octet-stream' # ct response.headers['Content-Disposition'] = \ u"attachment; filename={0}".format(name) g.log.info(response.headers) return response
def event_delete_api(public_id): valid_public_id(public_id) try: event = g.db_session.query(Event).filter_by( public_id=public_id, namespace_id=g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(public_id)) if event.calendar.read_only: raise InputError('Cannot delete event {} from read_only calendar.'. format(public_id)) # Set the local event status to 'cancelled' rather than deleting it, # in order to be consistent with how we sync deleted events from the # remote, and consequently return them through the events, delta sync APIs event.status = 'cancelled' g.db_session.commit() schedule_action('delete_event', event, g.namespace.id, g.db_session, event_uid=event.uid, calendar_name=event.calendar.name, calendar_uid=event.calendar.uid) return g.encoder.jsonify(None)