def ns_all(): """ Return all namespaces """ # We do this outside the blueprint to support the case of an empty # public_id. However, this means the before_request isn't run, so we need # to make our own session with session_scope() as db_session: parser = reqparse.RequestParser(argument_class=ValidatableArgument) parser.add_argument('limit', default=DEFAULT_LIMIT, type=limit, location='args') parser.add_argument('offset', default=0, type=int, location='args') parser.add_argument('email_address', type=bounded_str, location='args') args = strict_parse_args(parser, request.args) query = db_session.query(Namespace) if args['email_address']: query = query.join(Account) query = query.filter_by(email_address=args['email_address']) query = query.limit(args['limit']) if args['offset']: query = query.offset(args['offset']) namespaces = query.all() encoder = APIEncoder(legacy_nsid=request.path.startswith('/n')) return encoder.jsonify(namespaces)
def draft_query_api(): g.parser.add_argument('thread', type=valid_public_id, location='args') args = strict_parse_args(g.parser, request.args) drafts = filtering.drafts(g.namespace.id, args['thread'], args['limit'], args['offset'], g.db_session) return g.encoder.jsonify(drafts)
def event_search_api(): g.parser.add_argument('event_id', type=valid_public_id, location='args') g.parser.add_argument('calendar_id', type=valid_public_id, location='args') g.parser.add_argument('title', type=bounded_str, location='args') g.parser.add_argument('description', type=bounded_str, location='args') g.parser.add_argument('location', type=bounded_str, location='args') g.parser.add_argument('starts_before', type=timestamp, location='args') g.parser.add_argument('starts_after', type=timestamp, location='args') g.parser.add_argument('ends_before', type=timestamp, location='args') g.parser.add_argument('ends_after', type=timestamp, location='args') args = strict_parse_args(g.parser, request.args) results = filtering.events( namespace_id=g.namespace.id, account_id=g.namespace.account_id, event_public_id=args['event_id'], calendar_public_id=args['calendar_id'], title=args['title'], description=args['description'], location=args['location'], starts_before=args['starts_before'], starts_after=args['starts_after'], ends_before=args['ends_before'], ends_after=args['ends_after'], limit=args['limit'], offset=args['offset'], source='local', db_session=g.db_session) return g.encoder.jsonify(results)
def stream_changes(): g.parser.add_argument('timeout', type=float, location='args') g.parser.add_argument('cursor', type=valid_public_id, location='args', required=True) g.parser.add_argument('exclude_types', type=valid_delta_object_types, location='args') args = strict_parse_args(g.parser, request.args) timeout = args['timeout'] or 1800 transaction_pointer = None cursor = args['cursor'] if cursor == '0': transaction_pointer = 0 else: query_result = g.db_session.query(Transaction.id).filter( Transaction.namespace_id == g.namespace.id, Transaction.public_id == cursor).first() if query_result is None: raise InputError('Invalid cursor {}'.format(args['cursor'])) transaction_pointer = query_result[0] exclude_types = args.get('exclude_types') # Hack to not keep a database session open for the entire (long) request # duration. g.db_session.close() # TODO make transaction log support the `expand` feature generator = delta_sync.streaming_change_generator( g.namespace.id, transaction_pointer=transaction_pointer, poll_interval=1, timeout=timeout, exclude_types=exclude_types) return Response(generator, mimetype='text/event-stream')
def ns_all(): """ Return all namespaces """ # We do this outside the blueprint to support the case of an empty # public_id. However, this means the before_request isn't run, so we need # to make our own session with global_session_scope() as db_session: parser = reqparse.RequestParser(argument_class=ValidatableArgument) parser.add_argument('limit', default=DEFAULT_LIMIT, type=limit, location='args') parser.add_argument('offset', default=0, type=int, location='args') parser.add_argument('email_address', type=bounded_str, location='args') args = strict_parse_args(parser, request.args) query = db_session.query(Namespace) if args['email_address']: query = query.join(Account) query = query.filter_by(email_address=args['email_address']) query = query.limit(args['limit']) if args['offset']: query = query.offset(args['offset']) namespaces = query.all() encoder = APIEncoder(legacy_nsid=request.path.startswith('/n')) return encoder.jsonify(namespaces)
def contact_api(): g.parser.add_argument('filter', type=bounded_str, default='', location='args') g.parser.add_argument('view', type=bounded_str, location='args') args = strict_parse_args(g.parser, request.args) if args['view'] == 'count': results = g.db_session.query(func.count(Contact.id)) elif args['view'] == 'ids': results = g.db_session.query(Contact.id) else: results = g.db_session.query(Contact) results = results.filter(Contact.namespace_id == g.namespace.id) if args['filter']: results = results.filter(Contact.email_address == args['filter']) results = results.order_by(asc(Contact.id)) if args['view'] == 'count': return g.encoder.jsonify({"count": results.scalar()}) results = results.limit(args['limit']).offset(args['offset']).all() return g.encoder.jsonify(results)
def stream_changes(): g.parser.add_argument('timeout', type=float, location='args') g.parser.add_argument('cursor', type=valid_public_id, location='args', required=True) g.parser.add_argument('exclude_types', type=valid_delta_object_types, location='args') args = strict_parse_args(g.parser, request.args) timeout = args['timeout'] or 3600 transaction_pointer = None cursor = args['cursor'] if cursor == '0': transaction_pointer = 0 else: query_result = g.db_session.query(Transaction.id).filter( Transaction.namespace_id == g.namespace.id, Transaction.public_id == cursor).first() if query_result is None: raise InputError('Invalid cursor {}'.format(args['cursor'])) transaction_pointer = query_result[0] exclude_types = args.get('exclude_types') # Hack to not keep a database session open for the entire (long) request # duration. g.db_session.close() generator = delta_sync.streaming_change_generator( g.namespace.id, transaction_pointer=transaction_pointer, poll_interval=1, timeout=timeout, exclude_types=exclude_types) return Response(generator, mimetype='text/event-stream')
def calendar_search_api(): """ Calendar events! """ g.parser.add_argument('filter', type=bounded_str, default='', location='args') g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) term_filter_string = '%{}%'.format(args['filter']) term_filter = or_( Calendar.name.like(term_filter_string), Calendar.description.like(term_filter_string)) eager = subqueryload(Calendar.events) if view == 'count': query = g.db_session(func.count(Calendar.id)) elif view == 'ids': query = g.db_session.query(Calendar.id) else: query = g.db_session.query(Calendar) results = query.filter(Calendar.namespace_id == g.namespace.id, term_filter).order_by(asc(Calendar.id)) if view == 'count': return g.encoder.jsonify({"count": results.one()[0]}) results = results.limit(args['limit']) if view != 'ids': results = results.options(eager) results = results.offset(args['offset']).all() return g.encoder.jsonify(results)
def suspend_sync(): g.parser.add_argument('account_id', required=True, type=valid_public_id, location='form') args = strict_parse_args(g.parser, request.args) namespace_public_id = args['account_id'] with global_session_scope() as db_session: namespace = db_session.query(Namespace) \ .filter(Namespace.public_id == namespace_public_id).one() namespace_id = namespace.id with session_scope(namespace_id) as db_session: namespace = db_session.query(Namespace) \ .filter(Namespace.public_id == namespace_public_id).one() account = namespace.account account.sync_should_run = False account._sync_status[ 'sync_disabled_reason'] = 'suspend_account API endpoint called' account._sync_status['sync_disabled_on'] = datetime.utcnow() account._sync_status['sync_disabled_by'] = 'api' db_session.commit() shared_queue = shared_sync_event_queue_for_zone(config.get('ZONE')) shared_queue.send_event({'event': 'sync_suspended', 'id': account.id}) return make_response(('', 204, {}))
def message_search_api(): g.parser.add_argument('q', type=bounded_str, location='args') args = strict_parse_args(g.parser, request.args) if request.method == 'GET': if not args['q']: err_string = ('GET HTTP method must include query' ' url parameter') g.log.error(err_string) return err(400, err_string) search_client = get_search_client(g.namespace.account) results = search_client.search_messages(g.db_session, args['q']) else: data = request.get_json(force=True) query = data.get('query') validate_search_query(query) sort = data.get('sort') validate_search_sort(sort) try: search_engine = NamespaceSearchEngine(g.namespace_public_id) results = search_engine.messages.search(query=query, sort=sort, max_results=args.limit, offset=args.offset) except SearchEngineError as e: g.log.error('Search error: {0}'.format(e)) return err(501, 'Search error') return g.encoder.jsonify(results)
def contact_search_api(): g.parser.add_argument('filter', type=bounded_str, default='', location='args') g.parser.add_argument('view', type=bounded_str, location='args') args = strict_parse_args(g.parser, request.args) term_filter_string = '%{}%'.format(args['filter']) term_filter = or_( Contact.name.like(term_filter_string), Contact.email_address.like(term_filter_string)) if args['view'] == 'count': results = g.db_session.query(func.count(Contact.id)) elif args['view'] == 'ids': results = g.db_session.query(Contact.id) else: results = g.db_session.query(Contact) results = results.filter(Contact.namespace_id == g.namespace.id, Contact.source == 'local', term_filter).order_by(asc(Contact.id)) if args['view'] == 'count': return g.encoder.jsonify({"count": results.all()}) results = results.limit(args['limit']).offset(args['offset']).all() return g.encoder.jsonify(results)
def contact_search_api(): g.parser.add_argument('filter', type=bounded_str, default='', location='args') g.parser.add_argument('view', type=bounded_str, location='args') args = strict_parse_args(g.parser, request.args) term_filter_string = '%{}%'.format(args['filter']) term_filter = or_(Contact.name.like(term_filter_string), Contact.email_address.like(term_filter_string)) if args['view'] == 'count': results = g.db_session.query(func.count(Contact.id)) elif args['view'] == 'ids': results = g.db_session.query(Contact.id) else: results = g.db_session.query(Contact) results = results.filter(Contact.namespace_id == g.namespace.id, Contact.source == 'local', term_filter). \ order_by(asc(Contact.id)) if args['view'] == 'count': return g.encoder.jsonify({"count": results.all()}) results = results.limit(args['limit']).offset(args['offset']).all() return g.encoder.jsonify(results)
def tag_query_api(): g.parser.add_argument('tag_name', type=bounded_str, location='args') g.parser.add_argument('tag_id', type=valid_public_id, location='args') g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) if args['view'] == 'count': query = g.db_session.query(func.count(Tag.id)) elif args['view'] == 'ids': query = g.db_session.query(Tag.public_id) else: query = g.db_session.query(Tag) query = query.filter(Tag.namespace_id == g.namespace.id) if args['tag_name']: query = query.filter_by(name=args['tag_name']) if args['tag_id']: query = query.filter_by(public_id=args['tag_id']) if args['view'] == 'count': return g.encoder.jsonify({"count": query.one()[0]}) query = query.order_by(Tag.id) query = query.limit(args['limit']) if args['offset']: query = query.offset(args['offset']) if args['view'] == 'ids': results = [x[0] for x in query.all()] else: results = query.all() return g.encoder.jsonify(results)
def event_search_api(): g.parser.add_argument('event_id', type=valid_public_id, location='args') g.parser.add_argument('calendar_id', type=valid_public_id, location='args') g.parser.add_argument('title', type=bounded_str, location='args') g.parser.add_argument('description', type=bounded_str, location='args') g.parser.add_argument('location', type=bounded_str, location='args') g.parser.add_argument('starts_before', type=timestamp, location='args') g.parser.add_argument('starts_after', type=timestamp, location='args') g.parser.add_argument('ends_before', type=timestamp, location='args') g.parser.add_argument('ends_after', type=timestamp, location='args') g.parser.add_argument('view', type=bounded_str, location='args') args = strict_parse_args(g.parser, request.args) results = filtering.events(namespace_id=g.namespace.id, event_public_id=args['event_id'], calendar_public_id=args['calendar_id'], title=args['title'], description=args['description'], location=args['location'], starts_before=args['starts_before'], starts_after=args['starts_after'], ends_before=args['ends_before'], ends_after=args['ends_after'], limit=args['limit'], offset=args['offset'], source='local', view=args['view'], db_session=g.db_session) return g.encoder.jsonify(results)
def sync_deltas(): g.parser.add_argument('cursor', type=valid_public_id, location='args', required=True) args = strict_parse_args(g.parser, request.args) cursor = args['cursor'] if cursor == '0': start_pointer = 0 else: try: start_pointer, = g.db_session.query(Transaction.id). \ filter(Transaction.public_id == cursor, Transaction.namespace_id == g.namespace.id).one() except NoResultFound: return err(404, 'Invalid cursor parameter') deltas, _ = delta_sync.format_transactions_after_pointer( g.namespace.id, start_pointer, g.db_session, args['limit']) response = { 'cursor_start': cursor, 'deltas': deltas, } if deltas: response['cursor_end'] = deltas[-1]['cursor'] else: # No changes. response['cursor_end'] = cursor return g.encoder.jsonify(response)
def message_read_api(public_id): g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded') try: valid_public_id(public_id) message = g.db_session.query(Message).filter( Message.public_id == public_id, Message.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find message {0} ".format(public_id)) if request.headers.get('Accept', None) == 'message/rfc822': if message.full_body is not None: return Response(message.full_body.data, mimetype='message/rfc822') else: g.log.error("Message without full_body attribute: id='{0}'" .format(message.id)) raise NotFoundError( "Couldn't find raw contents for message `{0}` " .format(public_id)) return encoder.jsonify(message)
def stream_changes(): g.parser.add_argument('timeout', type=float, location='args') g.parser.add_argument('cursor', type=valid_public_id, location='args', required=True) args = strict_parse_args(g.parser, request.args) timeout = args['timeout'] or 3600 transaction_pointer = None cursor = args['cursor'] if cursor == '0': transaction_pointer = 0 else: query_result = g.db_session.query(Transaction.id).filter( Transaction.namespace_id == g.namespace.id, Transaction.public_id == cursor).first() if query_result is None: return err(400, 'Invalid cursor {}'.format(args['cursor'])) transaction_pointer = query_result[0] # Hack to not keep a database session open for the entire (long) request # duration. g.db_session.close() generator = delta_sync.streaming_change_generator( g.namespace.id, transaction_pointer=transaction_pointer, poll_interval=1, timeout=timeout) return Response(generator, mimetype='text/event-stream')
def contact_api(): g.parser.add_argument('filter', type=bounded_str, default='', location='args') g.parser.add_argument('view', type=bounded_str, location='args') args = strict_parse_args(g.parser, request.args) if args['view'] == 'count': results = g.db_session.query(func.count(Contact.id)) elif args['view'] == 'ids': results = g.db_session.query(Contact.public_id) else: results = g.db_session.query(Contact) results = results.filter(Contact.namespace_id == g.namespace.id) if args['filter']: results = results.filter(Contact.email_address == args['filter']) results = results.order_by(asc(Contact.id)) if args['view'] == 'count': return g.encoder.jsonify({"count": results.scalar()}) results = results.limit(args['limit']).offset(args['offset']).all() if args['view'] == 'ids': return g.encoder.jsonify([r for r, in results]) return g.encoder.jsonify(results)
def sync_deltas(): g.parser.add_argument('cursor', type=valid_public_id, location='args', required=True) g.parser.add_argument('exclude_types', type=valid_delta_object_types, location='args') args = strict_parse_args(g.parser, request.args) cursor = args['cursor'] if cursor == '0': start_pointer = 0 else: try: start_pointer, = g.db_session.query(Transaction.id). \ filter(Transaction.public_id == cursor, Transaction.namespace_id == g.namespace.id).one() except NoResultFound: raise InputError('Invalid cursor parameter') exclude_types = args.get('exclude_types') deltas, _ = delta_sync.format_transactions_after_pointer( g.namespace.id, start_pointer, g.db_session, args['limit'], delta_sync._format_transaction_for_delta_sync, exclude_types) response = { 'cursor_start': cursor, 'deltas': deltas, } if deltas: response['cursor_end'] = deltas[-1]['cursor'] else: # No changes. response['cursor_end'] = cursor return g.encoder.jsonify(response)
def sync_deltas(): g.parser.add_argument('cursor', type=valid_public_id, location='args', required=True) g.parser.add_argument('exclude_types', type=valid_delta_object_types, location='args') g.parser.add_argument('include_types', type=valid_delta_object_types, location='args') g.parser.add_argument('timeout', type=int, default=LONG_POLL_REQUEST_TIMEOUT, location='args') # TODO(emfree): should support `expand` parameter in delta endpoints. args = strict_parse_args(g.parser, request.args) exclude_types = args.get('exclude_types') include_types = args.get('include_types') cursor = args['cursor'] timeout = args['timeout'] if include_types and exclude_types: return err(400, "Invalid Request. Cannot specify both include_types" "and exclude_types") if cursor == '0': start_pointer = 0 else: try: start_pointer, = g.db_session.query(Transaction.id). \ filter(Transaction.public_id == cursor, Transaction.namespace_id == g.namespace.id).one() except NoResultFound: raise InputError('Invalid cursor parameter') # The client wants us to wait until there are changes g.db_session.close() # hack to close the flask session poll_interval = 1 start_time = time.time() while time.time() - start_time < timeout: with session_scope() as db_session: deltas, _ = delta_sync.format_transactions_after_pointer( g.namespace, start_pointer, db_session, args['limit'], exclude_types, include_types) response = { 'cursor_start': cursor, 'deltas': deltas, } if deltas: response['cursor_end'] = deltas[-1]['cursor'] return g.encoder.jsonify(response) # No changes. perhaps wait elif '/delta/longpoll' in request.url_rule.rule: gevent.sleep(poll_interval) else: # Return immediately response['cursor_end'] = cursor return g.encoder.jsonify(response) # If nothing happens until timeout, just return the end of the cursor response['cursor_end'] = cursor return g.encoder.jsonify(response)
def sync_deltas(): g.parser.add_argument("cursor", type=valid_public_id, location="args", required=True) args = strict_parse_args(g.parser, request.args) try: results = delta_sync.get_entries_from_public_id(g.namespace.id, args["cursor"], g.db_session, args["limit"]) return g.encoder.jsonify(results) except ValueError: return err(404, "Invalid cursor parameter")
def event_create_api(): g.parser.add_argument('notify_participants', type=strict_bool, location='args') args = strict_parse_args(g.parser, request.args) notify_participants = args['notify_participants'] data = request.get_json(force=True) calendar = get_calendar(data.get('calendar_id'), g.namespace, g.db_session) if calendar.read_only: raise InputError("Can't create events on read_only calendar.") valid_event(data) title = data.get('title', '') description = data.get('description') location = data.get('location') when = data.get('when') busy = data.get('busy') # client libraries can send explicit key = None automagically if busy is None: busy = True participants = data.get('participants') if participants is None: participants = [] for p in participants: if 'status' not in p: p['status'] = 'noreply' event = Event(calendar=calendar, namespace=g.namespace, uid=uuid.uuid4().hex, provider_name=g.namespace.account.provider, raw_data='', title=title, description=description, location=location, busy=busy, when=when, read_only=False, is_owner=True, participants=participants, sequence_number=0, source='local') g.db_session.add(event) g.db_session.flush() schedule_action('create_event', event, g.namespace.id, g.db_session, calendar_uid=event.calendar.uid, notify_participants=notify_participants) return g.encoder.jsonify(event)
def event_search_api(): g.parser.add_argument('filter', type=str, default='', location='args') args = strict_parse_args(g.parser, request.args) results = g.db_session.query(Event). \ filter(Event.account_id == g.namespace.account_id, Event.source == 'local'). \ order_by(asc(Event.id)).limit(args['limit']). \ offset(args['offset']).all() return g.encoder.jsonify(results)
def sync_deltas(): g.parser.add_argument('cursor', type=valid_public_id, location='args', required=True) args = strict_parse_args(g.parser, request.args) try: results = delta_sync.get_entries_from_public_id( g.namespace.id, args['cursor'], g.db_session, args['limit']) return g.encoder.jsonify(results) except ValueError: return err(404, 'Invalid cursor parameter')
def event_create_api(): g.parser.add_argument('notify_participants', type=strict_bool, location='args') args = strict_parse_args(g.parser, request.args) notify_participants = args['notify_participants'] data = request.get_json(force=True) calendar = get_calendar(data.get('calendar_id'), g.namespace, g.db_session) if calendar.read_only: raise InputError("Can't create events on read_only calendar.") valid_event(data) title = data.get('title', '') description = data.get('description') location = data.get('location') when = data.get('when') busy = data.get('busy') # client libraries can send explicit key = None automagically if busy is None: busy = True participants = data.get('participants') if participants is None: participants = [] for p in participants: if 'status' not in p: p['status'] = 'noreply' event = Event( calendar=calendar, namespace=g.namespace, uid=uuid.uuid4().hex, provider_name=g.namespace.account.provider, raw_data='', title=title, description=description, location=location, busy=busy, when=when, read_only=False, is_owner=True, participants=participants, sequence_number=0, source='local') g.db_session.add(event) g.db_session.flush() schedule_action('create_event', event, g.namespace.id, g.db_session, calendar_uid=event.calendar.uid, notify_participants=notify_participants) return g.encoder.jsonify(event)
def message_query_api(): g.parser.add_argument('subject', type=bounded_str, location='args') g.parser.add_argument('to', type=bounded_str, location='args') g.parser.add_argument('from', type=bounded_str, location='args') g.parser.add_argument('cc', type=bounded_str, location='args') g.parser.add_argument('bcc', type=bounded_str, location='args') g.parser.add_argument('any_email', type=bounded_str, location='args') g.parser.add_argument('started_before', type=timestamp, location='args') g.parser.add_argument('started_after', type=timestamp, location='args') g.parser.add_argument('last_message_before', type=timestamp, location='args') g.parser.add_argument('last_message_after', type=timestamp, location='args') g.parser.add_argument('filename', type=bounded_str, location='args') g.parser.add_argument('in', type=bounded_str, location='args') g.parser.add_argument('thread_id', type=valid_public_id, location='args') g.parser.add_argument('unread', type=strict_bool, location='args') g.parser.add_argument('starred', type=strict_bool, location='args') g.parser.add_argument('view', type=view, location='args') # For backwards-compatibility -- remove after deprecating tags API. g.parser.add_argument('tag', type=bounded_str, location='args') args = strict_parse_args(g.parser, request.args) # For backwards-compatibility -- remove after deprecating tags API. in_ = args['in'] or args['tag'] messages = filtering.messages_or_drafts( namespace_id=g.namespace.id, drafts=False, subject=args['subject'], thread_public_id=args['thread_id'], to_addr=args['to'], from_addr=args['from'], cc_addr=args['cc'], bcc_addr=args['bcc'], any_email=args['any_email'], started_before=args['started_before'], started_after=args['started_after'], last_message_before=args['last_message_before'], last_message_after=args['last_message_after'], filename=args['filename'], in_=in_, unread=args['unread'], starred=args['starred'], limit=args['limit'], offset=args['offset'], view=args['view'], db_session=g.db_session) # Use a new encoder object with the expand parameter set. encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded') return encoder.jsonify(messages)
def message_search_api(): g.parser.add_argument('q', type=bounded_str, location='args') args = strict_parse_args(g.parser, request.args) try: search_engine = NamespaceSearchEngine(g.namespace_public_id) results = search_engine.messages.search(query=args.q, max_results=args.limit, offset=args.offset) except SearchInterfaceError: return err(501, 'Search endpoint not available') return g.encoder.jsonify(results)
def thread_api(public_id): g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) # Use a new encoder object with the expand parameter set. encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded') try: valid_public_id(public_id) thread = g.db_session.query(Thread).filter( Thread.public_id == public_id, Thread.namespace_id == g.namespace.id).one() return encoder.jsonify(thread) except NoResultFound: raise NotFoundError("Couldn't find thread `{0}`".format(public_id))
def files_api(): # TODO perhaps return just if content_disposition == 'attachment' g.parser.add_argument('filename', type=bounded_str, location='args') g.parser.add_argument('message', type=valid_public_id, location='args') args = strict_parse_args(g.parser, request.args) files = filtering.files(namespace_id=g.namespace.id, message_public_id=args['message'], filename=args['filename'], limit=args['limit'], offset=args['offset'], db_session=g.db_session) return g.encoder.jsonify(files)
def contact_rankings(): g.parser.add_argument('force_recalculate', type=strict_bool, location='args') g.parser.add_argument('alias', type=bounded_str, location='args') args = strict_parse_args(g.parser, request.args) try: dpcache = g.db_session.query(DataProcessingCache).filter( DataProcessingCache.namespace_id == g.namespace.id).one() except NoResultFound: dpcache = DataProcessingCache(namespace_id=g.namespace.id) last_updated = dpcache.contact_rankings_last_updated cached_data = dpcache.contact_rankings use_cached_data = (not (is_stale(last_updated) or cached_data is None) and args['force_recalculate'] is not True) # With folders update, how we get these messages should change (?) from_email = g.namespace.email_address if not use_cached_data: last_updated = None messages = filtering.messages_for_contact_scores( g.db_session, g.namespace.id, from_email, last_updated) if args['alias'] is not None: messages.extend( filtering.messages_for_contact_scores( g.db_session, g.namespace.id, args['alias'], last_updated ) ) if use_cached_data: new_guys = calculate_contact_scores(messages, time_dependent=False) result = cached_data # result['use_cached_data'] = -1 # debug for k, v in new_guys.items(): if k in result: result[k] += v else: result[k] = v else: result = calculate_contact_scores(messages) dpcache.contact_rankings = result g.db_session.add(dpcache) g.db_session.commit() result = sorted(result.items(), key=lambda x: x[1], reverse=True) # result.append(('total messages fetched', len(messages))) # debug return g.encoder.jsonify(result)
def event_search_api(): g.parser.add_argument("filter", type=str, default="", location="args") args = strict_parse_args(g.parser, request.args) results = ( g.db_session.query(Event) .filter(Event.account_id == g.namespace.account_id, Event.source == "local") .order_by(asc(Event.id)) .limit(args["limit"]) .options(subqueryload(Event.participants_by_email)) .offset(args["offset"]) .all() ) return g.encoder.jsonify(results)
def files_api(): # TODO perhaps return just if content_disposition == 'attachment' g.parser.add_argument('filename', type=bounded_str, location='args') g.parser.add_argument('message', type=valid_public_id, location='args') args = strict_parse_args(g.parser, request.args) files = filtering.files( namespace_id=g.namespace.id, message_public_id=args['message'], filename=args['filename'], limit=args['limit'], offset=args['offset'], db_session=g.db_session) return g.encoder.jsonify(files)
def contact_search_api(): g.parser.add_argument("filter", type=bounded_str, default="", location="args") args = strict_parse_args(g.parser, request.args) term_filter_string = "%{}%".format(args["filter"]) term_filter = or_(Contact.name.like(term_filter_string), Contact.email_address.like(term_filter_string)) results = ( g.db_session.query(Contact) .filter(Contact.account_id == g.namespace.account_id, Contact.source == "local", term_filter) .order_by(asc(Contact.id)) .limit(args["limit"]) .offset(args["offset"]) .all() ) return g.encoder.jsonify(results)
def contact_search_api(): g.parser.add_argument('filter', type=bounded_str, default='', location='args') args = strict_parse_args(g.parser, request.args) term_filter_string = '%{}%'.format(args['filter']) term_filter = or_( Contact.name.like(term_filter_string), Contact.email_address.like(term_filter_string)) results = g.db_session.query(Contact). \ filter(Contact.account_id == g.namespace.account_id, Contact.source == 'local', term_filter). \ order_by(asc(Contact.id)).limit(args['limit']). \ offset(args['offset']).all() return g.encoder.jsonify(results)
def enable_sync(): g.parser.add_argument('account_id', required=True, type=valid_public_id, location='form') args = strict_parse_args(g.parser, request.args) account_id = None namespace_public_id = args['account_id'] with global_session_scope() as db_session: namespace = db_session.query(Namespace) \ .filter(Namespace.public_id == namespace_public_id).one() account_id = namespace.account.id with session_scope(account_id) as db_session: try: account = db_session.query(Account).with_for_update() \ .filter(Account.id == account_id).one() lease_period = timedelta(minutes=1) time_ended = account.sync_status.get('sync_end_time') time_now = datetime.utcnow() if account.sync_host is None and account.sync_state != 'running' \ and (time_ended is None or time_now > time_ended + lease_period): account.sync_should_run = True if account.provider == 'gmail': creds = account.auth_credentials for c in creds: c.is_valid = True db_session.commit() resp = json.dumps(account.sync_status, default=json_util.default) return make_response((resp, 200, { 'Content-Type': 'application/json' })) except NotSupportedError as e: resp = simplejson.dumps({ 'message': str(e), 'type': 'custom_api_error' }) return make_response((resp, 400, { 'Content-Type': 'application/json' }))
def draft_query_api(): g.parser.add_argument('subject', type=bounded_str, location='args') g.parser.add_argument('to', type=bounded_str, location='args') g.parser.add_argument('cc', type=bounded_str, location='args') g.parser.add_argument('bcc', type=bounded_str, location='args') g.parser.add_argument('any_email', type=bounded_str, location='args') g.parser.add_argument('started_before', type=timestamp, location='args') g.parser.add_argument('started_after', type=timestamp, location='args') g.parser.add_argument('last_message_before', type=timestamp, location='args') g.parser.add_argument('last_message_after', type=timestamp, location='args') g.parser.add_argument('filename', type=bounded_str, location='args') g.parser.add_argument('in', type=bounded_str, location='args') g.parser.add_argument('thread_id', type=valid_public_id, location='args') g.parser.add_argument('unread', type=strict_bool, location='args') g.parser.add_argument('starred', type=strict_bool, location='args') g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) drafts = filtering.messages_or_drafts( namespace_id=g.namespace.id, drafts=True, subject=args['subject'], thread_public_id=args['thread_id'], to_addr=args['to'], from_addr=None, cc_addr=args['cc'], bcc_addr=args['bcc'], any_email=args['any_email'], started_before=args['started_before'], started_after=args['started_after'], last_message_before=args['last_message_before'], last_message_after=args['last_message_after'], filename=args['filename'], in_=args['in'], unread=args['unread'], starred=args['starred'], limit=args['limit'], offset=args['offset'], view=args['view'], db_session=g.db_session) return g.encoder.jsonify(drafts)
def event_update_api(public_id): g.parser.add_argument('notify_participants', type=strict_bool, location='args') args = strict_parse_args(g.parser, request.args) notify_participants = args['notify_participants'] valid_public_id(public_id) try: event = g.db_session.query(Event).filter( Event.public_id == public_id, Event.namespace_id == g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(public_id)) if event.read_only: raise InputError('Cannot update read_only event.') if (isinstance(event, RecurringEvent) or isinstance(event, RecurringEventOverride)): raise InputError('Cannot update a recurring event yet.') data = request.get_json(force=True) valid_event_update(data, g.namespace, g.db_session) if 'participants' in data: for p in data['participants']: if 'status' not in p: p['status'] = 'noreply' # Don't update an event if we don't need to. if noop_event_update(event, data): return g.encoder.jsonify(event) for attr in ['title', 'description', 'location', 'when', 'participants']: if attr in data: setattr(event, attr, data[attr]) event.sequence_number += 1 g.db_session.commit() schedule_action('update_event', event, g.namespace.id, g.db_session, calendar_uid=event.calendar.uid, notify_participants=notify_participants) return g.encoder.jsonify(event)
def event_read_api(public_id): """Get all data for an existing event.""" try: valid_public_id(public_id) except InputError: return err(400, 'Invalid event id {}'.format(public_id)) g.parser.add_argument('participant_id', type=valid_public_id, location='args') g.parser.add_argument('action', type=valid_event_action, location='args') g.parser.add_argument('rsvp', type=valid_rsvp, location='args') args = strict_parse_args(g.parser, request.args) if 'action' in args: # Participants are able to RSVP to events by clicking on links (e.g. # that are emailed to them). Therefore, the RSVP action is invoked via # a GET. if args['action'] == 'rsvp': try: participant_id = args.get('participant_id') if not participant_id: return err(404, "Must specify a participant_id with rsvp") participant = g.db_session.query(Participant).filter_by( public_id=participant_id).one() participant.status = args['rsvp'] g.db_session.commit() result = events.crud.read(g.namespace, g.db_session, public_id) if result is None: return err( 404, "Couldn't find event with id {0}".format(public_id)) return g.encoder.jsonify(result) except NoResultFound: return err( 404, "Couldn't find participant with id `{0}` ".format( participant_id)) result = events.crud.read(g.namespace, g.db_session, public_id) if result is None: return err(404, "Couldn't find event with id {0}".format(public_id)) return g.encoder.jsonify(result)
def files_api(): g.parser.add_argument('filename', type=bounded_str, location='args') g.parser.add_argument('message_id', type=valid_public_id, location='args') g.parser.add_argument('content_type', type=bounded_str, location='args') g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) files = filtering.files(namespace_id=g.namespace.id, message_public_id=args['message_id'], filename=args['filename'], content_type=args['content_type'], limit=args['limit'], offset=args['offset'], view=args['view'], db_session=g.db_session) return g.encoder.jsonify(files)
def message_search_api(): args = strict_parse_args(g.parser, request.args) data = request.get_json(force=True) query = data.get('query') validate_search_query(query) try: search_engine = NamespaceSearchEngine(g.namespace_public_id) results = search_engine.messages.search(query=query, max_results=args.limit, offset=args.offset) except SearchEngineError as e: g.log.error('Search error: {0}'.format(e)) return err(501, 'Search error') return g.encoder.jsonify(results)
def stream_changes(): g.parser.add_argument('timeout', type=float, location='args') g.parser.add_argument('cursor', type=valid_public_id, location='args') args = strict_parse_args(g.parser, request.args) timeout = args['timeout'] or 3600 transaction_pointer = None if args['cursor'] is not None: query_result = g.db_session.query(Transaction.id).filter( Transaction.namespace_id == g.namespace.id, Transaction.public_id == args['cursor']).first() if query_result is not None: transaction_pointer = query_result[0] generator = streaming_change_generator( g.namespace.id, g.namespace.public_id, transaction_pointer=transaction_pointer, poll_interval=1, timeout=timeout) return Response(generator, mimetype='text/event-stream')
def event_read_api(public_id): """Get all data for an existing event.""" try: valid_public_id(public_id) except InputError: return err(400, 'Invalid event id {}'.format(public_id)) g.parser.add_argument('participant_id', type=valid_public_id, location='args') g.parser.add_argument('action', type=valid_event_action, location='args') g.parser.add_argument('rsvp', type=valid_rsvp, location='args') args = strict_parse_args(g.parser, request.args) if 'action' in args: # Participants are able to RSVP to events by clicking on links (e.g. # that are emailed to them). Therefore, the RSVP action is invoked via # a GET. if args['action'] == 'rsvp': try: participant_id = args.get('participant_id') if not participant_id: return err(404, "Must specify a participant_id with rsvp") participant = g.db_session.query(Participant).filter_by( public_id=participant_id).one() participant.status = args['rsvp'] g.db_session.commit() result = events.crud.read(g.namespace, g.db_session, public_id) if result is None: return err(404, "Couldn't find event with id {0}" .format(public_id)) return g.encoder.jsonify(result) except NoResultFound: return err(404, "Couldn't find participant with id `{0}` " .format(participant_id)) result = events.crud.read(g.namespace, g.db_session, public_id) if result is None: return err(404, "Couldn't find event with id {0}". format(public_id)) return g.encoder.jsonify(result)
def thread_query_api(): g.parser.add_argument('subject', type=bounded_str, location='args') g.parser.add_argument('to', type=bounded_str, location='args') g.parser.add_argument('from', type=bounded_str, location='args') g.parser.add_argument('cc', type=bounded_str, location='args') g.parser.add_argument('bcc', type=bounded_str, location='args') g.parser.add_argument('any_email', type=bounded_str, location='args') g.parser.add_argument('started_before', type=timestamp, location='args') g.parser.add_argument('started_after', type=timestamp, location='args') g.parser.add_argument('last_message_before', type=timestamp, location='args') g.parser.add_argument('last_message_after', type=timestamp, location='args') g.parser.add_argument('filename', type=bounded_str, location='args') g.parser.add_argument('thread_id', type=valid_public_id, location='args') g.parser.add_argument('tag', type=bounded_str, location='args') g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) threads = filtering.threads( namespace_id=g.namespace.id, subject=args['subject'], thread_public_id=args['thread_id'], to_addr=args['to'], from_addr=args['from'], cc_addr=args['cc'], bcc_addr=args['bcc'], any_email=args['any_email'], started_before=args['started_before'], started_after=args['started_after'], last_message_before=args['last_message_before'], last_message_after=args['last_message_after'], filename=args['filename'], tag=args['tag'], limit=args['limit'], offset=args['offset'], view=args['view'], db_session=g.db_session) # Use a new encoder object with the expand parameter set. encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded') return encoder.jsonify(threads)
def files_api(): g.parser.add_argument('filename', type=bounded_str, location='args') g.parser.add_argument('message_id', type=valid_public_id, location='args') g.parser.add_argument('content_type', type=bounded_str, location='args') g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) files = filtering.files( namespace_id=g.namespace.id, message_public_id=args['message_id'], filename=args['filename'], content_type=args['content_type'], limit=args['limit'], offset=args['offset'], view=args['view'], db_session=g.db_session) return g.encoder.jsonify(files)
def event_delete_api(public_id): g.parser.add_argument('notify_participants', type=strict_bool, location='args') args = strict_parse_args(g.parser, request.args) notify_participants = args['notify_participants'] valid_public_id(public_id) try: event = g.db_session.query(Event).filter_by( public_id=public_id, namespace_id=g.namespace.id).one() except NoResultFound: raise NotFoundError("Couldn't find event {0}".format(public_id)) if event.calendar.read_only: raise InputError( 'Cannot delete event {} from read_only calendar.'.format( public_id)) # Set the local event status to 'cancelled' rather than deleting it, # in order to be consistent with how we sync deleted events from the # remote, and consequently return them through the events, delta sync APIs event.sequence_number += 1 event.status = 'cancelled' g.db_session.commit() account = g.namespace.account # FIXME @karim: do this in the syncback thread instead. if notify_participants and account.provider != 'gmail': ical_file = generate_icalendar_invite(event, invite_type='cancel').to_ical() send_invite(ical_file, event, account, invite_type='cancel') schedule_action('delete_event', event, g.namespace.id, g.db_session, event_uid=event.uid, calendar_name=event.calendar.name, calendar_uid=event.calendar.uid, notify_participants=notify_participants) return g.encoder.jsonify(None)
def event_read_api(public_id): """Get all data for an existing event.""" valid_public_id(public_id) g.parser.add_argument('participant_id', type=valid_public_id, location='args') g.parser.add_argument('action', type=valid_event_action, location='args') g.parser.add_argument('rsvp', type=valid_rsvp, location='args') args = strict_parse_args(g.parser, request.args) # FIXME karim -- re-enable this after landing the participants refactor (T687) #if 'action' in args: # # Participants are able to RSVP to events by clicking on links (e.g. # # that are emailed to them). Therefore, the RSVP action is invoked via # # a GET. # if args['action'] == 'rsvp': # try: # participant_id = args.get('participant_id') # if not participant_id: # return err(404, "Must specify a participant_id with rsvp") # participant = g.db_session.query(Participant).filter_by( # public_id=participant_id).one() # participant.status = args['rsvp'] # g.db_session.commit() # result = events.crud.read(g.namespace, g.db_session, # public_id) # if result is None: # return err(404, "Couldn't find event with id {0}" # .format(public_id)) # return g.encoder.jsonify(result) # except NoResultFound: # return err(404, "Couldn't find participant with id `{0}` " # .format(participant_id)) result = events.crud.read(g.namespace, g.db_session, public_id) if result is None: raise NotFoundError("Couldn't find event id {0}".format(public_id)) return g.encoder.jsonify(result)
def calendar_search_api(): g.parser.add_argument('view', type=view, location='args') args = strict_parse_args(g.parser, request.args) if view == 'count': query = g.db_session.query(func.count(Calendar.id)) elif view == 'ids': query = g.db_session.query(Calendar.id) else: query = g.db_session.query(Calendar) results = query.filter(Calendar.namespace_id == g.namespace.id). \ order_by(asc(Calendar.id)) if view == 'count': return g.encoder.jsonify({"count": results.one()[0]}) results = results.limit(args['limit']) results = results.offset(args['offset']).all() return g.encoder.jsonify(results)