Esempio n. 1
0
def streaming_change_generator(namespace_id, poll_interval, timeout,
                               transaction_pointer, exclude_types=None):
    """
    Poll the transaction log for the given `namespace_id` until `timeout`
    expires, and yield each time new entries are detected.
    Arguments
    ---------
    namespace_id: int
        Id of the namespace for which to check changes.
    poll_interval: float
        How often to check for changes.
    timeout: float
        How many seconds to allow the connection to remain open.
    transaction_pointer: int, optional
        Yield transaction rows starting after the transaction with id equal to
        `transaction_pointer`.

    """
    encoder = APIEncoder()
    start_time = time.time()
    while time.time() - start_time < timeout:
        with session_scope() as db_session:
            deltas, new_pointer = format_transactions_after_pointer(
                namespace_id, transaction_pointer, db_session, 100,
                _format_transaction_for_delta_sync, exclude_types)
        if new_pointer is not None and new_pointer != transaction_pointer:
            transaction_pointer = new_pointer
            for delta in deltas:
                yield encoder.cereal(delta) + '\n'
        else:
            gevent.sleep(poll_interval)
Esempio n. 2
0
def format_output(public_snapshot, include_body):
    # Because we're using a snapshot of the message API representation in the
    # transaction log, we can just return that directly (without the 'body'
    # field if include_body is False).
    encoder = APIEncoder()
    return encoder.cereal({k: v for k, v in public_snapshot.iteritems()
                           if k != 'body' or include_body})
Esempio n. 3
0
def message_read_api(public_id):
    g.parser.add_argument('view', type=view, location='args')
    args = strict_parse_args(g.parser, request.args)
    encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded')

    try:
        valid_public_id(public_id)
        message = g.db_session.query(Message).filter(
            Message.public_id == public_id,
            Message.namespace_id == g.namespace.id).one()
    except NoResultFound:
        raise NotFoundError("Couldn't find message {0} ".format(public_id))

    if request.headers.get('Accept', None) == 'message/rfc822':
        if message.full_body is not None:
            return Response(message.full_body.data,
                            mimetype='message/rfc822')
        else:
            g.log.error("Message without full_body attribute: id='{0}'"
                        .format(message.id))
            raise NotFoundError(
                "Couldn't find raw contents for message `{0}` "
                .format(public_id))

    return encoder.jsonify(message)
Esempio n. 4
0
def ns_all():
    """ Return all namespaces """
    # We do this outside the blueprint to support the case of an empty
    # public_id.  However, this means the before_request isn't run, so we need
    # to make our own session
    with global_session_scope() as db_session:
        parser = reqparse.RequestParser(argument_class=ValidatableArgument)
        parser.add_argument('limit', default=DEFAULT_LIMIT, type=limit,
                            location='args')
        parser.add_argument('offset', default=0, type=int, location='args')
        parser.add_argument('email_address', type=bounded_str, location='args')
        args = strict_parse_args(parser, request.args)

        query = db_session.query(Namespace)
        if args['email_address']:
            query = query.join(Account)
            query = query.filter_by(email_address=args['email_address'])

        query = query.limit(args['limit'])
        if args['offset']:
            query = query.offset(args['offset'])

        namespaces = query.all()
        encoder = APIEncoder(legacy_nsid=request.path.startswith('/n'))
        return encoder.jsonify(namespaces)
Esempio n. 5
0
File: srv.py Progetto: ghotiv/inbox
def ns_all():
    """ Return all namespaces """
    # We do this outside the blueprint to support the case of an empty
    # public_id.  However, this means the before_request isn't run, so we need
    # to make our own session
    with session_scope() as db_session:
        namespaces = db_session.query(Namespace).all()
        encoder = APIEncoder()
        return encoder.jsonify(namespaces)
Esempio n. 6
0
def streaming_change_generator(
    namespace,
    poll_interval,
    timeout,
    transaction_pointer,
    exclude_types=None,
    include_types=None,
    exclude_folders=True,
    exclude_metadata=True,
    exclude_account=True,
    expand=False,
    is_n1=False,
):
    """
    Poll the transaction log for the given `namespace_id` until `timeout`
    expires, and yield each time new entries are detected.
    Arguments
    ---------
    namespace_id: int
        Id of the namespace for which to check changes.
    poll_interval: float
        How often to check for changes.
    timeout: float
        How many seconds to allow the connection to remain open.
    transaction_pointer: int, optional
        Yield transaction rows starting after the transaction with id equal to
        `transaction_pointer`.

    """
    encoder = APIEncoder(is_n1=is_n1)
    start_time = time.time()
    while time.time() - start_time < timeout:
        with session_scope(namespace.id) as db_session:
            deltas, new_pointer = format_transactions_after_pointer(
                namespace,
                transaction_pointer,
                db_session,
                100,
                exclude_types,
                include_types,
                exclude_folders,
                exclude_metadata,
                exclude_account,
                expand=expand,
                is_n1=is_n1,
            )

        if new_pointer is not None and new_pointer != transaction_pointer:
            transaction_pointer = new_pointer
            for delta in deltas:
                yield encoder.cereal(delta) + "\n"
        else:
            yield "\n"
            gevent.sleep(poll_interval)
Esempio n. 7
0
        def g():
            encoder = APIEncoder()

            with session_scope(self.account_id) as db_session:
                for imap_uids in self._search(db_session, search_query):
                    query = db_session.query(Message) \
                        .join(ImapUid) \
                        .filter(ImapUid.account_id == self.account_id,
                                ImapUid.msg_uid.in_(imap_uids))\
                        .order_by(desc(Message.received_date))\

                    yield encoder.cereal(query.all()) + '\n'
Esempio n. 8
0
def message_query_api():
    g.parser.add_argument('subject', type=bounded_str, location='args')
    g.parser.add_argument('to', type=bounded_str, location='args')
    g.parser.add_argument('from', type=bounded_str, location='args')
    g.parser.add_argument('cc', type=bounded_str, location='args')
    g.parser.add_argument('bcc', type=bounded_str, location='args')
    g.parser.add_argument('any_email', type=bounded_str, location='args')
    g.parser.add_argument('started_before', type=timestamp, location='args')
    g.parser.add_argument('started_after', type=timestamp, location='args')
    g.parser.add_argument('last_message_before', type=timestamp,
                          location='args')
    g.parser.add_argument('last_message_after', type=timestamp,
                          location='args')
    g.parser.add_argument('filename', type=bounded_str, location='args')
    g.parser.add_argument('in', type=bounded_str, location='args')
    g.parser.add_argument('thread_id', type=valid_public_id, location='args')
    g.parser.add_argument('unread', type=strict_bool, location='args')
    g.parser.add_argument('starred', type=strict_bool, location='args')
    g.parser.add_argument('view', type=view, location='args')

    # For backwards-compatibility -- remove after deprecating tags API.
    g.parser.add_argument('tag', type=bounded_str, location='args')
    args = strict_parse_args(g.parser, request.args)

    # For backwards-compatibility -- remove after deprecating tags API.
    in_ = args['in'] or args['tag']

    messages = filtering.messages_or_drafts(
        namespace_id=g.namespace.id,
        drafts=False,
        subject=args['subject'],
        thread_public_id=args['thread_id'],
        to_addr=args['to'],
        from_addr=args['from'],
        cc_addr=args['cc'],
        bcc_addr=args['bcc'],
        any_email=args['any_email'],
        started_before=args['started_before'],
        started_after=args['started_after'],
        last_message_before=args['last_message_before'],
        last_message_after=args['last_message_after'],
        filename=args['filename'],
        in_=in_,
        unread=args['unread'],
        starred=args['starred'],
        limit=args['limit'],
        offset=args['offset'],
        view=args['view'],
        db_session=g.db_session)

    # Use a new encoder object with the expand parameter set.
    encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded')
    return encoder.jsonify(messages)
Esempio n. 9
0
def thread_api(public_id):
    g.parser.add_argument('view', type=view, location='args')
    args = strict_parse_args(g.parser, request.args)
    # Use a new encoder object with the expand parameter set.
    encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded')
    try:
        valid_public_id(public_id)
        thread = g.db_session.query(Thread).filter(
            Thread.public_id == public_id,
            Thread.namespace_id == g.namespace.id).one()
        return encoder.jsonify(thread)
    except NoResultFound:
        raise NotFoundError("Couldn't find thread `{0}`".format(public_id))
Esempio n. 10
0
        def g():
            encoder = APIEncoder()

            with session_scope(self.account_id) as db_session:
                for imap_uids in self._search(db_session, search_query):
                    query = db_session.query(Thread) \
                        .join(Message) \
                        .join(ImapUid) \
                        .filter(ImapUid.account_id == self.account_id,
                                ImapUid.msg_uid.in_(imap_uids),
                                Thread.id == Message.thread_id)\
                        .order_by(desc(Message.received_date))

                    yield encoder.cereal(query.all()) + '\n'
Esempio n. 11
0
        def g():
            encoder = APIEncoder()

            with session_scope(self.account_id) as db_session:
                try:
                    for imap_uids in self._search(db_session, search_query):
                        query = (
                            db_session.query(Message).join(ImapUid).filter(
                                ImapUid.account_id == self.account_id,
                                ImapUid.msg_uid.in_(imap_uids),
                            ).order_by(desc(Message.received_date)))
                        yield encoder.cereal(query.all()) + "\n"
                except Exception as e:
                    self.log.error("Error while streaming messages", error=e)
Esempio n. 12
0
def delete_account(namespace_public_id):
    """ Mark an existing account for deletion. """
    try:
        with global_session_scope() as db_session:
            namespace = (db_session.query(Namespace).filter(
                Namespace.public_id == namespace_public_id).one())
            account = namespace.account
            account.mark_for_deletion()
            db_session.commit()
    except NoResultFound:
        raise NotFoundError(
            "Couldn't find account `{0}` ".format(namespace_public_id))

    encoder = APIEncoder()
    return encoder.jsonify({})
Esempio n. 13
0
def global_deltas():
    """
    Return the namespaces with recent transactions.

    Also returns `txnid_start` and `txnid_end`, which can be fed back in as the
    optional `txnid` parameter. `txnid` acts as a cursor, only returning
    namespaces with transactions newer than the given `txnid`.
    """
    from inbox.ignition import redis_txn
    from inbox.models.transaction import TXN_REDIS_KEY

    txnid = request.args.get("txnid", "0")

    try:
        start_pointer = int(txnid)
    except ValueError:
        raise InputError("Invalid cursor parameter")

    txns = redis_txn.zrangebyscore(
        TXN_REDIS_KEY,
        "({}".format(start_pointer),  # don't include start pointer
        "+inf",
        withscores=True,
        score_cast_func=int,
    )
    response = {
        "txnid_start": start_pointer,
        "txnid_end": max([t[1] for t in txns] or [start_pointer]),
        "deltas": [t[0] for t in txns],
    }
    return APIEncoder().jsonify(response)
Esempio n. 14
0
def send_draft_copy(account, draft, custom_body, recipient):
    """
    Sends a copy of this draft to the recipient, using the specified body
    rather that the one on the draft object, and not marking the draft as
    sent. Used within multi-send to send messages to individual recipients
    with customized bodies.
    """
    # Create the response to send on success by serlializing the draft. After
    # serializing, we replace the new custom body (which the recipient will get
    # and which should be returned in this response) in place of the existing
    # body (which we still need to retain in the draft for when it's saved to
    # the sent folder).
    response_on_success = encode(draft)
    response_on_success["body"] = custom_body
    response_on_success = APIEncoder().jsonify(response_on_success)

    # Now send the draft to the specified recipient. The send_custom method
    # will write the custom body into the message in place of the one in the
    # draft.
    try:
        sendmail_client = get_sendmail_client(account)
        sendmail_client.send_custom(draft, custom_body, [recipient])
    except SendMailException as exc:
        kwargs = {}
        if exc.failures:
            kwargs["failures"] = exc.failures
        if exc.server_error:
            kwargs["server_error"] = exc.server_error
        return err(exc.http_code, exc.args[0], **kwargs)

    return response_on_success
Esempio n. 15
0
def send_draft_copy(account, draft, custom_body, recipient):
    """
    Sends a copy of this draft to the recipient, using the specified body
    rather that the one on the draft object, and not marking the draft as
    sent. Used within multi-send to send messages to individual recipients
    with customized bodies.
    """
    # Create the response to send on success by serlializing the draft. Before
    # serializing, we temporarily swap in the new custom body (which the
    # recipient will get and which should be returned in this response) in
    # place of the existing body (which we still need to retain in the draft
    # for when it's saved to the sent folder). We replace the existing body
    # after serialization is done.
    original_body = draft.body
    draft.body = custom_body
    response_on_success = APIEncoder().jsonify(draft)
    draft.body = original_body

    # Now send the draft to the specified recipient. The send_custom method
    # will write the custom body into the message in place of the one in the
    # draft.
    try:
        sendmail_client = get_sendmail_client(account)
        sendmail_client.send_custom(draft, custom_body, [recipient])
    except SendMailException as exc:
        kwargs = {}
        if exc.failures:
            kwargs['failures'] = exc.failures
        if exc.server_error:
            kwargs['server_error'] = exc.server_error
        return err(exc.http_code, exc.message, **kwargs)

    return response_on_success
Esempio n. 16
0
def streaming_change_generator(namespace,
                               poll_interval,
                               timeout,
                               transaction_pointer,
                               exclude_types=None,
                               include_types=None,
                               exclude_folders=True,
                               legacy_nsid=False,
                               expand=False):
    """
    Poll the transaction log for the given `namespace_id` until `timeout`
    expires, and yield each time new entries are detected.
    Arguments
    ---------
    namespace_id: int
        Id of the namespace for which to check changes.
    poll_interval: float
        How often to check for changes.
    timeout: float
        How many seconds to allow the connection to remain open.
    transaction_pointer: int, optional
        Yield transaction rows starting after the transaction with id equal to
        `transaction_pointer`.

    """
    encoder = APIEncoder(legacy_nsid=legacy_nsid)
    start_time = time.time()
    while time.time() - start_time < timeout:
        with session_scope(namespace.id) as db_session:
            deltas, new_pointer = format_transactions_after_pointer(
                namespace,
                transaction_pointer,
                db_session,
                100,
                exclude_types,
                include_types,
                exclude_folders,
                legacy_nsid=legacy_nsid,
                expand=expand)

        if new_pointer is not None and new_pointer != transaction_pointer:
            transaction_pointer = new_pointer
            for delta in deltas:
                yield encoder.cereal(delta) + '\n'
        else:
            yield '\n'
            gevent.sleep(poll_interval)
Esempio n. 17
0
def thread_query_api():
    g.parser.add_argument('subject', type=bounded_str, location='args')
    g.parser.add_argument('to', type=bounded_str, location='args')
    g.parser.add_argument('from', type=bounded_str, location='args')
    g.parser.add_argument('cc', type=bounded_str, location='args')
    g.parser.add_argument('bcc', type=bounded_str, location='args')
    g.parser.add_argument('any_email', type=bounded_str, location='args')
    g.parser.add_argument('started_before', type=timestamp, location='args')
    g.parser.add_argument('started_after', type=timestamp, location='args')
    g.parser.add_argument('last_message_before',
                          type=timestamp,
                          location='args')
    g.parser.add_argument('last_message_after',
                          type=timestamp,
                          location='args')
    g.parser.add_argument('filename', type=bounded_str, location='args')
    g.parser.add_argument('thread_id', type=valid_public_id, location='args')
    g.parser.add_argument('tag', type=bounded_str, location='args')
    g.parser.add_argument('view', type=view, location='args')

    args = strict_parse_args(g.parser, request.args)

    threads = filtering.threads(
        namespace_id=g.namespace.id,
        subject=args['subject'],
        thread_public_id=args['thread_id'],
        to_addr=args['to'],
        from_addr=args['from'],
        cc_addr=args['cc'],
        bcc_addr=args['bcc'],
        any_email=args['any_email'],
        started_before=args['started_before'],
        started_after=args['started_after'],
        last_message_before=args['last_message_before'],
        last_message_after=args['last_message_after'],
        filename=args['filename'],
        tag=args['tag'],
        limit=args['limit'],
        offset=args['offset'],
        view=args['view'],
        db_session=g.db_session)

    # Use a new encoder object with the expand parameter set.
    encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded')
    return encoder.jsonify(threads)
Esempio n. 18
0
def thread_query_api():
    g.parser.add_argument('subject', type=bounded_str, location='args')
    g.parser.add_argument('to', type=bounded_str, location='args')
    g.parser.add_argument('from', type=bounded_str, location='args')
    g.parser.add_argument('cc', type=bounded_str, location='args')
    g.parser.add_argument('bcc', type=bounded_str, location='args')
    g.parser.add_argument('any_email', type=bounded_str, location='args')
    g.parser.add_argument('started_before', type=timestamp, location='args')
    g.parser.add_argument('started_after', type=timestamp, location='args')
    g.parser.add_argument('last_message_before', type=timestamp,
                          location='args')
    g.parser.add_argument('last_message_after', type=timestamp,
                          location='args')
    g.parser.add_argument('filename', type=bounded_str, location='args')
    g.parser.add_argument('thread_id', type=valid_public_id, location='args')
    g.parser.add_argument('tag', type=bounded_str, location='args')
    g.parser.add_argument('view', type=view, location='args')

    args = strict_parse_args(g.parser, request.args)

    threads = filtering.threads(
        namespace_id=g.namespace.id,
        subject=args['subject'],
        thread_public_id=args['thread_id'],
        to_addr=args['to'],
        from_addr=args['from'],
        cc_addr=args['cc'],
        bcc_addr=args['bcc'],
        any_email=args['any_email'],
        started_before=args['started_before'],
        started_after=args['started_after'],
        last_message_before=args['last_message_before'],
        last_message_after=args['last_message_after'],
        filename=args['filename'],
        tag=args['tag'],
        limit=args['limit'],
        offset=args['offset'],
        view=args['view'],
        db_session=g.db_session)

    # Use a new encoder object with the expand parameter set.
    encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded')
    return encoder.jsonify(threads)
Esempio n. 19
0
 def __init__(self, poll_interval=1, chunk_size=22):
     self.workers = defaultdict(set)
     self.log = get_logger()
     self.poll_interval = poll_interval
     self.chunk_size = chunk_size
     self.minimum_id = -1
     self.poller = None
     self.polling = False
     self.encoder = APIEncoder()
     self._on_startup()
Esempio n. 20
0
    def __init__(self, poll_interval=30, chunk_size=100):
        self.poll_interval = poll_interval
        self.chunk_size = chunk_size

        self.encoder = APIEncoder()

        self.transaction_pointer = None

        self.log = log.new(component='search-index')
        Greenlet.__init__(self)
Esempio n. 21
0
def message_read_api(public_id):
    g.parser.add_argument('view', type=view, location='args')
    args = strict_parse_args(g.parser, request.args)
    encoder = APIEncoder(g.namespace.public_id, args['view'] == 'expanded')
    try:
        valid_public_id(public_id)
        message = g.db_session.query(Message).filter(
            Message.public_id == public_id,
            Message.namespace_id == g.namespace.id).one()
    except NoResultFound:
        raise NotFoundError("Couldn't find message {0} ".format(public_id))
    if request.headers.get('Accept', None) == 'message/rfc822':
        if message.full_body is not None:
            return Response(message.full_body.data,
                            mimetype='message/rfc822')
        else:
            g.log.error("Message without full_body attribute: id='{0}'"
                        .format(message.id))
            raise NotFoundError(
                "Couldn't find raw contents for message `{0}` "
                .format(public_id))
    return encoder.jsonify(message)
Esempio n. 22
0
def create_account():
    """ Create a new account """
    data = request.get_json(force=True)

    if data["type"] == "generic":
        auth_handler = GenericAuthHandler()
        account_data = _get_account_data_for_generic_account(data)
    elif data["type"] == "gmail":
        auth_handler = GoogleAuthHandler()
        account_data = _get_account_data_for_google_account(data)
    elif data["type"] == "microsoft":
        auth_handler = MicrosoftAuthHandler()
        account_data = _get_account_data_for_microsoft_account(data)
    else:
        raise ValueError("Account type not supported.")

    with global_session_scope() as db_session:
        account = auth_handler.create_account(account_data)
        db_session.add(account)
        db_session.commit()

        encoder = APIEncoder()
        return encoder.jsonify(account.namespace)
Esempio n. 23
0
def ns_all():
    """ Return all namespaces """
    # We do this outside the blueprint to support the case of an empty
    # public_id.  However, this means the before_request isn't run, so we need
    # to make our own session
    with global_session_scope() as db_session:
        parser = reqparse.RequestParser(argument_class=ValidatableArgument)
        parser.add_argument("limit", default=DEFAULT_LIMIT, type=limit, location="args")
        parser.add_argument("offset", default=0, type=int, location="args")
        parser.add_argument("email_address", type=bounded_str, location="args")
        args = strict_parse_args(parser, request.args)

        query = db_session.query(Namespace)
        if args["email_address"]:
            query = query.join(Account)
            query = query.filter_by(email_address=args["email_address"])

        query = query.limit(args["limit"])
        if args["offset"]:
            query = query.offset(args["offset"])

        namespaces = query.all()
        encoder = APIEncoder()
        return encoder.jsonify(namespaces)
Esempio n. 24
0
def confim_oauth_user():
    response = {}
    encoder = APIEncoder()
    data = request.get_json(force=True)
    email_address = data.get('email_address')
    token = data.get('token')
    target = data.get('target', 0)

    if not email_address:
        response['error'] = 'Missing key - "email_address"!'
        return encoder.jsonify(response)
    if not token:
        response['error'] = 'Missing key - "token"!'
        return encoder.jsonify(response)

    shard_id = target << 48

    with session_scope(shard_id) as db_session:
        account = db_session.query(Account).filter_by(
            email_address=email_address).first()
        if account is None:
            response['error'] = 'Don\'t have this account!'
            return encoder.jsonify(response)
        auth_info = {}
        provider = provider_from_address(email_address)
        auth_info['provider'] = provider
        auth_handler = handler_from_provider(provider)
        try:
            auth_response = auth_handler._get_authenticated_user(token)
            auth_response['contacts'] = True
            auth_response['events'] = True
            auth_info.update(auth_response)
        except OAuthError:
            response['error'] = "Invalid authorization code, try again..."
            return encoder.jsonify(response)
        account = auth_handler.update_account(account, auth_info)
        try:
            if auth_handler.verify_account(account):
                db_session.add(account)
                db_session.commit()
                response['data'] = 'OK. Authenticated account for {}'.format(
                    email_address)
        except NotSupportedError as e:
            response['error'] = str(e)
            return encoder.jsonify(response)
    return encoder.jsonify(response)
Esempio n. 25
0
def send_draft(account, draft, db_session, schedule_remote_delete):
    """Send the draft with id = `draft_id`."""
    try:
        sendmail_client = get_sendmail_client(account)
        sendmail_client.send(draft)
    except SendMailException as exc:
        kwargs = {}
        if exc.failures:
            kwargs['failures'] = exc.failures
        if exc.server_error:
            kwargs['server_error'] = exc.server_error
        return err(exc.http_code, exc.message, **kwargs)

    # We want to return success to the API client if the message was sent, even
    # if there are errors in post-send updating. Otherwise the client may think
    # the send has failed. So wrap the rest of the work in try/except.
    try:
        if account.provider == 'icloud':
            # Special case because iCloud doesn't save sent messages.
            schedule_action('save_sent_email', draft, draft.namespace.id,
                            db_session)
        if schedule_remote_delete:
            schedule_action('delete_draft',
                            draft,
                            draft.namespace.id,
                            db_session,
                            inbox_uid=draft.inbox_uid,
                            message_id_header=draft.message_id_header)

        # Update message
        draft.is_sent = True
        draft.is_draft = False
        draft.received_date = datetime.utcnow()

        # Update thread
        sent_tag = account.namespace.tags['sent']
        draft_tag = account.namespace.tags['drafts']
        thread = draft.thread
        thread.apply_tag(sent_tag)
        # Remove the drafts tag from the thread if there are no more drafts.
        if not draft.thread.drafts:
            thread.remove_tag(draft_tag)
        thread.update_from_message(None, draft)
    except Exception as e:
        log.error('Error in post-send processing', error=e, exc_info=True)

    return APIEncoder().jsonify(draft)
Esempio n. 26
0
def send_raw_mime(account, db_session, msg):
    # Prepare a response so that we can immediately return it on success, and
    # not potentially have queries fail after sending.
    response_on_success = APIEncoder().jsonify(msg)
    try:
        sendmail_client = get_sendmail_client(account)
        sendmail_client.send_raw(msg)

    except SendMailException as exc:
        kwargs = {}
        if exc.failures:
            kwargs['failures'] = exc.failures
        if exc.server_error:
            kwargs['server_error'] = exc.server_error
        return err(exc.http_code, exc.message, **kwargs)

    return response_on_success
Esempio n. 27
0
def start():
    g.db_session = InboxSession(engine)

    g.log = get_logger()
    try:
        g.namespace = g.db_session.query(Namespace) \
            .filter(Namespace.public_id == g.namespace_public_id).one()

        g.encoder = APIEncoder(g.namespace.public_id)
    except NoResultFound:
        return err(404, "Couldn't find namespace with id `{0}` ".format(
            g.namespace_public_id))

    g.parser = reqparse.RequestParser(argument_class=ValidatableArgument)
    g.parser.add_argument('limit', default=DEFAULT_LIMIT, type=limit,
                          location='args')
    g.parser.add_argument('offset', default=0, type=int, location='args')
Esempio n. 28
0
def send_draft(account, draft, db_session):
    """Send the draft with id = `draft_id`."""
    # Update message state and prepare a response so that we can immediately
    # return it on success, and not potentially have queries fail after
    # sending. Note that changes are flushed here, but committed in the API's
    # after_request handler only on 200 OK (hence only if sending succeeds).
    update_draft_on_send(account, draft, db_session)
    response_on_success = APIEncoder().jsonify(draft)
    try:
        sendmail_client = get_sendmail_client(account)
        sendmail_client.send(draft)
    except SendMailException as exc:
        kwargs = {}
        if exc.failures:
            kwargs['failures'] = exc.failures
        if exc.server_error:
            kwargs['server_error'] = exc.server_error
        return err(exc.http_code, exc.message, **kwargs)

    return response_on_success
Esempio n. 29
0
class WebhookService():
    """Asynchronously consumes the transaction log and executes registered
    webhooks."""
    def __init__(self, poll_interval=1, chunk_size=22):
        self.workers = defaultdict(set)
        self.log = get_logger()
        self.poll_interval = poll_interval
        self.chunk_size = chunk_size
        self.minimum_id = -1
        self.poller = None
        self.polling = False
        self.encoder = APIEncoder()
        self._on_startup()

    @property
    def all_active_workers(self):
        worker_sets = self.workers.values()
        if not worker_sets:
            return set()
        return set.union(*worker_sets)

    def register_hook(self, namespace_id, parameters):
        """Register a new webhook.

        Parameters
        ----------
        namespace_id: int
            ID for the namespace to apply the webhook on.
        parameters: dictionary
            Dictionary of the hook parameters.
        """

        # TODO(emfree) do more meaningful parameter validation here
        # (or in the calling code in the API)

        if urlparse.urlparse(parameters.get('callback_url')).scheme != 'https':
            raise ValueError('callback_url MUST be https!')

        with session_scope() as db_session:
            lens = Lens(
                namespace_id=namespace_id,
                subject=parameters.get('subject'),
                thread_public_id=parameters.get('thread'),
                to_addr=parameters.get('to'),
                from_addr=parameters.get('from'),
                cc_addr=parameters.get('cc'),
                bcc_addr=parameters.get('bcc'),
                any_email=parameters.get('any_email'),
                started_before=parameters.get('started_before'),
                started_after=parameters.get('started_after'),
                last_message_before=parameters.get('last_message_before'),
                last_message_after=parameters.get('last_message_after'),
                filename=parameters.get('filename'))

            hook = Webhook(
                namespace_id=namespace_id,
                lens=lens,
                callback_url=parameters.get('callback_url'),
                failure_notify_url=parameters.get('failure_notify_url'),
                include_body=parameters.get('include_body', False),
                active=parameters.get('active', True),
                min_processed_id=self.minimum_id)

            db_session.add(hook)
            db_session.add(lens)
            db_session.commit()
            if hook.active:
                self._start_hook(hook, db_session)
            return self.encoder.cereal(hook, pretty=True)


    def start_hook(self, hook_public_id):
        with session_scope() as db_session:
            hook = db_session.query(Webhook). \
                filter_by(public_id=hook_public_id).one()
            self._start_hook(hook, db_session)

    def _start_hook(self, hook, db_session):
        self.log.info('Starting hook with public id {}'.format(hook.public_id))
        if any(worker.id == hook.id for worker in self.all_active_workers):
            # Hook already has a worker
            return 'OK hook already running'
        hook.min_processed_id = self.minimum_id
        hook.active = True
        namespace_id = hook.namespace_id
        worker = WebhookWorker(hook)
        self.workers[namespace_id].add(worker)
        if not worker.started:
            worker.start()
        db_session.commit()
        if not self.polling:
            self._start_polling()
        return 'OK hook started'

    def stop_hook(self, hook_public_id):
        self.log.info('Stopping hook with public id {}'.format(hook_public_id))
        with session_scope() as db_session:
            hook = db_session.query(Webhook). \
                filter_by(public_id=hook_public_id).one()
            hook.active = False
            db_session.commit()
            for worker in self.workers[hook.namespace_id]:
                if worker.public_id == hook_public_id:
                    self.workers[hook.namespace_id].remove(worker)
                    worker.kill()
                    break

        if not set.union(*self.workers.values()):
            # Kill the transaction log poller if there are no active hooks.
            self._stop_polling()
        return 'OK hook stopped'

    def _on_startup(self):
        self._load_hooks()
        for worker in itertools.chain(*self.workers.values()):
            if not worker.started:
                worker.start()
        # Needed for workers to actually start up.
        gevent.sleep(0)
        if self.all_active_workers:
            self._start_polling()

    def _start_polling(self):
        self.log.info('Start polling')
        self.minimum_id = min(hook.min_processed_id for hook in
                              self.all_active_workers)
        self.poller = gevent.spawn(self._poll)
        self.polling = True

    def _stop_polling(self):
        self.log.info('Stop polling')
        self.poller.kill()
        self.polling = False

    def _poll(self):
        """Poll the transaction log forever and publish events. Only runs when
        there are actually active webhooks."""
        while True:
            self._process_log()
            gevent.sleep(self.poll_interval)

    def _process_log(self):
        """Scan the transaction log `self.chunk_size` entries at a time,
        publishing matching events to registered hooks."""
        with session_scope() as db_session:
            self.log.info('Scanning tx log from id: {}'.
                          format(self.minimum_id))
            unprocessed_txn_count = db_session.query(
                func.count(Transaction.id)).filter(
                Transaction.table_name == 'message',
                Transaction.id > self.minimum_id).scalar()
            if unprocessed_txn_count:
                self.log.debug('Total of {0} transactions to process'.
                               format(unprocessed_txn_count))

            max_tx_id, = db_session.query(func.max(Transaction.id)).one()
            if max_tx_id is None:
                max_tx_id = 0
            for pointer in range(self.minimum_id, max_tx_id, self.chunk_size):
                # TODO(emfree) add the right index to make this query more
                # performant.
                for transaction in db_session.query(Transaction). \
                        filter(Transaction.table_name == 'message',
                               Transaction.command == 'insert',
                               Transaction.id > pointer,
                               Transaction.id <= pointer + self.chunk_size). \
                        order_by(asc(Transaction.id)):
                    namespace_id = transaction.namespace_id
                    for worker in self.workers[namespace_id]:
                        if worker.match(transaction):
                            worker.enqueue(EventData(transaction))
                    self.minimum_id = transaction.id
            self.log.debug('Processed tx. setting min id to {0}'.
                           format(self.minimum_id))

    def _load_hooks(self):
        """Load stored hook parameters from the database. Run once on
        startup."""
        with session_scope() as db_session:
            all_hooks = db_session.query(Webhook).filter_by(active=True).all()
            for hook in all_hooks:
                namespace_id = hook.namespace_id
                self.workers[namespace_id].add(WebhookWorker(hook))
Esempio n. 30
0
def index():
    with global_session_scope() as db_session:
        if "namespace_id" in request.args:
            try:
                namespace = (
                    db_session.query(Namespace)
                    .filter(Namespace.public_id == request.args["namespace_id"])
                    .one()
                )
            except NoResultFound:
                return APIEncoder().jsonify([])
        else:
            namespace = None

        accounts = db_session.query(ImapAccount).with_polymorphic([GenericAccount])

        if namespace:
            accounts = accounts.filter(Account.namespace == namespace)
        else:
            # Get all account IDs that aren't deleted
            account_ids = [
                result[0]
                for result in db_session.query(ImapAccount.id, ImapAccount._sync_status)
                if result[1].get("sync_disabled_reason") != "account deleted"
            ]

            # This is faster than fetching all accounts.
            accounts = accounts.filter(ImapAccount.id.in_(account_ids))

        accounts = list(accounts)

        folder_data = _get_folder_data(db_session, accounts)
        calendar_data = _get_calendar_data(db_session, namespace)
        heartbeat = get_ping_status(account_ids=[acc.id for acc in accounts])

        data = []

        for account in accounts:
            if account.id in heartbeat:
                account_heartbeat = heartbeat[account.id]
                account_folder_data = folder_data[account.id]
                account_calendar_data = calendar_data[account.id]

                events_alive = False

                for folder_status in account_heartbeat.folders:
                    folder_status_id = int(folder_status.id)
                    if folder_status_id in account_folder_data:
                        account_folder_data[folder_status_id].update(
                            {
                                "alive": folder_status.alive,
                                "heartbeat_at": folder_status.timestamp,
                            }
                        )
                    elif folder_status_id == EVENT_SYNC_FOLDER_ID:
                        events_alive = folder_status.alive

                email_alive = all(f["alive"] for f in account_folder_data.values())

                alive = True
                if account.sync_email and not email_alive:
                    alive = False
                if account.sync_events and not events_alive:
                    alive = False

                email_initial_sync = any(
                    f["state"] == "initial" for f in account_folder_data.values()
                )
                events_initial_sync = any(
                    c["state"] == "initial" for c in account_calendar_data
                )
                initial_sync = email_initial_sync or events_initial_sync

                total_uids = sum(
                    f["remote_uid_count"] or 0 for f in account_folder_data.values()
                )
                remaining_uids = sum(
                    f["download_uid_count"] or 0 for f in account_folder_data.values()
                )
                if total_uids:
                    progress = 100.0 / total_uids * (total_uids - remaining_uids)
                else:
                    progress = None
            else:
                alive = False
                email_initial_sync = None
                events_initial_sync = None
                initial_sync = None
                progress = None

            sync_status = account.sync_status
            is_running = sync_status["state"] == "running"
            if (
                is_running
                and not sync_status.get("sync_start_time")
                and not sync_status.get("sync_error")
            ):
                sync_status_str = "starting"
            elif is_running and alive:
                if initial_sync:
                    sync_status_str = "initial"
                else:
                    sync_status_str = "running"
            elif is_running:
                # Nylas is syncing, but not all heartbeats are reporting.
                sync_status_str = "delayed"
            else:
                # Nylas is no longer syncing this account.
                sync_status_str = "dead"

            data.append(
                {
                    "account_private_id": account.id,
                    "namespace_private_id": account.namespace.id,
                    "account_id": account.public_id,
                    "namespace_id": account.namespace.public_id,
                    "events_alive": events_alive,
                    "email_alive": email_alive,
                    "alive": alive,
                    "email_initial_sync": email_initial_sync,
                    "events_initial_sync": events_initial_sync,
                    "initial_sync": initial_sync,
                    "provider_name": account.provider,
                    "email_address": account.email_address,
                    "folders": sorted(
                        folder_data[account.id].values(), key=itemgetter("name")
                    ),
                    "calendars": sorted(
                        calendar_data[account.id], key=itemgetter("name")
                    ),
                    "sync_email": account.sync_email,
                    "sync_events": account.sync_events,
                    "sync_status": sync_status_str,
                    "sync_error": sync_status.get("sync_error"),
                    "sync_end_time": sync_status.get("sync_end_time"),
                    "sync_disabled_reason": sync_status.get("sync_disabled_reason"),
                    "sync_host": account.sync_host,
                    "progress": progress,
                    "throttled": account.throttled,
                    "created_at": account.created_at,
                    "updated_at": account.updated_at,
                }
            )

        return APIEncoder().jsonify(data)
Esempio n. 31
0
        def g():
            encoder = APIEncoder()

            with session_scope(self.account_id) as db_session:
                yield encoder.cereal(
                    self.search_threads(db_session, search_query)) + '\n'
Esempio n. 32
0
class WebhookService(object):
    """Asynchronously consumes the transaction log and executes registered
    webhooks."""
    def __init__(self, poll_interval=1, chunk_size=22):
        self.workers = defaultdict(set)
        self.log = get_logger()
        self.poll_interval = poll_interval
        self.chunk_size = chunk_size
        self.minimum_id = 0
        self.poller = None
        self.polling = False
        self.encoder = APIEncoder()
        self._on_startup()

    @property
    def all_active_workers(self):
        worker_sets = self.workers.values()
        if not worker_sets:
            return set()
        return set.union(*worker_sets)

    def register_hook(self, namespace_id, parameters):
        """Register a new webhook.

        Parameters
        ----------
        namespace_id: int
            ID for the namespace to apply the webhook on.
        parameters: dictionary
            Dictionary of the hook parameters.
        """

        # TODO(emfree) do more meaningful parameter validation here
        # (or in the calling code in the API)

        if urlparse.urlparse(parameters.get('callback_url')).scheme != 'https':
            raise ValueError('callback_url MUST be https!')

        with session_scope() as db_session:
            lens = Lens(
                namespace_id=namespace_id,
                subject=parameters.get('subject'),
                thread_public_id=parameters.get('thread'),
                to_addr=parameters.get('to'),
                from_addr=parameters.get('from'),
                cc_addr=parameters.get('cc'),
                bcc_addr=parameters.get('bcc'),
                any_email=parameters.get('any_email'),
                started_before=parameters.get('started_before'),
                started_after=parameters.get('started_after'),
                last_message_before=parameters.get('last_message_before'),
                last_message_after=parameters.get('last_message_after'),
                filename=parameters.get('filename'))

            hook = Webhook(
                namespace_id=namespace_id,
                lens=lens,
                callback_url=parameters.get('callback_url'),
                failure_notify_url=parameters.get('failure_notify_url'),
                include_body=parameters.get('include_body', False),
                active=parameters.get('active', True),
                min_processed_id=self.minimum_id - 1)

            db_session.add(hook)
            db_session.add(lens)
            db_session.commit()
            if hook.active:
                self._start_hook(hook, db_session)
            return self.encoder.cereal(hook, pretty=True)

    def start_hook(self, hook_public_id):
        with session_scope() as db_session:
            hook = db_session.query(Webhook). \
                filter_by(public_id=hook_public_id).one()
            self._start_hook(hook, db_session)

    def _start_hook(self, hook, db_session):
        self.log.info('Starting hook with public id {}'.format(hook.public_id))
        if any(worker.id == hook.id for worker in self.all_active_workers):
            # Hook already has a worker
            return 'OK hook already running'
        hook.min_processed_id = self.minimum_id - 1
        hook.active = True
        namespace_id = hook.namespace_id
        worker = WebhookWorker(hook)
        self.workers[namespace_id].add(worker)
        if not worker.started:
            worker.start()
        db_session.commit()
        if not self.polling:
            self._start_polling()
        return 'OK hook started'

    def stop_hook(self, hook_public_id):
        self.log.info('Stopping hook with public id {}'.format(hook_public_id))
        with session_scope() as db_session:
            hook = db_session.query(Webhook). \
                filter_by(public_id=hook_public_id).one()
            hook.active = False
            db_session.commit()
            for worker in self.workers[hook.namespace_id]:
                if worker.public_id == hook_public_id:
                    self.workers[hook.namespace_id].remove(worker)
                    worker.kill()
                    break

        if not set.union(*self.workers.values()):
            # Kill the transaction log poller if there are no active hooks.
            self._stop_polling()
        return 'OK hook stopped'

    def _on_startup(self):
        self._load_hooks()
        for worker in itertools.chain(*self.workers.values()):
            if not worker.started:
                worker.start()
        # Needed for workers to actually start up.
        gevent.sleep(0)
        if self.all_active_workers:
            self._start_polling()

    def _start_polling(self):
        self.log.info('Start polling')
        self.minimum_id = min(hook.min_processed_id + 1
                              for hook in self.all_active_workers)
        self.poller = gevent.spawn(self._poll)
        self.polling = True

    def _stop_polling(self):

        self.log.info('Stop polling')
        self.poller.kill()
        self.polling = False

    def _poll(self):
        """Poll the transaction log forever and publish events. Only runs when
        there are actually active webhooks."""
        while True:
            self._process_log()
            gevent.sleep(self.poll_interval)

    def _process_log(self):
        """Scan the transaction log `self.chunk_size` entries at a time,
        publishing matching events to registered hooks."""
        with session_scope() as db_session:
            self.log.info('Scanning tx log from id: {}'.format(
                self.minimum_id))
            unprocessed_txn_count = db_session.query(
                func.count(Transaction.id)).filter(
                    Transaction.table_name == 'message',
                    Transaction.id > self.minimum_id).scalar()
            if unprocessed_txn_count:
                self.log.debug('Total of {0} transactions to process'.format(
                    unprocessed_txn_count))

            max_tx_id, = db_session.query(func.max(Transaction.id)).one()
            if max_tx_id is None:
                max_tx_id = 0
            query = db_session.query(Transaction). \
                filter(Transaction.table_name == 'message',
                       Transaction.command == 'insert'). \
                order_by(asc(Transaction.id))
            for transaction in safer_yield_per(query, Transaction.id,
                                               self.minimum_id,
                                               self.chunk_size):
                namespace_id = transaction.namespace_id
                for worker in self.workers[namespace_id]:
                    if worker.match(transaction):
                        worker.enqueue(EventData(transaction))
                self.minimum_id = transaction.id + 1
            self.log.debug('Processed tx. setting min id to {0}'.format(
                self.minimum_id))

    def _load_hooks(self):
        """Load stored hook parameters from the database. Run once on
        startup."""
        with session_scope() as db_session:
            all_hooks = db_session.query(Webhook).filter_by(active=True).all()
            for hook in all_hooks:
                namespace_id = hook.namespace_id
                self.workers[namespace_id].add(WebhookWorker(hook))
Esempio n. 33
0
        def g():
            encoder = APIEncoder()

            with session_scope(self.account_id) as db_session:
                yield encoder.cereal(self.search_threads(db_session, search_query)) + '\n'
Esempio n. 34
0
def add_new_user():
    response = {}
    encoder = APIEncoder()

    data = request.get_json(force=True)
    email_address = data.get('email_address')
    password = data.get('password')
    auth_details = data.get('auth_details')
    reauth = data.get('reauth')
    target = data.get('target', 0)

    if not email_address:
        response['error'] = 'Missing key - "email_address"!'
        return encoder.jsonify(response)

    shard_id = target << 48

    with session_scope(shard_id) as db_session:
        account = db_session.query(Account).filter_by(
            email_address=email_address).first()
        if account is not None and not reauth:
            response['error'] = 'Already have this account!'
            return encoder.jsonify(response)

        auth_info = {}
        provider = provider_from_address(email_address)
        if 'gmail' in provider:
            auth_handler = handler_from_provider(provider)
            response['oauth_url'] = auth_handler.get_oauth_url(email_address)
            response['links'] = {'confirm_url': request.url + '/confirm_oauth'}
            namespace = Namespace()
            account = GmailAccount(namespace=namespace)
            account.sync_should_run = False
            account.refresh_token = '_placeholder_'
            account.email_address = email_address
        else:
            if not password:
                response['error'] = 'Missing key - "password"!'
                return encoder.jsonify(response)
            auth_info['email'] = email_address
            auth_info['password'] = password
            if provider != 'unknown':
                auth_handler = handler_from_provider(provider)
                auth_info['provider'] = provider
                try:
                    if reauth:
                        account = auth_handler.update_account(
                            account, auth_info)
                    else:
                        account = auth_handler.create_account(
                            email_address, auth_info)
                except Exception as e:
                    response['error'] = e.msg
            else:
                auth_info['provider'] = 'custom'
                auth_handler = handler_from_provider('custom')
                if not auth_details:
                    auth_info.update(
                        try_fill_config_data(email_address, password))
                else:
                    auth_info.update(auth_details)
                try:
                    if reauth:
                        account = auth_handler.update_account(
                            account, auth_info)
                    else:
                        account = auth_handler.create_account(
                            email_address, auth_info)
                except Exception as e:
                    response['error'] = str(e)
            try:
                auth_handler.verify_account(account)
                response['data'] = 'OK. Authenticated account for {}'.format(
                    email_address)
            except Exception as e:
                response['error'] = str(e)
        db_session.add(account)
        db_session.commit()
    return encoder.jsonify(response)