Example #1
0
def auth():
    """ Check for account ID on all non-root URLS """
    if request.path in ('/accounts', '/accounts/', '/') \
                       or request.path.startswith('/w/'):
        return

    if not request.authorization or not request.authorization.username:
        return make_response((
            "Could not verify access credential.", 401,
            {'WWW-Authenticate': 'Basic realm="API '
             'Access Token Required"'}))

    namespace_public_id = request.authorization.username

    with global_session_scope() as db_session:
        try:
            valid_public_id(namespace_public_id)
            namespace = db_session.query(Namespace) \
                .filter(Namespace.public_id == namespace_public_id).one()
            g.namespace_id = namespace.id
            g.account_id = namespace.account.id
        except NoResultFound:
            return make_response((
                "Could not verify access credential.", 401,
                {'WWW-Authenticate': 'Basic realm="API '
                 'Access Token Required"'}))
Example #2
0
def user_console(user_email_address):
    with global_session_scope() as db_session:
        account = db_session.query(Account).filter_by(
            email_address=user_email_address).one()

        if account.provider == 'eas':
            banner = """
        You can access the account instance with the 'account' variable.
        """
        else:
            with writable_connection_pool(account.id, pool_size=1).get()\
                    as crispin_client:
                if account.provider == 'gmail' \
                        and 'all' in crispin_client.folder_names():
                    crispin_client.select_folder(
                        crispin_client.folder_names()['all'][0],
                        uidvalidity_cb)

                banner = """
        You can access the crispin instance with the 'crispin_client' variable,
        and the account instance with the 'account' variable.

        IMAPClient docs are at:

            http://imapclient.readthedocs.org/en/latest/#imapclient-class-reference
        """

        IPython.embed(banner1=banner)
Example #3
0
    def _process_log(self):
        with global_session_scope() as db_session:
            # Only actions on accounts associated with this sync-engine
            query = db_session.query(ActionLog).join(Namespace).join(Account).\
                filter(ActionLog.discriminator == 'actionlog',
                       ActionLog.status == 'pending',
                       Account.sync_host == platform.node(),
                       Account.sync_should_run).\
                order_by(ActionLog.id).\
                options(contains_eager(ActionLog.namespace, Namespace.account))

            running_action_ids = [worker.action_log_id for worker in
                                  self.workers]
            if running_action_ids:
                query = query.filter(~ActionLog.id.in_(running_action_ids))
            for log_entry in query:
                namespace = log_entry.namespace
                self.log.info('delegating action',
                              action_id=log_entry.id,
                              msg=log_entry.action)
                semaphore = self.account_semaphores[namespace.account_id]
                worker = SyncbackWorker(action_name=log_entry.action,
                                        semaphore=semaphore,
                                        action_log_id=log_entry.id,
                                        record_id=log_entry.record_id,
                                        account_id=namespace.account_id,
                                        provider=namespace.account.verbose_provider,
                                        retry_interval=self.retry_interval,
                                        extra_args=log_entry.extra_args)
                self.workers.add(worker)
                worker.start()
Example #4
0
    def _process_log(self):
        with global_session_scope() as db_session:
            # Only actions on accounts associated with this sync-engine
            query = db_session.query(ActionLog).join(Namespace).join(Account).\
                filter(ActionLog.discriminator == 'actionlog',
                       ActionLog.status == 'pending',
                       Account.sync_host == platform.node(),
                       Account.sync_should_run).\
                order_by(ActionLog.id).\
                options(contains_eager(ActionLog.namespace, Namespace.account))

            running_action_ids = [worker.action_log_id for worker in
                                  self.workers]
            if running_action_ids:
                query = query.filter(~ActionLog.id.in_(running_action_ids))
            for log_entry in query:
                namespace = log_entry.namespace
                self.log.info('delegating action',
                              action_id=log_entry.id,
                              msg=log_entry.action)
                semaphore = self.account_semaphores[namespace.account_id]
                worker = SyncbackWorker(action_name=log_entry.action,
                                        semaphore=semaphore,
                                        action_log_id=log_entry.id,
                                        record_id=log_entry.record_id,
                                        account_id=namespace.account_id,
                                        provider=namespace.account.provider,
                                        retry_interval=self.retry_interval,
                                        extra_args=log_entry.extra_args)
                self.workers.add(worker)
                worker.start()
Example #5
0
def suspend_sync():
    g.parser.add_argument('account_id',
                          required=True,
                          type=valid_public_id,
                          location='form')
    args = strict_parse_args(g.parser, request.args)

    namespace_public_id = args['account_id']
    with global_session_scope() as db_session:
        namespace = db_session.query(Namespace) \
          .filter(Namespace.public_id == namespace_public_id).one()
        namespace_id = namespace.id

    with session_scope(namespace_id) as db_session:
        namespace = db_session.query(Namespace) \
            .filter(Namespace.public_id == namespace_public_id).one()
        account = namespace.account

        account.sync_should_run = False
        account._sync_status[
            'sync_disabled_reason'] = 'suspend_account API endpoint called'
        account._sync_status['sync_disabled_on'] = datetime.utcnow()
        account._sync_status['sync_disabled_by'] = 'api'

        db_session.commit()

        shared_queue = shared_sync_event_queue_for_zone(config.get('ZONE'))
        shared_queue.send_event({'event': 'sync_suspended', 'id': account.id})

    return make_response(('', 204, {}))
Example #6
0
def modify_account(namespace_public_id):
    """
    Modify an existing account

    This stops syncing an account until it is explicitly resumed.
    """

    data = request.get_json(force=True)

    with global_session_scope() as db_session:
        namespace = (db_session.query(Namespace).filter(
            Namespace.public_id == namespace_public_id).one())
        account = namespace.account

        if isinstance(account, GenericAccount):
            auth_handler = GenericAuthHandler()
            account_data = _get_account_data_for_generic_account(data)
        elif isinstance(account, GmailAccount):
            auth_handler = GoogleAuthHandler()
            account_data = _get_account_data_for_google_account(data)
        elif isinstance(account, OutlookAccount):
            auth_handler = MicrosoftAuthHandler()
            account_data = _get_account_data_for_microsoft_account(data)
        else:
            raise ValueError("Account type not supported.")

        account = auth_handler.update_account(account, account_data)
        db_session.add(account)
        db_session.commit()

        encoder = APIEncoder()
        return encoder.jsonify(account.namespace)
Example #7
0
 def account_ids_owned(self):
     with global_session_scope() as db_session:
         return {
             r[0]
             for r in db_session.query(Account.id).filter(
                 Account.sync_host == self.process_identifier).all()
         }
Example #8
0
def ns_all():
    """ Return all namespaces """
    # We do this outside the blueprint to support the case of an empty
    # public_id.  However, this means the before_request isn't run, so we need
    # to make our own session
    with global_session_scope() as db_session:
        parser = reqparse.RequestParser(argument_class=ValidatableArgument)
        parser.add_argument('limit', default=DEFAULT_LIMIT, type=limit,
                            location='args')
        parser.add_argument('offset', default=0, type=int, location='args')
        parser.add_argument('email_address', type=bounded_str, location='args')
        args = strict_parse_args(parser, request.args)

        query = db_session.query(Namespace)
        if args['email_address']:
            query = query.join(Account)
            query = query.filter_by(email_address=args['email_address'])

        query = query.limit(args['limit'])
        if args['offset']:
            query = query.offset(args['offset'])

        namespaces = query.all()
        encoder = APIEncoder(legacy_nsid=request.path.startswith('/n'))
        return encoder.jsonify(namespaces)
def event_update(calendar_public_id):
    try:
        valid_public_id(calendar_public_id)
        with global_session_scope() as db_session:
            calendar = db_session.query(Calendar) \
                .filter(Calendar.public_id == calendar_public_id) \
                .one()
            if calendar.gpush_last_ping is not None:
                time_since_last_ping = (
                    datetime.utcnow() - calendar.gpush_last_ping
                ).total_seconds()

                # Limit write volume, and de-herd, in case we're getting many
                # concurrent updates for the same calendar.
                if time_since_last_ping < 10 + random.randrange(0, 10):
                    return resp(200)

            calendar.handle_gpush_notification()
            db_session.commit()
        return resp(200)
    except ValueError:
        raise InputError('Invalid public ID')
    except NoResultFound:
        g.log.info('Getting push notifications for non-existing calendar',
                   calendar_public_id=calendar_public_id)
        raise NotFoundError("Couldn't find calendar `{0}`"
                            .format(calendar_public_id))
Example #10
0
def ns_all():
    """ Return all namespaces """
    # We do this outside the blueprint to support the case of an empty
    # public_id.  However, this means the before_request isn't run, so we need
    # to make our own session
    with global_session_scope() as db_session:
        parser = reqparse.RequestParser(argument_class=ValidatableArgument)
        parser.add_argument("limit",
                            default=DEFAULT_LIMIT,
                            type=limit,
                            location="args")
        parser.add_argument("offset", default=0, type=int, location="args")
        parser.add_argument("email_address", type=bounded_str, location="args")
        args = strict_parse_args(parser, request.args)

        query = db_session.query(Namespace)
        if args["email_address"]:
            query = query.join(Account)
            query = query.filter_by(email_address=args["email_address"])

        query = query.limit(args["limit"])
        if args["offset"]:
            query = query.offset(args["offset"])

        namespaces = query.all()
        encoder = APIEncoder()
        return encoder.jsonify(namespaces)
def event_update(calendar_public_id):
    g.log.info('Received request to update Google calendar',
               calendar_public_id=calendar_public_id)
    try:
        valid_public_id(calendar_public_id)
        with global_session_scope() as db_session:
            calendar = db_session.query(Calendar) \
                .filter(Calendar.public_id == calendar_public_id) \
                .one()
            if calendar.gpush_last_ping is not None:
                time_since_last_ping = (
                    datetime.utcnow() -
                    calendar.gpush_last_ping).total_seconds()

                # Limit write volume, and de-herd, in case we're getting many
                # concurrent updates for the same calendar.
                if time_since_last_ping < 10 + random.randrange(0, 10):
                    return resp(200)

            calendar.handle_gpush_notification()
            db_session.commit()
        return resp(200)
    except ValueError:
        raise InputError('Invalid public ID')
    except NoResultFound:
        g.log.info('Getting push notifications for non-existing calendar',
                   calendar_public_id=calendar_public_id)
        raise NotFoundError(
            "Couldn't find calendar `{0}`".format(calendar_public_id))
Example #12
0
def find_account_shard(email_address):
    with global_session_scope() as db_session:
        account = db_session.query(Account).filter_by(
            email_address=email_address).first()
        if account is not None:
            return account.id

    return None
def main(hostname):
    maybe_enable_rollbar()

    with global_session_scope() as db_session:
        account_ids = db_session.query(Account.id).filter(Account.sync_host == hostname)

        print("Accounts being synced by {}:".format(hostname))
        for account_id in account_ids:
            print(account_id[0])
        db_session.commit()
Example #14
0
 def account_ids_to_sync(self):
     with global_session_scope() as db_session:
         return {r[0] for r in db_session.query(Account.id).
             filter(Account.sync_should_run,
                    or_(and_(Account.desired_sync_host == self.process_identifier,
                             Account.sync_host == None),     # noqa
                        and_(Account.desired_sync_host == None,  # noqa
                            Account.sync_host == self.process_identifier),
                        and_(Account.desired_sync_host == self.process_identifier,
                             Account.sync_host == self.process_identifier))).all()}
Example #15
0
def main(min_id, max_id, shard_id):
    maybe_enable_rollbar()

    generic_accounts = []
    failed = []

    if min_id is not None or max_id is not None:
        # Get the list of running Gmail accounts.
        with global_session_scope() as db_session:
            generic_accounts = db_session.query(GenericAccount).filter(
                GenericAccount.sync_state == "running")

            if min_id is not None:
                generic_accounts = generic_accounts.filter(
                    GenericAccount.id > min_id)

            if max_id is not None:
                generic_accounts = generic_accounts.filter(
                    GenericAccount.id <= max_id)

            generic_accounts = [acc.id for acc in generic_accounts]

            db_session.expunge_all()

    elif shard_id is not None:
        with session_scope_by_shard_id(shard_id) as db_session:
            generic_accounts = db_session.query(GenericAccount).filter(
                GenericAccount.sync_state == "running")

            generic_accounts = [acc.id for acc in generic_accounts]
            db_session.expunge_all()

    print("Total accounts", len(generic_accounts))

    for account_id in generic_accounts:
        try:
            with session_scope(account_id) as db_session:
                account = db_session.query(GenericAccount).get(account_id)
                print("Updating", account.email_address)

                with connection_pool(account.id).get() as crispin_client:
                    account.folder_prefix = crispin_client.folder_prefix
                    account.folder_separator = crispin_client.folder_separator

                db_session.commit()
        except Exception:
            failed.append(account_id)

    print("Processed accounts:")
    print(generic_accounts)

    print("Failed accounts:")
    print(failed)
Example #16
0
def main(dry_run, number, hostname, process):
    """
    Unschedule all accounts assigned to a given sync host.
    Intended primarily for use when decomissioning sync instances or for
    manually unloading an overloaded sync instance.

    """
    maybe_enable_rollbar()

    if not number:
        message = (
            "You have not provided a --number option. This will "
            "unschedule ALL syncs on the host. Proceed? [Y/n] "
        )
        if raw_input(message).strip().lower() == "n":
            print("Will not proceed")
            return

    if not dry_run:
        message = (
            "It is unsafe to unassign hosts while mailsync processes are running. "
            "Have you shut down the appropriate mailsync processes on {}? [Y/n]".format(
                hostname
            )
        )
        if raw_input(message).strip().lower() == "n":
            print("Bailing out")
            return

    with global_session_scope() as db_session:
        if process is not None:
            hostname = ":".join([hostname, process])
        to_unschedule = db_session.query(Account.id).filter(
            Account.sync_host.like("{}%".format(hostname))
        )
        if number:
            to_unschedule = to_unschedule.limit(number)
        to_unschedule = [id_ for id_, in to_unschedule.all()]
        if number:
            to_unschedule = to_unschedule[:number]

    for account_id in to_unschedule:
        with session_scope(account_id) as db_session:
            if dry_run:
                print("Would unassign", account_id)
            else:
                account = db_session.query(Account).get(account_id)
                print("Unassigning", account.id)
                account.desired_sync_host = None
                account.sync_host = None
                db_session.commit()
def main(min_id, max_id, shard_id):
    generic_accounts = []
    failed = []

    if min_id is not None or max_id is not None:
        # Get the list of running Gmail accounts.
        with global_session_scope() as db_session:
            generic_accounts = db_session.query(GenericAccount).filter(
                GenericAccount.sync_state == 'running')

            if min_id is not None:
                generic_accounts = generic_accounts.filter(
                    GenericAccount.id > min_id)

            if max_id is not None:
                generic_accounts = generic_accounts.filter(
                    GenericAccount.id <= max_id)

            generic_accounts = [acc.id for acc in generic_accounts]

            db_session.expunge_all()

    elif shard_id is not None:
        with session_scope_by_shard_id(shard_id) as db_session:
            generic_accounts = db_session.query(GenericAccount).filter(
                GenericAccount.sync_state == 'running')

            generic_accounts = [acc.id for acc in generic_accounts]
            db_session.expunge_all()

    print "Total accounts: %d" % len(generic_accounts)

    for account_id in generic_accounts:
        try:
            with session_scope(account_id) as db_session:
                account = db_session.query(GenericAccount).get(account_id)
                print "Updating %s" % account.email_address

                with connection_pool(account.id).get() as crispin_client:
                    account.folder_prefix = crispin_client.folder_prefix
                    account.folder_separator = crispin_client.folder_separator

                db_session.commit()
        except Exception:
            failed.append(account_id)

    print "Processed accounts:"
    print generic_accounts

    print "Failed accounts:"
    print failed
Example #18
0
def auth():
    """ Check for account ID on all non-root URLS """
    if (
        request.path == "/"
        or request.path.startswith("/accounts")
        or request.path.startswith("/w/")
        or request.path.startswith("/metrics")
    ):
        return

    if not request.authorization or not request.authorization.username:

        AUTH_ERROR_MSG = (
            "Could not verify access credential.",
            401,
            {"WWW-Authenticate": 'Basic realm="API ' 'Access Token Required"'},
        )

        auth_header = request.headers.get("Authorization", None)

        if not auth_header:
            return make_response(AUTH_ERROR_MSG)

        parts = auth_header.split()

        if len(parts) != 2 or parts[0].lower() != "bearer" or not parts[1]:
            return make_response(AUTH_ERROR_MSG)
        namespace_public_id = parts[1]

    else:
        namespace_public_id = request.authorization.username

    with global_session_scope() as db_session:
        try:
            valid_public_id(namespace_public_id)
            namespace = (
                db_session.query(Namespace)
                .filter(Namespace.public_id == namespace_public_id)
                .one()
            )
            g.namespace_id = namespace.id
            g.account_id = namespace.account.id
        except NoResultFound:
            return make_response(
                (
                    "Could not verify access credential.",
                    401,
                    {"WWW-Authenticate": 'Basic realm="API ' 'Access Token Required"'},
                )
            )
Example #19
0
def user_console(user_email_address):
    with global_session_scope() as db_session:
        result = (db_session.query(Account).filter_by(
            email_address=user_email_address).all())

        account = None

        if len(result) == 1:
            account = result[0]
        elif len(result) > 1:
            print("\n{} accounts found for that email.\n".format(len(result)))
            for idx, acc in enumerate(result):
                print("[{}] - {} {} {}".format(
                    idx,
                    acc.provider,
                    acc.namespace.email_address,
                    acc.namespace.public_id,
                ))
            choice = int(input("\nWhich # do you want to select? "))
            account = result[choice]

        if account is None:
            print(
                "No account found with email '{}'".format(user_email_address))
            return

        if account.provider == "eas":
            banner = """
        You can access the account instance with the 'account' variable.
        """
            IPython.embed(banner1=banner)
        else:
            with writable_connection_pool(account.id,
                                          pool_size=1).get() as crispin_client:
                if (account.provider == "gmail"
                        and "all" in crispin_client.folder_names()):
                    crispin_client.select_folder(
                        crispin_client.folder_names()["all"][0],
                        uidvalidity_cb)

                banner = """
        You can access the crispin instance with the 'crispin_client' variable,
        and the account instance with the 'account' variable.

        IMAPClient docs are at:

            http://imapclient.readthedocs.org/en/latest/#imapclient-class-reference
        """

                IPython.embed(banner1=banner)
Example #20
0
def calendar_update(account_public_id):
    request.environ["log_context"]["account_public_id"] = account_public_id
    try:
        valid_public_id(account_public_id)
        with global_session_scope() as db_session:
            account = (db_session.query(GmailAccount).filter(
                GmailAccount.public_id == account_public_id).one())
            account.handle_gpush_notification()
            db_session.commit()
        return resp(200)
    except ValueError:
        raise InputError("Invalid public ID")
    except NoResultFound:
        raise NotFoundError(
            "Couldn't find account `{0}`".format(account_public_id))
Example #21
0
def delete_account(namespace_public_id):
    """ Mark an existing account for deletion. """
    try:
        with global_session_scope() as db_session:
            namespace = (db_session.query(Namespace).filter(
                Namespace.public_id == namespace_public_id).one())
            account = namespace.account
            account.mark_for_deletion()
            db_session.commit()
    except NoResultFound:
        raise NotFoundError(
            "Couldn't find account `{0}` ".format(namespace_public_id))

    encoder = APIEncoder()
    return encoder.jsonify({})
Example #22
0
def calendar_update(account_public_id):
    request.environ['log_context']['account_public_id'] = account_public_id
    try:
        valid_public_id(account_public_id)
        with global_session_scope() as db_session:
            account = db_session.query(GmailAccount) \
                .filter(GmailAccount.public_id == account_public_id) \
                .one()
            account.handle_gpush_notification()
            db_session.commit()
        return resp(200)
    except ValueError:
        raise InputError('Invalid public ID')
    except NoResultFound:
        raise NotFoundError("Couldn't find account `{0}`"
                            .format(account_public_id))
Example #23
0
def enable_sync():
    g.parser.add_argument('account_id',
                          required=True,
                          type=valid_public_id,
                          location='form')
    args = strict_parse_args(g.parser, request.args)

    account_id = None

    namespace_public_id = args['account_id']
    with global_session_scope() as db_session:
        namespace = db_session.query(Namespace) \
            .filter(Namespace.public_id == namespace_public_id).one()
        account_id = namespace.account.id

    with session_scope(account_id) as db_session:
        try:
            account = db_session.query(Account).with_for_update() \
                .filter(Account.id == account_id).one()

            lease_period = timedelta(minutes=1)
            time_ended = account.sync_status.get('sync_end_time')
            time_now = datetime.utcnow()

            if account.sync_host is None and account.sync_state != 'running' \
                and (time_ended is None or time_now > time_ended + lease_period):
                account.sync_should_run = True

                if account.provider == 'gmail':
                    creds = account.auth_credentials
                    for c in creds:
                        c.is_valid = True

                db_session.commit()

            resp = json.dumps(account.sync_status, default=json_util.default)
            return make_response((resp, 200, {
                'Content-Type': 'application/json'
            }))
        except NotSupportedError as e:
            resp = simplejson.dumps({
                'message': str(e),
                'type': 'custom_api_error'
            })
            return make_response((resp, 400, {
                'Content-Type': 'application/json'
            }))
def event_update(calendar_public_id):
    try:
        valid_public_id(calendar_public_id)
        with global_session_scope() as db_session:
            calendar = db_session.query(Calendar) \
                .filter(Calendar.public_id == calendar_public_id) \
                .one()
            calendar.handle_gpush_notification()
            db_session.commit()
        return resp(200)
    except ValueError:
        raise InputError('Invalid public ID')
    except NoResultFound:
        g.log.info('Getting push notifications for non-existing calendar',
                   calendar_public_id=calendar_public_id)
        raise NotFoundError(
            "Couldn't find calendar `{0}`".format(calendar_public_id))
Example #25
0
def user_console(user_email_address):
    with global_session_scope() as db_session:
        result = db_session.query(Account).filter_by(
            email_address=user_email_address).all()

        account = None

        if len(result) == 1:
            account = result[0]
        elif len(result) > 1:
            print "\n{} accounts found for that email.\n".format(len(result))
            for idx, acc in enumerate(result):
                print "[{}] - {} {} {}".format(idx, acc.provider,
                                               acc.namespace.email_address,
                                               acc.namespace.public_id)
            choice = int(raw_input("\nWhich # do you want to select? "))
            account = result[choice]

        if account is None:
            print "No account found with email '{}'".format(user_email_address)
            return

        if account.provider == 'eas':
            banner = """
        You can access the account instance with the 'account' variable.
        """
            IPython.embed(banner1=banner)
        else:
            with writable_connection_pool(account.id, pool_size=1).get()\
                    as crispin_client:
                if account.provider == 'gmail' \
                        and 'all' in crispin_client.folder_names():
                    crispin_client.select_folder(
                        crispin_client.folder_names()['all'][0],
                        uidvalidity_cb)

                banner = """
        You can access the crispin instance with the 'crispin_client' variable,
        and the account instance with the 'account' variable.

        IMAPClient docs are at:

            http://imapclient.readthedocs.org/en/latest/#imapclient-class-reference
        """

                IPython.embed(banner1=banner)
def event_update(calendar_public_id):
    try:
        valid_public_id(calendar_public_id)
        with global_session_scope() as db_session:
            calendar = db_session.query(Calendar) \
                .filter(Calendar.public_id == calendar_public_id) \
                .one()
            calendar.handle_gpush_notification()
            db_session.commit()
        return resp(200)
    except ValueError:
        raise InputError('Invalid public ID')
    except NoResultFound:
        g.log.info('Getting push notifications for non-existing calendar',
                   calendar_public_id=calendar_public_id)
        raise NotFoundError("Couldn't find calendar `{0}`"
                            .format(calendar_public_id))
def calendar_update(account_public_id):

    try:
        valid_public_id(account_public_id)
        with global_session_scope() as db_session:
            account = db_session.query(GmailAccount) \
                .filter(GmailAccount.public_id == account_public_id) \
                .one()
            account.handle_gpush_notification()
            db_session.commit()
        return resp(200)
    except ValueError:
        raise InputError('Invalid public ID')
    except NoResultFound:
        g.log.info('Getting push notifications for non-existing account',
                   account_public_id=account_public_id)
        raise NotFoundError(
            "Couldn't find account `{0}`".format(account_public_id))
Example #28
0
def event_update(calendar_public_id):
    request.environ["log_context"]["calendar_public_id"] = calendar_public_id
    try:
        valid_public_id(calendar_public_id)
        allowed, tokens, sleep = limitlion.throttle(
            "gcal:{}".format(calendar_public_id), rps=0.5)
        if allowed:
            with global_session_scope() as db_session:
                calendar = (db_session.query(Calendar).filter(
                    Calendar.public_id == calendar_public_id).one())
                calendar.handle_gpush_notification()
                db_session.commit()
        return resp(200)
    except ValueError:
        raise InputError("Invalid public ID")
    except NoResultFound:
        raise NotFoundError(
            "Couldn't find calendar `{0}`".format(calendar_public_id))
def calendar_update(account_public_id):

    try:
        valid_public_id(account_public_id)
        with global_session_scope() as db_session:
            account = db_session.query(GmailAccount) \
                .filter(GmailAccount.public_id == account_public_id) \
                .one()
            account.handle_gpush_notification()
            db_session.commit()
        return resp(200)
    except ValueError:
        raise InputError('Invalid public ID')
    except NoResultFound:
        g.log.info('Getting push notifications for non-existing account',
                   account_public_id=account_public_id)
        raise NotFoundError("Couldn't find account `{0}`"
                            .format(account_public_id))
Example #30
0
def auth():
    """ Check for account ID on all non-root URLS """
    if request.path in ('/accounts', '/accounts/', '/') \
            or request.path.startswith('/w/') \
            or request.path.startswith('/c/'):
        return

    if not request.authorization or not request.authorization.username:

        AUTH_ERROR_MSG = ("Could not verify access credential.", 401, {
            'WWW-Authenticate':
            'Basic realm="API '
            'Access Token Required"'
        })

        auth_header = request.headers.get('Authorization', None)

        if not auth_header:
            return make_response(AUTH_ERROR_MSG)

        parts = auth_header.split()

        if (len(parts) != 2 or parts[0].lower() != 'bearer' or not parts[1]):
            return make_response(AUTH_ERROR_MSG)
        namespace_public_id = parts[1]

    else:
        namespace_public_id = request.authorization.username

    with global_session_scope() as db_session:
        try:
            valid_public_id(namespace_public_id)
            namespace = db_session.query(Namespace) \
                .filter(Namespace.public_id == namespace_public_id).one()
            g.namespace_id = namespace.id
            g.account_id = namespace.account.id
        except NoResultFound:
            return make_response(("Could not verify access credential.", 401, {
                'WWW-Authenticate':
                'Basic realm="API '
                'Access Token Required"'
            }))
Example #31
0
def main():
    """
    Detects accounts with sync_state and sync_host inconsistent with
    sync_should_run bit. (At one point, this could happen if, say, an account
    was _started_ on a new host without being first stopped on its previous
    host.)

    """
    maybe_enable_rollbar()

    with global_session_scope() as db_session:
        for acc in (db_session.query(Account).options(
                load_only("sync_state", "sync_should_run", "sync_host",
                          "desired_sync_host")).filter(
                              Account.sync_state == "stopped")):

            if acc.desired_sync_host is not None:
                print("account {} assigned to {} but has sync_state 'stopped'"
                      " ({}, {})".format(acc.id, acc.sync_host,
                                         acc.sync_should_run, acc.sync_host))
Example #32
0
def main(account_id, desired_host, dry_run, toggle_sync):
    maybe_enable_rollbar()

    with global_session_scope() as db_session:
        account = db_session.query(Account).get(int(account_id))

        print("Before sync host: {}".format(account.sync_host))
        print("Before desired sync host: {}".format(account.desired_sync_host))
        print("Before sync should run: {}".format(account.sync_should_run))

        if dry_run:
            return
        account.desired_sync_host = desired_host
        if toggle_sync:
            account.sync_should_run = not account.sync_should_run

        print("After sync host: {}".format(account.sync_host))
        print("After desired sync host: {}".format(account.desired_sync_host))
        print("After sync should run: {}".format(account.sync_should_run))
        db_session.commit()
Example #33
0
def auth_user(request):
    """
        Authentication for user-specific routes, for example
        getting messages for one user
    """
    if not request.authorization or not request.authorization.username:
        AUTH_ERROR_MSG = ("Could not verify access credential.", 401, {
            'WWW-Authenticate':
            'Basic realm="API '
            'Access Token Required"'
        })

        auth_header = request.headers.get('Authorization', None)

        if not auth_header:
            return make_response(AUTH_ERROR_MSG)

        parts = auth_header.split()

        if len(parts) != 2 or parts[0].lower() != 'bearer' or not parts[1]:
            return make_response(AUTH_ERROR_MSG)
        namespace_public_id = parts[1]
    else:
        namespace_public_id = request.authorization.username

    with global_session_scope() as db_session:
        try:
            valid_public_id(namespace_public_id)
            namespace = db_session.query(Namespace) \
                .filter(Namespace.public_id == namespace_public_id).one()
            g.namespace_id = namespace.id
            g.account_id = namespace.account.id
        except NoResultFound:
            return make_response(("Could not verify access credential.", 401, {
                'WWW-Authenticate':
                'Basic realm="API '
                'Access Token Required"'
            }))
Example #34
0
def auth():
    """ Check for account ID on all non-root URLS """
    if request.path in ('/accounts', '/accounts/', '/') \
            or request.path.startswith('/w/'):
        return

    if not request.authorization or not request.authorization.username:

        AUTH_ERROR_MSG = ("Could not verify access credential.", 401,
                          {'WWW-Authenticate': 'Basic realm="API '
                              'Access Token Required"'})

        auth_header = request.headers.get('Authorization', None)

        if not auth_header:
            return make_response(AUTH_ERROR_MSG)

        parts = auth_header.split()

        if (len(parts) != 2 or parts[0].lower() != 'bearer' or not parts[1]):
            return make_response(AUTH_ERROR_MSG)
        namespace_public_id = parts[1]

    else:
        namespace_public_id = request.authorization.username

    with global_session_scope() as db_session:
        try:
            valid_public_id(namespace_public_id)
            namespace = db_session.query(Namespace) \
                .filter(Namespace.public_id == namespace_public_id).one()
            g.namespace_id = namespace.id
            g.account_id = namespace.account.id
        except NoResultFound:
            return make_response((
                "Could not verify access credential.", 401,
                {'WWW-Authenticate': 'Basic realm="API '
                 'Access Token Required"'}))
Example #35
0
def main(num_accounts):
    maybe_enable_rollbar()

    with global_session_scope() as db_session:
        accounts = (db_session.query(Account).filter(
            Account.sync_should_run == true()).order_by(
                func.rand()).limit(num_accounts).all())

        accounts = [acc.id for acc in accounts][:num_accounts]
        db_session.expunge_all()

    pool = Pool(size=100)
    results = pool.map(process_account, accounts)

    global_results = dict()
    for ret in results:
        for key in ret:
            if key not in global_results:
                global_results[key] = 0

            global_results[key] += ret[key]

    print(global_results)
Example #36
0
def main(type, id, public_id):
    maybe_enable_rollbar()

    type = type.lower()

    if type not in cls_for_type:
        print("Error: unknown type '{}'".format(type))
        sys.exit(-1)

    cls = cls_for_type[type]

    if public_id is None and id is None:
        print("Error: you should specify an id or public id to query.")
        sys.exit(-1)

    with global_session_scope() as db_session:
        if public_id:
            obj = db_session.query(cls).filter(
                cls.public_id == public_id).one()
            print(obj.id)
        elif id:
            obj = db_session.query(cls).filter(cls.id == id).one()
            print(obj.public_id)
Example #37
0
def create_account():
    """ Create a new account """
    data = request.get_json(force=True)

    if data["type"] == "generic":
        auth_handler = GenericAuthHandler()
        account_data = _get_account_data_for_generic_account(data)
    elif data["type"] == "gmail":
        auth_handler = GoogleAuthHandler()
        account_data = _get_account_data_for_google_account(data)
    elif data["type"] == "microsoft":
        auth_handler = MicrosoftAuthHandler()
        account_data = _get_account_data_for_microsoft_account(data)
    else:
        raise ValueError("Account type not supported.")

    with global_session_scope() as db_session:
        account = auth_handler.create_account(account_data)
        db_session.add(account)
        db_session.commit()

        encoder = APIEncoder()
        return encoder.jsonify(account.namespace)
Example #38
0
def main(type, id, public_id, account_id, namespace_id, readwrite):
    maybe_enable_rollbar()

    type = type.lower()

    if type not in cls_for_type:
        print("Error: unknown type '{}'".format(type))
        sys.exit(-1)

    cls = cls_for_type[type]

    if all([id, public_id, account_id, namespace_id]):
        print("Error: you should specify an id or public id to query.")
        sys.exit(-1)

    with global_session_scope() as db_session, db_session.no_autoflush:
        qu = db_session.query(cls)

        if public_id:
            qu = qu.filter(cls.public_id == public_id)
        elif id:
            qu = qu.filter(cls.id == id)

        if account_id:
            qu = qu.filter(cls.account_id == account_id)
        elif namespace_id:
            qu = qu.filter(cls.namespace_id == namespace_id)

        qu.one()  # noqa: F841

        banner = """The object you queried is accessible as `obj`.
Note that the db session is read-only, unless if you start this script with --readwrite"""
        IPython.embed(banner1=banner)

        if readwrite is False:
            print("Rolling-back db session.")
            db_session.rollback()
Example #39
0
 def account_ids_owned(self):
     with global_session_scope() as db_session:
         return {r[0] for r in db_session.query(Account.id).
                 filter(Account.sync_host == self.process_identifier).all()}
Example #40
0
def index():
    with global_session_scope() as db_session:
        if "namespace_id" in request.args:
            try:
                namespace = (
                    db_session.query(Namespace)
                    .filter(Namespace.public_id == request.args["namespace_id"])
                    .one()
                )
            except NoResultFound:
                return APIEncoder().jsonify([])
        else:
            namespace = None

        accounts = db_session.query(ImapAccount).with_polymorphic([GenericAccount])

        if namespace:
            accounts = accounts.filter(Account.namespace == namespace)
        else:
            # Get all account IDs that aren't deleted
            account_ids = [
                result[0]
                for result in db_session.query(ImapAccount.id, ImapAccount._sync_status)
                if result[1].get("sync_disabled_reason") != "account deleted"
            ]

            # This is faster than fetching all accounts.
            accounts = accounts.filter(ImapAccount.id.in_(account_ids))

        accounts = list(accounts)

        folder_data = _get_folder_data(db_session, accounts)
        calendar_data = _get_calendar_data(db_session, namespace)
        heartbeat = get_ping_status(account_ids=[acc.id for acc in accounts])

        data = []

        for account in accounts:
            if account.id in heartbeat:
                account_heartbeat = heartbeat[account.id]
                account_folder_data = folder_data[account.id]
                account_calendar_data = calendar_data[account.id]

                events_alive = False

                for folder_status in account_heartbeat.folders:
                    folder_status_id = int(folder_status.id)
                    if folder_status_id in account_folder_data:
                        account_folder_data[folder_status_id].update(
                            {
                                "alive": folder_status.alive,
                                "heartbeat_at": folder_status.timestamp,
                            }
                        )
                    elif folder_status_id == EVENT_SYNC_FOLDER_ID:
                        events_alive = folder_status.alive

                email_alive = all(f["alive"] for f in account_folder_data.values())

                alive = True
                if account.sync_email and not email_alive:
                    alive = False
                if account.sync_events and not events_alive:
                    alive = False

                email_initial_sync = any(
                    f["state"] == "initial" for f in account_folder_data.values()
                )
                events_initial_sync = any(
                    c["state"] == "initial" for c in account_calendar_data
                )
                initial_sync = email_initial_sync or events_initial_sync

                total_uids = sum(
                    f["remote_uid_count"] or 0 for f in account_folder_data.values()
                )
                remaining_uids = sum(
                    f["download_uid_count"] or 0 for f in account_folder_data.values()
                )
                if total_uids:
                    progress = 100.0 / total_uids * (total_uids - remaining_uids)
                else:
                    progress = None
            else:
                alive = False
                email_initial_sync = None
                events_initial_sync = None
                initial_sync = None
                progress = None

            sync_status = account.sync_status
            is_running = sync_status["state"] == "running"
            if (
                is_running
                and not sync_status.get("sync_start_time")
                and not sync_status.get("sync_error")
            ):
                sync_status_str = "starting"
            elif is_running and alive:
                if initial_sync:
                    sync_status_str = "initial"
                else:
                    sync_status_str = "running"
            elif is_running:
                # Nylas is syncing, but not all heartbeats are reporting.
                sync_status_str = "delayed"
            else:
                # Nylas is no longer syncing this account.
                sync_status_str = "dead"

            data.append(
                {
                    "account_private_id": account.id,
                    "namespace_private_id": account.namespace.id,
                    "account_id": account.public_id,
                    "namespace_id": account.namespace.public_id,
                    "events_alive": events_alive,
                    "email_alive": email_alive,
                    "alive": alive,
                    "email_initial_sync": email_initial_sync,
                    "events_initial_sync": events_initial_sync,
                    "initial_sync": initial_sync,
                    "provider_name": account.provider,
                    "email_address": account.email_address,
                    "folders": sorted(
                        folder_data[account.id].values(), key=itemgetter("name")
                    ),
                    "calendars": sorted(
                        calendar_data[account.id], key=itemgetter("name")
                    ),
                    "sync_email": account.sync_email,
                    "sync_events": account.sync_events,
                    "sync_status": sync_status_str,
                    "sync_error": sync_status.get("sync_error"),
                    "sync_end_time": sync_status.get("sync_end_time"),
                    "sync_disabled_reason": sync_status.get("sync_disabled_reason"),
                    "sync_host": account.sync_host,
                    "progress": progress,
                    "throttled": account.throttled,
                    "created_at": account.created_at,
                    "updated_at": account.updated_at,
                }
            )

        return APIEncoder().jsonify(data)