Exemplo n.º 1
0
def get_redis_client(host=None, port=6379, db=STATUS_DATABASE):
    if not host:
        host = str(config.get_required('REDIS_HOSTNAME'))
        port = int(config.get_required('REDIS_PORT'))

    connection_pool = _get_connection_pool(host, port, db)
    return StrictRedis(host, port, db, connection_pool=connection_pool)
Exemplo n.º 2
0
def make_default_account(db, config):
    import platform
    from inbox.models.backends.gmail import GmailAccount
    from inbox.models.backends.gmail import GmailAuthCredentials
    from inbox.auth.gmail import OAUTH_SCOPE
    from inbox.models import Namespace

    ns = Namespace()
    account = GmailAccount(sync_host=platform.node(),
                           email_address='*****@*****.**')
    account.namespace = ns
    account.create_emailed_events_calendar()
    account.refresh_token = 'faketoken'

    auth_creds = GmailAuthCredentials()
    auth_creds.client_id = config.get_required('GOOGLE_OAUTH_CLIENT_ID')
    auth_creds.client_secret = \
        config.get_required('GOOGLE_OAUTH_CLIENT_SECRET')
    auth_creds.refresh_token = 'faketoken'
    auth_creds.g_id_token = 'foo'
    auth_creds.created_at = datetime.utcnow()
    auth_creds.updated_at = datetime.utcnow()
    auth_creds.gmailaccount = account
    auth_creds.scopes = OAUTH_SCOPE

    db.session.add(account)
    db.session.add(auth_creds)
    db.session.commit()
    return account
Exemplo n.º 3
0
def log(request, config):
    """
    Returns root server logger. For others loggers, use this fixture
    for setup but then call inbox.log.get_logger().

    Testing log file is removed at the end of the test run!

    """
    import logging
    from inbox.util.file import mkdirp
    root_logger = logging.getLogger()
    for handler in root_logger.handlers:
        root_logger.removeHandler(handler)

    logdir = config.get_required('LOGDIR')
    mkdirp(logdir)
    logfile = config.get_required('TEST_LOGFILE')
    fileHandler = logging.FileHandler(logfile, encoding='utf-8')
    root_logger.addHandler(fileHandler)
    root_logger.setLevel(logging.DEBUG)

    def remove_logs():
        try:
            os.remove(logfile)
        except OSError:
            pass

    request.addfinalizer(remove_logs)
def upgrade():
    from inbox.config import config
    import nacl.secret
    import nacl.utils
    from inbox.ignition import main_engine
    from inbox.models.session import session_scope

    engine = main_engine(pool_size=1, max_overflow=0)
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    class Secret(Base):
        __table__ = Base.metadata.tables['secret']

    class GenericAccount(Base):
        __table__ = Base.metadata.tables['genericaccount']

    with session_scope(ignore_soft_deletes=False, versioned=False) as \
            db_session:
        secrets = db_session.query(Secret).filter(
            Secret.secret.isnot(None)).all()

        # Join on the genericaccount and optionally easaccount tables to
        # determine which secrets should have type 'password'.
        generic_query = db_session.query(Secret.id).join(
            GenericAccount).filter(Secret.id == GenericAccount.password_id)
        password_secrets = [id_ for id_, in generic_query]
        if engine.has_table('easaccount'):
            class EASAccount(Base):
                __table__ = Base.metadata.tables['easaccount']

            eas_query = db_session.query(Secret.id).join(
                EASAccount).filter(Secret.id == EASAccount.password_id)
            password_secrets.extend([id_ for id_, in eas_query])

        for s in secrets:
            plain = s.secret.encode('utf-8') if isinstance(s.secret, unicode) \
                else s.secret
            if config.get_required('ENCRYPT_SECRETS'):

                s._secret = nacl.secret.SecretBox(
                    key=config.get_required('SECRET_ENCRYPTION_KEY'),
                    encoder=nacl.encoding.HexEncoder
                ).encrypt(
                    plaintext=plain,
                    nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE))

                # 1 is EncryptionScheme.SECRETBOX_WITH_STATIC_KEY
                s.encryption_scheme = 1
            else:
                s._secret = plain

            if s.id in password_secrets:
                s.type = 'password'
            else:
                s.type = 'token'

            db_session.add(s)

        db_session.commit()
Exemplo n.º 5
0
def get_redis_client(host=None, port=6379, db=STATUS_DATABASE):
    if not host:
        host = str(config.get_required('REDIS_HOSTNAME'))
        port = int(config.get_required('REDIS_PORT'))

    connection_pool = _get_connection_pool(host, port, db)
    return StrictRedis(host, port, db, connection_pool=connection_pool)
Exemplo n.º 6
0
Arquivo: roles.py Projeto: dlitz/inbox
    def _save_to_s3(self, data):
        assert len(data) > 0, 'Need data to save!'

        access_key = config.get_required('AWS_ACCESS_KEY_ID')
        secret_key = config.get_required('AWS_SECRET_ACCESS_KEY')
        bucket_name = config.get_required('MESSAGE_STORE_BUCKET_NAME')

        # Boto pools connections at the class level
        conn = S3Connection(access_key, secret_key)
        bucket = conn.get_bucket(bucket_name, validate=False)

        # See if it already exists and has the same hash
        data_obj = bucket.get_key(self.data_sha256)
        if data_obj:
            assert data_obj.get_metadata('data_sha256') == self.data_sha256, \
                "Block hash doesn't match what we previously stored on s3!"
            return

        data_obj = Key(bucket)
        # if metadata:
        #     assert type(metadata) is dict
        #     for k, v in metadata.iteritems():
        #         data_obj.set_metadata(k, v)
        data_obj.set_metadata('data_sha256', self.data_sha256)
        # data_obj.content_type = self.content_type  # Experimental
        data_obj.key = self.data_sha256
        # log.info("Writing data to S3 with hash {0}".format(self.data_sha256))
        # def progress(done, total):
        #     log.info("%.2f%% done" % (done/total * 100) )
        # data_obj.set_contents_from_string(data, cb=progress)
        data_obj.set_contents_from_string(data)
Exemplo n.º 7
0
def reset_db(dry_run):
    maybe_enable_rollbar()

    database_hosts = config.get_required("DATABASE_HOSTS")
    database_users = config.get_required("DATABASE_USERS")
    # Do not include disabled shards since application services do not use them.
    engine_manager = EngineManager(database_hosts,
                                   database_users,
                                   include_disabled=False)

    for host in database_hosts:
        for shard in host["SHARDS"]:
            if shard.get("DISABLED"):
                continue
            key = int(shard["ID"])
            engine = engine_manager.engines[key]
            schema = shard["SCHEMA_NAME"]

            print("Resetting invalid autoincrements for database: {}".format(
                schema))
            reset_tables = reset_invalid_autoincrements(
                engine, schema, key, dry_run)
            if dry_run:
                print("dry_run=True")
            if reset_tables:
                print("Reset tables: {}".format(", ".join(reset_tables)))
            else:
                print("Schema {} okay".format(schema))
Exemplo n.º 8
0
def create_test_db():
    """ Creates new, empty test databases. """
    from inbox.config import config

    database_hosts = config.get_required("DATABASE_HOSTS")
    database_users = config.get_required("DATABASE_USERS")
    schemas = [
        (
            shard["SCHEMA_NAME"],
            host["HOSTNAME"],
            database_users[host["HOSTNAME"]]["USER"],
            database_users[host["HOSTNAME"]]["PASSWORD"],
        )
        for host in database_hosts
        for shard in host["SHARDS"]
    ]
    # The various test databases necessarily have "test" in their name.
    assert all(["test" in s for s, h, u, p in schemas])

    for name, host, user, password in schemas:
        cmd = (
            "DROP DATABASE IF EXISTS {name}; "
            "CREATE DATABASE IF NOT EXISTS {name} "
            "DEFAULT CHARACTER SET utf8mb4 DEFAULT COLLATE "
            "utf8mb4_general_ci".format(name=name)
        )

        subprocess.check_call(
            "mysql -h {} -u{} -p{} " '-e "{}"'.format(host, user, password, cmd),
            shell=True,
        )
Exemplo n.º 9
0
def log(request, config):
    """
    Returns root server logger. For others loggers, use this fixture
    for setup but then call inbox.log.get_logger().

    Testing log file is removed at the end of the test run!

    """
    import logging
    from inbox.util.file import mkdirp
    root_logger = logging.getLogger()
    for handler in root_logger.handlers:
        root_logger.removeHandler(handler)

    logdir = config.get_required('LOGDIR')
    mkdirp(logdir)
    logfile = config.get_required('TEST_LOGFILE')
    fileHandler = logging.FileHandler(logfile, encoding='utf-8')
    root_logger.addHandler(fileHandler)
    root_logger.setLevel(logging.DEBUG)

    def remove_logs():
        try:
            os.remove(logfile)
        except OSError:
            pass
    request.addfinalizer(remove_logs)
Exemplo n.º 10
0
def get_doc_service():
    return boto3.client(
        "cloudsearchdomain",
        region_name="us-west-2",
        aws_access_key_id=config.get_required('AWS_ACCESS_KEY_ID'),
        aws_secret_access_key=config.get_required('AWS_SECRET_ACCESS_KEY'),
        endpoint_url='https://{0}'.format(doc_service_url))
Exemplo n.º 11
0
def main(shard_num):
    maybe_enable_rollbar()

    users = config.get_required("DATABASE_USERS")

    creds = dict(hostname=None, username=None, password=None, db_name=None)

    for database in config.get_required("DATABASE_HOSTS"):
        for shard in database["SHARDS"]:
            if shard["ID"] == shard_num:
                creds["hostname"] = database["HOSTNAME"]
                hostname = creds["hostname"]
                creds["username"] = users[hostname]["USER"]
                creds["password"] = users[hostname]["PASSWORD"]
                creds["db_name"] = shard["SCHEMA_NAME"]
                break

    for key in creds.keys():
        if creds[key] is None:
            print("Error: {key} is None".format(key=key))
            sys.exit(-1)

    proc = subprocess.Popen([
        "mysql",
        "-h" + creds["hostname"],
        "-u" + creds["username"],
        "-D " + creds["db_name"],
        "-p" + creds["password"],
        "--safe-updates",
    ])
    proc.wait()
Exemplo n.º 12
0
def run_migrations_online():
    """Run migrations in 'online' mode.

    In this scenario we need to create an Engine
    and associate a connection with the context.

    """
    engine_manager = EngineManager(config.get_required('DATABASE_HOSTS'),
                                   config.get_required('DATABASE_USERS'),
                                   include_disabled=True)

    engine = engine_manager.engines[shard_id]
    connection = engine.connect()
    # Set sane lock wait timeout value.
    connection.execute('SET @@lock_wait_timeout=15')
    context.configure(
        connection=connection,
        target_metadata=target_metadata
    )

    try:
        with context.begin_transaction():
            context.run_migrations()
    finally:
        connection.close()
Exemplo n.º 13
0
def configure_general_logging():
    """
    Configure the general server logger to output to screen and server.log.

    Logs are output to a directory configurable via LOGDIR.

    """
    logdir = config.get_required('LOGDIR')
    loglevel = config.get_required('LOGLEVEL')

    mkdirp(logdir)

    # configure properties that should cascade
    inbox_root_logger = logging.getLogger('inbox')
    inbox_root_logger.setLevel(int(loglevel))
    # don't pass messages up to the root root logger
    inbox_root_logger.propagate = False

    # log everything to screen (or main logfile if redirecting)
    inbox_root_logger.addHandler(get_tty_handler())

    logger = get_logger()

    for handler in logger.handlers:
        logger.removeHandler(handler)

    logfile = os.path.join(logdir, 'server.log')
    file_handler = logging.FileHandler(logfile, encoding='utf-8')
    file_handler.setFormatter(file_formatter)
    logger.addHandler(file_handler)

    return logger
Exemplo n.º 14
0
def make_default_account(db, config):
    import platform
    from inbox.models.backends.gmail import GmailAccount
    from inbox.models.backends.gmail import GmailAuthCredentials
    from inbox.auth.gmail import OAUTH_SCOPE
    from inbox.models import Namespace

    ns = Namespace()
    account = GmailAccount(
        sync_host='{}:{}'.format(platform.node(), 0),
        email_address='*****@*****.**')
    account.namespace = ns
    account.create_emailed_events_calendar()
    account.refresh_token = 'faketoken'

    auth_creds = GmailAuthCredentials()
    auth_creds.client_id = config.get_required('GOOGLE_OAUTH_CLIENT_ID')
    auth_creds.client_secret = \
        config.get_required('GOOGLE_OAUTH_CLIENT_SECRET')
    auth_creds.refresh_token = 'faketoken'
    auth_creds.g_id_token = 'foo'
    auth_creds.created_at = datetime.utcnow()
    auth_creds.updated_at = datetime.utcnow()
    auth_creds.gmailaccount = account
    auth_creds.scopes = OAUTH_SCOPE

    db.session.add(account)
    db.session.add(auth_creds)
    db.session.commit()
    return account
Exemplo n.º 15
0
def get_redis_client(db):
    global redis_client
    if redis_client is None:
        host = str(config.get_required('REDIS_HOSTNAME'))
        port = int(config.get_required('REDIS_PORT'))

        redis_client = StrictRedis(host, port, db)

    return redis_client
Exemplo n.º 16
0
def get_queue():
    # The queue label is set via config to allow multiple distinct Inbox
    # instances to hit the same Redis server without interfering with each
    # other.
    host = config.get_required('REDIS_HOST')
    port = config.get_required('REDIS_PORT')
    label = config.get_required('ACTION_QUEUE_LABEL')

    return Queue(label, connection=StrictRedis(host=host, port=port, db=0))
Exemplo n.º 17
0
def get_service_urls():
    conn = boto3.client(
        'cloudsearch', region_name="us-west-2",
        aws_access_key_id=config.get_required('AWS_ACCESS_KEY_ID'),
        aws_secret_access_key=config.get_required('AWS_SECRET_ACCESS_KEY'))
    domain_config = get_domain_config(conn, CLOUDSEARCH_DOMAIN)
    search_service_url = domain_config['SearchService']['Endpoint']
    doc_service_url = domain_config['DocService']['Endpoint']
    return (search_service_url, doc_service_url)
Exemplo n.º 18
0
Arquivo: sync.py Projeto: nbolt/inbox
def get_heartbeat_config():
    global g_alive_threshold
    if g_alive_threshold is None:
        g_alive_threshold = int(config.get_required('ALIVE_THRESHOLD'))

    global g_alive_threshold_eas
    if g_alive_threshold_eas is None:
        g_alive_threshold_eas = int(config.get_required('ALIVE_THRESHOLD_EAS'))

    return (g_alive_threshold, g_alive_threshold_eas)
Exemplo n.º 19
0
    def __init__(self, secret_name):
        self._closed = False

        if not config.get_required("ENCRYPT_SECRETS"):
            self.default_scheme = EncryptionScheme.NULL
            self._secret_box = None
            return

        self.default_scheme = EncryptionScheme.SECRETBOX_WITH_STATIC_KEY
        self._secret_box = nacl.secret.SecretBox(
            key=config.get_required(secret_name),
            encoder=nacl.encoding.HexEncoder)
Exemplo n.º 20
0
    def __init__(self, secret_name):
        self._closed = False

        if not config.get_required('ENCRYPT_SECRETS'):
            self.default_scheme = EncryptionScheme.NULL
            self._secret_box = None
            return

        self.default_scheme = EncryptionScheme.SECRETBOX_WITH_STATIC_KEY
        self._secret_box = nacl.secret.SecretBox(
            key=config.get_required(secret_name),
            encoder=nacl.encoding.HexEncoder)
Exemplo n.º 21
0
Arquivo: roles.py Projeto: dlitz/inbox
    def _get_from_s3(self):
        assert self.data_sha256, "Can't get data with no hash!"

        access_key = config.get_required('AWS_ACCESS_KEY_ID')
        secret_key = config.get_required('AWS_SECRET_ACCESS_KEY')
        bucket_name = config.get_required('MESSAGE_STORE_BUCKET_NAME')

        # Boto pools connections at the class level
        conn = S3Connection(access_key, secret_key)
        bucket = conn.get_bucket(bucket_name, validate=False)

        data_obj = bucket.get_key(self.data_sha256)
        assert data_obj, 'No data returned!'

        return data_obj.get_contents_as_string()
Exemplo n.º 22
0
def main_engine(pool_size=DB_POOL_SIZE,
                max_overflow=DB_POOL_MAX_OVERFLOW,
                echo=False):
    database_name = config.get_required('MYSQL_DATABASE')
    engine = create_engine(engine_uri(database_name),
                           listeners=[ForceStrictMode()],
                           isolation_level='READ COMMITTED',
                           echo=echo,
                           pool_size=pool_size,
                           pool_recycle=3600,
                           max_overflow=max_overflow,
                           connect_args={
                               'charset': 'utf8mb4',
                               'waiter': gevent_waiter
                           })

    @event.listens_for(engine, 'checkout')
    def receive_checkout(dbapi_connection, connection_record,
                         connection_proxy):
        '''Log checkedout and overflow when a connection is checked out'''
        hostname = gethostname().replace(".", "-")
        process_name = str(config.get("PROCESS_NAME", "unknown"))

        statsd_client.gauge(
            ".".join([
                "dbconn", database_name, hostname, process_name, "checkedout"
            ]), connection_proxy._pool.checkedout())

        statsd_client.gauge(
            ".".join(
                ["dbconn", database_name, hostname, process_name, "overflow"]),
            connection_proxy._pool.overflow())

    return engine
Exemplo n.º 23
0
def setup_test_db():
    """
    Creates a new, empty test database with table structure generated
    from declarative model classes; returns an engine for that database.

    """
    from inbox.config import config
    from inbox.ignition import engine_manager
    from inbox.ignition import init_db

    # Hardcode this part instead of reading from config because the idea of a
    # general-purpose 'DROP DATABASE' function is unsettling
    for name in ('test', 'test_1'):
        cmd = 'DROP DATABASE IF EXISTS {name}; ' \
              'CREATE DATABASE IF NOT EXISTS {name} ' \
              'DEFAULT CHARACTER SET utf8mb4 DEFAULT COLLATE ' \
              'utf8mb4_general_ci'.format(name=name)

        subprocess.check_call('mysql -uinboxtest -pinboxtest '
                              '-e "{}"'.format(cmd), shell=True)

    database_hosts = config.get_required('DATABASE_HOSTS')
    for host in database_hosts:
        for shard in host['SHARDS']:
            key = shard['ID']
            engine = engine_manager.engines[key]
            init_db(engine, key)
Exemplo n.º 24
0
 def secret(self):
     if self.encryption_scheme == \
             EncryptionScheme.SECRETBOX_WITH_STATIC_KEY:
         return nacl.secret.SecretBox(
             key=config.get_required('SECRET_ENCRYPTION_KEY'),
             encoder=nacl.encoding.HexEncoder
         ).decrypt(self._secret)
Exemplo n.º 25
0
 def _data_file_directory(self):
     assert self.data_sha256
     # Nest it 6 items deep so we don't have folders with too many files.
     h = self.data_sha256
     root = config.get_required('MSG_PARTS_DIRECTORY')
     return os.path.join(root,
                         h[0], h[1], h[2], h[3], h[4], h[5])
Exemplo n.º 26
0
def main_engine(pool_size=DB_POOL_SIZE, max_overflow=DB_POOL_MAX_OVERFLOW,
                pool_timeout=DB_POOL_TIMEOUT, echo=False):
    database_name = config.get_required('MYSQL_DATABASE')
    engine = create_engine(engine_uri(database_name),
                           listeners=[ForceStrictMode()],
                           isolation_level='READ COMMITTED',
                           echo=echo,
                           pool_size=pool_size,
                           pool_timeout=pool_timeout,
                           pool_recycle=3600,
                           max_overflow=max_overflow,
                           connect_args={'charset': 'utf8mb4',
                                         'waiter': gevent_waiter})

    @event.listens_for(engine, 'checkout')
    def receive_checkout(dbapi_connection, connection_record,
                         connection_proxy):
        '''Log checkedout and overflow when a connection is checked out'''
        hostname = gethostname().replace(".", "-")
        process_name = str(config.get("PROCESS_NAME", "unknown"))

        statsd_client.gauge(".".join(
            ["dbconn", database_name, hostname, process_name,
             "checkedout"]),
            connection_proxy._pool.checkedout())

        statsd_client.gauge(".".join(
            ["dbconn", database_name, hostname, process_name,
             "overflow"]),
            connection_proxy._pool.overflow())

    return engine
Exemplo n.º 27
0
Arquivo: roles.py Projeto: wmv/inbox
 def _data_file_directory(self):
     assert self.data_sha256
     # Nest it 6 items deep so we don't have folders with too many files.
     h = self.data_sha256
     root = config.get_required('MSG_PARTS_DIRECTORY')
     return os.path.join(root,
                         h[0], h[1], h[2], h[3], h[4], h[5])
Exemplo n.º 28
0
Arquivo: roles.py Projeto: dlitz/inbox
    def data(self, value):
        # Cache value in memory unencrypted. Otherwise message-parsing incurs
        # a disk or S3 roundtrip.
        self._data = value

        assert value is not None, \
            "Blob can't have NoneType data (can be zero-length, though!)"
        assert type(value) is not unicode, 'Blob bytes must be encoded'

        self.size = len(value)
        self.data_sha256 = sha256(value).hexdigest()

        # Encrypt before saving
        self.encryption_scheme = EncryptionScheme.SECRETBOX_WITH_STATIC_KEY

        encrypted_value = nacl.secret.SecretBox(
            key=config.get_required('BLOCK_ENCRYPTION_KEY'),
            encoder=nacl.encoding.HexEncoder
        ).encrypt(
            plaintext=value,
            nonce=self.data_sha256[:nacl.secret.SecretBox.NONCE_SIZE],
            encoder=nacl.encoding.HexEncoder)

        if self.size > 0:
            if STORE_MSG_ON_S3:
                self._save_to_s3(encrypted_value)
            else:
                self._save_to_disk(encrypted_value)
        else:
            log.warning('Not saving 0-length {1} {0}'.format(
                self.id, self.__class__.__name__))
Exemplo n.º 29
0
Arquivo: roles.py Projeto: dlitz/inbox
    def data(self):
        encrypted = True
        if self.size == 0:
            log.warning('block size is 0')
            # NOTE: This is a placeholder for "empty bytes". If this doesn't
            # work as intended, it will trigger the hash assertion later.
            value = ""
            encrypted = False
        elif hasattr(self, '_data'):
            # On initial download we temporarily store data unencrypted
            # in memory
            value = self._data
            encrypted = False
        elif STORE_MSG_ON_S3:
            value = self._get_from_s3()
        else:
            value = self._get_from_disk()

        if value is None:
            log.error("Couldn't find data!")
            return value

        # Decrypt if reqd.
        if encrypted and self.encryption_scheme == \
                EncryptionScheme.SECRETBOX_WITH_STATIC_KEY:
            value = nacl.secret.SecretBox(
                key=config.get_required('BLOCK_ENCRYPTION_KEY'),
                encoder=nacl.encoding.HexEncoder
            ).decrypt(
                value,
                encoder=nacl.encoding.HexEncoder)

        assert self.data_sha256 == sha256(value).hexdigest(), \
            "Returned data doesn't match stored hash!"
        return value
Exemplo n.º 30
0
def get_sentry_client():
    global sentry_client
    if _sentry_client is None:
        sentry_dsn = config.get_required('SENTRY_DSN')
        return raven.Client(sentry_dsn,
                            processors=('inbox.log.TruncatingProcessor', ))
    return _sentry_client
Exemplo n.º 31
0
Arquivo: log.py Projeto: rbs-pli/inbox
def configure_logging(is_prod):
    tty_handler = logging.StreamHandler(sys.stdout)
    if not is_prod:
        # Use a more human-friendly format.
        formatter = colorlog.ColoredFormatter(
            '%(log_color)s[%(levelname)s]%(reset)s %(message)s',
            reset=True,
            log_colors={
                'DEBUG': 'cyan',
                'INFO': 'green',
                'WARNING': 'yellow',
                'ERROR': 'red',
                'CRITICAL': 'red'
            })
    else:
        formatter = logging.Formatter('%(message)s')
    tty_handler.setFormatter(formatter)
    tty_handler._inbox = True

    # Configure the root logger.
    root_logger = logging.getLogger()
    for handler in root_logger.handlers:
        # If the handler was previously installed, remove it so that repeated
        # calls to configure_logging() are idempotent.
        if getattr(handler, '_inbox', False):
            root_logger.removeHandler(handler)
    root_logger.addHandler(tty_handler)
    # Set loglevel DEBUG if config value is missing.
    root_logger.setLevel(config.get('LOGLEVEL', 10))

    if config.get('SENTRY_EXCEPTIONS'):
        sentry_dsn = config.get_required('SENTRY_DSN')
        global sentry_client
        sentry_client = raven.Client(
            sentry_dsn, processors=('inbox.log.TruncatingProcessor', ))
Exemplo n.º 32
0
Arquivo: log.py Projeto: dlitz/inbox
def configure_logging(is_prod):
    tty_handler = logging.StreamHandler(sys.stdout)
    if not is_prod:
        # Use a more human-friendly format.
        formatter = colorlog.ColoredFormatter(
            '%(log_color)s[%(levelname)s]%(reset)s %(message)s',
            reset=True, log_colors={'DEBUG': 'cyan', 'INFO': 'green',
                                    'WARNING': 'yellow', 'ERROR': 'red',
                                    'CRITICAL': 'red'})
    else:
        formatter = logging.Formatter('%(message)s')
    tty_handler.setFormatter(formatter)
    tty_handler._inbox = True

    # Configure the root logger.
    root_logger = logging.getLogger()
    for handler in root_logger.handlers:
        # If the handler was previously installed, remove it so that repeated
        # calls to configure_logging() are idempotent.
        if getattr(handler, '_inbox', False):
            root_logger.removeHandler(handler)
    root_logger.addHandler(tty_handler)
    # Set loglevel DEBUG if config value is missing.
    root_logger.setLevel(config.get('LOGLEVEL', 10))

    if config.get('SENTRY_EXCEPTIONS'):
        sentry_dsn = config.get_required('SENTRY_DSN')
        global sentry_client
        sentry_client = raven.Client(
            sentry_dsn,
            processors=('inbox.log.TruncatingProcessor',))
Exemplo n.º 33
0
def get_sentry_client():
    global sentry_client
    if _sentry_client is None:
        sentry_dsn = config.get_required('SENTRY_DSN')
        return raven.Client(
            sentry_dsn,
            processors=('inbox.log.TruncatingProcessor',))
    return _sentry_client
Exemplo n.º 34
0
def get_open_shards():
    # Can't use engine_manager.engines here because it does not track
    # shard state (open/ closed)
    database_hosts = config.get_required('DATABASE_HOSTS')
    open_shards = []
    for host in database_hosts:
        open_shards.extend(shard['ID'] for shard in host['SHARDS'] if
                           shard['OPEN'] and not shard.get('DISABLED'))

    return open_shards
Exemplo n.º 35
0
def get_open_shards():
    # Can't use engine_manager.engines here because it does not track
    # shard state (open/ closed)
    database_hosts = config.get_required("DATABASE_HOSTS")
    open_shards = []
    for host in database_hosts:
        open_shards.extend(shard["ID"] for shard in host["SHARDS"]
                           if shard["OPEN"] and not shard.get("DISABLED"))

    return open_shards
Exemplo n.º 36
0
def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
    script output.

    """
    engine_manager = EngineManager(config.get_required('DATABASE_HOSTS'),
                                   config.get_required('DATABASE_USERS'),
                                   include_disabled=True)
    context.configure(engine=engine_manager.engines[shard_id])

    with context.begin_transaction():
        context.run_migrations()
Exemplo n.º 37
0
def run_migrations_offline():
    """Run migrations in 'offline' mode.

    This configures the context with just a URL
    and not an Engine, though an Engine is acceptable
    here as well.  By skipping the Engine creation
    we don't even need a DBAPI to be available.

    Calls to context.execute() here emit the given string to the
    script output.

    """
    engine_manager = EngineManager(
        config.get_required("DATABASE_HOSTS"), config.get_required("DATABASE_USERS"), include_disabled=True
    )
    context.configure(engine=engine_manager.engines[shard_id])

    with context.begin_transaction():
        context.run_migrations()
Exemplo n.º 38
0
 def _data_file_directory(h):
     return os.path.join(
         config.get_required("MSG_PARTS_DIRECTORY"),
         h[0],
         h[1],
         h[2],
         h[3],
         h[4],
         h[5],
     )
Exemplo n.º 39
0
def upgrade():
    # Block table
    op.drop_column("block", "encryption_scheme")

    # Secret table
    op.add_column("secret", sa.Column("acl_id", sa.Integer(), nullable=False))

    op.alter_column(
        "secret",
        "type",
        type_=sa.Integer(),
        existing_server_default=None,
        existing_nullable=False,
    )

    op.add_column("secret",
                  sa.Column("secret", sa.String(length=512), nullable=True))

    import nacl.secret
    import nacl.utils

    from inbox.config import config
    from inbox.ignition import main_engine
    from inbox.models.session import session_scope

    engine = main_engine(pool_size=1, max_overflow=0)
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    key = config.get_required("SECRET_ENCRYPTION_KEY")

    class Secret(Base):
        __table__ = Base.metadata.tables["secret"]

    with session_scope(versioned=False) as db_session:
        secrets = (db_session.query(Secret).filter(
            Secret.encryption_scheme == 1,
            Secret._secret.isnot(None)).order_by(Secret.id).all())

        for s in secrets:
            encrypted = s._secret

            s.secret = nacl.secret.SecretBox(
                key=key, encoder=nacl.encoding.HexEncoder).decrypt(encrypted)

            # Picked arbitrarily
            s.acl_id = 0
            s.type = 0

            db_session.add(s)

        db_session.commit()

    op.drop_column("secret", "_secret")
    op.drop_column("secret", "encryption_scheme")
Exemplo n.º 40
0
def main():
    maybe_enable_rollbar()

    database_hosts = config.get_required("DATABASE_HOSTS")
    database_users = config.get_required("DATABASE_USERS")
    # Do not include disabled shards since application services do not use them.
    engine_manager = EngineManager(database_hosts,
                                   database_users,
                                   include_disabled=False)

    for host in database_hosts:
        for shard in host["SHARDS"]:
            if shard.get("DISABLED"):
                continue
            key = int(shard["ID"])
            engine = engine_manager.engines[key]
            schema = shard["SCHEMA_NAME"]

            print("Verifying database: {}".format(schema))
            verify_db(engine, schema, key)
Exemplo n.º 41
0
class OutlookAccount(ImapAccount, OAuthAccount):
    OAUTH_CLIENT_ID = config.get_required("MICROSOFT_OAUTH_CLIENT_ID")
    OAUTH_CLIENT_SECRET = config.get_required("MICROSOFT_OAUTH_CLIENT_SECRET")

    id = Column(ForeignKey(ImapAccount.id, ondelete="CASCADE"),
                primary_key=True)

    __mapper_args__ = {"polymorphic_identity": "outlookaccount"}

    client_id = Column(String(256))
    scope = Column(String(512))

    # TODO: These fields are unused.
    client_secret = Column(String(256))
    family_name = Column(String(256))
    given_name = Column(String(256))
    gender = Column(String(16))
    o_id = Column(String(32))  # `id`
    o_id_token = Column(String(1024))  # `id_token`
    link = Column(String(256))
    locale = Column(String(8))

    @property
    def provider(self):
        return PROVIDER

    @property
    def category_type(self):
        return "folder"

    @property
    def thread_cls(self):
        from inbox.models.backends.imap import ImapThread

        return ImapThread

    @property
    def actionlog_cls(self):
        from inbox.models.action_log import ActionLog

        return ActionLog
Exemplo n.º 42
0
def upgrade():
    # Block table
    op.drop_column('block', 'encryption_scheme')

    # Secret table
    op.add_column('secret', sa.Column('acl_id', sa.Integer(), nullable=False))

    op.alter_column('secret', 'type', type_=sa.Integer(),
                    existing_server_default=None,
                    existing_nullable=False)

    op.add_column('secret',
                  sa.Column('secret', sa.String(length=512), nullable=True))

    import nacl.secret
    import nacl.utils
    from inbox.ignition import main_engine
    from inbox.models.session import session_scope
    from inbox.config import config

    engine = main_engine(pool_size=1, max_overflow=0)
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    key = config.get_required('SECRET_ENCRYPTION_KEY')

    class Secret(Base):
        __table__ = Base.metadata.tables['secret']

    with session_scope(versioned=False) as \
            db_session:
        secrets = db_session.query(Secret).filter(
            Secret.encryption_scheme == 1,
            Secret._secret.isnot(None)).order_by(Secret.id).all()

        for s in secrets:
            encrypted = s._secret

            s.secret = nacl.secret.SecretBox(
                key=key,
                encoder=nacl.encoding.HexEncoder
            ).decrypt(encrypted)

            # Picked arbitrarily
            s.acl_id = 0
            s.type = 0

            db_session.add(s)

        db_session.commit()

    op.drop_column('secret', '_secret')
    op.drop_column('secret', 'encryption_scheme')
Exemplo n.º 43
0
Arquivo: sync.py Projeto: nbolt/inbox
def get_redis_client():
    global redis_client
    if redis_client is None:
        global redis_hostname
        if redis_hostname is None:
            redis_hostname = str(config.get_required('REDIS_HOSTNAME'))

        global redis_port
        if redis_port is None:
            redis_port = int(config.get_required('REDIS_PORT'))

        global redis_database
        if redis_database is None:
            redis_database = int(config.get_required('REDIS_DATABASE'))
            assert redis_database >= 1 and redis_database <= 15

        redis_client = StrictRedis(host=redis_hostname,
                                   port=redis_port,
                                   db=redis_database)

    return redis_client
Exemplo n.º 44
0
def get_shard_schemas():
    # Can't use engine_manager.engines here because it does not track
    # shard schemas.
    shard_schemas = {}
    database_hosts = config.get_required("DATABASE_HOSTS")
    for host in database_hosts:
        for shard in host["SHARDS"]:
            if not shard.get("DISABLED"):
                shard_id = shard["ID"]
                schema_name = shard["SCHEMA_NAME"]
                shard_schemas[shard_id] = schema_name
    return shard_schemas
Exemplo n.º 45
0
def get_shard_schemas():
    # Can't use engine_manager.engines here because it does not track
    # shard schemas.
    shard_schemas = {}
    database_hosts = config.get_required('DATABASE_HOSTS')
    for host in database_hosts:
        for shard in host['SHARDS']:
            if not shard.get('DISABLED'):
                shard_id = shard['ID']
                schema_name = shard['SCHEMA_NAME']
                shard_schemas[shard_id] = schema_name
    return shard_schemas
def upgrade():
    from inbox.config import config
    import nacl.secret
    import nacl.utils
    from inbox.ignition import engine_manager
    from inbox.models.session import session_scope

    shard_id = int(context.get_x_argument(as_dictionary=True).get('shard_id'))
    engine = engine_manager.engines[shard_id]
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    class Secret(Base):
        __table__ = Base.metadata.tables['secret']

    with session_scope(shard_id << 48, versioned=False) as db_session:
        secrets = db_session.query(Secret).filter(
            Secret._secret.isnot(None),
            Secret.encryption_scheme == 0).all()

        for s in secrets:
            plain = s._secret.encode('utf-8') if isinstance(s._secret, unicode) \
                else s._secret
            if config.get_required('ENCRYPT_SECRETS'):

                s._secret = nacl.secret.SecretBox(
                    key=config.get_required('SECRET_ENCRYPTION_KEY'),
                    encoder=nacl.encoding.HexEncoder
                ).encrypt(
                    plaintext=plain,
                    nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE))

                # 1 is EncryptionScheme.SECRETBOX_WITH_STATIC_KEY
                s.encryption_scheme = 1
            else:
                s._secret = plain

            db_session.add(s)

        db_session.commit()
Exemplo n.º 47
0
def upgrade():
    # Can just drop this, was't really used before
    op.drop_column('secret', 'acl_id')

    op.alter_column('secret', 'type', type_=sa.Enum('password', 'token'),
                    existing_server_default=None,
                    existing_nullable=False)
    op.add_column('secret', sa.Column('encryption_scheme', sa.Integer(),
                  server_default='0', nullable=False))

    # Change name, type
    op.add_column('secret', sa.Column('_secret', sa.BLOB(),
                                      nullable=False))

    from inbox.ignition import main_engine
    from inbox.models.session import session_scope
    from inbox.config import config
    from inbox.models.util import EncryptionScheme

    engine = main_engine(pool_size=1, max_overflow=0)
    Base = sa.ext.declarative.declarative_base()
    Base.metadata.reflect(engine)

    class Secret(Base):
        __table__ = Base.metadata.tables['secret']

    with session_scope(ignore_soft_deletes=False, versioned=False) as \
            db_session:
        secrets = db_session.query(Secret).all()

        for s in secrets:
            plain = s.secret.encode('ascii') if isinstance(s.secret, unicode) \
                else s.secret

            s._secret = nacl.secret.SecretBox(
                key=config.get_required('SECRET_ENCRYPTION_KEY'),
                encoder=nacl.encoding.HexEncoder
            ).encrypt(
                plaintext=plain,
                nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE),
                encoder=nacl.encoding.HexEncoder)

            s.encryption_scheme = EncryptionScheme.SECRETBOX_WITH_STATIC_KEY

            # Picked arbitrarily
            s.type = 'password'

            db_session.add(s)

        db_session.commit()

    op.drop_column('secret', 'secret')
Exemplo n.º 48
0
def generate_open_shard_key():
    """
    Return the key that can be passed into session_scope() for an open shard,
    picked at random.

    """
    database_hosts = config.get_required('DATABASE_HOSTS')
    open_shards = []
    for host in database_hosts:
        open_shards.extend(shard['ID'] for shard in host['SHARDS'] if
                           shard['OPEN'] and not shard.get('DISABLED'))

    # TODO[k]: Always pick min()instead?
    shard_id = random.choice(open_shards)
    key = shard_id << 48
    return key
Exemplo n.º 49
0
    def secret(self, secret):
        """
        The secret must be a byte sequence.
        The type must be specified as 'password'/'token'.

        """
        if not isinstance(secret, bytes):
            raise TypeError('Invalid secret')

        self.encryption_scheme = EncryptionScheme.SECRETBOX_WITH_STATIC_KEY

        self._secret = nacl.secret.SecretBox(
            key=config.get_required('SECRET_ENCRYPTION_KEY'),
            encoder=nacl.encoding.HexEncoder
        ).encrypt(
            plaintext=secret,
            nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE))
Exemplo n.º 50
0
def setup_test_db():
    """
    Creates new, empty test databases with table structures generated
    from declarative model classes.

    """
    from inbox.config import config
    from inbox.ignition import engine_manager, init_db

    create_test_db()

    database_hosts = config.get_required("DATABASE_HOSTS")
    for host in database_hosts:
        for shard in host["SHARDS"]:
            key = shard["ID"]
            engine = engine_manager.engines[key]
            init_db(engine, key)
Exemplo n.º 51
0
def create_test_db():
    """ Creates new, empty test databases. """
    from inbox.config import config

    database_hosts = config.get_required('DATABASE_HOSTS')
    schemas = [shard['SCHEMA_NAME'] for host in database_hosts for
               shard in host['SHARDS']]
    # The various test databases necessarily have "test" in their name.
    assert all(['test' in s for s in schemas])

    for name in schemas:
        cmd = 'DROP DATABASE IF EXISTS {name}; ' \
              'CREATE DATABASE IF NOT EXISTS {name} ' \
              'DEFAULT CHARACTER SET utf8mb4 DEFAULT COLLATE ' \
              'utf8mb4_general_ci'.format(name=name)

        subprocess.check_call('mysql -uinboxtest -pinboxtest '
                              '-e "{}"'.format(cmd), shell=True)
Exemplo n.º 52
0
def create_test_db():
    """ Creates new, empty test databases. """
    from inbox.config import config

    database_hosts = config.get_required('DATABASE_HOSTS')
    schemas = [shard['SCHEMA_NAME'] for host in database_hosts for
               shard in host['SHARDS']]
    # The various test databases necessarily have "test" in their name.
    assert all(['test' in s for s in schemas])

    for name in schemas:
        cmd = 'DROP DATABASE IF EXISTS {name}; ' \
              'CREATE DATABASE IF NOT EXISTS {name} ' \
              'DEFAULT CHARACTER SET utf8mb4 DEFAULT COLLATE ' \
              'utf8mb4_general_ci'.format(name=name)

        subprocess.check_call('mysql -uinboxtest -pinboxtest '
                              '-e "{}"'.format(cmd), shell=True)
Exemplo n.º 53
0
def verify_backfilled_index(namespace_id, created_before=None):
    """
    Verify that a backfilled namespace is correctly indexed into Elasticsearch.

    Elasticsearch is queried for the documents whose ids == the public_ids of
    the last thread, message that fits the `namespace_id`, `created_before`
    criteria specified. Raises an IndexException if the namespace_id was
    not indexed successfully.

    Note: This check is not accurate for verifying index creation via the
    search-index-service.

    """
    es_host = config.get_required('ELASTICSEARCH_HOSTS')[0]

    with session_scope() as db_session:
        namespace_public_id = db_session.query(
            Namespace.public_id).get(namespace_id)

        for obj_type in [Thread, Message]:
            filters = [obj_type.namespace_id == namespace_id]

            if created_before:
                filters.append(obj_type.created_at <= created_before)

            # Pick an object to query Elasticsearch for.
            # Note this is the last object, rather than the first, in
            # order for the check to be accurate -
            # we bulk_index in chunks; if any chunk fails, an exception is
            # raised causing subsequent chunks to not be indexed.
            id_, _ = db_session.query(func.max(
                obj_type.id)).filter(*filters).one()
            public_id = db_session.query(obj_type.public_id).get(id_)

            # Query Elasticsearch.
            url = 'http://{}:{}/{}/{}/_count?q=id:{}'.format(
                es_host['host'], es_host['port'], namespace_public_id,
                obj_type.__tablename__, public_id)

            response = requests.get(url)

            if response.status_code != 200 or response.json()['count'] != 1:
                raise IndexException(namespace_id, namespace_public_id,
                                     obj_type.__tablename__, public_id)
Exemplo n.º 54
0
def make_default_account(db, config):
    import platform

    from inbox.models import Namespace
    from inbox.models.backends.gmail import GmailAccount

    ns = Namespace()
    account = GmailAccount(
        sync_host="{}:{}".format(platform.node(), 0),
        email_address="*****@*****.**",
    )
    account.namespace = ns
    account.client_id = config.get_required("GOOGLE_OAUTH_CLIENT_ID")
    account.create_emailed_events_calendar()
    account.refresh_token = "faketoken"

    db.session.add(account)
    db.session.commit()
    return account
Exemplo n.º 55
0
    def encrypt(self, plaintext, encryption_scheme=None):
        """
        Encrypt the specified secret.  If no encryption_scheme is specified
        (recommended), a reasonable default will be used.

        Returns (ciphertext, encryption_scheme)
        """
        if self._closed:
            raise ValueError("Connection to crypto oracle already closed")

        # default args
        if encryption_scheme is None:
            encryption_scheme = self.default_scheme

        # sanity check
        if isinstance(plaintext, unicode):
            raise TypeError("plaintext should be bytes, not unicode")
        if not isinstance(encryption_scheme, enum.Enum):
            raise TypeError("encryption_scheme should be an Enum")
        if not 0 <= encryption_scheme.value <= 2**31 - 1:
            raise ValueError("encryption_scheme value out of range")
        if encryption_scheme != EncryptionScheme.NULL and not config.get_required(
                "ENCRYPT_SECRETS"):
            raise ValueError("ENCRYPT_SECRETS not enabled in config")

        # encrypt differently depending on the scheme
        if encryption_scheme == EncryptionScheme.NULL:
            ciphertext = plaintext

        elif encryption_scheme == EncryptionScheme.SECRETBOX_WITH_STATIC_KEY:
            ciphertext = self._secret_box.encrypt(
                plaintext=plaintext,
                nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE),
            )

        else:
            raise ValueError("encryption_scheme not supported: %d" %
                             encryption_scheme)

        return (ciphertext, encryption_scheme.value)