Ejemplo n.º 1
0
def main(shard_num):
    maybe_enable_rollbar()

    users = config.get_required("DATABASE_USERS")

    creds = dict(hostname=None, username=None, password=None, db_name=None)

    for database in config.get_required("DATABASE_HOSTS"):
        for shard in database["SHARDS"]:
            if shard["ID"] == shard_num:
                creds["hostname"] = database["HOSTNAME"]
                hostname = creds["hostname"]
                creds["username"] = users[hostname]["USER"]
                creds["password"] = users[hostname]["PASSWORD"]
                creds["db_name"] = shard["SCHEMA_NAME"]
                break

    for key in creds.keys():
        if creds[key] is None:
            print("Error: {key} is None".format(key=key))
            sys.exit(-1)

    proc = subprocess.Popen([
        "mysql",
        "-h" + creds["hostname"],
        "-u" + creds["username"],
        "-D " + creds["db_name"],
        "-p" + creds["password"],
        "--safe-updates",
    ])
    proc.wait()
Ejemplo n.º 2
0
def main(host, port, account_id, folder_id, device_id):
    maybe_enable_rollbar()

    print("Clearing heartbeat status...")
    n = clear_heartbeat_status(account_id, folder_id, device_id, host, port)
    print("{} folders cleared.".format(n))
    exit(0)
Ejemplo n.º 3
0
def main(revision_id):
    maybe_enable_rollbar()

    alembic_ini_filename = os.environ.get("ALEMBIC_INI_PATH", "alembic.ini")
    assert os.path.isfile(
        alembic_ini_filename), "Missing alembic.ini file at {}".format(
            alembic_ini_filename)

    database_hosts = config.get_required("DATABASE_HOSTS")

    for host in database_hosts:
        for shard in host["SHARDS"]:
            key = shard["ID"]

            if shard.get("DISABLED"):
                # Do not include disabled shards since application services
                # do not use them.
                continue

            key = shard["ID"]

            try:
                print("Stamping shard_id {}".format(key))
                alembic_cfg = alembic.config.Config(alembic_ini_filename)
                alembic_cfg.set_main_option("shard_id", str(key))
                alembic.command.stamp(alembic_cfg, revision_id)
                print("Stamped shard_id {}\n".format(key))
            except alembic.util.CommandError as e:
                print("FAILED to stamp shard_id {} with error: {}".format(
                    key, str(e)))
                continue
Ejemplo n.º 4
0
def main():
    from inbox.config import config, secrets_path

    maybe_enable_rollbar()

    # If the config contains encryption keys, don't override.
    if config.get("SECRET_ENCRYPTION_KEY"):
        raise Exception("Encryption keys already present in secrets config "
                        "file {0}".format(secrets_path))

    # Generate keys
    data = {
        "SECRET_ENCRYPTION_KEY":
        binascii.hexlify(nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE)),
        "BLOCK_ENCRYPTION_KEY":
        binascii.hexlify(nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE)),
    }

    # Our secrets config file contains our database credentials etc.,
    # so it better exist.
    # Update it
    try:
        with open(secrets_path, "a") as f:
            print(
                "Writing keys to secrets config file {0}".format(secrets_path))
            yaml.dump(data, f, default_flow_style=False)
    except IOError:
        raise Exception(
            "Check file write permissions on config file {0}".format(
                secrets_path))

    # Update the config dict
    config.update(data)
Ejemplo n.º 5
0
def reset_db(dry_run):
    maybe_enable_rollbar()

    database_hosts = config.get_required("DATABASE_HOSTS")
    database_users = config.get_required("DATABASE_USERS")
    # Do not include disabled shards since application services do not use them.
    engine_manager = EngineManager(database_hosts,
                                   database_users,
                                   include_disabled=False)

    for host in database_hosts:
        for shard in host["SHARDS"]:
            if shard.get("DISABLED"):
                continue
            key = int(shard["ID"])
            engine = engine_manager.engines[key]
            schema = shard["SCHEMA_NAME"]

            print("Resetting invalid autoincrements for database: {}".format(
                schema))
            reset_tables = reset_invalid_autoincrements(
                engine, schema, key, dry_run)
            if dry_run:
                print("dry_run=True")
            if reset_tables:
                print("Reset tables: {}".format(", ".join(reset_tables)))
            else:
                print("Schema {} okay".format(schema))
Ejemplo n.º 6
0
def main(host, port, database):
    maybe_enable_rollbar()

    connection_pool = BlockingConnectionPool(
        host=host,
        port=port,
        db=database,
        max_connections=MAX_CONNECTIONS,
        timeout=WAIT_TIMEOUT,
        socket_timeout=SOCKET_TIMEOUT,
    )

    client = StrictRedis(host, port, database, connection_pool=connection_pool)
    batch_client = client.pipeline()

    count = 0
    for name in client.scan_iter(count=100):
        if name == "ElastiCacheMasterReplicationTimestamp":
            continue
        batch_client.delete(name)
        count += 1

    batch_client.execute()
    print("{} heartbeats deleted!".format(count))
    exit(0)
Ejemplo n.º 7
0
def main(shard_id, dry_run):
    maybe_enable_rollbar()

    if shard_id is not None:
        backfix_shard(shard_id, dry_run)
    else:
        for shard_id in engine_manager.engines:
            backfix_shard(shard_id, dry_run)
Ejemplo n.º 8
0
def delete_namespace_indexes(namespace_ids):
    """
    Delete the CloudSearch indexes for a list of namespaces, specified by id.

    """
    maybe_enable_rollbar()

    delete_indexes(namespace_ids)
Ejemplo n.º 9
0
def console(email_address, client):
    """ REPL for Nylas. """
    maybe_enable_rollbar()

    if client:
        start_client_console(email_address)
    else:
        start_console(email_address)
def main(shard_id, id_start, dry_run):
    maybe_enable_rollbar()

    if shard_id is not None:
        process_shard(shard_id, dry_run, id_start)
    else:
        for shard_id in engine_manager.engines:
            process_shard(shard_id, dry_run, id_start)
Ejemplo n.º 11
0
def main(shard_id):
    maybe_enable_rollbar()

    if shard_id is not None:
        update_categories_for_shard(shard_id)
        update_folders_and_labels_for_shard(shard_id)
    else:
        for key in engine_manager.engines:
            update_categories_for_shard(key)
            update_folders_and_labels_for_shard(key)
Ejemplo n.º 12
0
def main(hostname):
    maybe_enable_rollbar()

    with global_session_scope() as db_session:
        account_ids = db_session.query(Account.id).filter(Account.sync_host == hostname)

        print("Accounts being synced by {}:".format(hostname))
        for account_id in account_ids:
            print(account_id[0])
        db_session.commit()
Ejemplo n.º 13
0
def main():
    maybe_enable_rollbar()

    setproctitle("scheduler")
    zones = {h.get("ZONE") for h in config["DATABASE_HOSTS"]}
    threads = []
    for zone in zones:
        populator = QueuePopulator(zone)
        threads.append(gevent.spawn(populator.run))

    gevent.joinall(threads)
Ejemplo n.º 14
0
def main(namespace_ids):
    """
    Idempotently index the given namespace_ids.

    """
    maybe_enable_rollbar()

    for namespace_id in namespace_ids:
        log.info("indexing namespace {namespace_id}".format(
            namespace_id=namespace_id))
        index_namespace(namespace_id)
Ejemplo n.º 15
0
def main(host, port):
    maybe_enable_rollbar()

    if host:
        status_client = _get_redis_client(host, port, STATUS_DATABASE)
        report_client = _get_redis_client(host, port, REPORT_DATABASE)
    else:
        status_client = get_redis_client(STATUS_DATABASE)
        report_client = get_redis_client(REPORT_DATABASE)
    status_client.flushdb()
    report_client.flushdb()
    exit(0)
Ejemplo n.º 16
0
def run(throttle, dry_run):
    maybe_enable_rollbar()

    print("Python", sys.version, file=sys.stderr)

    pool = []

    for host in config["DATABASE_HOSTS"]:
        log.info("Spawning delete process for host", host=host["HOSTNAME"])
        pool.append(gevent.spawn(delete_account_data, host, throttle, dry_run))

    gevent.joinall(pool)
Ejemplo n.º 17
0
def main(min_id, max_id, shard_id):
    maybe_enable_rollbar()

    generic_accounts = []
    failed = []

    if min_id is not None or max_id is not None:
        # Get the list of running Gmail accounts.
        with global_session_scope() as db_session:
            generic_accounts = db_session.query(GenericAccount).filter(
                GenericAccount.sync_state == "running")

            if min_id is not None:
                generic_accounts = generic_accounts.filter(
                    GenericAccount.id > min_id)

            if max_id is not None:
                generic_accounts = generic_accounts.filter(
                    GenericAccount.id <= max_id)

            generic_accounts = [acc.id for acc in generic_accounts]

            db_session.expunge_all()

    elif shard_id is not None:
        with session_scope_by_shard_id(shard_id) as db_session:
            generic_accounts = db_session.query(GenericAccount).filter(
                GenericAccount.sync_state == "running")

            generic_accounts = [acc.id for acc in generic_accounts]
            db_session.expunge_all()

    print("Total accounts", len(generic_accounts))

    for account_id in generic_accounts:
        try:
            with session_scope(account_id) as db_session:
                account = db_session.query(GenericAccount).get(account_id)
                print("Updating", account.email_address)

                with connection_pool(account.id).get() as crispin_client:
                    account.folder_prefix = crispin_client.folder_prefix
                    account.folder_separator = crispin_client.folder_separator

                db_session.commit()
        except Exception:
            failed.append(account_id)

    print("Processed accounts:")
    print(generic_accounts)

    print("Failed accounts:")
    print(failed)
Ejemplo n.º 18
0
def delete_account_data(account_id, dry_run, yes, throttle):
    maybe_enable_rollbar()

    with session_scope(account_id) as db_session:
        account = db_session.query(Account).get(account_id)

        if not account:
            print("Account with id {} does NOT exist.".format(account_id))
            return

        email_address = account.email_address
        namespace_id = account.namespace.id

        if account.sync_should_run or not account.is_marked_for_deletion:
            print("Account with id {} NOT marked for deletion.\n"
                  "Will NOT delete, goodbye.".format(account_id))
            return -1

    if not yes:
        question = (
            "Are you sure you want to delete all data for account with "
            "id: {}, email_address: {} and namespace_id: {}? [yes / no]".
            format(account_id, email_address, namespace_id))

        answer = raw_input(question).strip().lower()

        if answer != "yes":
            print("Will NOT delete, goodbye.")
            return 0

    print("Deleting account with id: {}...".format(account_id))
    start = time.time()

    # Delete data in database
    try:
        print("Deleting database data")
        delete_namespace(namespace_id, dry_run=dry_run, throttle=throttle)
    except Exception as e:
        print("Database data deletion failed! Error: {}".format(str(e)))
        return -1

    database_end = time.time()
    print("Database data deleted. Time taken: {}".format(database_end - start))

    # Delete liveness data
    print("Deleting liveness data")
    clear_heartbeat_status(account_id)

    end = time.time()
    print("All data deleted successfully! TOTAL time taken: {}".format(end -
                                                                       start))
    return 0
Ejemplo n.º 19
0
def run(days_ago, limit, throttle, dry_run):
    maybe_enable_rollbar()

    print("Python", sys.version, file=sys.stderr)

    pool = []

    for host in config["DATABASE_HOSTS"]:
        pool.append(
            gevent.spawn(purge_old_transactions, host, days_ago, limit,
                         throttle, dry_run))

    gevent.joinall(pool)
Ejemplo n.º 20
0
def main(dry_run, number, hostname, process):
    """
    Unschedule all accounts assigned to a given sync host.
    Intended primarily for use when decomissioning sync instances or for
    manually unloading an overloaded sync instance.

    """
    maybe_enable_rollbar()

    if not number:
        message = (
            "You have not provided a --number option. This will "
            "unschedule ALL syncs on the host. Proceed? [Y/n] "
        )
        if raw_input(message).strip().lower() == "n":
            print("Will not proceed")
            return

    if not dry_run:
        message = (
            "It is unsafe to unassign hosts while mailsync processes are running. "
            "Have you shut down the appropriate mailsync processes on {}? [Y/n]".format(
                hostname
            )
        )
        if raw_input(message).strip().lower() == "n":
            print("Bailing out")
            return

    with global_session_scope() as db_session:
        if process is not None:
            hostname = ":".join([hostname, process])
        to_unschedule = db_session.query(Account.id).filter(
            Account.sync_host.like("{}%".format(hostname))
        )
        if number:
            to_unschedule = to_unschedule.limit(number)
        to_unschedule = [id_ for id_, in to_unschedule.all()]
        if number:
            to_unschedule = to_unschedule[:number]

    for account_id in to_unschedule:
        with session_scope(account_id) as db_session:
            if dry_run:
                print("Would unassign", account_id)
            else:
                account = db_session.query(Account).get(account_id)
                print("Unassigning", account.id)
                account.desired_sync_host = None
                account.sync_host = None
                db_session.commit()
Ejemplo n.º 21
0
def main(prod, start_syncback, enable_tracer, config, port, enable_profiler):
    """ Launch the Nylas API service. """
    level = os.environ.get("LOGLEVEL", inbox_config.get("LOGLEVEL"))
    configure_logging(log_level=level)

    maybe_enable_rollbar()

    if config is not None:
        config_path = os.path.abspath(config)
        load_overrides(config_path)

    if prod:
        start(port, start_syncback, enable_tracer, enable_profiler)
    else:
        preflight()
        from werkzeug.serving import run_with_reloader

        run_with_reloader(lambda: start(port, start_syncback, enable_tracer,
                                        enable_profiler))
Ejemplo n.º 22
0
def main():
    maybe_enable_rollbar()

    database_hosts = config.get_required("DATABASE_HOSTS")
    database_users = config.get_required("DATABASE_USERS")
    # Do not include disabled shards since application services do not use them.
    engine_manager = EngineManager(database_hosts,
                                   database_users,
                                   include_disabled=False)

    for host in database_hosts:
        for shard in host["SHARDS"]:
            if shard.get("DISABLED"):
                continue
            key = int(shard["ID"])
            engine = engine_manager.engines[key]
            schema = shard["SCHEMA_NAME"]

            print("Verifying database: {}".format(schema))
            verify_db(engine, schema, key)
Ejemplo n.º 23
0
def main(account_id, desired_host, dry_run, toggle_sync):
    maybe_enable_rollbar()

    with global_session_scope() as db_session:
        account = db_session.query(Account).get(int(account_id))

        print("Before sync host: {}".format(account.sync_host))
        print("Before desired sync host: {}".format(account.desired_sync_host))
        print("Before sync should run: {}".format(account.sync_should_run))

        if dry_run:
            return
        account.desired_sync_host = desired_host
        if toggle_sync:
            account.sync_should_run = not account.sync_should_run

        print("After sync host: {}".format(account.sync_host))
        print("After desired sync host: {}".format(account.desired_sync_host))
        print("After sync should run: {}".format(account.sync_should_run))
        db_session.commit()
Ejemplo n.º 24
0
def main():
    """
    Detects accounts with sync_state and sync_host inconsistent with
    sync_should_run bit. (At one point, this could happen if, say, an account
    was _started_ on a new host without being first stopped on its previous
    host.)

    """
    maybe_enable_rollbar()

    with global_session_scope() as db_session:
        for acc in (db_session.query(Account).options(
                load_only("sync_state", "sync_should_run", "sync_host",
                          "desired_sync_host")).filter(
                              Account.sync_state == "stopped")):

            if acc.desired_sync_host is not None:
                print("account {} assigned to {} but has sync_state 'stopped'"
                      " ({}, {})".format(acc.id, acc.sync_host,
                                         acc.sync_should_run, acc.sync_host))
Ejemplo n.º 25
0
def main(prod, config, process_num, syncback_id, enable_tracer,
         enable_profiler):
    """ Launch the actions syncback service. """
    setproctitle("syncback-{}".format(process_num))

    maybe_enable_rollbar()

    print("Python", sys.version, file=sys.stderr)

    if config is not None:
        config_path = os.path.abspath(config)
        load_overrides(config_path)
    level = os.environ.get("LOGLEVEL", inbox_config.get("LOGLEVEL"))
    configure_logging(log_level=level)
    reconfigure_logging()

    total_processes = int(os.environ.get("SYNCBACK_PROCESSES", 1))

    def start():
        # Start the syncback service, and just hang out forever
        syncback = SyncbackService(syncback_id, process_num, total_processes)

        if enable_profiler:
            inbox_config["DEBUG_PROFILING_ON"] = True

        port = 16384 + process_num
        enable_profiler_api = inbox_config.get("DEBUG_PROFILING_ON")
        frontend = SyncbackHTTPFrontend(port, enable_tracer,
                                        enable_profiler_api)
        frontend.start()

        syncback.start()
        syncback.join()

    if prod:
        start()
    else:
        preflight()
        from werkzeug.serving import run_with_reloader

        run_with_reloader(start)
Ejemplo n.º 26
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "-u", "--with-users", action="store_true", dest="with_users", default=False
    )
    args = parser.parse_args()
    from inbox.ignition import init_db, main_engine

    maybe_enable_rollbar()

    engine = main_engine(pool_size=1)

    # Always keep the 'alembic_version' table
    keep_tables = ["alembic_version"]
    reset_columns = {}

    # '--with-users' NOT specified
    if not args.with_users:
        keep_tables += [
            "user",
            "namespace",
            "account",
            "imapaccount",
            "user_session",
            "easaccount",
            "folder",
            "gmailaccount",
            "outlookaccount",
            "genericaccount",
            "secret",
            "calendar",
            "easdevice",
        ]

        reset_columns = {"easaccount": ["eas_account_sync_key", "eas_state"]}

    drop_everything(engine, keep_tables=keep_tables, reset_columns=reset_columns)
    # recreate dropped tables
    init_db(engine)
    sys.exit(0)
Ejemplo n.º 27
0
def main(email_address, reauth, target, provider):
    """ Auth an email account. """
    preflight()

    maybe_enable_rollbar()

    shard_id = target << 48

    with session_scope(shard_id) as db_session:
        account = (db_session.query(Account).filter_by(
            email_address=email_address).first())
        if account is not None and not reauth:
            sys.exit("Already have this account!")

        if not provider:
            provider = provider_from_address(email_address)

            # Resolve unknown providers into either custom IMAP or EAS.
            if provider == "unknown":
                is_imap = raw_input(
                    "IMAP account? [Y/n] ").strip().lower() != "n"
                provider = "custom" if is_imap else "eas"

        auth_handler = handler_from_provider(provider)
        account_data = auth_handler.interactive_auth(email_address)

        if reauth:
            account = auth_handler.update_account(account, account_data)
        else:
            account = auth_handler.create_account(account_data)

        try:
            if auth_handler.verify_account(account):
                db_session.add(account)
                db_session.commit()
        except NotSupportedError as e:
            sys.exit(str(e))

    print("OK. Authenticated account for {}".format(email_address))
Ejemplo n.º 28
0
def main(num_accounts):
    maybe_enable_rollbar()

    with global_session_scope() as db_session:
        accounts = (db_session.query(Account).filter(
            Account.sync_should_run == true()).order_by(
                func.rand()).limit(num_accounts).all())

        accounts = [acc.id for acc in accounts][:num_accounts]
        db_session.expunge_all()

    pool = Pool(size=100)
    results = pool.map(process_account, accounts)

    global_results = dict()
    for ret in results:
        for key in ret:
            if key not in global_results:
                global_results[key] = 0

            global_results[key] += ret[key]

    print(global_results)
Ejemplo n.º 29
0
def main(type, id, public_id):
    maybe_enable_rollbar()

    type = type.lower()

    if type not in cls_for_type:
        print("Error: unknown type '{}'".format(type))
        sys.exit(-1)

    cls = cls_for_type[type]

    if public_id is None and id is None:
        print("Error: you should specify an id or public id to query.")
        sys.exit(-1)

    with global_session_scope() as db_session:
        if public_id:
            obj = db_session.query(cls).filter(
                cls.public_id == public_id).one()
            print(obj.id)
        elif id:
            obj = db_session.query(cls).filter(cls.id == id).one()
            print(obj.public_id)
Ejemplo n.º 30
0
def main(prod, config):
    """ Launch the contact search index service. """
    level = os.environ.get("LOGLEVEL", inbox_config.get("LOGLEVEL"))
    configure_logging(log_level=level)

    maybe_enable_rollbar()

    if config is not None:
        from inbox.util.startup import load_overrides

        config_path = os.path.abspath(config)
        load_overrides(config_path)

    # import here to make sure config overrides are loaded
    from inbox.transactions.search import ContactSearchIndexService

    if not prod:
        preflight()

    contact_search_indexer = ContactSearchIndexService()

    contact_search_indexer.start()
    contact_search_indexer.join()