def bootstrap_db(heroku_app, config): # Pre-populate the database if an archive was given log("Ingesting dataset from {}...".format(os.path.basename(zip_path))) engine = db.create_db_engine(heroku_app.db_url) data.bootstrap_db_from_zip(zip_path, engine)
def deploy(mode, server, dns_host, config_options, archive_path): # pragma: no cover """Deploy a dallnger experiment docker image to a server using ssh.""" config = get_config() config.load() server_info = CONFIGURED_HOSTS[server] ssh_host = server_info["host"] ssh_user = server_info.get("user") HAS_TLS = ssh_host != "localhost" # We abuse the mturk contact_email_on_error to provide an email for let's encrypt certificate email_addr = config.get("contact_email_on_error") if HAS_TLS: if "@" not in parseaddr(email_addr)[1]: print(f"Email address absent or invalid. Value {email_addr} found") print("Run `dallinger email-test` to verify your configuration") raise click.Abort tls = "tls internal" if not HAS_TLS else f"tls {email_addr}" if not dns_host: dns_host = get_dns_host(ssh_host) executor = Executor(ssh_host, user=ssh_user) executor.run("mkdir -p ~/dallinger/caddy.d") sftp = get_sftp(ssh_host, user=ssh_user) sftp.putfo(BytesIO(DOCKER_COMPOSE_SERVER), "dallinger/docker-compose.yml") sftp.putfo( BytesIO(CADDYFILE.format(host=dns_host, tls=tls).encode()), "dallinger/Caddyfile", ) executor.run("docker-compose -f ~/dallinger/docker-compose.yml up -d") print("Launched http and postgresql servers. Starting experiment") experiment_uuid = str(uuid4()) if archive_path: experiment_id = get_experiment_id_from_archive(archive_path) else: experiment_id = f"dlgr-{experiment_uuid[:8]}" dashboard_password = token_urlsafe(8) image = config.get("docker_image_name", None) cfg = config.as_dict() for key in "aws_access_key_id", "aws_secret_access_key": # AWS credentials are not included by default in to_dict() result # but can be extracted explicitly from a config object cfg[key.upper()] = config[key] cfg.update({ "FLASK_SECRET_KEY": token_urlsafe(16), "AWS_DEFAULT_REGION": config["aws_region"], "dashboard_password": dashboard_password, "mode": mode, "CREATOR": f"{USER}@{HOSTNAME}", "DALLINGER_UID": experiment_uuid, "ADMIN_USER": "******", }) cfg.update(config_options) del cfg["host"] # The uppercase variable will be used instead executor.run(f"mkdir -p dallinger/{experiment_id}") postgresql_password = token_urlsafe(16) sftp.putfo( BytesIO( get_docker_compose_yml(cfg, experiment_id, image, postgresql_password).encode()), f"dallinger/{experiment_id}/docker-compose.yml", ) # We invoke the "ls" command in the context of the `web` container. # docker-compose will honour `web`'s dependencies and block # until postgresql is ready. This way we can be sure we can start creating the database. executor.run( f"docker-compose -f ~/dallinger/{experiment_id}/docker-compose.yml run --rm web ls" ) print("Cleaning up db/user") executor.run( fr"""docker-compose -f ~/dallinger/docker-compose.yml exec -T postgresql psql -U dallinger -c 'DROP DATABASE IF EXISTS "{experiment_id}";'""" ) executor.run( fr"""docker-compose -f ~/dallinger/docker-compose.yml exec -T postgresql psql -U dallinger -c 'DROP USER IF EXISTS "{experiment_id}"; '""" ) print(f"Creating database {experiment_id}") executor.run( fr"""docker-compose -f ~/dallinger/docker-compose.yml exec -T postgresql psql -U dallinger -c 'CREATE DATABASE "{experiment_id}"'""" ) create_user_script = f"""CREATE USER "{experiment_id}" with encrypted password '{postgresql_password}'""" executor.run( f"docker-compose -f ~/dallinger/docker-compose.yml exec -T postgresql psql -U dallinger -c {quote(create_user_script)}" ) grant_roles_script = ( f'grant all privileges on database "{experiment_id}" to "{experiment_id}"' ) if archive_path is not None: print(f"Loading database data from {archive_path}") with remote_postgres(server_info, experiment_id) as db_uri: engine = create_db_engine(db_uri) bootstrap_db_from_zip(archive_path, engine) with engine.connect() as conn: conn.execute(grant_roles_script) conn.execute( f'GRANT USAGE ON SCHEMA public TO "{experiment_id}"') conn.execute( f'GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA PUBLIC TO "{experiment_id}"' ) executor.run( f"docker-compose -f ~/dallinger/docker-compose.yml exec -T postgresql psql -U dallinger -c {quote(grant_roles_script)}" ) executor.run( f"docker-compose -f ~/dallinger/{experiment_id}/docker-compose.yml up -d" ) if archive_path is None: print(f"Experiment {experiment_id} started. Initializing database") executor.run( f"docker-compose -f ~/dallinger/{experiment_id}/docker-compose.yml exec -T web dallinger-housekeeper initdb" ) print("Database initialized") # We give caddy the alias for the service. If we scale up the service container caddy will # send requests to all of them in a round robin fashion. caddy_conf = f"{experiment_id}.{dns_host} {{\n {tls}\n reverse_proxy {experiment_id}_web:5000\n}}" sftp.putfo( BytesIO(caddy_conf.encode()), f"dallinger/caddy.d/{experiment_id}", ) # Tell caddy we changed something in the configuration executor.reload_caddy() print("Launching experiment") response = get_retrying_http_client().post( f"https://{experiment_id}.{dns_host}/launch", verify=HAS_TLS) print(response.json()["recruitment_msg"]) print("To display the logs for this experiment you can run:") print( f"ssh {ssh_user}@{ssh_host} docker-compose -f '~/dallinger/{experiment_id}/docker-compose.yml' logs -f" ) print( f"You can now log in to the console at https://{experiment_id}.{dns_host}/dashboard as user {cfg['ADMIN_USER']} using password {cfg['dashboard_password']}" )