def test_s3_env(): creds = s3.get_credentials() args = [ "--conf spark.mesos.driverEnv.AWS_ACCESS_KEY_ID={}".format( creds.access_key), "--conf spark.mesos.driverEnv.AWS_SECRET_ACCESS_KEY={}".format( creds.secret_key) ] args.append("--class S3Job") linecount_path = os.path.join(THIS_DIR, 'resources', 'linecount.txt') s3.upload_file(linecount_path) # download/read linecount.txt only utils.run_tests(app_url=utils.dcos_test_jar_url(), app_args="--readUrl {} --countOnly".format( s3.s3n_url('linecount.txt')), expected_output="Read 3 lines", args=args) # download/read linecount.txt, reupload as linecount-env.txt utils.run_tests(app_url=utils.dcos_test_jar_url(), app_args="--readUrl {} --writeUrl {}".format( s3.s3n_url('linecount.txt'), s3.s3n_url('linecount-env.txt')), expected_output="Read 3 lines", args=args) assert len(list(s3.list("linecount-env.txt"))) > 0
def test_s3_secrets(): linecount_path = os.path.join(THIS_DIR, 'resources', 'linecount.txt') s3.upload_file(linecount_path) creds = s3.get_credentials() def make_credential_secret(path, val): sdk_security.delete_secret(path) rc, stdout, stderr = sdk_cmd.run_raw_cli( "security secrets create /{} -v {}".format(path, val)) assert rc == 0, "Failed to create secret {}, stderr: {}, stdout: {}".format( path, stderr, stdout) aws_access_key_path = "aws_access_key_id" make_credential_secret(aws_access_key_path, creds.access_key) aws_secret_key_path = "aws_secret_access_key" make_credential_secret(aws_secret_key_path, creds.secret_key) args = [ "--conf spark.mesos.containerizer=mesos", "--conf spark.mesos.driver.secret.names=/{key},/{secret}".format( key=aws_access_key_path, secret=aws_secret_key_path), "--conf spark.mesos.driver.secret.envkeys=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY", "--class S3Job" ] try: # download/read linecount.txt only utils.run_tests(app_url=utils.dcos_test_jar_url(), app_args="--readUrl {} --countOnly".format( s3.s3n_url('linecount.txt')), expected_output="Read 3 lines", args=args) # download/read linecount.txt, reupload as linecount-secret.txt: utils.run_tests(app_url=utils.dcos_test_jar_url(), app_args="--readUrl {} --writeUrl {}".format( s3.s3n_url('linecount.txt'), s3.s3n_url('linecount-secret.txt')), expected_output="Read 3 lines", args=args) assert len(list(s3.list("linecount-secret.txt"))) > 0 finally: sdk_security.delete_secret(aws_access_key_path) sdk_security.delete_secret(aws_secret_key_path)
def test_s3(): def make_credential_secret(envvar, secret_path): rc, stdout, stderr = sdk_cmd.run_raw_cli( "security secrets create {p} -v {e}".format(p=secret_path, e=os.environ[envvar])) assert rc == 0, "Failed to create secret {secret} from envvar {envvar}, stderr: {err}, stdout: {out}".format( secret=secret_path, envvar=envvar, err=stderr, out=stdout) LOGGER.info("Creating AWS secrets") aws_access_key_secret_path = "aws_access_key_id" aws_secret_access_key_path = "aws_secret_access_key" make_credential_secret( envvar="AWS_ACCESS_KEY_ID", secret_path="/{}".format(aws_access_key_secret_path)) make_credential_secret( envvar="AWS_SECRET_ACCESS_KEY", secret_path="/{}".format(aws_secret_access_key_path)) linecount_path = os.path.join(THIS_DIR, 'resources', 'linecount.txt') s3.upload_file(linecount_path) app_args = "--readUrl {} --writeUrl {}".format(s3.s3n_url('linecount.txt'), s3.s3n_url("linecount-out")) args = [ "--conf", "spark.mesos.containerizer=mesos", "--conf", "spark.mesos.driver.secret.names=/{key},/{secret}".format( key=aws_access_key_secret_path, secret=aws_secret_access_key_path), "--conf", "spark.mesos.driver.secret.envkeys=AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY", "--class", "S3Job" ] utils.run_tests(app_url=utils._scala_test_jar_url(), app_args=app_args, expected_output="Read 3 lines", args=args) assert len(list(s3.list("linecount-out"))) > 0 app_args = "--readUrl {} --countOnly".format(s3.s3n_url('linecount.txt')) args = [ "--conf", "spark.mesos.driverEnv.AWS_ACCESS_KEY_ID={}".format( os.environ["AWS_ACCESS_KEY_ID"]), "--conf", "spark.mesos.driverEnv.AWS_SECRET_ACCESS_KEY={}".format( os.environ["AWS_SECRET_ACCESS_KEY"]), "--class", "S3Job" ] utils.run_tests(app_url=utils._scala_test_jar_url(), app_args=app_args, expected_output="Read 3 lines", args=args) app_args = "--countOnly --readUrl {}".format(s3.s3n_url('linecount.txt')) args = [ "--conf", "spark.mesos.driverEnv.AWS_ACCESS_KEY_ID={}".format( os.environ["AWS_ACCESS_KEY_ID"]), "--conf", "spark.mesos.driverEnv.AWS_SECRET_ACCESS_KEY={}".format( os.environ["AWS_SECRET_ACCESS_KEY"]), "--class", "S3Job" ] utils.run_tests(app_url=utils._scala_test_jar_url(), app_args=app_args, expected_output="Read 3 lines", args=args)