def dagster_postgres_tests(): tests = [] for version in SupportedPythons: coverage = ".coverage.dagster-postgres.{version}.$BUILDKITE_BUILD_ID".format( version=version ) tests.append( StepBuilder("dagster-postgres tests ({ver})".format(ver=TOX_MAP[version])) .run( "cd python_modules/libraries/dagster-postgres/dagster_postgres_tests/", "docker-compose stop", "docker-compose rm -f", "docker-compose up -d --remove-orphans", "export POSTGRES_TEST_DB_HOST=`docker inspect --format '{{ .NetworkSettings.IPAddress }}' test-postgres-db`", "pushd ../", "pip install tox", "tox -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), "popd", "docker-compose stop", "docker-compose rm -f", ) .on_integration_image(version, ['BUILDKITE']) .build() ) return tests
def coverage_step(): return ( StepBuilder("coverage") .run( "mkdir -p tmp", 'buildkite-agent artifact download ".coverage*" tmp/', 'buildkite-agent artifact download "lcov.*" tmp/', "cd tmp", "coverage debug sys", "coverage debug data", "coverage combine", "coveralls-lcov -v -n lcov.* > coverage.js.json", "coveralls", # add '--merge=coverage.js.json' to report JS coverage ) .on_python_image( "coverage-image:v1", [ "COVERALLS_REPO_TOKEN", # exported by /env in ManagedSecretsBucket "CI_NAME", "CI_BUILD_NUMBER", "CI_BUILD_URL", "CI_BRANCH", "CI_PULL_REQUEST", ], ) .build() )
def events_demo_tests(): tests = [] for version in SupportedPython3s: coverage = ".coverage.events-demo.{version}.$BUILDKITE_BUILD_ID".format(version=version) tests.append( StepBuilder('events-demo tests ({version})'.format(version=TOX_MAP[version])) .on_integration_image( version, ['AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_DEFAULT_REGION'] ) .on_medium_instance() .run( "mkdir -p /tmp/dagster/events", "pushd scala_modules", "sbt events/assembly", "cp ./events/target/scala-2.11/events-assembly-0.1.0-SNAPSHOT.jar /tmp/dagster/events/", "popd", "pushd examples", "pip install tox", "tox -vv -c event.tox -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), ) .build() ) return tests
def publish_integration_images(): python_versions = [ ''.join(python_version.split('.')[:2]) for python_version in SupportedPythons + [SupportedPython.V3_8] ] publish_date = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H%M%S") return [ StepBuilder("Integration Image %s" % python_version).run( "aws ecr get-login --no-include-email --region us-west-1 | sh", "cd /workdir/.buildkite/images/", "make VERSION=\"{publish_date}\" build-integration-{python_version}" .format(publish_date=publish_date, python_version=python_version), "make VERSION=\"{publish_date}\" push-integration-{python_version}" .format(publish_date=publish_date, python_version=python_version), ).on_integration_image( SupportedPython.V3_7, [ 'AWS_ACCOUNT_ID', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'BUILDKITE_SECRETS_BUCKET', ], ).on_queue(BuildkiteQueue.DOCKER).with_timeout(30).build() for python_version in python_versions ]
def deploy_scala(): # GCP tests need appropriate credentials creds_local_file = "/tmp/gcp-key-elementl-dev.json" version = SupportedPython.V3_7 return ( StepBuilder("scala deploy") .run( "pip install --upgrade google-cloud-storage", r"aws s3 cp s3://\${BUILDKITE_SECRETS_BUCKET}/gcp-key-elementl-dev.json " + creds_local_file, "export GOOGLE_APPLICATION_CREDENTIALS=" + creds_local_file, "pushd scala_modules", "make deploy", ) .on_integration_image( version, [ 'AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_DEFAULT_REGION', 'BUILDKITE_SECRETS_BUCKET', 'GCP_PROJECT_ID', 'GCP_DEPLOY_BUCKET', ], ) .on_queue(BuildkiteQueue.MEDIUM) .build() )
def dask_tests(): tests = [] for version in SupportedPython3s: coverage = ".coverage.dagster-dask.{version}.$BUILDKITE_BUILD_ID".format( version=version) tests.append( StepBuilder( "dagster-dask tests ({ver})".format(ver=TOX_MAP[version])). run( "pushd python_modules/dagster-dask/dagster_dask_tests/dask-docker", "./build.sh " + version, # Run the docker-compose dask cluster "export PYTHON_VERSION=\"{ver}\"".format(ver=version), "docker-compose up -d --remove-orphans", # hold onto your hats, this is docker networking at its best. First, we figure out # the name of the currently running container... "export CONTAINER_ID=`cut -c9- < /proc/1/cpuset`", r'export CONTAINER_NAME=`docker ps --filter "id=\${CONTAINER_ID}" --format "{{.Names}}"`', # then, we dynamically bind this container into the dask user-defined bridge # network to make the dask containers visible... r"docker network connect dask \${CONTAINER_NAME}", # Now, we grab the IP address of the dask-scheduler container from within the dask # bridge network and export it; this will let the tox tests talk to the scheduler. "export DASK_ADDRESS=`docker inspect --format '{{ .NetworkSettings.Networks.dask.IPAddress }}' dask-scheduler`", "popd", "pushd python_modules/dagster-dask/", "pip install tox", "tox -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), ).on_integration_image(version, [ 'AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_DEFAULT_REGION' ]).on_medium_instance().build()) return tests
def airline_demo_tests(): tests = [] for version in SupportedPython3s: coverage = ".coverage.airline-demo.{version}.$BUILDKITE_BUILD_ID".format(version=version) tests.append( StepBuilder('airline-demo tests ({version})'.format(version=TOX_MAP[version])) .on_integration_image(version) .on_medium_instance() .run( "cd examples", # Build the image we use for airflow in the demo tests "./build_airline_demo_image.sh", "mkdir -p /home/circleci/airflow", # Run the postgres db. We are in docker running docker # so this will be a sibling container. "docker-compose stop", "docker-compose rm -f", "docker-compose up -d", # Can't use host networking on buildkite and communicate via localhost # between these sibling containers, so pass along the ip. "export DAGSTER_AIRLINE_DEMO_DB_HOST=`docker inspect --format '{{ .NetworkSettings.IPAddress }}' airline-demo-db`", "tox -vv -c airline.tox -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), ) .build() ) return tests
def dagster_postgres_tests(): tests = [] # See: https://github.com/dagster-io/dagster/issues/1960 for version in SupportedPythons + [SupportedPython.V3_8]: coverage = ".coverage.dagster-postgres.{version}.$BUILDKITE_BUILD_ID".format( version=version) tests.append( StepBuilder("libraries/dagster-postgres tests ({ver})".format( ver=TOX_MAP[version])). run( "cd python_modules/libraries/dagster-postgres/dagster_postgres_tests/", "docker-compose up -d --remove-orphans", # clean up in hooks/pre-exit, "docker-compose -f docker-compose-multi.yml up -d", # clean up in hooks/pre-exit, network_buildkite_container('postgres'), connect_sibling_docker_container('postgres', 'test-postgres-db', 'POSTGRES_TEST_DB_HOST'), network_buildkite_container('postgres_multi'), connect_sibling_docker_container( 'postgres_multi', 'test-run-storage-db', 'POSTGRES_TEST_RUN_STORAGE_DB_HOST', ), connect_sibling_docker_container( 'postgres_multi', 'test-event-log-storage-db', 'POSTGRES_TEST_EVENT_LOG_STORAGE_DB_HOST', ), "pushd ../", "tox -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), "popd", ).on_integration_image(version).build()) return tests
def version_equality_checks(version=SupportedPython.V3_7): return [ StepBuilder("version equality checks for libraries") .on_integration_image(version) .run("pip install -e python_modules/automation", "dagster-release version") .build() ]
def airline_demo_tests(): tests = [] for version in SupportedPython3s: coverage = ".coverage.airline-demo.{version}.$BUILDKITE_BUILD_ID".format( version=version) tests.append( StepBuilder('airline-demo tests ({version})'.format( version=TOX_MAP[version])).on_integration_image(version). run( "cd examples", # Build the image we use for airflow in the demo tests "./build_airline_demo_image.sh", "mkdir -p /home/circleci/airflow", # Run the postgres db. We are in docker running docker # so this will be a sibling container. "docker-compose up -d --remove-orphans", # clean up in hooks/pre-exit # Can't use host networking on buildkite and communicate via localhost # between these sibling containers, so pass along the ip. network_buildkite_container('postgres'), connect_sibling_docker_container('postgres', 'test-postgres-db', 'POSTGRES_TEST_DB_HOST'), "tox -vv -c airline.tox -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), ).build()) return tests
def k8s_tests(): tests = [] # See: https://github.com/dagster-io/dagster/issues/1960 for version in SupportedPythons + [SupportedPython.V3_8]: coverage = ".coverage.dagster-k8s.{version}.$BUILDKITE_BUILD_ID".format( version=version) tests.append( StepBuilder("dagster-k8s ({ver})".format(ver=TOX_MAP[version])). run( "pip install kubernetes", "curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash", "export DAGSTER_DOCKER_IMAGE_TAG=$${BUILDKITE_BUILD_ID}-" + version, "export DAGSTER_DOCKER_REPOSITORY=\"$${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com\"", "pushd python_modules/libraries/dagster-k8s/", "tox -vv -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), "popd", ).depends_on([ "dagster-test-images-{version}".format( version=TOX_MAP[version]) ]).on_integration_image( version, [ 'AWS_ACCOUNT_ID', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'BUILDKITE_SECRETS_BUCKET', ], ).build()) return tests
def version_equality_checks(version=SupportedPython.V3_7): return [ StepBuilder("version equality checks for libraries"). on_integration_image(version).run( "pip install -r bin/requirements.txt", "python bin/version_check.py").build() ]
def python_modules_tox_tests(directory, supported_pythons=None): label = directory.replace("/", "-") tests = [] # See: https://github.com/dagster-io/dagster/issues/1960 supported_pythons = supported_pythons or SupportedPythons + [ SupportedPython.V3_8 ] for version in supported_pythons: # pyspark doesn't support Python 3.8 yet # See: https://github.com/dagster-io/dagster/issues/1960 if ('pyspark' in label or 'aws' in label) and version == SupportedPython.V3_8: continue coverage = ".coverage.{label}.{version}.$BUILDKITE_BUILD_ID".format( label=label, version=version) tests.append( StepBuilder("{label} tests ({ver})".format( label=label, ver=TOX_MAP[version])).run( "eval $(ssh-agent)", "cd python_modules/{directory}".format( directory=directory), "tox -vv -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format( file=coverage), ).on_integration_image(version, [ 'AWS_DEFAULT_REGION', 'TWILIO_TEST_ACCOUNT_SID', 'TWILIO_TEST_AUTH_TOKEN' ]).build()) return tests
def dask_tests(): tests = [] # We don't test dagster-dask against Python 3.5 because Dask dropped support # for 3.5 with Dask 2.7.0 (2019-11-08) for version in SupportPython3sEx35: coverage = ".coverage.dagster-dask.{version}.$BUILDKITE_BUILD_ID".format( version=version) tests.append( StepBuilder( "dagster-dask tests ({ver})".format(ver=TOX_MAP[version])). run( "pushd python_modules/libraries/dagster-dask/dagster_dask_tests/dask-docker", "./build.sh " + version, # Run the docker-compose dask cluster "docker-compose up -d --remove-orphans", # clean up in hooks/pre-exit network_buildkite_container('dask'), connect_sibling_docker_container('dask', 'dask-scheduler', 'DASK_ADDRESS'), "popd", "pushd python_modules/libraries/dagster-dask/", "tox -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), ).on_integration_image(version, [ 'AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_DEFAULT_REGION' ]).build()) return tests
def dagit_steps(): return [ StepBuilder("dagit webapp tests") .run( "pip install -r python_modules/dagster/dev-requirements.txt -qqq", "pip install -e python_modules/dagster -qqq", "pip install -e python_modules/dagster-graphql -qqq", "pip install -e python_modules/libraries/dagster-cron -qqq", "pip install -e python_modules/libraries/dagster-slack -qqq", "pip install -e python_modules/dagit -qqq", "pip install -e examples/legacy_examples -qqq", "cd js_modules/dagit", "yarn install", "yarn run ts", "yarn run jest --collectCoverage --watchAll=false", "yarn run check-prettier", "yarn run check-lint", "yarn run download-schema", "yarn run generate-types", "git diff --exit-code", "mv coverage/lcov.info lcov.dagit.$BUILDKITE_BUILD_ID.info", "buildkite-agent artifact upload lcov.dagit.$BUILDKITE_BUILD_ID.info", ) .on_integration_image(SupportedPython.V3_7) .build(), ]
def python_modules_tox_tests(directory, prereqs=None, env=None): label = directory.replace("/", "-") tests = [] for version in SupportedPythons: coverage = ".coverage.{label}.{version}.$BUILDKITE_BUILD_ID".format( label=label, version=version) tox_command = [] if prereqs: tox_command += prereqs tox_command += [ "pip install tox;", "cd python_modules/{directory}".format(directory=directory), "tox -vv -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), ] env_vars = ['AWS_DEFAULT_REGION'] + (env or []) builder = (StepBuilder("{label} tests ({ver})".format( label=label, ver=TOX_MAP[version])).run(*tox_command).on_python_image( version, env_vars)) tests.append(builder.build()) return tests
def coverage_step(): return ( StepBuilder("coverage") .run( "mkdir -p tmp", 'buildkite-agent artifact download ".coverage*" tmp/', 'buildkite-agent artifact download "lcov.*" tmp/', "cd tmp", "coverage debug sys", "coverage debug data", "coverage combine", # coveralls-lcov is currently not working - fails with: # converter.rb:63:in `initialize': No such file or directory @ rb_sysopen - jest/mocks/dagre_layout.worker.ts # "coveralls-lcov -v -n lcov.* > coverage.js.json", "coveralls", # add '--merge=coverage.js.json' to report JS coverage ) .on_python_image( "buildkite-coverage:py3.8.7-{version}".format(version=COVERAGE_IMAGE_VERSION), [ "COVERALLS_REPO_TOKEN", # exported by /env in ManagedSecretsBucket "CI_NAME", "CI_BUILD_NUMBER", "CI_BUILD_URL", "CI_BRANCH", "CI_PULL_REQUEST", ], ) .build() )
def coverage_step(): return ( StepBuilder("coverage") .run( "apt-get update", "apt-get -qq -y install lcov ruby-full", "pip install coverage coveralls coveralls-merge", "gem install coveralls-lcov", "mkdir -p tmp", 'buildkite-agent artifact download ".coverage*" tmp/', 'buildkite-agent artifact download "lcov.*" tmp/', "cd tmp", "coverage combine", "coveralls-lcov -v -n lcov.* > coverage.js.json", "coveralls", # add '--merge=coverage.js.json' to report JS coverage ) .on_python_image( SupportedPython.V3_7, [ 'COVERALLS_REPO_TOKEN', # exported by /env in ManagedSecretsBucket 'CI_NAME', 'CI_BUILD_NUMBER', 'CI_BUILD_URL', 'CI_BRANCH', 'CI_PULL_REQUEST', ], ) .build() )
def gcp_tests(): tests = [] for version in SupportedPythons: coverage = ".coverage.libraries-dagster-gcp.{version}.$BUILDKITE_BUILD_ID".format( version=version) tests.append( StepBuilder("libraries-dagster-gcp tests ({ver})".format( ver=TOX_MAP[version])). run( r"aws s3 cp s3://\${BUILDKITE_SECRETS_BUCKET}/gcp-key-elementl-dev.json " + GCP_CREDS_LOCAL_FILE, "export GOOGLE_APPLICATION_CREDENTIALS=" + GCP_CREDS_LOCAL_FILE, "cd python_modules/libraries/dagster-gcp", "tox -vv -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), ).on_integration_image( version, [ 'BUILDKITE_SECRETS_BUCKET', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'GCP_PROJECT_ID', ], ).build()) return tests
def gcp_tests(): # GCP tests need appropriate credentials creds_local_file = "/tmp/gcp-key-elementl-dev.json" tests = [] for version in SupportedPythons: coverage = ".coverage.libraries-dagster-gcp.{version}.$BUILDKITE_BUILD_ID".format( version=version ) tests.append( StepBuilder("libraries-dagster-gcp tests ({ver})".format(ver=TOX_MAP[version])) .run( "pip install awscli", "aws s3 cp s3://${BUILDKITE_SECRETS_BUCKET}/gcp-key-elementl-dev.json " + creds_local_file, "export GOOGLE_APPLICATION_CREDENTIALS=" + creds_local_file, "pip install tox;", "cd python_modules/libraries/dagster-gcp", "tox -vv -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), ) .on_python_image(version, ['BUILDKITE_SECRETS_BUCKET', 'GCP_PROJECT_ID']) .build() ) return tests
def dask_tests(): tests = [] for version in SupportedPython3s: coverage = ".coverage.dagster-dask.{version}.$BUILDKITE_BUILD_ID".format( version=version) tests.append( StepBuilder( "dagster-dask tests ({ver})".format(ver=TOX_MAP[version])). run( "pushd python_modules/dagster-dask/dagster_dask_tests/dask-docker", "./build.sh " + version, # Run the docker-compose dask cluster *wrap_with_docker_compose_steps( network_buildkite_container('dask') + connect_sibling_docker_container('dask', 'dask-scheduler', 'DASK_ADDRESS') + [ "popd", "pushd python_modules/dagster-dask/", "tox -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format( file=coverage), "popd", "pushd python_modules/dagster-dask/dagster_dask_tests/dask-docker", ])).on_integration_image(version, [ 'AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_DEFAULT_REGION' ]).build()) return tests
def airflow_tests(): tests = [] # See: https://github.com/dagster-io/dagster/issues/1960 for version in SupportedPythons + [SupportedPython.V3_8]: coverage = ".coverage.dagster-airflow.{version}.$BUILDKITE_BUILD_ID".format( version=version) tests.append( StepBuilder( "[dagster-airflow] ({ver})".format(ver=TOX_MAP[version])). run( "pip install awscli", "aws ecr get-login --no-include-email --region us-west-1 | sh", r"aws s3 cp s3://\${BUILDKITE_SECRETS_BUCKET}/gcp-key-elementl-dev.json " + GCP_CREDS_LOCAL_FILE, "export GOOGLE_APPLICATION_CREDENTIALS=" + GCP_CREDS_LOCAL_FILE, "aws ecr get-login --no-include-email --region us-west-1 | sh", "./.buildkite/scripts/dagster_airflow.sh {ver}".format( ver=TOX_MAP[version]), "pushd python_modules/dagster-airflow/", "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), "popd", ).depends_on(["dagster-airflow-images"]).on_integration_image( version, [ 'AIRFLOW_HOME', 'AWS_ACCOUNT_ID', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'BUILDKITE_SECRETS_BUCKET', 'GOOGLE_APPLICATION_CREDENTIALS', ], ).build()) return tests
def publish_airflow_images(): '''These images are used by the dagster-airflow tests. We build them here and not in the main build pipeline to speed it up, because they change very rarely. ''' return [ StepBuilder("[dagster-airflow] images", key="dagster-airflow-images"). run( "pip install awscli", "aws ecr get-login --no-include-email --region us-west-1 | sh", r"aws s3 cp s3://\${BUILDKITE_SECRETS_BUCKET}/gcp-key-elementl-dev.json " + GCP_CREDS_LOCAL_FILE, "export GOOGLE_APPLICATION_CREDENTIALS=" + GCP_CREDS_LOCAL_FILE, "export DAGSTER_AIRFLOW_DOCKER_IMAGE=$${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com/dagster-airflow-demo:$${BUILDKITE_BUILD_ID}", # Build and deploy dagster-airflow docker images "pushd python_modules/dagster-airflow/dagster_airflow_tests/test_project", "./build.sh", "docker tag dagster-airflow-demo $${DAGSTER_AIRFLOW_DOCKER_IMAGE}", "docker push $${DAGSTER_AIRFLOW_DOCKER_IMAGE}", "popd", ).on_integration_image( SupportedPython.V3_7, [ 'AIRFLOW_HOME', 'AWS_ACCOUNT_ID', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'BUILDKITE_SECRETS_BUCKET', ], ).build() ]
def helm_steps(): base_paths = "'helm/dagster/*.yml' 'helm/dagster/*.yaml'" base_paths_ignored = "':!:helm/dagster/templates/*.yml' ':!:helm/dagster/templates/*.yaml'" return [ StepBuilder("yamllint helm").run( "pip install yamllint", "yamllint -c .yamllint.yaml --strict `git ls-files {base_paths} {base_paths_ignored}`" .format(base_paths=base_paths, base_paths_ignored=base_paths_ignored), ).on_integration_image(SupportedPython.V3_7).build(), StepBuilder("validate helm schema").run( "pip install -e python_modules/automation", "dagster-helm schema --command=apply", "git diff --exit-code", "helm lint helm/dagster -f helm/dagster/values.yaml", ).on_integration_image(SupportedPython.V3_7).build(), ]
def releasability_tests(version=SupportedPython.V3_7): return [ StepBuilder("releasibility test").run( "pip install -r bin/requirements.txt", "pip install -r bin/dev-requirements.txt", "cd bin", "SLACK_RELEASE_BOT_TOKEN='dummy' pytest", ).on_integration_image(version).build() ]
def next_docs_build_tests(): return [ StepBuilder("next docs build tests").run( "pip install -e python_modules/automation", "pip install -r docs-requirements.txt -qqq", "pip install -r python_modules/dagster/dev-requirements.txt -qqq", "cd docs", "make NODE_ENV=production VERSION=master full_docs_build", ).on_integration_image(SupportedPython.V3_7).build(), StepBuilder("next docs tests").run( "pip install -e python_modules/automation", "pip install -r docs-requirements.txt -qqq", "pip install -r python_modules/dagster/dev-requirements.txt -qqq", "cd docs", "make buildnext", "cd next", "yarn test", ).on_integration_image(SupportedPython.V3_7).build(), ]
def python_steps(): steps = [] steps += publish_test_images() steps += pylint_steps() steps += [ StepBuilder("isort").run( "pip install isort>=4.3.21", "make isort", "git diff --exit-code", ).on_integration_image(SupportedPython.V3_7).build(), StepBuilder("black") # See: https://github.com/dagster-io/dagster/issues/1999 .run("make check_black").on_integration_image( SupportedPython.V3_7).build(), StepBuilder("mypy examples").run( "pip install mypy", # start small by making sure the local code type checks "mypy examples/airline_demo/airline_demo " "examples/legacy_examples/dagster_examples/bay_bikes " "examples/docs_snippets/docs_snippets/intro_tutorial/basics/e04_quality/custom_types_mypy* " "--ignore-missing-imports", ).on_integration_image(SupportedPython.V3_7).build(), StepBuilder("Validate Library Docs").run( "pip install -e python_modules/automation", "dagster-docs validate-libraries").on_integration_image( SupportedPython.V3_7).build(), ] for m in DAGSTER_PACKAGES_WITH_CUSTOM_TESTS: steps += m.get_tox_build_steps() steps += extra_library_tests() # https://github.com/dagster-io/dagster/issues/2785 steps += pipenv_smoke_tests() steps += version_equality_checks() steps += next_docs_build_tests() steps += examples_tests() steps += integration_tests() return steps
def flyte_tests(): tests = [] for version in SupportedPython3s: coverage = ".coverage.examples.{version}.$BUILDKITE_BUILD_ID".format( version=version) tests.append( StepBuilder("dagster-flyte tests ({ver})".format( ver=TOX_MAP[version])).run( "pushd python_modules/libraries/dagster-flyte", "tox -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format( file=coverage), ).on_integration_image(version).build()) tests.append( StepBuilder("dagster-flyte build example").run( "cd python_modules/libraries/dagster-flyte/examples", "make docker_build", ).on_integration_image(SupportedPython.V3_6).build()) return tests
def releasability_tests(): tests = [] for version in [SupportedPython.V3_7]: tests.append( StepBuilder("releasibility tests ({ver})".format( ver=TOX_MAP[version])).run( "pip install -r bin/requirements.txt", "pip install -r bin/dev-requirements.txt", "cd bin", "SLACK_RELEASE_BOT_TOKEN='dummy' pytest"). on_integration_image(version).build()) return tests
def automation_tests(): tests = [] version = SupportedPython.V3_7 coverage = ".coverage.automation.{version}.$BUILDKITE_BUILD_ID".format( version=version) tests.append( StepBuilder( "automation tests ({ver})".format(ver=TOX_MAP[version])).run( "pushd python_modules/automation", "tox -e {ver}".format(ver=TOX_MAP[version]), "mv .coverage {file}".format(file=coverage), "buildkite-agent artifact upload {file}".format(file=coverage), ).on_integration_image(version).build()) return tests