Exemple #1
0
def coverage_step():
    return (StepBuilder("coverage").run(
        "mkdir -p tmp",
        'buildkite-agent artifact download ".coverage*" tmp/',
        'buildkite-agent artifact download "lcov.*" tmp/',
        "cd tmp",
        "coverage debug sys",
        "coverage debug data",
        "coverage combine",
        "coveralls-lcov -v -n lcov.* > coverage.js.json",
        "coveralls",  # add '--merge=coverage.js.json' to report JS coverage
    ).on_python_image(
        "coverage-image:v1",
        [
            "COVERALLS_REPO_TOKEN",  # exported by /env in ManagedSecretsBucket
            "CI_NAME",
            "CI_BUILD_NUMBER",
            "CI_BUILD_URL",
            "CI_BRANCH",
            "CI_PULL_REQUEST",
        ],
    ).build())
Exemple #2
0
def dagster_postgres_tests():
    tests = []
    for version in SupportedPythons:
        coverage = ".coverage.dagster-postgres.{version}.$BUILDKITE_BUILD_ID".format(
            version=version)
        tests.append(
            StepBuilder("libraries/dagster-postgres tests ({ver})".format(
                ver=TOX_MAP[version])).
            run(
                "cd python_modules/libraries/dagster-postgres/dagster_postgres_tests/",
                *wrap_with_docker_compose_steps(
                    wrap_with_docker_compose_steps(
                        network_buildkite_container('postgres') +
                        connect_sibling_docker_container(
                            'postgres', 'test-postgres-db',
                            'POSTGRES_TEST_DB_HOST') +
                        network_buildkite_container('postgres_multi') +
                        connect_sibling_docker_container(
                            'postgres_multi',
                            'test-run-storage-db',
                            'POSTGRES_TEST_RUN_STORAGE_DB_HOST',
                        ) + connect_sibling_docker_container(
                            'postgres_multi',
                            'test-event-log-storage-db',
                            'POSTGRES_TEST_EVENT_LOG_STORAGE_DB_HOST',
                        ) + [
                            "pushd ../",
                            "pip install tox",
                            "tox -e {ver}".format(ver=TOX_MAP[version]),
                            "mv .coverage {file}".format(file=coverage),
                            "buildkite-agent artifact upload {file}".format(
                                file=coverage),
                            "popd",
                        ],
                        filename='docker-compose-multi.yml',
                        remove_orphans=False,
                    ))).on_integration_image(version).build())
    return tests
Exemple #3
0
def examples_tests():
    tests = []
    for version in SupportedPython3s:
        coverage = ".coverage.examples.{version}.$BUILDKITE_BUILD_ID".format(version=version)
        tests.append(
            StepBuilder("examples tests ({ver})".format(ver=TOX_MAP[version]))
            .run(
                "pushd examples",
                "docker-compose up -d --remove-orphans",  # clean up in hooks/pre-exit,
                # Can't use host networking on buildkite and communicate via localhost
                # between these sibling containers, so pass along the ip.
                network_buildkite_container('postgres'),
                connect_sibling_docker_container(
                    'postgres', 'test-postgres-db', 'POSTGRES_TEST_DB_HOST'
                ),
                "tox -e {ver}".format(ver=TOX_MAP[version]),
                "mv .coverage {file}".format(file=coverage),
                "buildkite-agent artifact upload {file}".format(file=coverage),
            )
            .on_integration_image(version)
            .build()
        )
    return tests
Exemple #4
0
def dagit_steps():
    return [
        StepBuilder("dagit webapp tests").run(
            "pip install -r python_modules/dagster/dev-requirements.txt -qqq",
            "pip install -e python_modules/dagster -qqq",
            "pip install -e python_modules/dagster-graphql -qqq",
            "pip install -e python_modules/libraries/dagster-cron -qqq",
            "pip install -e python_modules/libraries/dagster-slack -qqq",
            "pip install -e python_modules/dagit -qqq",
            "pip install -e examples/legacy_examples -qqq",
            "cd js_modules/dagit",
            "yarn install",
            "yarn run ts",
            "yarn run jest --collectCoverage --watchAll=false",
            "yarn run check-prettier",
            "yarn run check-lint",
            "yarn run download-schema",
            "yarn run generate-types",
            "git diff --exit-code",
            "mv coverage/lcov.info lcov.dagit.$BUILDKITE_BUILD_ID.info",
            "buildkite-agent artifact upload lcov.dagit.$BUILDKITE_BUILD_ID.info",
        ).on_integration_image(SupportedPython.V3_7).build(),
    ]
Exemple #5
0
def python_modules_tox_tests(directory):
    label = directory.replace("/", "-")
    tests = []
    for version in SupportedPythons:
        coverage = ".coverage.{label}.{version}.$BUILDKITE_BUILD_ID".format(
            label=label, version=version)
        tests.append(
            StepBuilder("{label} tests ({ver})".format(
                label=label, ver=TOX_MAP[version])).run(
                    "pip install tox",
                    "eval $(ssh-agent)",
                    "cd python_modules/{directory}".format(
                        directory=directory),
                    "tox -vv -e {ver}".format(ver=TOX_MAP[version]),
                    "mv .coverage {file}".format(file=coverage),
                    "buildkite-agent artifact upload {file}".format(
                        file=coverage),
                ).on_integration_image(version, [
                    'AWS_DEFAULT_REGION', 'TWILIO_TEST_ACCOUNT_SID',
                    'TWILIO_TEST_AUTH_TOKEN'
                ]).build())

    return tests
Exemple #6
0
def publish_integration_images():
    return [
        StepBuilder("Integration Image %s" % python_version).run(
            # See: https://buildkite.com/docs/pipelines/build-meta-data
            "export IMAGE_VERSION=$$(buildkite-agent meta-data get \"integration-image-version\")",
            "pip install awscli",
            "aws ecr get-login --no-include-email --region us-west-1 | sh",
            "cd /workdir/.buildkite/images/",
            "make VERSION=\"$$IMAGE_VERSION\" build-integration-{python_version}"
            .format(python_version=''.join(python_version.split('.')[:2])),
            "make VERSION=\"$$IMAGE_VERSION\" push-integration-{python_version}"
            .format(python_version=python_version),
        ).on_integration_image(
            SupportedPython.V3_7,
            [
                'AWS_ACCOUNT_ID',
                'AWS_ACCESS_KEY_ID',
                'AWS_SECRET_ACCESS_KEY',
                'BUILDKITE_SECRETS_BUCKET',
            ],
        ).on_queue(BuildkiteQueue.DOCKER).with_timeout(30).build()
        for python_version in SupportedPythons + [SupportedPython.V3_8]
    ]
Exemple #7
0
def events_demo_tests():
    tests = []
    for version in SupportedPython3s:
        coverage = ".coverage.events-demo.{version}.$BUILDKITE_BUILD_ID".format(
            version=version)
        tests.append(
            StepBuilder('events-demo tests ({version})'.format(
                version=TOX_MAP[version])).
            on_integration_image(version, [
                'AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID',
                'AWS_DEFAULT_REGION'
            ]).run(
                "mkdir -p /tmp/dagster/events",
                "pushd scala_modules",
                "sbt events/assembly",
                "cp ./events/target/scala-2.11/events-assembly-0.1.0-SNAPSHOT.jar /tmp/dagster/events/",
                "popd",
                "pushd examples",
                "tox -vv -c event.tox -e {ver}".format(ver=TOX_MAP[version]),
                "mv .coverage {file}".format(file=coverage),
                "buildkite-agent artifact upload {file}".format(file=coverage),
            ).build())
    return tests
Exemple #8
0
def dagit_tests():
    tests = []
    for version in SupportedPythons:
        coverage = ".coverage.dagit.{version}.$BUILDKITE_BUILD_ID".format(
            version=version)
        tests.append(
            StepBuilder(
                "dagit tests ({ver})".format(ver=TOX_MAP[version])).run(
                    "apt-get update",
                    "apt-get install -y xdg-utils",
                    "pushd python_modules",
                    "make rebuild_dagit",
                    "popd",
                    "pip install tox;",
                    "cd python_modules/dagit",
                    "tox -vv -e {ver}".format(ver=TOX_MAP[version]),
                    "mv .coverage {file}".format(file=coverage),
                    "buildkite-agent artifact upload {file}".format(
                        file=coverage),
                ).on_integration_image(version).on_queue(
                    BuildkiteQueue.MEDIUM).build())

    return tests
Exemple #9
0
def publish_test_images():
    '''This set of tasks builds and pushes Docker images, which are used by the dagster-airflow and
    the dagster-k8s tests
    '''
    tests = []
    # See: https://github.com/dagster-io/dagster/issues/1960
    for version in SupportedPythons:
        key = "dagster-test-images-{version}".format(version=TOX_MAP[version])
        tests.append(
            StepBuilder("test images {version}".format(version=version),
                        key=key).
            run(
                # credentials
                "aws ecr get-login --no-include-email --region us-west-1 | sh",
                "export GOOGLE_APPLICATION_CREDENTIALS=\"/tmp/gcp-key-elementl-dev.json\"",
                "aws s3 cp s3://$${BUILDKITE_SECRETS_BUCKET}/gcp-key-elementl-dev.json $${GOOGLE_APPLICATION_CREDENTIALS}",
                #
                # build and tag test image
                "export TEST_IMAGE=$${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com/dagster-docker-buildkite:$${BUILDKITE_BUILD_ID}-"
                + version,
                "./python_modules/dagster-test/dagster_test/test_project/build.sh "
                + version + " $${TEST_IMAGE}",
                #
                # push the built image
                "echo -e \"--- \033[32m:docker: Pushing Docker image\033[0m\"",
                "docker push $${TEST_IMAGE}",
            ).on_integration_image(
                version,
                [
                    'AIRFLOW_HOME',
                    'AWS_ACCOUNT_ID',
                    'AWS_ACCESS_KEY_ID',
                    'AWS_SECRET_ACCESS_KEY',
                    'BUILDKITE_SECRETS_BUCKET',
                ],
            ).build())
    return tests
Exemple #10
0
def celery_tests():
    tests = []
    # See: https://github.com/dagster-io/dagster/issues/1960
    for version in SupportedPythons + [SupportedPython.V3_8]:
        coverage = ".coverage.dagster-celery.{version}.$BUILDKITE_BUILD_ID".format(
            version=version)
        tests.append(
            StepBuilder(
                "[dagster-celery] ({ver})".format(ver=TOX_MAP[version])).
            on_integration_image(
                version,
                [
                    'AWS_ACCOUNT_ID',
                    'AWS_ACCESS_KEY_ID',
                    'AWS_SECRET_ACCESS_KEY',
                ],
            ).run(
                "export DAGSTER_DOCKER_IMAGE_TAG=$${BUILDKITE_BUILD_ID}-" +
                version,
                "export DAGSTER_DOCKER_REPOSITORY=\"$${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com\"",
                "pushd python_modules/libraries/dagster-celery",
                # Run the rabbitmq db. We are in docker running docker
                # so this will be a sibling container.
                "docker-compose up -d --remove-orphans",  # clean up in hooks/pre-exit,
                # Can't use host networking on buildkite and communicate via localhost
                # between these sibling containers, so pass along the ip.
                network_buildkite_container('rabbitmq'),
                connect_sibling_docker_container('rabbitmq', 'test-rabbitmq',
                                                 'DAGSTER_CELERY_BROKER_HOST'),
                "tox -vv -e {ver}".format(ver=TOX_MAP[version]),
                "mv .coverage {file}".format(file=coverage),
                "buildkite-agent artifact upload {file}".format(file=coverage),
            ).depends_on([
                "dagster-test-images-{version}".format(
                    version=TOX_MAP[version])
            ]).build())
    return tests
Exemple #11
0
def python_modules_tox_tests(directory, supported_pythons=None):
    label = directory.replace("/", "-")
    tests = []
    # See: https://github.com/dagster-io/dagster/issues/1960
    supported_pythons = supported_pythons or SupportedPythons + [
        SupportedPython.V3_8
    ]
    for version in supported_pythons:

        # pyspark doesn't support Python 3.8 yet
        # See: https://github.com/dagster-io/dagster/issues/1960
        if ('pyspark' in label
                or 'aws' in label) and version == SupportedPython.V3_8:
            continue

        coverage = ".coverage.{label}.{version}.$BUILDKITE_BUILD_ID".format(
            label=label, version=version)
        tests.append(
            StepBuilder("{label} tests ({ver})".format(
                label=label, ver=TOX_MAP[version])).run(
                    "eval $(ssh-agent)",
                    "cd python_modules/{directory}".format(
                        directory=directory),
                    "tox -vv -e {ver}".format(ver=TOX_MAP[version]),
                    "mv .coverage {file}".format(file=coverage),
                    "buildkite-agent artifact upload {file}".format(
                        file=coverage),
                ).on_integration_image(
                    version,
                    [
                        'AWS_DEFAULT_REGION',
                        'TWILIO_TEST_ACCOUNT_SID',
                        'TWILIO_TEST_AUTH_TOKEN',
                    ],
                ).build())

    return tests
Exemple #12
0
def celery_tests():
    tests = []
    # See: https://github.com/dagster-io/dagster/issues/1960
    for version in SupportedPythons + [SupportedPython.V3_8]:
        coverage = ".coverage.dagster-celery.{version}.$BUILDKITE_BUILD_ID".format(
            version=version)
        tests.append(
            StepBuilder(
                "[dagster-celery] ({ver})".format(ver=TOX_MAP[version])).
            on_integration_image(version).run(
                "pushd python_modules/dagster-celery",
                # Run the rabbitmq db. We are in docker running docker
                # so this will be a sibling container.
                "docker-compose up -d --remove-orphans",  # clean up in hooks/pre-exit,
                # Can't use host networking on buildkite and communicate via localhost
                # between these sibling containers, so pass along the ip.
                network_buildkite_container('rabbitmq'),
                connect_sibling_docker_container('rabbitmq', 'test-rabbitmq',
                                                 'DAGSTER_CELERY_BROKER_HOST'),
                "tox -vv -e {ver}".format(ver=TOX_MAP[version]),
                "mv .coverage {file}".format(file=coverage),
                "buildkite-agent artifact upload {file}".format(file=coverage),
            ).build())
    return tests
Exemple #13
0
def deploy_scala():
    # GCP tests need appropriate credentials
    creds_local_file = "/tmp/gcp-key-elementl-dev.json"
    version = SupportedPython.V3_7

    return (StepBuilder("scala deploy").run(
        "pip install awscli",
        "pip install --upgrade google-cloud-storage",
        r"aws s3 cp s3://\${BUILDKITE_SECRETS_BUCKET}/gcp-key-elementl-dev.json "
        + creds_local_file,
        "export GOOGLE_APPLICATION_CREDENTIALS=" + creds_local_file,
        "pushd scala_modules",
        "make deploy",
    ).on_integration_image(
        version,
        [
            'AWS_SECRET_ACCESS_KEY',
            'AWS_ACCESS_KEY_ID',
            'AWS_DEFAULT_REGION',
            'BUILDKITE_SECRETS_BUCKET',
            'GCP_PROJECT_ID',
            'GCP_DEPLOY_BUCKET',
        ],
    ).on_queue(BuildkiteQueue.MEDIUM).build())
Exemple #14
0
def dagster_postgres_tests():
    tests = []
    # See: https://github.com/dagster-io/dagster/issues/1960
    for version in SupportedPythons + [SupportedPython.V3_8]:
        coverage = ".coverage.dagster-postgres.{version}.$BUILDKITE_BUILD_ID".format(
            version=version
        )
        tests.append(
            StepBuilder("libraries/dagster-postgres tests ({ver})".format(ver=TOX_MAP[version]))
            .run(
                "cd python_modules/libraries/dagster-postgres/dagster_postgres_tests/",
                "docker-compose up -d --remove-orphans",  # clean up in hooks/pre-exit,
                "docker-compose -f docker-compose-multi.yml up -d",  # clean up in hooks/pre-exit,
                network_buildkite_container('postgres'),
                connect_sibling_docker_container(
                    'postgres', 'test-postgres-db', 'POSTGRES_TEST_DB_HOST'
                ),
                network_buildkite_container('postgres_multi'),
                connect_sibling_docker_container(
                    'postgres_multi', 'test-run-storage-db', 'POSTGRES_TEST_RUN_STORAGE_DB_HOST',
                ),
                connect_sibling_docker_container(
                    'postgres_multi',
                    'test-event-log-storage-db',
                    'POSTGRES_TEST_EVENT_LOG_STORAGE_DB_HOST',
                ),
                "pushd ../",
                "tox -e {ver}".format(ver=TOX_MAP[version]),
                "mv .coverage {file}".format(file=coverage),
                "buildkite-agent artifact upload {file}".format(file=coverage),
                "popd",
            )
            .on_integration_image(version)
            .build()
        )
    return tests
Exemple #15
0
def airflow_tests():
    tests = []
    # See: https://github.com/dagster-io/dagster/issues/1960
    for version in SupportedPythons:
        coverage = ".coverage.dagster-airflow.{version}.$BUILDKITE_BUILD_ID".format(
            version=version)
        tests.append(
            StepBuilder(
                "dagster-airflow ({ver})".format(ver=TOX_MAP[version])).
            run(
                "aws ecr get-login --no-include-email --region us-west-1 | sh",
                r"aws s3 cp s3://\${BUILDKITE_SECRETS_BUCKET}/gcp-key-elementl-dev.json "
                + GCP_CREDS_LOCAL_FILE,
                "export GOOGLE_APPLICATION_CREDENTIALS=" +
                GCP_CREDS_LOCAL_FILE,
                "./.buildkite/scripts/dagster_airflow.sh %s %s" %
                (version, TOX_MAP[version]),
                "pushd python_modules/dagster-airflow/",
                "mv .coverage {file}".format(file=coverage),
                "buildkite-agent artifact upload {file}".format(file=coverage),
                "popd",
            ).depends_on([
                "dagster-test-images-{version}".format(
                    version=TOX_MAP[version])
            ]).on_integration_image(
                version,
                [
                    'AIRFLOW_HOME',
                    'AWS_ACCOUNT_ID',
                    'AWS_ACCESS_KEY_ID',
                    'AWS_SECRET_ACCESS_KEY',
                    'BUILDKITE_SECRETS_BUCKET',
                    'GOOGLE_APPLICATION_CREDENTIALS',
                ],
            ).build())
    return tests
Exemple #16
0
def coverage_step():
    return (StepBuilder("coverage").run(
        "apt-get update",
        "apt-get -qq -y install lcov ruby-full",
        "pip install coverage coveralls coveralls-merge",
        "gem install coveralls-lcov",
        "mkdir -p tmp",
        'buildkite-agent artifact download ".coverage*" tmp/',
        'buildkite-agent artifact download "lcov.*" tmp/',
        "cd tmp",
        "coverage combine",
        "coveralls-lcov -v -n lcov.* > coverage.js.json",
        "coveralls",  # add '--merge=coverage.js.json' to report JS coverage
    ).on_integration_image(
        SupportedPython.V3_7,
        [
            'COVERALLS_REPO_TOKEN',  # exported by /env in ManagedSecretsBucket
            'CI_NAME',
            'CI_BUILD_NUMBER',
            'CI_BUILD_URL',
            'CI_BRANCH',
            'CI_PULL_REQUEST',
        ],
    ).build())
Exemple #17
0
def dask_tests():
    tests = []
    for version in SupportedPython3s:
        coverage = ".coverage.dagster-dask.{version}.$BUILDKITE_BUILD_ID".format(version=version)
        tests.append(
            StepBuilder("dagster-dask tests ({ver})".format(ver=TOX_MAP[version]))
            .run(
                "pushd python_modules/dagster-dask/dagster_dask_tests/dask-docker",
                "./build.sh " + version,
                # Run the docker-compose dask cluster
                "export PYTHON_VERSION=\"{ver}\"".format(ver=version),
                "docker-compose up -d --remove-orphans",
                # hold onto your hats, this is docker networking at its best. First, we figure out
                # the name of the currently running container...
                "export CONTAINER_ID=`cut -c9- < /proc/1/cpuset`",
                r'export CONTAINER_NAME=`docker ps --filter "id=\${CONTAINER_ID}" --format "{{.Names}}"`',
                # then, we dynamically bind this container into the dask user-defined bridge
                # network to make the dask containers visible...
                r"docker network connect dask \${CONTAINER_NAME}",
                # Now, we grab the IP address of the dask-scheduler container from within the dask
                # bridge network and export it; this will let the tox tests talk to the scheduler.
                "export DASK_ADDRESS=`docker inspect --format '{{ .NetworkSettings.Networks.dask.IPAddress }}' dask-scheduler`",
                "popd",
                "pushd python_modules/dagster-dask/",
                "pip install tox",
                "tox -e {ver}".format(ver=TOX_MAP[version]),
                "mv .coverage {file}".format(file=coverage),
                "buildkite-agent artifact upload {file}".format(file=coverage),
            )
            .on_integration_image(
                version, ['AWS_SECRET_ACCESS_KEY', 'AWS_ACCESS_KEY_ID', 'AWS_DEFAULT_REGION']
            )
            .on_queue(BuildkiteQueue.MEDIUM)
            .build()
        )
    return tests
Exemple #18
0
def publish_integration_images():
    python_versions = [
        ''.join(python_version.split('.')[:2])
        for python_version in SupportedPythons + [SupportedPython.V3_8]
    ]
    publish_date = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H%M%S")

    return [
        StepBuilder("Integration Image %s" % python_version)
        .run(
            r"aws s3 cp s3://\${BUILDKITE_SECRETS_BUCKET}/dockerhub-creds /tmp/dockerhub-creds",
            "export DOCKERHUB_PASSWORD=`cat /tmp/dockerhub-creds`",
            "export DOCKERHUB_USERNAME=elementldevtools",
            "aws ecr get-login --no-include-email --region us-west-1 | sh",
            "cd /workdir/.buildkite/images/",
            "make VERSION=\"{publish_date}\" build-integration-{python_version}".format(
                publish_date=publish_date, python_version=python_version
            ),
            "make VERSION=\"{publish_date}\" push-integration-{python_version}".format(
                publish_date=publish_date, python_version=python_version
            ),
        )
        .on_integration_image(
            SupportedPython.V3_7,
            [
                'AWS_ACCOUNT_ID',
                'AWS_ACCESS_KEY_ID',
                'AWS_SECRET_ACCESS_KEY',
                'BUILDKITE_SECRETS_BUCKET',
            ],
        )
        .on_queue(BuildkiteQueue.DOCKER)
        .with_timeout(30)
        .build()
        for python_version in python_versions
    ]
Exemple #19
0
def pipenv_smoke_tests():
    tests = []
    # See: https://github.com/dagster-io/dagster/issues/1960
    # See: https://github.com/dagster-io/dagster/issues/2079
    for version in SupportedPython3s:
        is_release = check_for_release()
        smoke_test_steps = (
            [
                "mkdir /tmp/pipenv_smoke_tests",
                "pushd /tmp/pipenv_smoke_tests",
                "pipenv install -e /workdir/python_modules/dagster",
                "pipenv install -e /workdir/python_modules/dagit",
            ]
            if not is_release
            else []
        )
        tests.append(
            StepBuilder("pipenv smoke tests ({ver})".format(ver=TOX_MAP[version]))
            .run(*smoke_test_steps)
            .on_integration_image(version)
            .build()
        )

    return tests
Exemple #20
0
            StepBuilder("releasibility tests ({ver})".format(
                ver=TOX_MAP[version])).run(
                    "pip install -r bin/requirements.txt",
                    "pip install -r bin/dev-requirements.txt",
                    "cd bin",
                    "SLACK_RELEASE_BOT_TOKEN='dummy' pytest",
                ).on_integration_image(version).build())

    return tests


if __name__ == "__main__":
    steps = pylint_steps() + [
        StepBuilder("isort").run(
            "pip install isort>=4.3.21",
            "isort -rc examples python_modules",  # -sg seems to be broken
            "isort -rc -l 78 examples/dagster_examples/intro_tutorial",
            "git diff --exit-code",
        ).on_integration_image(SupportedPython.V3_7).build(),
        StepBuilder("black").run("pip install black==19.10b0",
                                 "make check_black").on_integration_image(
                                     SupportedPython.V3_7).build(),
        StepBuilder("docs snapshot test").run(
            "pip install -r .read-the-docs-requirements.txt -qqq",
            "pip install -r python_modules/dagster/dev-requirements.txt -qqq",
            "pip install -e python_modules/dagster -qqq",
            "pip install -e python_modules/dagstermill -qqq",
            "pytest -vv docs",
        ).on_integration_image(SupportedPython.V3_7).build(),
        StepBuilder("dagit webapp tests").run(
            "pip install -r python_modules/dagster/dev-requirements.txt -qqq",
            "pip install -e python_modules/dagster -qqq",
Exemple #21
0
def version_equality_checks(version=SupportedPython.V3_7):
    return [
        StepBuilder("version equality checks for libraries").
        on_integration_image(version).run(
            "pip install -r bin/requirements.txt",
            "python bin/version_check.py").build()
    ]


if __name__ == "__main__":
    steps = []
    steps += publish_test_images()

    steps += [
        StepBuilder("dagster-flyte build example").run(
            "cd python_modules/libraries/dagster-flyte/examples",
            "make docker_build").on_integration_image(
                SupportedPython.V3_6).build()
    ]

    steps += pylint_steps()
    steps += [
        StepBuilder("isort").run(
            "pip install isort>=4.3.21",
            "make isort",
            "git diff --exit-code",
        ).on_integration_image(SupportedPython.V3_7).build(),
        StepBuilder("black")
        # See: https://github.com/dagster-io/dagster/issues/1999
        .run("make check_black").on_integration_image(
            SupportedPython.V3_7).build(),
        StepBuilder("docs snapshot test").run(
Exemple #22
0
            .run(
                "make install_dev_python_modules",
                "pylint -j 0 `git ls-files '%s/*.py'` --rcfile=.pylintrc" % path,
            )
            .on_integration_image(SupportedPython.V3_7)
            .on_queue(BuildkiteQueue.MEDIUM)
            .build()
        )
    return res


if __name__ == "__main__":
    steps = pylint_steps() + [
        StepBuilder("isort")
        .run(
            "pip install isort>=4.3.21", "isort -rc examples python_modules", "git diff --exit-code"
        )
        .on_python_image(SupportedPython.V3_7)
        .build(),
        StepBuilder("black")
        # black 18.9b0 doesn't support py27-compatible formatting of the below invocation (omitting
        # the trailing comma after **check.opt_dict_param...) -- black 19.3b0 supports multiple
        # python versions, but currently doesn't know what to do with from __future__ import
        # print_function -- see https://github.com/ambv/black/issues/768
        .run("pip install black==18.9b0", "make check_black")
        .on_python_image(SupportedPython.V3_7)
        .build(),
        StepBuilder("docs snapshot test")
        .run(
            "pip install -r .read-the-docs-requirements.txt -qqq",
            "pip install -r python_modules/dagster/dev-requirements.txt -qqq",
            "pip install -e python_modules/dagster -qqq",
Exemple #23
0

if __name__ == "__main__":
    steps = publish_docker_images() + [
        StepBuilder('publish nightlies').on_integration_image(
            SupportedPython.V3_7,
            ['SLACK_RELEASE_BOT_TOKEN', 'PYPI_USERNAME', 'PYPI_PASSWORD']).
        run(
            # Configure git
            'git config --global user.email "$GITHUB_EMAIL"',
            'git config --global user.name "$GITHUB_NAME"',
            # Merge Master
            'git fetch --all',
            'git branch -D master',
            'git checkout --track origin/master',
            'git reset --hard origin/master',
            'git checkout --track origin/nightly',
            'git checkout nightly',
            'git reset --hard origin/nightly',
            'GIT_MERGE_AUTOEDIT=no git merge --strategy recursive --strategy-option theirs master',
            'git push',
            # Install reqs
            'pip install -r bin/requirements.txt',
            # Create ~/.pypirc
            '.buildkite/scripts/pypi.sh',
            # Publish
            'python bin/publish.py publish --nightly --autoclean',
        ).build(),
        StepBuilder('clean phabricator tags').run(
            'git tag | grep phabricator | xargs git push -d origin').build(),
    ]
    print(yaml.dump({"env": {}, "steps": steps}, default_flow_style=False))
Exemple #24
0
def publish_test_images():
    '''This set of tasks builds and pushes Docker images, which are used by the dagster-airflow and
    the dagster-k8s tests
    '''
    tests = []
    for version in SupportedPythons:
        key = "dagster-test-images-{version}".format(version=TOX_MAP[version])
        tests.append(
            StepBuilder("test images {version}".format(version=version),
                        key=key)
            # these run commands are coupled to the way the test-image-builder is built
            # see .buildkite/images/test_image_builder/Dockerfile
            .run(
                # credentials
                "/scriptdir/aws.pex ecr get-login --no-include-email --region us-west-1 | sh",
                "export GOOGLE_APPLICATION_CREDENTIALS=\"/tmp/gcp-key-elementl-dev.json\"",
                "/scriptdir/aws.pex s3 cp s3://$${BUILDKITE_SECRETS_BUCKET}/gcp-key-elementl-dev.json $${GOOGLE_APPLICATION_CREDENTIALS}",
                #
                # build and tag test image
                "export TEST_IMAGE=$${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com/dagster-docker-buildkite:$${BUILDKITE_BUILD_ID}-"
                + version,
                "./python_modules/dagster-test/dagster_test/test_project/build.sh "
                + version + " $${TEST_IMAGE}",
                #
                # push the built image
                "echo -e \"--- \033[32m:docker: Pushing Docker image\033[0m\"",
                "docker push $${TEST_IMAGE}",
            ).on_python_image(
                'test-image-builder:v2',
                [
                    'AIRFLOW_HOME',
                    'AWS_ACCOUNT_ID',
                    'AWS_ACCESS_KEY_ID',
                    'AWS_SECRET_ACCESS_KEY',
                    'BUILDKITE_SECRETS_BUCKET',
                ],
            ).build())

        key = "dagster-core-test-images-{version}".format(
            version=TOX_MAP[version])
        tests.append(
            StepBuilder("core test images {version}".format(version=version),
                        key=key)
            # these run commands are coupled to the way the test-image-builder is built
            # see .buildkite/images/test_image_builder/Dockerfile
            .run(
                # credentials
                "/scriptdir/aws.pex ecr get-login --no-include-email --region us-west-1 | sh",
                # set the base image
                "export BASE_IMAGE=$${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com/buildkite-unit:py"
                + version + "-" + UNIT_IMAGE_VERSION,
                # build and tag test image
                "export TEST_IMAGE=$${AWS_ACCOUNT_ID}.dkr.ecr.us-west-1.amazonaws.com/dagster-core-docker-buildkite:$${BUILDKITE_BUILD_ID}-"
                + version,
                "./python_modules/dagster-test/build_core.sh " + version +
                " $${TEST_IMAGE}",
                #
                # push the built image
                "echo -e \"--- \033[32m:docker: Pushing Docker image\033[0m\"",
                "docker push $${TEST_IMAGE}",
            ).on_python_image(
                'test-image-builder:v2',
                [
                    'AWS_ACCOUNT_ID',
                    'AWS_ACCESS_KEY_ID',
                    'AWS_SECRET_ACCESS_KEY',
                    'BUILDKITE_SECRETS_BUCKET',
                ],
            ).build())
    return tests
Exemple #25
0
sys.path.append(SCRIPT_PATH)

if __name__ == "__main__":
    steps = [
        StepBuilder('publish nightlies').on_integration_image(
            SupportedPython.V3_7,
            ['SLACK_RELEASE_BOT_TOKEN', 'PYPI_USERNAME', 'PYPI_PASSWORD']).run(
                # Configure git
                'git config --global user.email "$GITHUB_EMAIL"',
                'git config --global user.name "$GITHUB_NAME"',
                # Merge Master
                'git fetch --all',
                'git branch -D master',
                'git checkout --track origin/master',
                'git reset --hard origin/master',
                # Reset nightly branch
                'git push --delete origin nightly',
                'git checkout -b nightly',
                'git push --set-upstream origin nightly',
                # Install reqs
                'pip install -r bin/requirements.txt',
                # Create ~/.pypirc
                '.buildkite/scripts/pypi.sh',
                # Publish
                'export PYTHONDONTWRITEBYTECODE=1',
                'pip install -e python_modules/automation',
                'dagster-release publish --nightly --autoclean',
            ).build(),
    ]
    print(yaml.dump({"env": {}, "steps": steps}, default_flow_style=False))