예제 #1
0
 def destroy(self) -> None:
     run_process([
         'gcloud', 'compute', '--project', self.gcp_project_id, 'routers',
         'nats', 'delete', self.nat_name, '--router', self.router.name,
         '--quiet'
     ])
     self.router.secure_destroy()
예제 #2
0
def pip_compile(
        requiremenets: Path,
        *,
        dry_run=False,
        verbose=False,
        upgrade=False,
        upgrade_package="",
        prereleases=False,
        rebuild=False,
        extra_args=(),
):
    """Wraps 'pip-tools' command. Include hash of source file into the generated one."""

    requirements_txt = requiremenets.with_suffix(".txt")
    requirements_in = requiremenets.with_suffix(".in")
    logger.info("Compile requirements file %s ...", requirements_in)

    with tempfile.NamedTemporaryFile('w+t',
                                     prefix=f"{requirements_in.stem}-",
                                     suffix=".txt",
                                     delete=False) as txt_file:
        txt_path = Path(txt_file.name)

        if requirements_txt.exists() and not rebuild:
            txt_path.write_bytes(requirements_txt.read_bytes())

        bf_commons.run_process([
            "pip-compile",
            "--no-header",
            *(["-o", txt_path] if not dry_run else []),
            *(["--dry-run"] if dry_run else []),
            *(["--rebuild"] if rebuild else []),
            *(["--upgrade"] if upgrade else []),
            *(["--pre"] if prereleases else []),
            *(["--upgrade-package", upgrade_package]
              if upgrade_package else []),
            *(["-v"] if verbose else ["-q"]),
            *extra_args,
            str(requirements_in),
        ],
                               check=True)

        reqs_content = txt_path.read_text()

    if dry_run:
        return

    source_hash = compute_requirements_in_hash(requirements_in)
    with open(requirements_txt, 'w+t') as out:
        logger.info("Write pip requirements file: %s", requirements_txt)
        out.write(
            textwrap.dedent(f"""\
            # *** autogenerated: don't edit ***
            # $source-hash: {source_hash}
            # $source-file: {requirements_in}
            #
            # run 'bigflow build-requirements {requirements_in}' to update this file

        """))
        out.write(reqs_content)
예제 #3
0
def _export_docker_image_to_file(tag: str, target_dir: Path, version: str):
    image_target_path = target_dir / f"image-{version}.tar"
    logger.info("Exporting the image to %s ...", image_target_path)
    bf_commons.run_process([
        "docker", "image", "save", "-o", image_target_path,
        bf_commons.get_docker_image_id(tag)
    ])
예제 #4
0
 def destroy(self) -> None:
     run_process([
         'gcloud', 'composer', 'environments', 'delete', self.composer_name,
         '--location', self.region, '--project', self.gcp_project_id,
         '--quiet'
     ])
     self.cloud_nat.secure_destroy()
예제 #5
0
def _read_setuppy_args(path_to_setup: Path) -> dict:
    logger.info("Read project options from %s", path_to_setup)
    with tempfile.NamedTemporaryFile("r+b") as f:
        bf_commons.run_process(
            ["python", path_to_setup, DUMP_PARAMS_SETUPPY_CMDARG, f.name],
            cwd=str(path_to_setup.parent))
        return pickle.load(f)
예제 #6
0
 def create(self) -> None:
     self.router.create()
     run_process([
         'gcloud', 'compute', '--project', self.gcp_project_id, 'routers',
         'nats', 'create', self.nat_name, f'--router={self.router.name}',
         '--auto-allocate-nat-external-ips', '--nat-all-subnet-ip-ranges',
         '--enable-logging'
     ])
예제 #7
0
def _cli_build_package():
    validate_project_setup()
    bf_commons.run_process([
        "python",
        bigflow.build.dev.find_setuppy(),
        "build_project",
        "--build-package",
    ])
예제 #8
0
def _cli_build(args):
    validate_project_setup()
    cmd = [
        "python",
        bigflow.build.dev.find_setuppy(),
        "build_project",
    ]
    if _is_workflow_selected(args):
        cmd.append('--workflow')
        cmd.append(args.workflow)
    if _is_starttime_selected(args):
        cmd.append('--start-time')
        cmd.append(args.start_time)
    bf_commons.run_process(cmd)
예제 #9
0
def load_image_from_tar(image_tar_path: str) -> str:
    logger.info("Load docker image from %s...", image_tar_path)
    for line in bf_commons.run_process(
        ['docker', 'load', '-i', image_tar_path]).split('\n'):
        if 'Loaded image ID:' in line:
            return line.split()[-1].split(':')[-1]
    raise ValueError(f"Can't load image: {image_tar_path}")
예제 #10
0
def _read_setuppy_args(path_to_setup: Path) -> dict:

    logger.info("Read project options from %s", path_to_setup)
    with tempfile.NamedTemporaryFile("r+b") as f:
        bf_commons.run_process(
            ["python", path_to_setup, DUMP_PARAMS_SETUPPY_CMDARG, f.name],
            cwd=str(path_to_setup.parent))
        params = pickle.load(f)

    legacy_project_name = _read_project_name_from_setup_legacy(
        path_to_setup.parent)
    if legacy_project_name and params.get('name') != legacy_project_name:
        logging.error(
            "Project name mismatch: setup.PROJECT_NAME == %r, "
            "but setup(name=%r). It is recommended to remove 'PROJECT_NAME' variable from 'project_setup.py'",
            legacy_project_name, params.get('name'))

    return params
예제 #11
0
def validate_project_setup():
    cmd = [
        "python",
        bigflow.build.dev.find_setuppy(),
        "build_project",
        "--validate-project-setup",
    ]
    output = bf_commons.run_process(cmd)

    if bigflow.build.dist.SETUP_VALIDATION_MESSAGE not in output:
        raise ValueError('The `setup.py` is invalid. Check the documentation how to create a valid `setup.py`: https://github.com/allegro/bigflow/blob/master/docs/build.md')
예제 #12
0
파일: pip.py 프로젝트: anjensan/bigflow
def pip_compile(
        req: Path,
        verbose=False,
        extra_args=(),
):
    """Wraps 'pip-tools' command. Include hash of source file into the generated one."""

    req_txt = req.with_suffix(".txt")
    req_in = req.with_suffix(".in")
    logger.info("Compile requirements file %s ...", req_in)

    with tempfile.NamedTemporaryFile('w+t',
                                     prefix=f"{req_in.stem}-",
                                     suffix=".txt",
                                     delete=False) as txt_file:
        bf_commons.run_process([
            "pip-compile",
            "--no-header",
            "-o",
            txt_file.name,
            *(["-v"] if verbose else ["-q"]),
            *extra_args,
            str(req_in),
        ])
        with open(txt_file.name) as ff:
            reqs_content = ff.readlines()

    source_hash = bf_commons.generate_file_hash(req_in)

    with open(req_txt, 'w+t') as out:
        logger.info("Write pip requirements file: %s", req_txt)
        out.write(
            textwrap.dedent(f"""\
            # *** AUTO GENERATED: DON'T EDIT ***
            # $source-hash: {source_hash}
            # $source-file: {req_in}
            #
            # run 'bigflow build-requirements {req_in}' to update this file

        """))
        out.writelines(reqs_content)
예제 #13
0
def run_tests(project_spec: BigflowProjectSpec):
    logger.info('Runing tests...')
    output_dir = "build/junit-reports"
    try:
        bf_commons.run_process([
            "python",
            "-m",
            "xmlrunner",
            "discover",
            "-s",
            ".",
            "-t",
            project_spec.project_dir,
            "-o",
            output_dir,
        ])
    except subprocess.CalledProcessError:
        logger.error("Test suite was FAILED")
        exit(1)

    logger.info("Test suite was PASSED")
예제 #14
0
def _deploy_image_loaded_to_local_registry(
    build_ver,
    docker_repository,
    image_id,
    auth_method,
    vault_endpoint,
    vault_secret,
):
    tag_image(image_id, docker_repository, build_ver)

    docker_image = docker_repository + ":" + build_ver
    docker_image_latest = docker_repository + ":latest"

    logger.info("Deploying docker image tag=%s auth_method=%s", docker_image,
                auth_method)

    if auth_method == 'local_account':
        bf_commons.run_process(['gcloud', 'auth', 'configure-docker'])
    elif auth_method == 'vault':
        oauthtoken = get_vault_token(vault_endpoint, vault_secret)
        bf_commons.run_process(
            [
                'docker', 'login', '-u', 'oauth2accesstoken',
                '--password-stdin', 'https://eu.gcr.io'
            ],
            input=oauthtoken,
        )
    else:
        raise ValueError('unsupported auth_method: ' + auth_method)

    bf_commons.run_process(
        ['docker', 'push', docker_image, docker_image_latest])

    return docker_image
예제 #15
0
파일: reflect.py 프로젝트: anjensan/bigflow
def _build_dist_package(
    project_name: str,
    suffix: str,
    cmdname: str,
    exargs: List[str],
):
    """Locates and runs 'bdist_*' command on 'setup.py'"""

    if project_name is None:
        project_name = infer_project_name(stack=3)

    with tempfile.TemporaryDirectory() as workdir:

        setuppy = materialize_setuppy(project_name, workdir)
        distdir = Path(workdir) / "dist"
        logger.info("Run setup.py %s", cmdname)

        fd, result_path = tempfile.mkstemp(suffix=suffix)
        result_path = Path(result_path)
        os.close(fd)

        bfc.run_process(
            [
                "python",
                setuppy,
                cmdname,
                "--dist-dir",
                distdir,
                *(exargs or []),
            ],
            cwd=str(setuppy.parent),
        )
        result_tmp = _expect_single_file(distdir, "*" + suffix)

        logger.debug("Rename %s to %s", result_tmp, result_path)
        result_tmp.rename(result_path)

        logger.info("Built package located at %s", result_path)
        return result_path
예제 #16
0
def read_setuppy_args(path_to_setup: Union[Path, str, None] = None) -> dict:
    """Loads `setup.py`, returns all parameters of `bigflow.build.setup()` function.

    This function doesn't unpack 'embeeded sdist' archive when package is installed via pip.
    You could use `bigflow.build.materialize_setuppy` for such purposes, although it is not recommended"""

    path_to_setup = path_to_setup or find_setuppy()
    logger.info("Read project options from %s", path_to_setup)
    with tempfile.NamedTemporaryFile("r+b") as f:
        bf_commons.run_process(
            ["python", path_to_setup, DUMP_PARAMS_SETUPPY_CMDARG, f.name],
            cwd=str(path_to_setup.parent))
        params = pickle.load(f)

    legacy_project_name = _read_project_name_from_setup_legacy(
        path_to_setup.parent)
    if legacy_project_name and params.get('name') != legacy_project_name:
        logging.error(
            "Project name mismatch: setup.PROJECT_NAME == %r, "
            "but setup(name=%r). It is recommended to remove 'PROJECT_NAME' variable from 'project_setup.py'",
            legacy_project_name, params.get('name'))

    return params
예제 #17
0
def run_tests(project_dir: Path, build_dir: Path, test_package: Path):
    output_dir = build_dir / 'junit-reports'
    try:
        return bf_commons.run_process([
            "python",
            "-m",
            "xmlrunner",
            "discover",
            "-s",
            test_package,
            "-t",
            project_dir,
            "-o",
            output_dir,
        ])
    except subprocess.CalledProcessError:
        raise ValueError("Test suite failed.")
예제 #18
0
 def destroy(self) -> None:
     run_process([
         'gcloud', 'compute', '--project', self.gcp_project_id, 'routers',
         'delete', self.router_name, '--region', self.region, '--quiet'
     ])
예제 #19
0
def tag_image(image_id, repository, tag):
    return bf_commons.run_process(
        ["docker", "tag", image_id, f"{repository}:{tag}"])
예제 #20
0
def build_docker_image(project_dir: Path, tag: str):
    print('Building a Docker image. It might take a while.')
    bf_commons.run_process(f'docker build {project_dir} --tag {tag}')
예제 #21
0
 def create(self) -> None:
     self.cloud_nat.create()
     run_process(
         _composer_create_command(self.composer_name, self.gcp_project_id,
                                  self.region, self.zone,
                                  self.environment_name))
예제 #22
0
def _build_docker_image(project_dir: Path, tag: str):
    logger.debug("Run docker build...")
    bf_commons.run_process(f'docker build {project_dir} --tag {tag}')
예제 #23
0
 def create(self) -> None:
     run_process([
         'gcloud', 'compute', '--project', self.gcp_project_id, 'routers',
         'create', self.router_name, '--network', 'default', '--region',
         self.region
     ])