def response(namespace):
    """
    """
    ocl = OpenShiftCmdClient()
    """
    ocl.login(
        server=OPENSHIFT_URL,
        token=ocl.get_token_from_mounted_secret(
            secret_mount_path=SERVICE_ACCOUNT_SECRET_MOUNT_PATH
        )
    )
    """
    gc = GitClient(git_url=INDEX_GIT_URL, git_branch=INDEX_GIT_BRANCH)
    gc.fresh_clone()
    index_location = path.join(gc.actual_clone_location, "index.d")
    ir = IndexReader(index_location, namespace)
    projects = []
    prjs = ir.read_projects()
    for p in prjs:
        projects.append(
            Project(app_id=p.app_id,
                    job_id=p.job_id,
                    desired_tag=p.desired_tag))
    projects_in_namespace = Projects(meta=meta_obj(), projects=projects)
    return projects_in_namespace
Example #2
0
def response(namespace):
    """
    """
    ocl = OpenShiftCmdClient()
    """
    ocl.login(
        server=OPENSHIFT_URL,
        token=ocl.get_token_from_mounted_secret(
            secret_mount_path=SERVICE_ACCOUNT_SECRET_MOUNT_PATH
        )
    )
    """
    check_index_seed_job_update(namespace=namespace)
    index_location = path.join(INDEX_CLONE_LOCATION, "index.d")
    ir = IndexReader(index_location, namespace)
    projects = []
    prjs = ir.read_projects()
    for p in prjs:
        projects.append(
            Project(app_id=p.app_id,
                    job_id=p.job_id,
                    desired_tag=p.desired_tag))
    projects_in_namespace = Projects(meta=meta_obj(), projects=projects)

    return projects_in_namespace
def response(namespace, app_id, job_id, desired_tag):
    """
    """
    check_index_seed_job_update(namespace=namespace)
    index_location = path.join(INDEX_CLONE_LOCATION, "index.d")
    ir = IndexReader(index_location, namespace)
    prjs = ir.read_projects()
    job_name =  Project.pipeline_name(
        app_id=app_id, job_id=job_id, desired_tag=desired_tag
    )
    ojbi = OpenshiftJenkinsBuildInfo(
        JENKINS_URL,
        token_from_mount=SERVICE_ACCOUNT_SECRET_MOUNT_PATH,
        namespace=namespace
    )
    latest_build_number = str(ojbi.get_latest_build_number(
        ordered_job_list=[
            namespace,
            "{}-{}".format(
                namespace,
                job_name
            )
        ]
    ))

    if not latest_build_number:
        latest_build_number="0"

    target_file_path = ""
    source_repo = ""
    source_branch = ""
    pre_build_exists = False

    for p in prjs:
        if p.app_id == app_id and p.job_id == job_id and \
                p.desired_tag == desired_tag:
            source_repo = p.git_url
            source_branch = p.git_branch
            target_file_path = "{}/{}".format(p.git_path, p.target_file)
            pre_build_exists = p.pre_build_script and p.pre_build_context
            break

    if source_repo == "":
        return {}

    if source_repo.endswith(".git"):
        source_repo = source_repo[:-4]

    if not pre_build_exists:
        pre_build_exists=False

    return TargetFile(
        meta=meta_obj(), prebuild=pre_build_exists,
        target_file_path=target_file_path, source_repo=source_repo,
        source_branch = source_branch, latest_build_number=latest_build_number
    )
def response(namespace, appid, jobid, desired_tag, build_number):
    """
    """
    ojbi = OpenshiftJenkinsBuildInfo(
        JENKINS_URL,
        token_from_mount=SERVICE_ACCOUNT_SECRET_MOUNT_PATH,
        namespace=namespace)

    job_name = Project.pipeline_name(app_id=appid,
                                     job_id=jobid,
                                     desired_tag=desired_tag)
    jenkins_job_name = "{}-{}".format(namespace, job_name)
    logs_info = ojbi.get_build_logs(
        ordered_job_list=[namespace, jenkins_job_name],
        build_number=build_number)

    check_index_seed_job_update(namespace=namespace)
    index_location = path.join(INDEX_CLONE_LOCATION, "index.d")
    ir = IndexReader(index_location, namespace)
    prjs = ir.read_projects()
    prebuild_exists = "false"
    for p in prjs:
        if p.app_id == appid and p.job_id == jobid and \
                p.desired_tag == desired_tag:
            if p.pre_build_context and p.pre_build_script:
                prebuild_exists = "true"
            break

    if prebuild_exists == "":
        prebuild_exists = "false"

    prebuild_logs = process_log(logs_info, "Prebuild source repo") if \
        prebuild_exists else "Prebuild not requested"
    lint_logs = process_log(logs_info, "Lint the Dockerfile")
    build_logs = process_log(logs_info, "Build the container image")

    # TODO : Update this once logs are seperated as scans
    scan_logs = process_log(logs_info, "Scan the image")
    extracted_scan_logs = ScannerLogs(logs=scan_logs,
                                      description="All Scanners logs")
    all_scan_logs = AllScannerLogs(scanner_name=[extracted_scan_logs])
    logs = PrebuildLintBuildScanLogs(prebuild=str(prebuild_logs),
                                     lint=str(lint_logs),
                                     build=str(build_logs),
                                     scan=all_scan_logs)

    return BuildLogs(meta=meta_obj(),
                     pre_build=prebuild_exists,
                     status=ojbi.get_build_status(
                         ordered_job_list=[namespace, jenkins_job_name],
                         build_number=build_number),
                     failed_stage="TODO",
                     logs=logs)
def response(namespace, app_id, job_id, desired_tag):
    """
    """
    check_index_seed_job_update(namespace=namespace)
    index_location = path.join(INDEX_CLONE_LOCATION, "index.d")
    ir = IndexReader(index_location, namespace)
    prjs = ir.read_projects()
    job_name = Project.pipeline_name(app_id=app_id,
                                     job_id=job_id,
                                     desired_tag=desired_tag)
    ojbi = OpenshiftJenkinsBuildInfo(
        JENKINS_URL,
        token_from_mount=SERVICE_ACCOUNT_SECRET_MOUNT_PATH,
        namespace=namespace)
    latest_build_number = str(
        ojbi.get_latest_build_number(
            ordered_job_list=[namespace, "{}-{}".format(namespace, job_name)]))

    if not latest_build_number:
        latest_build_number = "0"

    target_file_path = ""
    source_repo = ""
    source_branch = ""
    pre_build_exists = "false"
    for p in prjs:
        if p.app_id == app_id and p.job_id == job_id and \
                p.desired_tag == desired_tag:
            source_repo = p.git_url
            source_branch = p.git_branch
            target_file_path = "{}/{}".format(p.git_path, p.target_file)
            if p.pre_build_script and p.pre_build_context:
                pre_build_exists = "true"
            break

    if source_repo == "":
        return {}

    if source_repo.endswith(".git"):
        source_repo = source_repo[:-4]

    if pre_build_exists == "":
        pre_build_exists = "false"

    return TargetFile(meta=meta_obj(),
                      prebuild=pre_build_exists,
                      target_file_path=target_file_path,
                      source_repo=source_repo,
                      source_branch=source_branch,
                      latest_build_number=latest_build_number)
def response(namespace, app_id, job_id):
    """
    """
    ojbi = OpenshiftJenkinsBuildInfo(
        JENKINS_URL,
        token_from_mount=SERVICE_ACCOUNT_SECRET_MOUNT_PATH,
        namespace=namespace
    )

    gc = GitClient(
        git_url=INDEX_GIT_URL,
        git_branch=INDEX_GIT_BRANCH
    )
    gc.fresh_clone()
    index_location = path.join(gc.actual_clone_location, "index.d")
    ir = IndexReader(index_location, namespace)
    prjs = ir.read_projects()
    tags = []
    for p in prjs:
        if p.app_id == app_id and p.job_id == job_id:
            tags.append(p.desired_tag)

    ajtds = []
    if len(tags) == 0:
        return {}

    for tag in tags:
        jenkins_job_name = "{}-{}".format(
            namespace,
            Project.pipeline_name(
                app_id=app_id, job_id=job_id, desired_tag=tag
            )
        )
        build_status = ojbi.get_build_status(
            ordered_job_list=[
                namespace,
                jenkins_job_name
            ],
            build_number="lastBuild"
        )
        image = "{}/{}".format(app_id, job_id)
        ajtd = AppIdJobIdTag(image=image, desired_tag=tag,
                             build_status=build_status)
        ajtds.append(ajtd)

    return AppIdJobIdTags(
        meta=meta_obj(), app_id=app_id,
        job_id=job_id, tags=ajtds
    )
def response(namespace, app_id, job_id):
    """
    """
    ojbi = OpenshiftJenkinsBuildInfo(
        JENKINS_URL,
        token_from_mount=SERVICE_ACCOUNT_SECRET_MOUNT_PATH,
        namespace=namespace
    )

    check_index_seed_job_update(namespace=namespace)
    index_location = path.join(INDEX_CLONE_LOCATION, "index.d")
    ir = IndexReader(index_location, namespace)
    prjs = ir.read_projects()
    tags = []
    for p in prjs:
        if p.app_id == app_id and p.job_id == job_id:
            tags.append(p.desired_tag)

    ajtds = []
    if len(tags) == 0:
        return {}

    for tag in tags:
        jenkins_job_name = "{}-{}".format(
            namespace,
            Project.pipeline_name(
                app_id=app_id, job_id=job_id, desired_tag=tag
            )
        )
        build_status = ojbi.get_build_status(
            ordered_job_list=[
                namespace,
                jenkins_job_name
            ],
            build_number="lastBuild"
        )
        image = "{}/{}".format(app_id, job_id)
        ajtd = AppIdJobIdTag(image=image, desired_tag=tag,
                             build_status=build_status)
        ajtds.append(ajtd)

    return AppIdJobIdTags(
        meta=meta_obj(), app_id=app_id,
        job_id=job_id, tags=ajtds
    )
def response(namespace, appid, jobid, desired_tag, build_number):
    """
    """
    ojbi = OpenshiftJenkinsBuildInfo(
        JENKINS_URL,
        token_from_mount=SERVICE_ACCOUNT_SECRET_MOUNT_PATH,
        namespace=namespace
    )

    job_name = Project.pipeline_name(
        app_id=appid, job_id=jobid, desired_tag=desired_tag
    )
    jenkins_job_name = "{}-{}".format(
        namespace,
        job_name
    )
    logs_info =ojbi.get_build_logs(
        ordered_job_list=[
            namespace,
            jenkins_job_name
        ],
        build_number=build_number
    )

    check_index_seed_job_update(namespace=namespace)
    index_location = path.join(INDEX_CLONE_LOCATION, "index.d")
    ir = IndexReader(index_location, namespace)
    prjs = ir.read_projects()
    prebuild_exists = False
    for p in prjs:
        if p.app_id == appid and p.job_id == jobid and \
                p.desired_tag == desired_tag:
            prebuild_exists = p.pre_build_context and p.pre_build_script
            break

    if not prebuild_exists:
        prebuild_exists=False

    prebuild_logs = process_log(logs_info, "Prebuild source repo") if \
        prebuild_exists else "Prebuild not requested"
    lint_logs = process_log(logs_info, "Lint the Dockerfile")
    build_logs = process_log(logs_info, "Build the container image")

    # TODO : Update this once logs are seperated as scans
    scan_logs = process_log(logs_info, "Scan the image")
    extracted_scan_logs = ScannerLogs(
        logs=scan_logs, description="All Scanners logs"
    )
    all_scan_logs = AllScannerLogs(
        scanner_name=[extracted_scan_logs]
    )
    logs = PrebuildLintBuildScanLogs(
        prebuild=str(prebuild_logs),
        lint=str(lint_logs),
        build=str(build_logs),
        scan=all_scan_logs
    )

    return BuildLogs(
        meta=meta_obj(),
        pre_build=prebuild_exists,
        status=ojbi.get_build_status(
            ordered_job_list=[
                namespace,
                jenkins_job_name
            ],
            build_number=build_number
        ),
        failed_stage="TODO",
        logs=logs
    )