Esempio n. 1
0
    def create_artifact_index_file(self, project_id=None, pipeline_id=None):
        if project_id is None:
            project_id = os.getenv('CI_PROJECT_ID')
        if pipeline_id is None:
            pipeline_id = os.getenv('CI_PIPELINE_ID')
        gitlab_inst = gitlab_api.Gitlab(project_id)

        artifact_index_list = []
        for build_job_name in self.case_group.BUILD_JOB_NAMES:
            job_info_list = gitlab_inst.find_job_id(build_job_name,
                                                    pipeline_id=pipeline_id)
            for job_info in job_info_list:
                parallel_num = job_info[
                    'parallel_num'] or 1  # Could be None if "parallel_num" not defined for the job
                raw_data = gitlab_inst.download_artifact(
                    job_info['id'],
                    [self.format_build_log_path(parallel_num)])[0]
                build_info_list = [
                    json.loads(line)
                    for line in raw_data.decode().splitlines()
                ]
                for build_info in build_info_list:
                    build_info['ci_job_id'] = job_info['id']
                    artifact_index_list.append(build_info)
        artifact_index_file = self.case_group.get_artifact_index_file()
        try:
            os.makedirs(os.path.dirname(artifact_index_file))
        except OSError as e:
            if e.errno != errno.EEXIST:
                raise e

        with open(artifact_index_file, 'w') as f:
            json.dump(artifact_index_list, f)
def update_submodule(git_module_file, submodules_to_update):
    gitlab_inst = gitlab_api.Gitlab()
    submodules = []
    with open(git_module_file, "r") as f:
        data = f.read()
    match = SUBMODULE_PATTERN.search(data)
    while True:
        next_match = SUBMODULE_PATTERN.search(data, pos=match.end())
        if next_match:
            end_pos = next_match.start()
        else:
            end_pos = len(data)
        path_match = PATH_PATTERN.search(data, pos=match.end(), endpos=end_pos)
        url_match = URL_PATTERN.search(data, pos=match.end(), endpos=end_pos)
        path = path_match.group(1)
        url = url_match.group(1)

        filter_result = True
        if submodules_to_update:
            if path not in submodules_to_update:
                filter_result = False
        if filter_result:
            submodules.append(SubModule(gitlab_inst, path, url))

        match = next_match
        if not match:
            break

    shutil.rmtree(SUBMODULE_ARCHIVE_TEMP_FOLDER, ignore_errors=True)

    for submodule in submodules:
        submodule.download_archive()
Esempio n. 3
0
def create_artifact_index_file(project_id=None, pipeline_id=None):
    if project_id is None:
        project_id = os.getenv("CI_PROJECT_ID")
    if pipeline_id is None:
        pipeline_id = os.getenv("CI_PIPELINE_ID")
    gitlab_inst = gitlab_api.Gitlab(project_id)
    artifact_index_list = []

    def format_build_log_path():
        return "build_examples/list_job_{}.json".format(job_info["parallel_num"])

    for build_job_name in EXAMPLE_BUILD_JOB_NAMES:
        job_info_list = gitlab_inst.find_job_id(build_job_name, pipeline_id=pipeline_id)
        for job_info in job_info_list:
            raw_data = gitlab_inst.download_artifact(job_info["id"], [format_build_log_path()])[0]
            build_info_list = [json.loads(line) for line in raw_data.splitlines()]
            for build_info in build_info_list:
                build_info["ci_job_id"] = job_info["id"]
                artifact_index_list.append(build_info)
    try:
        os.makedirs(os.path.dirname(ARTIFACT_INDEX_FILE))
    except OSError:
        # already created
        pass

    with open(ARTIFACT_INDEX_FILE, "w") as f:
        json.dump(artifact_index_list, f)
Esempio n. 4
0
 def __init__(self, dest_root_path, artifact_index_file, app_path, config_name, target):
     assert gitlab_api
     # at least one of app_path or config_name is not None. otherwise we can't match artifact
     assert app_path or config_name
     assert os.path.exists(artifact_index_file)
     self.gitlab_inst = gitlab_api.Gitlab(os.getenv("CI_PROJECT_ID"))
     self.dest_root_path = dest_root_path
     with open(artifact_index_file, "r") as f:
         artifact_index = json.load(f)
     self.artifact_info = self._find_artifact(artifact_index, app_path, config_name, target)
Esempio n. 5
0
def try_to_download_artifacts(bin_path: str) -> None:
    '''
    bin_path: "SSC/ssc_bin/ESP32[C3]/SSC[_APP]"
    '''
    project_id = os.getenv('CI_PROJECT_ID')
    pipeline_id = os.getenv('CI_PIPELINE_ID')
    gitlab_inst = gitlab_api.Gitlab(project_id)
    build_job_name = SSC_BUILD_JOB_MAP[bin_path.split('/')[-2]]
    job_list = gitlab_inst.find_job_id(build_job_name, pipeline_id=pipeline_id)
    files_to_download = [os.path.join(bin_path, f) for f in NEEDED_FILES]
    for job_info in job_list:
        try:
            gitlab_inst.download_artifact(job_info['id'], files_to_download, IDF_PATH)
            print('Downloaded {} from {}'.format(bin_path, job_info['id']))
            break
        except gitlab.exceptions.GitlabError as e:
            if e.response_code == 404:
                continue
            raise
Esempio n. 6
0
def download_all_test_results(result_path: str,
                              include_retried: bool = False) -> None:
    if os.path.exists(result_path):
        shutil.rmtree(result_path)
    os.makedirs(result_path, exist_ok=True)

    gitlab_inst = gitlab_api.Gitlab(CI_PROJECT_ID)
    pipelines = gitlab_inst.project.pipelines.get(CI_PIPELINE_ID)
    _include_retried = 'true' if include_retried else 'false'
    jobs = pipelines.jobs.list(all=True,
                               per_page=100,
                               include_retried=_include_retried)

    job_info_list = []
    for job in jobs:
        if job.stage in ['target_test', 'host_test']:
            log_path = ''
            if job.status not in ['success', 'failed']:
                print('Job {}({}) is not finished'.format(job.id, job.name))
            elif not hasattr(job, 'artifacts_file'):
                print('Job {}({}) has no artifacts.'.format(job.id, job.name))
            else:
                log_path = os.path.join(result_path, 'job_{}'.format(job.id))
                print('Downloading artifacts from: {}'.format(job.name))
                os.makedirs(log_path, exist_ok=True)
                gitlab_inst.download_artifacts(job.id, log_path)

            job_info = {
                'id': job.id,
                'name': job.name,
                'tag_list': job.tag_list,
                'status': job.status,
                'stage': job.stage,
                'web_url': job.web_url,
                'commit_url': job.commit['web_url'],
                'log_path': log_path,
            }
            job_info_list.append(job_info)

    with open(os.path.join(result_path, 'index.json'), 'w') as f:
        f.write(json.dumps({'jobs': job_info_list}, indent=1, sort_keys=True))
Esempio n. 7
0
def create_artifact_index_file(project_id=None,
                               pipeline_id=None,
                               case_group=ExampleGroup):
    if project_id is None:
        project_id = os.getenv("CI_PROJECT_ID")
    if pipeline_id is None:
        pipeline_id = os.getenv("CI_PIPELINE_ID")
    gitlab_inst = gitlab_api.Gitlab(project_id)
    artifact_index_list = []

    def format_build_log_path():
        parallel = job_info[
            "parallel_num"]  # Could be None if "parallel_num" not defined for the job
        return "{}/list_job_{}.json".format(case_group.BUILD_LOCAL_DIR,
                                            parallel or 1)

    for build_job_name in case_group.BUILD_JOB_NAMES:
        job_info_list = gitlab_inst.find_job_id(build_job_name,
                                                pipeline_id=pipeline_id)
        for job_info in job_info_list:
            raw_data = gitlab_inst.download_artifact(
                job_info["id"], [format_build_log_path()])[0]
            build_info_list = [
                json.loads(line) for line in raw_data.splitlines()
            ]
            for build_info in build_info_list:
                build_info["ci_job_id"] = job_info["id"]
                artifact_index_list.append(build_info)
    artifact_index_file = get_artifact_index_file(case_group=case_group)
    try:
        os.makedirs(os.path.dirname(artifact_index_file))
    except OSError:
        # already created
        pass

    with open(artifact_index_file, "w") as f:
        json.dump(artifact_index_list, f)