def main():
    parser = argparse.ArgumentParser(description='Create a requirement report')
    addStandardArguments(parser)
    parser.add_argument('--organization', dest='organization',
                        help='github organization owning the repo (e.g. NASA-PDS)')
    parser.add_argument('--repository', dest='repository',
                        help='github repository name')
    parser.add_argument('--dev', dest='dev',
                        nargs='?',
                        const=True, default=False,
                        help="Generate requirements with impacts related to latest dev/snapshot version")
    parser.add_argument('--output', dest='output',
                        help='directory where version/REQUIREMENTS.md file is created')
    parser.add_argument('--format', dest='format', default='md',
                        help='markdown (md) or html')
    parser.add_argument('--token', dest='token',
                        help='github personal access token')
    args = parser.parse_args()
    logging.basicConfig(level=args.loglevel, format="%(levelname)s %(message)s")

    try:
        requirements = Requirements(args.organization, args.repository, token=args.token, dev=args.dev)
        requirement_file = requirements.write_requirements(root_dir=args.output, format=args.format)
        print(requirement_file)
    except NoAppropriateVersionFoundException as e:
        print('')  # Write just a newline to stdout I guess
        _logger.error(e)
        sys.exit(0)  # we don't want the github action to fail after that
Exemplo n.º 2
0
def main():
    parser = argparse.ArgumentParser(description='Create new snapshot release')
    addStandardArguments(parser)
    parser.add_argument('--token',
                        dest='token',
                        help='github personal access token')
    parser.add_argument('--path',
                        dest='path',
                        default='./output/',
                        help='directory where the summary will be created')
    parser.add_argument(
        '--format',
        dest='format',
        default='rst',
        help='format of the summary, accepted formats are md and rst')
    args = parser.parse_args()
    logging.basicConfig(level=args.loglevel,
                        format="%(levelname)s %(message)s")

    token = args.token or GITHUB_TOKEN
    if not token:
        _logger.error(
            'Github token must be provided or set as environment variable (GITHUB_TOKEN).'
        )
        sys.exit(1)

    build_summaries(token, args.path, args.format)
Exemplo n.º 3
0
def main():
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=__doc__)
    addStandardArguments(parser)
    parser.add_argument('--github_org',
                        help='github org',
                        default=DEFAULT_GITHUB_ORG)
    parser.add_argument(
        '--github_repos',
        nargs='*',
        help=
        'github repo names. if not specified, tool will include all repos in org, by default.'
    )
    parser.add_argument('--token', help='github token.')
    parser.add_argument('--label-name', help='Add new label with this name.')
    parser.add_argument('--label-color', help='Color in hex')
    parser.add_argument('--create', action='store_true', help='create labels')
    parser.add_argument('--delete', action='store_true', help='remove labels')
    parser.add_argument(
        '--config_file',
        help=
        'YAML config file containing many label-name + label-color combinations.'
    )

    args = parser.parse_args()
    logging.basicConfig(level=args.loglevel,
                        format="%(levelname)s %(message)s")

    token = args.token or os.environ.get('GITHUB_TOKEN')
    if not token:
        _logger.error(
            'Github token must be provided or set as environment variable (GITHUB_TOKEN).'
        )
        sys.exit(1)

    if (args.label_name and not args.label_color) or (not args.label_name
                                                      and args.label_color):
        raise Exception("Must specify label name and label color")

    labels_obj = Labels(args.github_org, args.github_repos, token)

    if args.label_name and args.label_color:
        if args.create:
            labels_obj.create_labels_for_org(args.label_name, args.label_color)
        elif args.delete:
            labels_obj.delete_labels_for_org({args.label_name: ''})
    elif args.config_file:
        with open(args.config_file) as _file:
            _yml = yaml.load(_file, Loader=yaml.FullLoader)
            if args.delete:
                labels_obj.delete_labels_for_org(_yml['labels'])
            elif args.create:
                for name, color in _yml['labels'].items():
                    labels_obj.create_labels_for_org(name, color)
Exemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser(
        description='empty commit on a repo branch')
    addStandardArguments(parser)
    parser.add_argument(
        '--repo',
        dest='repo',
        help='repostory full name with owner, e.g. nasa-pds/pdsen-corral')
    parser.add_argument('--token',
                        dest='token',
                        help='github personal access token')
    parser.add_argument('--branch', dest='branch', help='branch name')
    parser.add_argument('--message', dest='message', help='commit message')
    args = parser.parse_args()
    logging.basicConfig(level=args.loglevel,
                        format="%(levelname)s %(message)s")

    # read organization and repository name
    ping_repo_branch(args.repo, args.branch, args.message, token=args.token)
Exemplo n.º 5
0
def main():
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=__doc__)
    addStandardArguments(parser)
    parser.add_argument('--github-org',
                        help='github org',
                        default=DEFAULT_GITHUB_ORG)
    parser.add_argument(
        '--github-repos',
        nargs='*',
        help=
        'github repo names. if not specified, tool will include all repos in org by default.'
    )
    parser.add_argument('--length',
                        default=21,
                        help='milestone length in number of days.')
    parser.add_argument('--token', help='github token.')
    parser.add_argument('--create',
                        action='store_true',
                        help='create milestone.')
    parser.add_argument('--delete',
                        action='store_true',
                        help='delete milestone.')
    parser.add_argument('--close',
                        action='store_true',
                        help='close milestone.')
    parser.add_argument('--due-date',
                        help='Due date of first sprint. Format: YYYY-MM-DD')
    parser.add_argument(
        '--sprint-name-file',
        help=('yaml file containing list of sprint names. tool will create '
              'as many milestones as specified in file.'))
    parser.add_argument('--sprint-names',
                        nargs='*',
                        help='create one sprint with this name')
    parser.add_argument(
        '--prepend-number',
        type=int,
        help=
        'specify number to prepend sprint names or to start with. e.g. 01.foo')

    args = parser.parse_args()
    logging.basicConfig(level=args.loglevel,
                        format="%(levelname)s %(message)s")

    token = args.token or os.environ.get('GITHUB_TOKEN')
    if not token:
        _logger.error(
            'Github token must be provided or set as environment variable (GITHUB_TOKEN).'
        )
        sys.exit(1)

    _sprint_names = args.sprint_names

    if args.sprint_name_file:
        with open(args.sprint_name_file) as f:
            _sprint_names = f.read().splitlines()

    if not _sprint_names:
        _logger.error(
            'One of --sprint-names or --sprint-name_file must be specified.')
        sys.exit(1)

    _due_date = None
    if args.create:
        if not args.due_date:
            _logger.error('--due-date must be specified.')
            sys.exit(1)
        else:
            _due_date = datetime.datetime.strptime(
                args.due_date, '%Y-%m-%d') + datetime.timedelta(hours=8)

    _sprint_number = args.prepend_number
    for n in _sprint_names:
        _sprint_name = n.replace(' ', '.')

        if not _sprint_name:
            continue

        if _sprint_number is not None:
            _sprint_name = f"{str(_sprint_number).zfill(2)}.{_sprint_name}"
            _sprint_number += 1

        # connect to github
        gh = login(token=token)
        for _repo in gh.repositories_by(args.github_org):
            if args.github_repos and _repo.name not in args.github_repos:
                continue

            if args.create:
                _logger.info(
                    f"+++ milestone: {_sprint_name}, due: {_due_date}")
                try:
                    _logger.info(f"CREATE repo: {_repo.name}")
                    _repo.create_milestone(
                        _sprint_name,
                        due_on=_due_date.strftime('%Y-%m-%dT%H:%M:%SZ'))
                except exceptions.UnprocessableEntity:
                    # milestone already exists with this name
                    _logger.info(
                        f"CREATE repo: {_repo.name}, already exists. skipping..."
                    )
            elif args.close:
                _logger.info(f"+++ milestone: {_sprint_name}")
                _milestone = get_milestone(_repo, _sprint_name)
                if _milestone:
                    _logger.info(f"CLOSE repo: {_repo.name}")
                    remove_closed_issues_from_sprint_backlog(_repo, _milestone)
                    defer_open_issues(_repo, _milestone)
                    _milestone.update(state='closed')
                else:
                    _logger.info(f"CLOSE repo: {_repo.name}, skipping...")
            elif args.delete:
                _logger.info(f"+++ milestone: {_sprint_name}")
                _milestone = get_milestone(_repo, _sprint_name)
                if _milestone:
                    _logger.info(f"DELETE repo: {_repo.name}")
                    _milestone.delete()
                else:
                    _logger.info(f"DELETE repo: {_repo.name}, skipping...")
            else:
                _logger.warning("NONE: no action specified")

        if _due_date:
            # Increment due date for next milestone
            _due_date = _due_date + datetime.timedelta(days=args.length)
Exemplo n.º 6
0
def main():

    parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
        description=__doc__)
    addStandardArguments(parser)
    parser.add_argument('--deploy_dir',
                        help='directory to deploy the validate tool on the file system',
                        default='/tmp')
    parser.add_argument('--token',
                        help='github token')
    parser.add_argument('--schemas',
                        help='path(s) to schemas to validate against')
    parser.add_argument('--schematrons',
                        help='path(s) to schematrons to validate against')
    parser.add_argument('--skip_content_validation',
                        help='validate: skip content validation',
                        action='store_true', default=False)
    parser.add_argument('--failure_expected', dest='failure_expected',
                        help='validate expected to fail',
                        action='store_true', default=False)
    parser.add_argument('--datapath',
                        help='path(s) to data to validate',
                        required=True)
    parser.add_argument('--output_log_path',
                        help='path(s) to output validate run log file',
                        default=os.path.join('tmp', 'logs'))
    parser.add_argument('--with_pds4_version',
                        help=('force the following PDS4 version. software will '
                              'download and validate with this version of the '
                              'PDS4 Information Model. this version should be '
                              'the semantic numbered version. e.g. 1.14.0.0'))
    parser.add_argument('--development_release',
                        help=('flag to indicate this should be tested with a '
                              'development release of the PDS4 Standard.'),
                        action='store_true', default=False)

    args = parser.parse_args()
    logging.basicConfig(level=args.loglevel, format="%(levelname)s %(message)s")

    token = args.token or os.environ.get('GITHUB_TOKEN')

    if not token:
        _logger.error('Github token must be provided or set as environment variable (GITHUB_TOKEN).')
        sys.exit(1)

    try:
        validate_args = ['-R', 'pds4.label']

        if args.skip_content_validation:
            validate_args.append('--skip-content-validation')

        schemas = []
        if args.schemas:
            schemas.extend(glob.glob(args.schemas, recursive=True))

        schematrons = []
        if args.schematrons:
            # validate_args.append('-S')
            schematrons.extend(glob.glob(args.schematrons, recursive=True))

        if args.development_release:
            if not args.with_pds4_version:
                raise argparse.ArgumentError('--with_pds4_version must be specified when using --development_release')

        if args.with_pds4_version:
            download_schemas(DOWNLOAD_PATH, args.with_pds4_version, dev_release=args.development_release)
            schemas.extend(glob.glob(os.path.join(DOWNLOAD_PATH, '*.xsd')))
            schematrons.extend(glob.glob(os.path.join(DOWNLOAD_PATH, '*.sch')))

        if schemas:
            validate_args.append('-x')
            validate_args.extend(schemas)

        if schematrons:
            validate_args.append('-S')
            validate_args.extend(schematrons)

        validate_args.append('-t')
        validate_args.extend(glob.glob(args.datapath, recursive=True))

        pkg = download_asset(get_latest_release(token), args.deploy_dir, startswith="validate", file_extension='.zip')
        sw_dir = unzip_asset(pkg, args.deploy_dir)

        exec_validate(os.path.join(sw_dir, 'bin', 'validate'), validate_args, log_path=args.output_log_path)
    except CalledProcessError:
        if not args.failure_expected:
            _logger.error('FAILED: Validate failed unexpectedly. See output logs.')
            sys.exit(1)

    _logger.info('SUCCESS: Validation complete.')
Exemplo n.º 7
0
def main():

    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=__doc__)
    addStandardArguments(parser)
    parser.add_argument('--github_token', help='github API token')
    parser.add_argument('--zenhub_token', help='zenhub API token')
    parser.add_argument('--build_number', help='build number', required=True)
    parser.add_argument('--delivery_date',
                        help='EN delivery to I&T date',
                        required=True)
    parser.add_argument('--trr_date', help='EN TRR date', required=True)
    parser.add_argument('--ddr_date', help='EN DDR date', required=True)
    parser.add_argument('--release_date', help='EN DDR date', required=True)
    parser.add_argument('--projects_config',
                        help='Path to config file with project information',
                        required=True)

    args = parser.parse_args()
    logging.basicConfig(level=args.loglevel,
                        format="%(levelname)s %(message)s")

    # set output filename
    output_fname = 'plan.rst'

    # get github token or throw error
    github_token = args.github_token or os.environ.get('GITHUB_TOKEN')
    if not github_token:
        _logger.error('github API token must be provided or set as environment'
                      ' variable (GITHUB_TOKEN).')
        sys.exit(1)

    # get zenhub token or throw error
    zenhub_token = args.github_token or os.environ.get('ZENHUB_TOKEN')
    if not zenhub_token:
        _logger.error('zenhub API token must be provided or set as environment'
                      ' variable (ZENHUB_TOKEN).')
        sys.exit(1)

    try:
        gh = GithubConnection.getConnection(token=github_token)
        org = gh.organization(GITHUB_ORG)
        repos = org.repositories()

        issues = []
        repo_dict = {}
        zen = Zenhub(zenhub_token)
        for repo in repos:
            if not issues:
                issues = zen.get_issues_by_release(repo.id,
                                                   f'B{args.build_number}')

            repo_dict[repo.id] = {'repo': repo, 'issues': []}

        # Build up dictionary of repos + issues in release
        issue_dict = {}
        for issue in issues:
            repo_dict[issue['repo_id']]['issues'].append(issue['issue_number'])

        # Create project-based dictionary
        with open(args.projects_config) as _file:
            _conf = load(_file, Loader=FullLoader)

        # get project info
        projects = _conf['projects']

        # get key dates info
        key_dates = _conf['key_dates']

        # Loop through repos
        plan_output = ''
        maintenance_output = ''
        ddwg_plans = ''
        for repo_id in repo_dict:
            r = repo_dict[repo_id]['repo']
            issues = repo_dict[repo_id]['issues']
            repo_output = ''
            if issues:
                for issue_num in issues:
                    gh_issue = gh.issue(org.login,
                                        repo_dict[repo_id]['repo'].name,
                                        issue_num)
                    zen_issue = zen.issue(repo_id, issue_num)

                    # we only want release themes in the plan (is_epic + label:theme)
                    labels = get_labels(gh_issue)

                    # Custom handling for pds4-information-model SCRs
                    if 'CCB-' in gh_issue.title:
                        ddwg_plans += f'* `{r.name}#{issue_num} <{gh_issue.html_url}>`_ **{gh_issue.title}**\n'

                    elif is_theme(labels, zen_issue):
                        repo_output += f'* `{r.name}#{issue_num} <{gh_issue.html_url}>`_ **{gh_issue.title}**\n'

                        # proj_id = get_project(projects, gh_issue, labels)
                        # append_to_project(projects[proj_id], f'* `{r.name}#{issue_num} <{gh_issue.html_url}>`_ **{gh_issue.title}**\n')

                        for child in zen.get_epic_children(
                                gh, org, repo_id, issue_num):
                            child_repo = child['repo']
                            child_issue = child['issue']
                            repo_output += f'   * `{child_repo.name}#{child_issue.number} <{child_issue.html_url}>`_ {child_issue.title}\n'

                            # append_to_project(projects[proj_id], f'   * `{child_repo.name}#{child_issue.number} <{child_issue.html_url}>`_ {child_issue.title}\n')
                    # print(repo_output)

            repo_info = REPO_INFO.format(r.name, '#' * len(r.name),
                                         r.description, r.homepage
                                         or r.html_url + '#readme', r.html_url,
                                         r.html_url, r.html_url, r.html_url,
                                         r.html_url)
            # only output the header
            if repo_output:
                plan_output += repo_info
                plan_output += repo_output

        with open(output_fname, 'w') as f_out:

            template_kargs = {
                'output': output_fname,
                'build_number': args.build_number,
                'scr_date': key_dates['scr_date'],
                'doc_update_date': key_dates['doc_update_date'],
                'delivery_date': key_dates['delivery_date'],
                'trr_date': key_dates['trr_date'],
                'beta_test_date': key_dates['beta_test_date'],
                'dldd_int_date': key_dates['dldd_int_date'],
                'doc_review_date': key_dates['doc_review_date'],
                'ddr_date': key_dates['ddr_date'],
                'release_date': key_dates['release_date'],
                'pds4_changes': ddwg_plans,
                'planned_changes': plan_output
            }
            template = Template(
                resource_string(__name__, 'plan.template.rst').decode("utf-8"))
            rst_str = template.render(template_kargs)
            f_out.write(rst_str)

            # else:
            #     maintenance_output += repo_info

            #                     print(f'## {r.name}')
            #     print(f'Description: {r.description}')
            #     print(f'User Guide: {r.homepage}')
            #     print(f'Github Repo: {r.html_url}')
            #     print(f'Issue Tracker: {r.html_url}/issues')

            # print(repo_dict[repo_id]['repo'].name)
            # print(repo_dict[repo_id]['issues'])

        # print(repo_dict)

        # for repo in repos:

    except Exception as e:
        traceback.print_exc()
        sys.exit(1)

    _logger.info('SUCCESS: Release Plan generated successfully.')
Exemplo n.º 8
0
def main():

    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=__doc__)
    addStandardArguments(parser)
    parser.add_argument(
        '--deploy_dir',
        help='directory to deploy the validate tool on the file system',
        default='/tmp')
    parser.add_argument('--ldd_output_path',
                        help='directory to output generated LDDs',
                        required=True)
    parser.add_argument('--ingest_ldd_src_dir',
                        help='/path/to/src/ dir for IngestLDD file',
                        default=os.path.join('tmp', 'logs'),
                        required=True)
    parser.add_argument('--token', help='github token')
    parser.add_argument('--output_log_path',
                        help='path(s) to output validate run log file',
                        default=os.path.join('tmp', 'logs'))
    parser.add_argument(
        '--with_pds4_version',
        help=('force the following PDS4 version. software will '
              'download and validate with this version of the '
              'PDS4 Information Model. this version should be '
              'the semantic numbered version. e.g. 1.14.0.0'))
    parser.add_argument(
        '--use_lddtool_unstable',
        help=('force the use of the latest unstable LDDTool release. '
              'by default, uses latest stable release'),
        action='store_true',
        default=False)

    args = parser.parse_args()
    logging.basicConfig(level=args.loglevel,
                        format="%(levelname)s %(message)s")

    token = args.token or os.environ.get('GITHUB_TOKEN')

    if not token:
        _logger.error(
            'Github token must be provided or set as environment variable (GITHUB_TOKEN).'
        )
        sys.exit(1)

    try:

        lddtool_args = [LDDTOOL_DEFAULT_ARGS]

        if args.with_pds4_version:
            lddtool_args.extend(
                ['-V',
                 convert_pds4_version_to_alpha(args.with_pds4_version)])

        # Get the IngestLDDs
        # lddtool_args.extend()

        # cleanup the LDD Output area before generating LDDs
        prep_ldd_output_path(args.ldd_output_path)

        pkg = download_asset(get_latest_release(token,
                                                dev=args.use_lddtool_unstable),
                             args.deploy_dir,
                             startswith='lddtool',
                             file_extension='.zip')
        sw_dir = unzip_asset(pkg, args.deploy_dir)

        # Generate dependency LDDs
        ingest_ldds = find_dependency_ingest_ldds(args.ingest_ldd_src_dir)
        for ingest in ingest_ldds:
            # execute LDDTool
            exec_lddtool(os.path.join(sw_dir, 'bin', 'lddtool'),
                         args.ldd_output_path,
                         lddtool_args, [ingest],
                         log_path=args.output_log_path)

        # Generate final LDDs
        ingest_ldds.extend(find_primary_ingest_ldd(args.ingest_ldd_src_dir))

        # execute LDDTool
        exec_lddtool(os.path.join(sw_dir, 'bin', 'lddtool'),
                     args.ldd_output_path,
                     lddtool_args,
                     ingest_ldds,
                     log_path=args.output_log_path)

    except CalledProcessError:
        _logger.error('FAILED: LDDTool failed unexpectedly. See output logs.')
        sys.exit(1)
    except Exception as e:
        traceback.print_exc()
        sys.exit(1)

    _logger.info('SUCCESS: LDD Generation complete.')
Exemplo n.º 9
0
def main():
    """Main."""
    parser = argparse.ArgumentParser(
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=__doc__)
    addStandardArguments(parser)
    parser.add_argument('--ldd_output_path',
                        help='directory to output generated LDDs',
                        required=True)
    parser.add_argument('--ingest_ldd_src_dir',
                        help='/path/to/src/ dir for IngestLDD file',
                        default=os.path.join('tmp', 'logs'),
                        required=True)
    parser.add_argument('--dev',
                        help='flag to indicate this is a dev release',
                        action='store_true',
                        default=False)
    parser.add_argument('--token', help='github token')
    parser.add_argument('--repo_name',
                        help='full name of github repo (e.g. user/repo)')
    parser.add_argument(
        '--workspace',
        help=(
            'path of workspace. defaults to current working directory if this '
            'or GITHUB_WORKSPACE not specified'))
    parser.add_argument('--pds4_version', help='pds4 IM version')

    args, unknown = parser.parse_known_args()
    logging.basicConfig(level=args.loglevel,
                        format="%(levelname)s %(message)s")
    if args.loglevel != logging.DEBUG:
        # Quiet github3 logging
        logging.getLogger('github3').setLevel(level=logging.WARNING)

    token = args.token or os.environ.get('GITHUB_TOKEN')

    if not token:
        _logger.error(
            'Github token must be provided or set as environment variable (GITHUB_TOKEN).'
        )
        sys.exit(1)

    repo_full_name = args.repo_name or os.environ.get('GITHUB_REPOSITORY')
    if not repo_full_name:
        _logger.error(
            'Github repository must be provided or set as environment variable (GITHUB_REPOSITORY).'
        )
        sys.exit(1)
    org = repo_full_name.split('/')[0]
    repo_name = repo_full_name.split('/')[1]

    workspace = args.workspace or os.environ.get('GITHUB_WORKSPACE')
    if not workspace:
        workspace = os.getcwd()
        os.environ['GITHUB_WORKSPACE'] = workspace

    try:
        ingest_ldd, namespace_id, ldd_version = get_info(
            args.ingest_ldd_src_dir)

        ldd_dict = find_ldds(args.ldd_output_path, namespace_id, ldd_version,
                             args.pds4_version)

        assets = package_assets(ingest_ldd, ldd_dict, namespace_id)

        tagger = {"name": "PDSEN CI Bot", "email": "*****@*****.**"}
        gh = github3.login(token=token)
        repo = gh.repository(org, repo_name)

        delete_snapshot_releases(repo, SNAPSHOT_TAG_SUFFIX)
        for release_name in ldd_dict.keys():
            if 'dependencies' not in release_name:
                if args.dev:
                    tag_name = release_name + SNAPSHOT_TAG_SUFFIX
                else:
                    tag_name = release_name

                # Check tag exists before continuing
                tags = Tags(org, repo_name, token=token)
                if not tags.get_tag(tag_name):
                    release = create_release(repo, "main", tag_name, tagger,
                                             args.dev)
                    _logger.info("upload assets")
                    ldd_upload_assets(release, assets)
                else:
                    _logger.warning(
                        f"tag {tag_name} already exists. skipping...")

    except Exception:
        traceback.print_exc()
        sys.exit(1)

    _logger.info('SUCCESS: LDD release complete.')
Exemplo n.º 10
0
def main():
    parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
                                     description=__doc__)
    addStandardArguments(parser)
    parser.add_argument('--github-org',
                        help='github org',
                        default=DEFAULT_GITHUB_ORG)
    parser.add_argument('--github-repos',
                        nargs='*',
                        help='github repo names. if not specified, tool will include all repos in org by default.')
    parser.add_argument('--token',
                        help='github token.')
    parser.add_argument('--issue_state',
                        choices=['open', 'closed', 'all'],
                        default='all',
                        help='Return open, closed, or all issues')
    parser.add_argument('--start-time',
                        help='Start datetime for tickets to find. This is a timestamp in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ.')
    parser.add_argument('--end-time',
                        help='End datetime for tickets to find. This is a timestamp in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ.')
    parser.add_argument('--format', default='md',
                        help='rst or md or metrics')

    parser.add_argument('--build', default=None,
                        help='build label, for example B11.1 or B12.0')

    args = parser.parse_args()
    logging.basicConfig(level=args.loglevel, format="%(levelname)s %(message)s")

    _logger.info('Working on build %s', args.build)

    if args.format == 'md':
        create_md_issue_report(
            args.github_org,
            args.github_repos,
            issue_state=args.issue_state,
            start_time=args.start_time,
            token=args.token
        )

    elif args.format == 'rst':

        rst_rdd_report = RstRddReport(
            args.github_org,
            start_time=args.start_time,
            end_time=args.end_time,
            build=args.build,
            token=args.token
        )

        rst_rdd_report.create(args.github_repos, 'pdsen_issues.rst')

    elif args.format == 'metrics':

        rdd_metrics = MetricsRddReport(
            args.github_org,
            start_time=args.start_time,
            end_time=args.end_time,
            build=args.build,
            token=args.token
        )

        rdd_metrics.create(args.github_repos)

    else:
        _logger.error("unsupported format %s, must be rst or md or metrics", args.format)
Exemplo n.º 11
0
def release_publication(suffix, get_version, upload_assets, prefix='v'):
    """
    Script made to work in the context of a github action.
    """
    parser = argparse.ArgumentParser(description='Create new release')
    addStandardArguments(parser)
    parser.add_argument('--token',
                        dest='token',
                        help='github personal access token')
    parser.add_argument('--repo_name',
                        help='full name of github repo (e.g. user/repo)')
    parser.add_argument(
        '--workspace',
        help=
        'path of workspace. defaults to current working directory if this or GITHUB_WORKSPACE not specified'
    )
    parser.add_argument('--snapshot',
                        action="store_true",
                        help="Mark release as a SNAPSHOT release.")
    args, unknown = parser.parse_known_args()
    print(
        f'🪵 Setting log level to {args.loglevel}, debug happens to be {logging.DEBUG}',
        file=sys.stderr)
    logging.basicConfig(level=args.loglevel,
                        format="%(levelname)s %(message)s")

    # read organization and repository name
    repo_full_name = args.repo_name or os.environ.get('GITHUB_REPOSITORY')
    if not repo_full_name:
        _logger.error(
            'Github repository must be provided or set as environment variable (GITHUB_REPOSITORY).'
        )
        sys.exit(1)

    workspace = args.workspace or os.environ.get('GITHUB_WORKSPACE')
    if not workspace:
        workspace = os.getcwd()
        os.environ['GITHUB_WORKSPACE'] = workspace

    token = args.token or os.environ.get('GITHUB_TOKEN')
    if not token:
        _logger.error(
            'Github token must be provided or set as environment variable (GITHUB_TOKEN).'
        )

    repo_full_name_array = repo_full_name.split("/")
    org = repo_full_name_array[0]
    repo_name = repo_full_name_array[1]

    tag_name = prefix + get_version(workspace)
    print(f'YO YO YO USING tag_name of «{tag_name}»', file=sys.stdout)
    tagger = {"name": "PDSEN CI Bot", "email": "*****@*****.**"}

    gh = github3.login(token=token)
    repo = gh.repository(org, repo_name)

    delete_snapshot_releases(repo, suffix)
    if tag_name.endswith(suffix) or args.snapshot:
        if not tag_name.endswith(suffix):
            tag_name = tag_name + suffix
        create_snapshot_release(repo, repo_name, "main", tag_name, tagger,
                                upload_assets)
    else:
        create_release(repo, repo_name, "main", tag_name, tagger,
                       upload_assets)