def process_requirement(req, dependency_type):
    # this method finds either latest or minimum version of a package that is available on PyPI

    # find package name and requirement specifier from requires
    pkg_name, spec = parse_req(req)

    # get available versions on PyPI
    client = PyPIClient()
    versions = [str(v) for v in client.get_ordered_versions(pkg_name)]
    logging.info("Versions available on PyPI for %s: %s", pkg_name, versions)

    if pkg_name in MINIMUM_VERSION_SUPPORTED_OVERRIDE:
        versions = [v for v in versions if parse_version(v) >= parse_version(MINIMUM_VERSION_SUPPORTED_OVERRIDE[pkg_name])]
    # Search from lowest to latest in case of finding minimum dependency
    # Search from latest to lowest in case of finding latest required version
    # reverse the list to get latest version first
    if dependency_type == "Latest":
        versions.reverse()

    # return first version that matches specifier in <package-name>==<version> format
    for version in versions:
        if version in spec:
            logging.info("Found %s version %s that matches specifier %s", dependency_type, version, spec)
            return pkg_name + "==" + version

    logging.error("No version is found on PyPI for package %s that matches specifier %s", pkg_name, spec)
    return ""
def is_required_version_on_pypi(package_name, spec):
    client = PyPIClient()
    versions = [
        str(v) for v in client.get_ordered_versions(package_name)
        if str(v) in spec
    ]
    return versions
Esempio n. 3
0
def is_required_version_on_pypi(package_name, spec):
    client = PyPIClient()
    try:
        pypi_results = client.get_ordered_versions(package_name)
    except:
        pypi_results = []

    versions = [str(v) for v in pypi_results if str(v) in spec]
    return versions
Esempio n. 4
0
def is_required_version_on_pypi(package_name, spec):
    from pypi_tools.pypi import PyPIClient
    client = PyPIClient()
    versions = []
    try:
        versions = [str(v) for v in client.get_ordered_versions(package_name) if str(v) in spec]
    except:
        logging.error("Package {} is not found on PyPI", package_name)
    return versions
def change_log_generate(package_name):
    from pypi_tools.pypi import PyPIClient
    client = PyPIClient()
    try:
        client.get_ordered_versions(package_name)
    except:
        return "  - Initial Release"
    else:
        return change_log_main(f"{package_name}:pypi",
                               f"{package_name}:latest")
def get_release_tag(dep_pkg_name, isLatest):
    # get versions from pypi and find latest
    # delayed import until sdk tools are installed on virtual env
    from pypi_tools.pypi import PyPIClient

    client = PyPIClient()
    versions = []
    try:
        versions = [str(v) for v in client.get_ordered_versions(dep_pkg_name)]
        logging.info("Versions available on PyPI for {0} are: {1}".format(
            dep_pkg_name, versions))
    except:
        logging.error(
            "Package {} is not available on PyPI".format(dep_pkg_name))
        return None

    # filter excluded versions
    if dep_pkg_name in EXCLUDED_PACKAGE_VERSIONS:
        versions = [
            v for v in versions
            if parse(v) > parse(EXCLUDED_PACKAGE_VERSIONS[dep_pkg_name])
        ]
        logging.info("Filtered versions for {0} is: {1}".format(
            dep_pkg_name, versions))

    if not versions:
        logging.info(
            "Released version info for package {} is not available".format(
                dep_pkg_name))
        # This is not a hard error. We can run into this situation when a new package is added to repo and not yet released
        return

    # find latest version
    logging.info("Looking for {} released version".format(
        "Latest" if isLatest == True else "Oldest"))
    if isLatest == True:
        versions.reverse()
    else:
        # find oldest GA version by filtering out all preview versions
        versions = [v for v in versions if parse(v).is_prerelease == False]
        if (len(versions) < 2):
            logging.info(
                "Only one or no released GA version found for package {}".
                format(dep_pkg_name))
            return

    version = versions[0]

    # create tag in <pkg_name>_version format
    tag_name = "{0}_{1}".format(dep_pkg_name, version)
    logging.info("Release tag for package [{0}] is [{1}]".format(
        dep_pkg_name, tag_name))
    return tag_name
Esempio n. 7
0
def main(input_parameter: str,
         version: Optional[str] = None,
         no_venv: bool = False,
         pypi: bool = False,
         last_pypi: bool = False,
         output: str = None):
    package_name, module_name = parse_input(input_parameter)
    path_to_package = resolve_package_directory(package_name)

    if (version or pypi or last_pypi) and not no_venv:
        if version:
            versions = [version]
        else:
            _LOGGER.info(f"Download versions of {package_name} on PyPI")
            from pypi_tools.pypi import PyPIClient
            client = PyPIClient()
            versions = [
                str(v) for v in client.get_ordered_versions(package_name)
            ]
            _LOGGER.info(f"Got {versions}")
            if last_pypi:
                _LOGGER.info(f"Only keep last PyPI version")
                versions = filter_track2_versions(package_name, versions)
                versions = [versions[-1]]

        for version in versions:
            _LOGGER.info(
                f"Installing version {version} of {package_name} in a venv")
            with create_venv_with_package([f"{package_name}=={version}"
                                           ]) as venv:
                args = [
                    venv.env_exe, __file__, "--no-venv", "--version", version,
                    input_parameter
                ]
                if output is not None:
                    args.append("--output=" + output)
                try:
                    subprocess.check_call(args)
                except subprocess.CalledProcessError:
                    # If it fail, just assume this version is too old to get an Autorest report
                    _LOGGER.warning(
                        f"Version {version} seems to be too old to build a report (probably not Autorest based)"
                    )
        # Files have been written by the subprocess
        return

    modules = find_autorest_generated_folder(module_name)
    result = []
    version = version or "latest"
    output_folder = Path(path_to_package) / Path("code_reports") / Path(
        version)
    output_folder.mkdir(parents=True, exist_ok=True)

    for module_name in modules:
        _LOGGER.info(f"Working on {module_name}")

        report = create_report(module_name)

        module_for_path = get_sub_module_part(package_name, module_name)
        if module_for_path:
            output_filename = output_folder / Path(module_for_path + ".json")
        else:
            if output is not None:
                output_filename = output
            else:
                output_filename = output_folder / Path("report.json")

        with open(output_filename, "w") as fd:
            json.dump(report, fd, indent=2)
            _LOGGER.info(f"Report written to {output_filename}")
        result.append(output_filename)

    if len(result) > 1:
        merged_report = merge_report(result)
        if output is not None:
            output_filename = output
        else:
            output_filename = output_folder / Path("merged_report.json")
        with open(output_filename, "w") as fd:
            json.dump(merged_report, fd, indent=2)
            _LOGGER.info(f"Merged report written to {output_filename}")

    return result
Esempio n. 8
0
    sp.call(
        fr"docker create -it --rm -h Change_log --name Change_log -v {docker_path}:/_ l601306339/autorest"
    )
    sp.call("docker start Change_log")

    # install azure tools
    sp.call(f'{docker_cmd}  "python _/scripts/dev_setup.py -p azure-core"  ')

    # get all azure-mgmt-package paths
    in_files = glob.glob(str(Path(f'{docker_path}/sdk/*/azure-mgmt-*')))
    for i in in_files:
        path = Path(i)
        service_name = path.parts[-1]

        # get package version in pypi
        client = PyPIClient()
        versions = [str(v) for v in client.get_ordered_versions(service_name)]
        if len(versions) >= 2:
            older_version = versions[-2]
            last_version = versions[-1]

            # generate code_report
            cmd_last_version = fr'{docker_cmd} "cd _/ && python -m packaging_tools.code_report  {service_name} --version={last_version}"'
            cmd_older_version = fr'{docker_cmd} "cd _/ && python -m packaging_tools.code_report {service_name} --version={older_version}"'
            try:
                last_code_report_info = create_code_report(
                    cmd_last_version, service_name)
                older_code_report_info = create_code_report(
                    cmd_older_version, service_name)

                # get code_report path
    args = parser.parse_args()
    in_venv = args.in_venv
    stable_version = args.stable_version

    pkg_dir = os.path.abspath(args.target_package)
    package_name = os.path.basename(pkg_dir)
    logging.basicConfig(level=logging.INFO)
    if package_name not in RUN_BREAKING_CHANGES_PACKAGES:
        _LOGGER.info(
            f"{package_name} opted out of breaking changes checks. "
            f"See http://aka.ms/azsdk/breaking-changes-tool to opt-in.")
        exit(0)

    # TODO need to parse setup.py here to get the top module/namespace since not always the same.
    #  e.g. azure-storage-file-share and azure.storage.fileshare
    target_module = package_name.replace("-", ".")
    if not stable_version:

        from pypi_tools.pypi import PyPIClient
        client = PyPIClient()

        try:
            stable_version = str(client.get_relevant_versions(package_name)[1])
        except IndexError:
            _LOGGER.warning(
                f"No stable version for {package_name} on PyPi. Exiting...")
            exit(0)

    main(package_name, target_module, stable_version, in_venv, pkg_dir)
def main(
    input_parameter: str,
    version: Optional[str] = None,
    no_venv: bool = False,
    pypi: bool = False,
    last_pypi: bool = False,
    output: Optional[str] = None,
    metadata_path: Optional[str] = None,
):

    output_msg = output if output else "default folder"
    _LOGGER.info(
        f"Building code report of {input_parameter} for version {version} in {output_msg} ({no_venv}/{pypi}/{last_pypi})"
    )
    package_name, module_name = parse_input(input_parameter)
    path_to_package = resolve_package_directory(package_name)

    output_filename = ""
    result = []
    if (version or pypi or last_pypi) and not no_venv:
        if version:
            versions = [version]
        else:
            _LOGGER.info(f"Download versions of {package_name} on PyPI")
            from pypi_tools.pypi import PyPIClient

            client = PyPIClient()
            versions = [
                str(v) for v in client.get_ordered_versions(package_name)
            ]
            _LOGGER.info(f"Got {versions}")
            if last_pypi:
                _LOGGER.info(f"Only keep last PyPI version")
                versions = [versions[-1]]

        for version in versions:
            _LOGGER.info(
                f"Installing version {version} of {package_name} in a venv")
            with create_venv_with_package([
                    f"{package_name}=={version}"
            ]) as venv, tempfile.TemporaryDirectory() as temp_dir:
                metadata_path = str(Path(temp_dir, f"metadata_{version}.json"))
                args = [
                    venv.env_exe,
                    __file__,
                    "--no-venv",
                    "--version",
                    version,
                    "--metadata",
                    metadata_path,
                    input_parameter,
                ]
                if output is not None:
                    args.append("--output=" + output)
                try:
                    subprocess.check_call(args)
                except subprocess.CalledProcessError:
                    # If it fail, just assume this version is too old to get an Autorest report
                    _LOGGER.warning(
                        f"Version {version} seems to be too old to build a report (probably not Autorest based)"
                    )
                # Files have been written by the subprocess
                with open(metadata_path, "r") as metadata_fd:
                    result.extend(json.load(metadata_fd)["reports_path"])
        # Files have been written by the subprocess
        return result

    modules = find_autorest_generated_folder(module_name)
    version = version or "latest"
    output_folder = Path(path_to_package) / Path("code_reports") / Path(
        version)
    output_folder.mkdir(parents=True, exist_ok=True)

    for module_name in modules:
        _LOGGER.info(f"Working on {module_name}")

        report = create_report(module_name)

        module_for_path = get_sub_module_part(package_name, module_name)
        if module_for_path:
            output_filename = output_folder / Path(module_for_path + ".json")
        else:
            if output is not None:
                output_filename = output
            else:
                output_filename = output_folder / Path("report.json")

        with open(output_filename, "w") as fd:
            json.dump(report, fd, indent=2)
            _LOGGER.info(f"Report written to {output_filename}")
        result.append(str(output_filename))

    if len(result) > 1:
        merged_report = merge_report(result)
        if output is not None:
            output_filename = output
        else:
            output_filename = output_folder / Path("merged_report.json")
        with open(output_filename, "w") as fd:
            json.dump(merged_report, fd, indent=2)
            _LOGGER.info(f"Merged report written to {output_filename}")
        result = [str(output_filename)]

    if metadata_path:
        metadata = {"reports_path": result}  # Prepare metadata
        with open(metadata_path, "w") as metadata_fd:
            _LOGGER.info(f"Writing metadata: {metadata}")
            json.dump(metadata, metadata_fd)

    return result