Beispiel #1
0
def run_main(args):
    temp_dir = ""
    if args.temp_dir:
        temp_dir = args.temp_dir
    else:
        temp_dir = os.path.abspath(
            os.path.join(root_dir, "..", TEMP_FOLDER_NAME))

    code_repo_root = os.path.join(temp_dir, GIT_REPO_NAME)
    # Make sure root_dir where script is running is not same as code repo which will be reverted to old released branch to run test
    if root_dir == code_repo_root:
        logging.error(
            "Invalid path to clone github code repo. Temporary path can not be same as current source root directory"
        )
        exit(1)

    # Make sure temp path exists
    if not os.path.exists(temp_dir):
        os.mkdir(temp_dir)

    if args.service:
        service_dir = os.path.join("sdk", args.service)
        target_dir = os.path.join(root_dir, service_dir)
    else:
        target_dir = root_dir

    targeted_packages = process_glob_string(args.glob_string, target_dir, "",
                                            "Regression")
    if len(targeted_packages) == 0:
        exit(0)

    # clone code repo only if it doesn't exist
    if not os.path.exists(code_repo_root):
        clone_repo(temp_dir, AZURE_SDK_FOR_PYTHON_GIT_URL)
    else:
        logging.info(
            "Path {} already exists. Skipping step to clone github repo".
            format(code_repo_root))

    # find package dependency map for azure sdk
    pkg_dependency = find_package_dependency(AZURE_GLOB_STRING, code_repo_root)

    # Create regression text context. One context object will be reused for all packages
    context = RegressionContext(args.whl_dir, temp_dir,
                                str_to_bool(args.verify_latest), args.mark_arg)

    for pkg_path in targeted_packages:
        context.init_for_pkg(pkg_path)
        RegressionTest(context, pkg_dependency).run()
    logging.info("Regression test is completed successfully")
def find_package_dependency(glob_string, repo_root_dir):
    package_paths = process_glob_string(glob_string, repo_root_dir, "",
                                        "Regression")
    dependency_map = {}
    for pkg_root in package_paths:
        _, _, _, requires = parse_setup(pkg_root)

        # Get a list of package names from install requires
        required_pkgs = [parse_require(r)[0] for r in requires]
        required_pkgs = [p for p in required_pkgs if p.startswith("azure")]

        for req_pkg in required_pkgs:
            if req_pkg not in dependency_map:
                dependency_map[req_pkg] = []
            dependency_map[req_pkg].append(pkg_root)

    logging.info("Package dependency: {}".format(dependency_map))
    return dependency_map
Beispiel #3
0
def get_installed_packages(pkg_name_to_exclude):
    # This method returns a list of installed azure sdk packages
    installed_pkgs = [
        p.split("==")[0] for p in freeze.freeze() if p.startswith("azure-")
    ]

    # Get valid list of Azure SDK packages in repo
    pkgs = process_glob_string("", root_dir)
    valid_azure_packages = [
        path.basename(p) for p in pkgs if "mgmt" not in p and "-nspkg" not in p
    ]

    # Filter current package and any exlcuded package
    pkg_names = [
        p for p in installed_pkgs if p in valid_azure_packages
        and p != pkg_name_to_exclude and p not in EXCLUDED_PKGS
    ]

    logging.info("Installed azure sdk packages: %s", pkg_names)
    return pkg_names
def find_package_dependency(glob_string, repo_root_dir, dependent_service):
    package_paths = process_glob_string(glob_string, repo_root_dir, "",
                                        "Regression")
    dependent_service_filter = os.path.join('sdk', dependent_service.lower())

    dependency_map = {}
    for pkg_root in package_paths:
        if dependent_service_filter in pkg_root:
            _, _, _, requires = parse_setup(pkg_root)

            # Get a list of package names from install requires
            required_pkgs = [parse_require(r)[0] for r in requires]
            required_pkgs = [p for p in required_pkgs if p.startswith("azure")]

            for req_pkg in required_pkgs:
                if req_pkg not in dependency_map:
                    dependency_map[req_pkg] = []
                dependency_map[req_pkg].append(pkg_root)

    return dependency_map
        "-x",
        "--xdist",
        default=False,
        help=("Flag that enables xdist (requires pip install)"),
        action="store_true"
    )

    args = parser.parse_args()

    # We need to support both CI builds of everything and individual service
    # folders. This logic allows us to do both.
    if args.service:
        service_dir = os.path.join("sdk", args.service)
        target_dir = os.path.join(root_dir, service_dir)
    else:
        target_dir = root_dir

    targeted_packages = process_glob_string(args.glob_string, target_dir)
    extended_pytest_args = []

    if len(targeted_packages) == 0:
        exit(0)

    if args.xdist:
        extended_pytest_args.extend(["-n", "8", "--dist=loadscope"])

    if args.runtype != "none":
        execute_global_install_and_test(args, targeted_packages, extended_pytest_args)
    else:
        prep_and_run_tox(targeted_packages, args, extended_pytest_args)
Beispiel #6
0
        dest="package_filter_string",
        help=(
            "An additional string used to filter the set of artifacts by a simple CONTAINS clause."
        ),
    )

    args = parser.parse_args()

    # We need to support both CI builds of everything and individual service
    # folders. This logic allows us to do both.
    if args.service:
        service_dir = os.path.join("sdk", args.service)
        target_dir = os.path.join(root_dir, service_dir)
    else:
        target_dir = root_dir

    # Skip nspkg and metapackage from version check.
    # Change log file may be missing for these two types
    # process glob helper methods filter nspkg and metapackages with filter type "Docs"
    targeted_packages = process_glob_string(
        args.glob_string, target_dir, args.package_filter_string, "Docs"
    )
    change_missing = verify_packages(targeted_packages)
    if len(change_missing) > 0:
        logging.error("Below packages do not have change log")
        logging.error("***************************************************")
        for pkg_name in change_missing.keys():
            logging.error("{0} - {1}".format(pkg_name, change_missing[pkg_name]))

        sys.exit(1)
        dest="filter_type",
        default='Build',
        help="Filter type to identify eligible packages. for e.g. packages filtered in Build can pass filter type as Build,",
        choices=['Build', "Docs", "Regression", "Omit_management"]
    )


    args = parser.parse_args()

    # We need to support both CI builds of everything and individual service
    # folders. This logic allows us to do both.
    if args.service:
        service_dir = os.path.join("sdk", args.service)
        target_dir = os.path.join(root_dir, service_dir)
    else:
        target_dir = root_dir

    targeted_packages = process_glob_string(args.glob_string, target_dir, "", args.filter_type)
    extended_pytest_args = []

    if len(targeted_packages) == 0:
        exit(0)

    if args.xdist:
        extended_pytest_args.extend(["-n", "8", "--dist=loadscope"])

    if args.runtype != "none":
        execute_global_install_and_test(args, targeted_packages, extended_pytest_args)
    else:
        prep_and_run_tox(targeted_packages, args, extended_pytest_args)
Beispiel #8
0
def get_setup_py_paths(glob_string, base_path):
    setup_paths = process_glob_string(glob_string, base_path)
    filtered_paths = [
        path.join(p, 'setup.py') for p in setup_paths if not path_excluded(p)
    ]
    return filtered_paths
        "--json",
        help=
        ("Location of the matrix configuration which has a DependentServices dimension object."
         ),
    )

    args = parser.parse_args()

    if args.service:
        service_dir = os.path.join("sdk", args.service)
        target_dir = os.path.join(root_dir, service_dir)
    else:
        target_dir = root_dir

    targeted_packages = [
        os.path.basename(path_name) for path_name in process_glob_string(
            args.glob_string, target_dir, "", "Regression")
    ]
    deps = find_package_dependency(AZURE_GLOB_STRING, root_dir, "")
    package_set = []

    for key in list(deps.keys()):
        if key not in targeted_packages:
            deps.pop(key)
        else:
            package_set.extend(deps[key])

    service_list = set([parse_service(pkg) for pkg in package_set])

    try:
        with open(args.json, "r") as f:
            settings_json = f.read()
Beispiel #10
0
        action="store_true",
        help="Flag that indicates to omit any management packages except any management packages that should not be filtered. for e.g azure-mgmt-core",
    )

    args = parser.parse_args()

    # We need to support both CI builds of everything and individual service
    # folders. This logic allows us to do both.
    if args.service:
        service_dir = os.path.join("sdk", args.service)
        target_dir = os.path.join(root_dir, service_dir)
    else:
        target_dir = root_dir

    if args.omit_management:
        targeted_packages = process_glob_string(args.glob_string, target_dir, "", "Omit_management")
    else:
        targeted_packages = process_glob_string(args.glob_string, target_dir)
    extended_pytest_args = []

    if len(targeted_packages) == 0:
        exit(0)

    if args.xdist:
        extended_pytest_args.extend(["-n", "8", "--dist=loadscope"])

    if args.runtype != "none":
        execute_global_install_and_test(args, targeted_packages, extended_pytest_args)
    else:
        prep_and_run_tox(targeted_packages, args, extended_pytest_args)
        default = 'python',
        help = 'The name of the python that should run the build. This is for usage in special cases like the "Special_Python_Distro_Tests" Job in /.azure-pipelines/client.yml. Defaults to "python"')

    parser.add_argument(
        'glob_string',
        nargs='?',
        help = ('A comma separated list of glob strings that will target the top level directories that contain packages.'
                'Examples: All = "azure-*", Single = "azure-keyvault", Targeted Multiple = "azure-keyvault,azure-mgmt-resource"'))

    parser.add_argument(
        '--junitxml',
        dest = 'test_results',
        help = ('The folder where the test results will be stored in xml format.'
                'Example: --junitxml="junit/test-results.xml"'))

    parser.add_argument(
        '--disablecov',
        help = ('Flag that disables code coverage.'),
        action='store_true')

    args = parser.parse_args()
    targeted_packages = process_glob_string(args.glob_string, root_dir)
    test_results_arg = []
    if args.test_results:
        test_results_arg.extend(['--junitxml', args.test_results])

    if args.disablecov:
        test_results_arg.append('--no-cov')

    prep_and_run_tests(targeted_packages, args.python_version, test_results_arg)