def add_extension(extensions): require_virtual_env() ext_paths = get_ext_repo_paths() all_extensions = find_files(ext_paths, 'setup.py') if extensions == ['*']: paths_to_add = [ os.path.dirname(path) for path in all_extensions if 'site-packages' not in path ] else: paths_to_add = [] for path in all_extensions: folder = os.path.dirname(path) long_name = os.path.basename(folder) if long_name in extensions: paths_to_add.append(folder) extensions.remove(long_name) # raise error if any extension wasn't found if extensions: raise CLIError('extension(s) not found: {}'.format( ' '.join(extensions))) for path in paths_to_add: result = pip_cmd('install -e {}'.format(path), "Adding extension '{}'...".format(path)) if result.error: raise result.error # pylint: disable=raising-bad-type
def remove_extension(extensions): require_virtual_env() ext_paths = get_ext_repo_paths() installed_paths = find_files(ext_paths, '*.*-info') paths_to_remove = [] names_to_remove = [] if extensions == ['*']: paths_to_remove = [os.path.dirname(path) for path in installed_paths] names_to_remove = [ os.path.basename(os.path.dirname(path)) for path in installed_paths ] else: for path in installed_paths: folder = os.path.dirname(path) long_name = os.path.basename(folder) if long_name in extensions: paths_to_remove.append(folder) names_to_remove.append(long_name) extensions.remove(long_name) # raise error if any extension not installed if extensions: raise CLIError('extension(s) not installed: {}'.format( ' '.join(extensions))) # removes any links that may have been added to site-packages. for ext in names_to_remove: pip_cmd('uninstall {} -y'.format(ext)) for path in paths_to_remove: for d in os.listdir(path): # delete the egg-info and dist-info folders to make the extension invisible to the CLI and azdev if d.endswith('egg-info') or d.endswith('dist-info'): path_to_remove = os.path.join(path, d) display("Removing '{}'...".format(path_to_remove)) shutil.rmtree(path_to_remove)
def build_extensions(extensions, dist_dir='dist'): require_virtual_env() ext_paths = get_ext_repo_paths() all_extensions = find_files(ext_paths, 'setup.py') paths_to_build = [] for path in all_extensions: folder = os.path.dirname(path) long_name = os.path.basename(folder) if long_name in extensions: paths_to_build.append(folder) extensions.remove(long_name) # raise error if any extension wasn't found if extensions: raise CLIError('extension(s) not found: {}'.format( ' '.join(extensions))) original_cwd = os.getcwd() dist_dir = os.path.join(original_cwd, dist_dir) for path in paths_to_build: os.chdir(path) command = 'setup.py bdist_wheel -b bdist -d {}'.format(dist_dir) result = py_cmd(command, "Building extension '{}'...".format(path), is_module=False) if result.error: raise result.error # pylint: disable=raising-bad-type os.chdir(original_cwd)
def create_extension(ext_name, azure_rest_api_specs=const.GITHUB_SWAGGER_REPO_URL, branch=None, use=None): if not azure_rest_api_specs.startswith('http') and branch: raise CLIError( 'Cannot specify azure-rest-api-specs repo branch when using local one.' ) if not branch: branch = json.load( request.urlopen( 'https://api.github.com/repos/Azure/azure-rest-api-specs') ).get('default_branch') require_virtual_env() repo_paths = get_ext_repo_paths() repo_path = next( (x for x in repo_paths if x.endswith(const.EXT_REPO_NAME) or x.endswith(const.EXT_REPO_NAME + '\\')), None) if not repo_path: raise CLIError( 'Unable to find `{}` repo. Have you cloned it and added ' 'with `azdev extension repo add`?'.format(const.EXT_REPO_NAME)) if not os.path.isdir(repo_path): raise CLIError("Invalid path {} in .azure config.".format(repo_path)) swagger_readme_file_path = _get_swagger_readme_file_path( ext_name, azure_rest_api_specs, branch) _generate_extension(ext_name, repo_path, swagger_readme_file_path, use) _add_extension(ext_name, repo_path) _display_success_message(ext_name, ext_name)
def generate_extension_ref_docs(output_dir=None, output_type=None): require_virtual_env() # require that azure cli installed require_azure_cli() output_dir = _process_ref_doc_output_dir(output_dir) heading('Generate CLI Extensions Reference Docs') display("Docs will be placed in {}.".format(output_dir)) display( "Generating Docs for public extensions. Installed extensions will not be affected..." ) _generate_ref_docs_for_public_exts(output_type, output_dir) display("\nThe {} files are in {}".format(output_type, output_dir))
def remove_extension_repo(repos): require_virtual_env() az_config = get_azure_config() env_config = get_azdev_config() dev_sources = az_config.get('extension', 'dev_sources', None) dev_sources = dev_sources.split(',') if dev_sources else [] for repo in repos: try: dev_sources.remove(os.path.abspath(repo)) except CLIError: logger.warning( "Repo '%s' was not found in the list of repositories to search.", os.path.abspath(repo)) az_config.set_value('extension', 'dev_sources', ','.join(dev_sources)) env_config.set_value('ext', 'repo_paths', ','.join(dev_sources)) return list_extension_repos()
def add_extension_repo(repos): from azdev.operations.setup import _check_repo require_virtual_env() az_config = get_azure_config() env_config = get_azdev_config() dev_sources = az_config.get('extension', 'dev_sources', None) dev_sources = dev_sources.split(',') if dev_sources else [] for repo in repos: repo = os.path.abspath(repo) _check_repo(repo) if repo not in dev_sources: dev_sources.append(repo) az_config.set_value('extension', 'dev_sources', ','.join(dev_sources)) env_config.set_value('ext', 'repo_paths', ','.join(dev_sources)) return list_extension_repos()
def verify_versions(): import tempfile import shutil require_virtual_env() require_azure_cli() heading('Verify CLI Versions') path_table = get_path_table() modules = list(path_table['core'].items()) modules = [x for x in modules if x[0] not in EXCLUDED_MODULES] if not modules: raise CLIError('No modules selected to test.') display('MODULES: {}'.format(', '.join([x[0] for x in modules]))) results = {} original_cwd = os.getcwd() temp_dir = tempfile.mkdtemp() for mod, mod_path in modules: if not mod.startswith(COMMAND_MODULE_PREFIX) and mod != 'azure-cli': mod = '{}{}'.format(COMMAND_MODULE_PREFIX, mod) results[mod] = {} results.update(_compare_module_against_pypi(results, temp_dir, mod, mod_path)) shutil.rmtree(temp_dir) os.chdir(original_cwd) logger.info('Module'.ljust(40) + 'Local Version'.rjust(20) + 'Public Version'.rjust(20)) # pylint: disable=logging-not-lazy for mod, data in results.items(): logger.info(mod.ljust(40) + data['local_version'].rjust(20) + data['public_version'].rjust(20)) bump_mods = {k: v for k, v in results.items() if v['status'] == 'BUMP'} subheading('RESULTS') if bump_mods: logger.error('The following modules need their versions bumped. ' 'Scroll up for details: %s', ', '.join(bump_mods.keys())) logger.warning('\nNote that before changing versions, you should consider ' 'running `git clean` to remove untracked files from your repo. ' 'Files that were once tracked but removed from the source may ' 'still be on your machine, resuling in false positives.') sys.exit(1) else: display('OK!')
def list_extensions(): from glob import glob require_virtual_env() azure_config = get_azure_config() dev_sources = azure_config.get('extension', 'dev_sources', None) dev_sources = dev_sources.split(',') if dev_sources else [] installed = _get_installed_dev_extensions(dev_sources) installed_names = [x['name'] for x in installed] results = [] for ext_path in find_files(dev_sources, 'setup.py'): # skip non-extension packages that may be in the extension folder (for example, from a virtual environment) try: glob_pattern = os.path.join( os.path.split(ext_path)[0], '{}*'.format(EXTENSION_PREFIX)) _ = glob(glob_pattern)[0] except IndexError: continue # ignore anything in site-packages folder if 'site-packages' in ext_path: continue folder = os.path.dirname(ext_path) long_name = os.path.basename(folder) if long_name not in installed_names: results.append({'name': long_name, 'install': '', 'path': folder}) else: results.append({ 'name': long_name, 'install': 'Installed', 'path': folder }) return results
def run_tests(tests, xml_path=None, discover=False, in_series=False, run_live=False, profile=None, last_failed=False, pytest_args=None, git_source=None, git_target=None, git_repo=None): require_virtual_env() DEFAULT_RESULT_FILE = 'test_results.xml' DEFAULT_RESULT_PATH = os.path.join(get_azdev_config_dir(), DEFAULT_RESULT_FILE) from .pytest_runner import get_test_runner heading('Run Tests') original_profile = _get_profile(profile) if not profile: profile = original_profile path_table = get_path_table() test_index = _get_test_index(profile, discover) if not tests: tests = list(path_table['mod'].keys()) + list(path_table['core'].keys()) + list(path_table['ext'].keys()) if tests == ['CLI']: tests = list(path_table['mod'].keys()) + list(path_table['core'].keys()) elif tests == ['EXT']: tests = list(path_table['ext'].keys()) # filter out tests whose modules haven't changed tests = _filter_by_git_diff(tests, test_index, git_source, git_target, git_repo) if tests: display('\nTESTS: {}\n'.format(', '.join(tests))) # resolve the path at which to dump the XML results xml_path = xml_path or DEFAULT_RESULT_PATH if not xml_path.endswith('.xml'): xml_path = os.path.join(xml_path, DEFAULT_RESULT_FILE) # process environment variables if run_live: logger.warning('RUNNING TESTS LIVE') os.environ[ENV_VAR_TEST_LIVE] = 'True' def _find_test(index, name): name_comps = name.split('.') num_comps = len(name_comps) key_error = KeyError() for i in range(num_comps): check_name = '.'.join(name_comps[(-1 - i):]) try: match = index[check_name] if check_name != name: logger.info("Test found using just '%s'. The rest of the name was ignored.\n", check_name) return match except KeyError as ex: key_error = ex continue raise key_error # lookup test paths from index test_paths = [] for t in tests: try: test_path = os.path.normpath(_find_test(test_index, t)) test_paths.append(test_path) except KeyError: logger.warning("'%s' not found. If newly added, re-run with --discover", t) continue # Tests have been collected. Now run them. if not test_paths: raise CLIError('No tests selected to run.') runner = get_test_runner(parallel=not in_series, log_path=xml_path, last_failed=last_failed) exit_code = runner(test_paths=test_paths, pytest_args=pytest_args) _summarize_test_results(xml_path) # attempt to restore the original profile if profile != original_profile: result = raw_cmd('az cloud update --profile {}'.format(original_profile), "Restoring profile '{}'.".format(original_profile)) if result.exit_code != 0: logger.warning("Failed to restore profile '%s'.", original_profile) sys.exit(0 if not exit_code else 1)
def setup(cli_path=None, ext_repo_path=None, ext=None, deps=None): require_virtual_env() start = time.time() heading('Azure CLI Dev Setup') ext_to_install = [] if not any([cli_path, ext_repo_path, ext]): cli_path, ext_repo_path, ext_to_install = _interactive_setup() else: if cli_path == "pypi": cli_path = None # otherwise assume programmatic setup if cli_path: CLI_SENTINEL = 'azure-cli.pyproj' if cli_path == Flag: cli_path = find_file(CLI_SENTINEL) if not cli_path: raise CLIError( 'Unable to locate your CLI repo. Things to check:' '\n Ensure you have cloned the repo. ' '\n Specify the path explicitly with `-c PATH`. ' '\n If you run with `-c` to autodetect, ensure you are running ' 'this command from a folder upstream of the repo.') if cli_path != 'EDGE': cli_path = _check_path(cli_path, CLI_SENTINEL) display('Azure CLI:\n {}\n'.format(cli_path)) else: display('Azure CLI:\n PyPI\n') # must add the necessary repo to add an extension if ext and not ext_repo_path: raise CLIError( 'usage error: --repo EXT_REPO [EXT_REPO ...] [--ext EXT_NAME ...]' ) get_azure_config().set_value('extension', 'dev_sources', '') if ext_repo_path: # add extension repo(s) add_extension_repo(ext_repo_path) display('Azure CLI extension repos:\n {}'.format('\n '.join( [os.path.abspath(x) for x in ext_repo_path]))) if ext == ['*']: ext_to_install = [x['path'] for x in list_extensions()] elif ext: # add extension(s) available_extensions = [x['name'] for x in list_extensions()] not_found = [x for x in ext if x not in available_extensions] if not_found: raise CLIError( "The following extensions were not found. Ensure you have added " "the repo using `--repo/-r PATH`.\n {}".format( '\n '.join(not_found))) ext_to_install = [ x['path'] for x in list_extensions() if x['name'] in ext ] if ext_to_install: display('\nAzure CLI extensions:\n {}'.format( '\n '.join(ext_to_install))) dev_sources = get_azure_config().get('extension', 'dev_sources', None) # save data to config files config = get_azdev_config() config.set_value('ext', 'repo_paths', dev_sources if dev_sources else '_NONE_') config.set_value('cli', 'repo_path', cli_path if cli_path else '_NONE_') # install packages subheading('Installing packages') # upgrade to latest pip pip_cmd('install --upgrade pip -q', 'Upgrading pip...') _install_cli(cli_path, deps=deps) _install_extensions(ext_to_install) _copy_config_files() end = time.time() elapsed_min = int((end - start) / 60) elapsed_sec = int(end - start) % 60 display('\nElapsed time: {} min {} sec'.format(elapsed_min, elapsed_sec)) subheading('Finished dev setup!')
def run_tests(tests, xml_path=None, discover=False, in_series=False, run_live=False, profile=None, last_failed=False, pytest_args=None, no_exit_first=False, git_source=None, git_target=None, git_repo=None, cli_ci=False): require_virtual_env() DEFAULT_RESULT_FILE = 'test_results.xml' DEFAULT_RESULT_PATH = os.path.join(get_azdev_config_dir(), DEFAULT_RESULT_FILE) heading('Run Tests') path_table = get_path_table() test_index = _get_test_index(profile or current_profile(), discover) if not tests: tests = list(path_table['mod'].keys()) + list( path_table['core'].keys()) + list(path_table['ext'].keys()) if tests == ['CLI']: tests = list(path_table['mod'].keys()) + list( path_table['core'].keys()) elif tests == ['EXT']: tests = list(path_table['ext'].keys()) # filter out tests whose modules haven't changed modified_mods = _filter_by_git_diff(tests, test_index, git_source, git_target, git_repo) if modified_mods: display('\nTest on modules: {}\n'.format(', '.join(modified_mods))) if cli_ci is True: ctx = CLIAzureDevOpsContext(git_repo, git_source, git_target) modified_mods = ctx.filter(test_index) # resolve the path at which to dump the XML results xml_path = xml_path or DEFAULT_RESULT_PATH if not xml_path.endswith('.xml'): xml_path = os.path.join(xml_path, DEFAULT_RESULT_FILE) # process environment variables if run_live: logger.warning('RUNNING TESTS LIVE') os.environ[ENV_VAR_TEST_LIVE] = 'True' def _find_test(index, name): name_comps = name.split('.') num_comps = len(name_comps) key_error = KeyError() for i in range(num_comps): check_name = '.'.join(name_comps[(-1 - i):]) try: match = index[check_name] if check_name != name: logger.info( "Test found using just '%s'. The rest of the name was ignored.\n", check_name) return match except KeyError as ex: key_error = ex continue raise key_error # lookup test paths from index test_paths = [] for t in modified_mods: try: test_path = os.path.normpath(_find_test(test_index, t)) test_paths.append(test_path) except KeyError: logger.warning( "'%s' not found. If newly added, re-run with --discover", t) continue exit_code = 0 # Tests have been collected. Now run them. if not test_paths: logger.warning('No tests selected to run.') sys.exit(exit_code) exit_code = 0 with ProfileContext(profile): runner = get_test_runner(parallel=not in_series, log_path=xml_path, last_failed=last_failed, no_exit_first=no_exit_first) exit_code = runner(test_paths=test_paths, pytest_args=pytest_args) sys.exit(0 if not exit_code else 1)
def publish_extensions(extensions, storage_account, storage_account_key, storage_container, dist_dir='dist', update_index=False, yes=False): from azure.storage.blob import BlockBlobService require_virtual_env() heading('Publish Extensions') require_azure_cli() # rebuild the extensions subheading('Building WHLs') try: shutil.rmtree(dist_dir) except Exception as ex: # pylint: disable=broad-except logger.debug("Unable to clear folder '%s'. Error: %s", dist_dir, ex) build_extensions(extensions, dist_dir=dist_dir) whl_files = find_files(dist_dir, '*.whl') uploaded_urls = [] subheading('Uploading WHLs') for whl_path in whl_files: whl_file = os.path.split(whl_path)[-1] client = BlockBlobService(account_name=storage_account, account_key=storage_account_key) exists = client.exists(container_name=storage_container, blob_name=whl_file) # check if extension already exists unless user opted not to if not yes: if exists: if not prompt_y_n( "{} already exists. You may need to bump the extension version. Replace?" .format(whl_file), default='n'): logger.warning("Skipping '%s'...", whl_file) continue # upload the WHL file client.create_blob_from_path(container_name=storage_container, blob_name=whl_file, file_path=os.path.abspath(whl_path)) url = client.make_blob_url(container_name=storage_container, blob_name=whl_file) logger.info(url) uploaded_urls.append(url) if update_index: subheading('Updating Index') update_extension_index(uploaded_urls) subheading('Published WHLs') for url in uploaded_urls: display(url) if not update_index: logger.warning('You still need to update the index for your changes!') logger.warning(' az extension update-index <URL>')
def update_extension_index(extensions): import re import tempfile from .util import get_ext_metadata, get_whl_from_url require_virtual_env() ext_repos = get_ext_repo_paths() index_path = next((x for x in find_files(ext_repos, 'index.json') if 'azure-cli-extensions' in x), None) if not index_path: raise CLIError( "Unable to find 'index.json' in your extension repos. Have " "you cloned 'azure-cli-extensions' and added it to you repo " "sources with `azdev extension repo add`?") NAME_REGEX = r'.*/([^/]*)-\d+.\d+.\d+' for extension in extensions: # Get the URL extension = extension[extension.index('https'):] # Get extension WHL from URL if not extension.endswith('.whl') or not extension.startswith( 'https:'): raise CLIError( 'usage error: only URL to a WHL file currently supported.') # TODO: extend to consider other options ext_path = extension # Extract the extension name try: extension_name = re.findall(NAME_REGEX, ext_path)[0] extension_name = extension_name.replace('_', '-') except IndexError: raise CLIError('unable to parse extension name') # TODO: Update this! extensions_dir = tempfile.mkdtemp() ext_dir = tempfile.mkdtemp(dir=extensions_dir) whl_cache_dir = tempfile.mkdtemp() whl_cache = {} ext_file = get_whl_from_url(ext_path, extension_name, whl_cache_dir, whl_cache) with open(index_path, 'r') as infile: curr_index = json.loads(infile.read()) entry = { 'downloadUrl': ext_path, 'sha256Digest': _get_sha256sum(ext_file), 'filename': ext_path.split('/')[-1], 'metadata': get_ext_metadata(ext_dir, ext_file, extension_name) } if extension_name not in curr_index['extensions'].keys(): logger.info("Adding '%s' to index...", extension_name) curr_index['extensions'][extension_name] = [entry] else: logger.info("Updating '%s' in index...", extension_name) curr_index['extensions'][extension_name].append(entry) # update index and write back to file with open(os.path.join(index_path), 'w') as outfile: outfile.write(json.dumps(curr_index, indent=4, sort_keys=True))
def list_extension_repos(): require_virtual_env() az_config = get_azure_config() dev_sources = az_config.get('extension', 'dev_sources', None) return dev_sources.split(',') if dev_sources else dev_sources