def _copy_vendored_sdk(src_path, dest_path): import shutil try: version_files = find_files(src_path, 'version.py') if not version_files: version_files = find_files(src_path, '_version.py') client_location = version_files[0] except IndexError: raise CLIError('Unable to find client files.') # copy the client files and folders to the root of vendored_sdks for easier access client_dir = os.path.dirname(client_location) shutil.rmtree(dest_path) shutil.copytree(client_dir, dest_path)
def _add_to_codeowners(repo_path, prefix, name, github_alias): # add the user Github alias to the CODEOWNERS file for new packages if not github_alias: display( '\nWhat is the Github alias of the person responsible for maintaining this package?' ) while not github_alias: github_alias = prompt('Alias: ') # accept a raw alias or @alias github_alias = '@{}'.format( github_alias) if not github_alias.startswith('@') else github_alias try: codeowners = find_files(repo_path, 'CODEOWNERS')[0] except IndexError: raise CLIError('unexpected error: unable to find CODEOWNERS file.') if prefix == EXTENSION_PREFIX: new_line = '/src/{}{}/ {}'.format(prefix, name, github_alias) else: # ensure Linux-style separators when run on Windows new_line = '/{} {}'.format( os.path.join('', _MODULE_ROOT_PATH, name, ''), github_alias).replace('\\', '/') with open(codeowners, 'a') as f: f.write(new_line) f.write('\n')
def remove_extension(extensions): ext_paths = get_ext_repo_paths() installed_paths = find_files(ext_paths, '*.*-info') paths_to_remove = [] names_to_remove = [] if extensions == ['*']: paths_to_remove = [os.path.dirname(path) for path in installed_paths] names_to_remove = [ os.path.basename(os.path.dirname(path)) for path in installed_paths ] else: for path in installed_paths: folder = os.path.dirname(path) long_name = os.path.basename(folder) if long_name in extensions: paths_to_remove.append(folder) names_to_remove.append(long_name) extensions.remove(long_name) # raise error if any extension not installed if extensions: raise CLIError('extension(s) not installed: {}'.format( ' '.join(extensions))) # removes any links that may have been added to site-packages. for ext in names_to_remove: pip_cmd('uninstall {} -y'.format(ext)) for path in paths_to_remove: for d in os.listdir(path): # delete the egg-info and dist-info folders to make the extension invisible to the CLI and azdev if d.endswith('egg-info') or d.endswith('dist-info'): path_to_remove = os.path.join(path, d) display("Removing '{}'...".format(path_to_remove)) shutil.rmtree(path_to_remove)
def build_extensions(extensions, dist_dir='dist'): ext_paths = get_ext_repo_paths() all_extensions = find_files(ext_paths, 'setup.py') paths_to_build = [] for path in all_extensions: folder = os.path.dirname(path) long_name = os.path.basename(folder) if long_name in extensions: paths_to_build.append(folder) extensions.remove(long_name) # raise error if any extension wasn't found if extensions: raise CLIError('extension(s) not found: {}'.format( ' '.join(extensions))) original_cwd = os.getcwd() dist_dir = os.path.join(original_cwd, dist_dir) for path in paths_to_build: os.chdir(path) command = 'setup.py bdist_wheel -b bdist -d {}'.format(dist_dir) result = py_cmd(command, "Building extension '{}'...".format(path), is_module=False) if result.error: raise result.error # pylint: disable=raising-bad-type os.chdir(original_cwd)
def add_extension(extensions): ext_paths = get_ext_repo_paths() all_extensions = find_files(ext_paths, 'setup.py') if extensions == ['*']: paths_to_add = [ os.path.dirname(path) for path in all_extensions if 'site-packages' not in path ] else: paths_to_add = [] for path in all_extensions: folder = os.path.dirname(path) long_name = os.path.basename(folder) if long_name in extensions: paths_to_add.append(folder) extensions.remove(long_name) # raise error if any extension wasn't found if extensions: raise CLIError('extension(s) not found: {}'.format( ' '.join(extensions))) for path in paths_to_add: result = pip_cmd('install -e {}'.format(path), "Adding extension '{}'...".format(path)) if result.error: raise result.error # pylint: disable=raising-bad-type
def publish_extensions(extensions, storage_subscription, storage_account, storage_container, dist_dir='dist', update_index=False, yes=False): heading('Publish Extensions') require_azure_cli() # rebuild the extensions subheading('Building WHLs') try: shutil.rmtree(dist_dir) except Exception as ex: # pylint: disable=broad-except logger.debug("Unable to clear folder '%s'. Error: %s", dist_dir, ex) build_extensions(extensions, dist_dir=dist_dir) whl_files = find_files(dist_dir, '*.whl') uploaded_urls = [] subheading('Uploading WHLs') for whl_path in whl_files: whl_file = os.path.split(whl_path)[-1] # check if extension already exists unless user opted not to if not yes: command = 'az storage blob exists --subscription {} --account-name {} -c {} -n {}'.format( storage_subscription, storage_account, storage_container, whl_file) exists = json.loads(cmd(command).result)['exists'] if exists: if not prompt_y_n( "{} already exists. You may need to bump the extension version. Replace?" .format(whl_file), default='n'): logger.warning("Skipping '%s'...", whl_file) continue # upload the WHL file command = 'az storage blob upload --subscription {} --account-name {} -c {} -n {} -f {}'.format( storage_subscription, storage_account, storage_container, whl_file, os.path.abspath(whl_path)) cmd(command, "Uploading '{}'...".format(whl_file)) command = 'az storage blob url --subscription {} --account-name {} -c {} -n {} -otsv'.format( storage_subscription, storage_account, storage_container, whl_file) url = cmd(command).result logger.info(url) uploaded_urls.append(url) if update_index: subheading('Updating Index') update_extension_index(uploaded_urls) subheading('Published') display(uploaded_urls) if not update_index: logger.warning( 'You still need to update the index for your changes with `az extension update-index`.' )
def list_extensions(): from glob import glob azure_config = get_azure_config() dev_sources = azure_config.get('extension', 'dev_sources', None) dev_sources = dev_sources.split(',') if dev_sources else [] installed = _get_installed_dev_extensions(dev_sources) installed_names = [x['name'] for x in installed] results = [] for ext_path in find_files(dev_sources, 'setup.py'): # skip non-extension packages that may be in the extension folder (for example, from a virtual environment) try: glob_pattern = os.path.join( os.path.split(ext_path)[0], '{}*'.format(EXTENSION_PREFIX)) _ = glob(glob_pattern)[0] except IndexError: continue folder = os.path.dirname(ext_path) long_name = os.path.basename(folder) if long_name not in installed_names: results.append({'name': long_name, 'install': '', 'path': folder}) else: results.append({ 'name': long_name, 'install': 'Installed', 'path': folder }) return results
def update_extension_index(extension): import json import re import tempfile from .util import get_ext_metadata, get_whl_from_url ext_repos = get_ext_repo_paths() index_path = next((x for x in find_files(ext_repos, 'index.json') if 'azure-cli-extensions' in x), None) if not index_path: raise CLIError("Unable to find 'index.json' in your extension repos. Have " "you cloned 'azure-cli-extensions' and added it to you repo " "sources with `azdev extension repo add`?") NAME_REGEX = r'.*/([^/]*)-\d+.\d+.\d+' # Get extension WHL from URL if not extension.endswith('.whl') or not extension.startswith('https:'): raise ValueError('usage error: only URL to a WHL file currently supported.') # TODO: extend to consider other options ext_path = extension # Extract the extension name try: extension_name = re.findall(NAME_REGEX, ext_path)[0] extension_name = extension_name.replace('_', '-') except IndexError: raise ValueError('unable to parse extension name') extensions_dir = tempfile.mkdtemp() ext_dir = tempfile.mkdtemp(dir=extensions_dir) whl_cache_dir = tempfile.mkdtemp() whl_cache = {} ext_file = get_whl_from_url(ext_path, extension_name, whl_cache_dir, whl_cache) with open(index_path, 'r') as infile: curr_index = json.loads(infile.read()) entry = { 'downloadUrl': ext_path, 'sha256Digest': _get_sha256sum(ext_file), 'filename': ext_path.split('/')[-1], 'metadata': get_ext_metadata(ext_dir, ext_file, extension_name) } if extension_name not in curr_index['extensions'].keys(): logger.info("Adding '%s' to index...", extension_name) curr_index['extensions'][extension_name] = [entry] else: logger.info("Updating '%s' in index...", extension_name) curr_index['extensions'][extension_name].append(entry) # update index and write back to file with open(os.path.join(index_path), 'w') as outfile: outfile.write(json.dumps(curr_index, indent=4, sort_keys=True))
def _add_to_doc_map(repo_path, name): try: doc_source_file = find_files(repo_path, 'doc_source_map.json')[0] except IndexError: raise CLIError( 'unexpected error: unable to find doc_source_map.json file.') doc_source = None with open(doc_source_file, 'r') as f: doc_source = json.loads(f.read()) # ensure Linux-style separators when run on Windows doc_source[name] = str(os.path.join(_MODULE_ROOT_PATH, name, '_help.py')).replace('\\', '/') with open(doc_source_file, 'w') as f: f.write(json.dumps(doc_source, indent=4))
def _get_module_versions(results, modules): version_pattern = re.compile(r'.*(?P<ver>\d+.\d+.\d+).*') for mod, mod_path in modules: if not mod.startswith(COMMAND_MODULE_PREFIX) and mod != 'azure-cli': mod = '{}{}'.format(COMMAND_MODULE_PREFIX, mod) setup_path = find_files(mod_path, 'setup.py') with open(setup_path[0], 'r') as f: local_version = 'Unknown' for line in f.readlines(): if line.strip().startswith('VERSION'): local_version = version_pattern.match(line).group('ver') break results[mod]['local_version'] = local_version return results
def update_setup_py(pin=False): require_azure_cli() heading('Update azure-cli setup.py') path_table = get_path_table() azure_cli_path = path_table['core']['azure-cli'] azure_cli_setup_path = find_files(azure_cli_path, SETUP_PY_NAME)[0] modules = list(path_table['core'].items()) + list(path_table['mod'].items()) modules = [x for x in modules if x[0] not in EXCLUDED_MODULES] results = {mod[0]: {} for mod in modules} results = _get_module_versions(results, modules) _update_setup_py(results, azure_cli_setup_path, pin) display('OK!')
def list_extensions(): azure_config = get_azure_config() dev_sources = azure_config.get('extension', 'dev_sources', None) dev_sources = dev_sources.split(',') if dev_sources else [] installed = _get_installed_dev_extensions(dev_sources) installed_names = [x['name'] for x in installed] results = [] for ext_path in find_files(dev_sources, 'setup.py'): folder = os.path.dirname(ext_path) long_name = os.path.basename(folder) if long_name not in installed_names: results.append({'name': long_name, 'inst': '', 'path': folder}) else: results.append({'name': long_name, 'inst': 'Y', 'path': folder}) return results
def _check_setup_py(results, update, pin): # only audit or update setup.py when all modules are being considered # otherwise, edge cases could arise if update is None: return results # retrieve current versions from azure-cli's setup.py file azure_cli_path = get_path_table(include_only='azure-cli')['core']['azure-cli'] azure_cli_setup_path = find_files(azure_cli_path, SETUP_PY_NAME)[0] with open(azure_cli_setup_path, 'r') as f: setup_py_version_regex = re.compile(r"(?P<quote>[\"'])(?P<mod>[^'=]*)(==(?P<ver>[\d.]*))?(?P=quote)") for line in f.readlines(): if line.strip().startswith("'azure-cli-"): match = setup_py_version_regex.match(line.strip()) mod = match.group('mod') if mod == 'azure-cli-command-modules-nspkg': mod = 'azure-cli-command_modules-nspkg' try: results[mod]['setup_version'] = match.group('ver') except KeyError: # something that is in setup.py but isn't module is # inherently a mismatch results[mod] = { 'local_version': 'Unavailable', 'public_version': 'Unknown', 'setup_version': match.group('ver'), 'status': 'MISMATCH' } if update: _update_setup_py(results, azure_cli_setup_path, pin) else: display('\nAuditing azure-cli setup.py against local module versions...') for mod, data in results.items(): if mod == 'azure-cli': continue setup_version = data['setup_version'] if not setup_version: logger.warning('The azure-cli setup.py file is not using pinned versions. Aborting audit.') break elif setup_version != data['local_version']: data['status'] = 'MISMATCH' return results
def publish_extensions(extensions, storage_account, storage_account_key, storage_container, dist_dir='dist', update_index=False, yes=False): from azure.multiapi.storage.v2018_11_09.blob import BlockBlobService heading('Publish Extensions') require_azure_cli() # rebuild the extensions subheading('Building WHLs') try: shutil.rmtree(dist_dir) except Exception as ex: # pylint: disable=broad-except logger.debug("Unable to clear folder '%s'. Error: %s", dist_dir, ex) build_extensions(extensions, dist_dir=dist_dir) whl_files = find_files(dist_dir, '*.whl') uploaded_urls = [] subheading('Uploading WHLs') for whl_path in whl_files: whl_file = os.path.split(whl_path)[-1] client = BlockBlobService(account_name=storage_account, account_key=storage_account_key) exists = client.exists(container_name=storage_container, blob_name=whl_file) # check if extension already exists unless user opted not to if not yes: if exists: if not prompt_y_n( "{} already exists. You may need to bump the extension version. Replace?" .format(whl_file), default='n'): logger.warning("Skipping '%s'...", whl_file) continue # upload the WHL file client.create_blob_from_path(container_name=storage_container, blob_name=whl_file, file_path=os.path.abspath(whl_path)) url = client.make_blob_url(container_name=storage_container, blob_name=whl_file) logger.info(url) uploaded_urls.append(url) if update_index: subheading('Updating Index') update_extension_index(uploaded_urls) subheading('Published WHLs') for url in uploaded_urls: display(url) if not update_index: logger.warning('You still need to update the index for your changes!') logger.warning(' az extension update-index <URL>')
def _check_setup_py(results, update): # only audit or update setup.py when all modules are being considered # otherwise, edge cases could arise if update is None: return results # retrieve current versions from azure-cli's setup.py file azure_cli_path = get_path_table( include_only='azure-cli')['core']['azure-cli'] azure_cli_setup_path = find_files(azure_cli_path, SETUP_PY_NAME)[0] with open(azure_cli_setup_path, 'r') as f: setup_py_version_regex = re.compile( r"(?P<quote>[\"'])(?P<mod>[^'=]*)==(?P<ver>[\d.]*)(?P=quote)") for line in f.readlines(): if line.strip().startswith("'azure-cli-"): match = setup_py_version_regex.match(line.strip()) mod = match.group('mod') if mod == 'azure-cli-command-modules-nspkg': mod = 'azure-cli-command_modules-nspkg' try: results[mod]['setup_version'] = match.group('ver') except KeyError: # something that is in setup.py but isn't module is # inherently a mismatch results[mod] = { 'local_version': 'Unavailable', 'public_version': 'Unknown', 'setup_version': match.group('ver'), 'status': 'MISMATCH' } if not update: display( '\nAuditing azure-cli setup.py against local module versions...') for mod, data in results.items(): if mod == 'azure-cli': continue if data['local_version'] != data['setup_version']: data['status'] = 'MISMATCH' return results # update azure-cli's setup.py file with the collected versions logger.warning( '\nUpdating azure-cli setup.py with collected module versions...') old_lines = [] with open(azure_cli_setup_path, 'r') as f: old_lines = f.readlines() with open(azure_cli_setup_path, 'w') as f: start_line = 'DEPENDENCIES = [' end_line = ']' write_versions = False for line in old_lines: if line.strip() == start_line: write_versions = True f.write(line) for mod, data in results.items(): if mod == 'azure-cli' or data[ 'local_version'] == 'Unavailable': continue f.write(" '{}=={}'\n".format(mod, data['local_version'])) continue elif line.strip() == end_line: write_versions = False elif write_versions: # stop writing lines until the end bracket is found continue f.write(line) return results