def test_metadata(self): self.maxDiff = None extensions_dir = tempfile.mkdtemp() for ext_name, exts in self.index['extensions'].items(): for item in exts: ext_dir = tempfile.mkdtemp(dir=extensions_dir) ext_file = get_whl_from_url(item['downloadUrl'], item['filename'], self.whl_cache_dir, self.whl_cache) metadata = get_ext_metadata(ext_dir, ext_file, ext_name) # Due to https://github.com/pypa/wheel/issues/195 we prevent whls built with 0.31.0 or greater. # 0.29.0, 0.30.0 are the two previous versions before that release. supported_generators = ['bdist_wheel (0.29.0)', 'bdist_wheel (0.30.0)'] self.assertIn(metadata.get('generator'), supported_generators, "{}: 'generator' should be one of {}. " "Build the extension with a different version of the 'wheel' package " "(e.g. `pip install wheel==0.30.0`). " "This is due to https://github.com/pypa/wheel/issues/195".format(ext_name, supported_generators)) self.assertDictEqual(metadata, item['metadata'], "Metadata for {} in index doesn't match the expected of: \n" "{}".format(item['filename'], json.dumps(metadata, indent=2, sort_keys=True, separators=(',', ': ')))) run_requires = metadata.get('run_requires') if run_requires: deps = run_requires[0]['requires'] self.assertTrue( all(verify_dependency(dep) for dep in deps), "Dependencies of {} use disallowed extension dependencies. " "Remove these dependencies: {}".format(item['filename'], deps)) shutil.rmtree(extensions_dir)
def test_source_wheels(self): # Test we can build all sources into wheels and that metadata from the wheel is valid built_whl_dir = tempfile.mkdtemp() source_extensions = [ os.path.join(SRC_PATH, n) for n in os.listdir(SRC_PATH) if os.path.isdir(os.path.join(SRC_PATH, n)) ] for s in source_extensions: if not os.path.isfile(os.path.join(s, 'setup.py')): continue try: check_call([ 'python', 'setup.py', 'bdist_wheel', '-q', '-d', built_whl_dir ], cwd=s, stdout=PIPE, stderr=PIPE) except CalledProcessError as err: self.fail("Unable to build extension {} : {}".format(s, err)) for filename in os.listdir(built_whl_dir): ext_file = os.path.join(built_whl_dir, filename) ext_dir = tempfile.mkdtemp(dir=built_whl_dir) ext_name = WHEEL_INFO_RE(filename).groupdict().get('name') metadata = get_ext_metadata(ext_dir, ext_file, ext_name) run_requires = metadata.get('run_requires') if run_requires and ext_name not in SKIP_DEP_CHECK: deps = run_requires[0]['requires'] self.assertTrue( all(not dep.startswith('azure-') for dep in deps), "Dependencies of {} use disallowed extension dependencies. " "Remove these dependencies: {}".format(filename, deps)) shutil.rmtree(built_whl_dir)
def test_metadata(self): self.maxDiff = None extensions_dir = tempfile.mkdtemp() for ext_name, exts in self.index['extensions'].items(): for item in exts: ext_dir = tempfile.mkdtemp(dir=extensions_dir) ext_file = get_whl_from_url(item['downloadUrl'], item['filename'], self.whl_cache_dir, self.whl_cache) metadata = get_ext_metadata(ext_dir, ext_file, ext_name) self.assertDictEqual( metadata, item['metadata'], "Metadata for {} in index doesn't match the expected of: \n" "{}".format( item['filename'], json.dumps(metadata, indent=2, sort_keys=True, separators=(',', ': ')))) run_requires = metadata.get('run_requires') if run_requires and ext_name not in SKIP_DEP_CHECK: deps = run_requires[0]['requires'] self.assertTrue( all(not dep.startswith('azure-') for dep in deps), "Dependencies of {} use disallowed extension dependencies. " "Remove these dependencies: {}".format( item['filename'], deps)) shutil.rmtree(extensions_dir)
def main(): # Get extension WHL from URL whl_path = None try: whl_path = sys.argv[1] except IndexError: pass if not whl_path or not whl_path.endswith('.whl') or not whl_path.startswith('https:'): raise ValueError('incorrect usage: update_script <URL TO WHL FILE>') # Extract the extension name try: extension_name = re.findall(NAME_REGEX, whl_path)[0] extension_name = extension_name.replace('_', '-') except IndexError: raise ValueError('unable to parse extension name') extensions_dir = tempfile.mkdtemp() ext_dir = tempfile.mkdtemp(dir=extensions_dir) whl_cache_dir = tempfile.mkdtemp() whl_cache = {} ext_file = get_whl_from_url(whl_path, extension_name, whl_cache_dir, whl_cache) with open('./src/index.json', 'r') as infile: curr_index = json.loads(infile.read()) try: entry = curr_index['extensions'][extension_name] except IndexError: raise ValueError('{} not found in index.json'.format(extension_name)) entry[0]['downloadUrl'] = whl_path entry[0]['sha256Digest'] = get_sha256sum(ext_file) entry[0]['filename'] = whl_path.split('/')[-1] entry[0]['metadata'] = get_ext_metadata(ext_dir, ext_file, extension_name) # update index and write back to file curr_index['extensions'][extension_name] = entry with open('./src/index.json', 'w') as outfile: outfile.write(json.dumps(curr_index, indent=4, sort_keys=True))
def test_source_wheels(self): # Test we can build all sources into wheels and that metadata from the wheel is valid built_whl_dir = tempfile.mkdtemp() source_extensions = [os.path.join(SRC_PATH, n) for n in os.listdir(SRC_PATH) if os.path.isdir(os.path.join(SRC_PATH, n))] for s in source_extensions: if not os.path.isfile(os.path.join(s, 'setup.py')): continue try: check_output(['python', 'setup.py', 'bdist_wheel', '-q', '-d', built_whl_dir], cwd=s) except CalledProcessError as err: self.fail("Unable to build extension {} : {}".format(s, err)) for filename in os.listdir(built_whl_dir): ext_file = os.path.join(built_whl_dir, filename) ext_dir = tempfile.mkdtemp(dir=built_whl_dir) ext_name = WHEEL_INFO_RE(filename).groupdict().get('name') metadata = get_ext_metadata(ext_dir, ext_file, ext_name) run_requires = metadata.get('run_requires') if run_requires: deps = run_requires[0]['requires'] self.assertTrue(all(verify_dependency(dep) for dep in deps), "Dependencies of {} use disallowed extension dependencies. " "Remove these dependencies: {}".format(filename, deps)) shutil.rmtree(built_whl_dir)
def test_metadata(self): skipable_extension_thresholds = { 'ip-group': '0.1.2', 'vm-repair': '0.3.1', 'mixed-reality': '0.0.2', 'subscription': '0.1.4', 'managementpartner': '0.1.3', 'log-analytics': '0.2.1' } historical_extensions = { 'keyvault-preview': '0.1.3', 'log-analytics': '0.2.1' } extensions_dir = tempfile.mkdtemp() for ext_name, exts in self.index['extensions'].items(): # only test the latest version item = max( exts, key=lambda ext: version.parse(ext['metadata']['version'])) ext_dir = tempfile.mkdtemp(dir=extensions_dir) ext_file = get_whl_from_url(item['downloadUrl'], item['filename'], self.whl_cache_dir, self.whl_cache) print(ext_file) ext_version = item['metadata']['version'] try: metadata = get_ext_metadata(ext_dir, ext_file, ext_name) # check file exists except ValueError as ex: if ext_name in skipable_extension_thresholds: threshold_version = skipable_extension_thresholds[ext_name] if version.parse(ext_version) <= version.parse( threshold_version): continue else: raise ex else: raise ex try: self.assertIn('azext.minCliCoreVersion', metadata) # check key properties exists except AssertionError as ex: if ext_name in historical_extensions: threshold_version = historical_extensions[ext_name] if version.parse(ext_version) <= version.parse( threshold_version): continue else: raise ex else: raise ex # Due to https://github.com/pypa/wheel/issues/195 we prevent whls built with 0.31.0 or greater. # 0.29.0, 0.30.0 are the two previous versions before that release. supported_generators = [ 'bdist_wheel (0.29.0)', 'bdist_wheel (0.30.0)' ] self.assertIn( metadata.get('generator'), supported_generators, "{}: 'generator' should be one of {}. " "Build the extension with a different version of the 'wheel' package " "(e.g. `pip install wheel==0.30.0`). " "This is due to https://github.com/pypa/wheel/issues/195". format(ext_name, supported_generators)) self.assertDictEqual( metadata, item['metadata'], "Metadata for {} in index doesn't match the expected of: \n" "{}".format( item['filename'], json.dumps(metadata, indent=2, sort_keys=True, separators=(',', ': ')))) shutil.rmtree(extensions_dir)