def check_config(path, profile, expected_switches=[]): conf = content_of(path) assert_match('.*Build_Profile : Build_Profile_Kind := "%s"' % profile, conf) for sw in expected_switches: assert_match('.*"%s"' % sw, conf)
def check_error(var_def, expected): make_manifest(var_def) p = run_alr('show', 'hello_world', complain_on_error=False, debug=False, quiet=True) assert_match('ERROR:.*' + expected + '\n', p.out)
def check(pin, error): """ Insert a pin at the end of the manifest, verify that it is rejected, and remove it from the manifest. Check the error produced against the one given """ with open(alr_manifest(), "a") as manifest: manifest.write("\n[[pins]]\n" + pin + "\n") # Remove lockfile to ensure reload if os.path.exists(alr_lockfile()): os.remove(alr_lockfile()) p = run_alr("pin", complain_on_error=False) assert p.status != 0, "Unexpected success for pin: " + pin assert_match(".*Cannot continue with invalid session.*" + error + ".*\n", p.out) # Restore the manifest lines = lines_of(alr_manifest()) lines.pop() with open(alr_manifest(), "w") as manifest: manifest.write("".join(lines)) # Verify the manifest is OK again run_alr("pin")
def bad_action_check(type, command, name, error_re): # Test in new crate as the manifest is going to be broken init_local_crate("abc") add_action(type=type, command=command, name=name) p = run_alr("show", complain_on_error=False) assert p.status != 0, "Unexpected success" assert_match(error_re, p.out) chdir("..") rmtree("abc")
def run(i, error): config_dir = 'alr-config-{}'.format(i) prepare_env(config_dir, os.environ) prepare_indexes(config_dir, '.', {'bad_index_{}'.format(i): { 'in_fixtures': False }}) p = run_alr("search", "--crates", complain_on_error=False, debug=False) assert_match('ERROR: {}\n'.format(error), p.out)
def run(i, error): config_dir = 'alr-config-{}'.format(i) prepare_env(config_dir, os.environ) prepare_indexes( config_dir, '.', {'bad_index_{}'.format(i): {'in_fixtures': False}}) p = run_alr('list', complain_on_error=False, debug=False) assert_match( 'ERROR: {}\n' 'ERROR: alr encountered an unexpected error,' ' re-run with -d for details.\n$'.format(error), p.out)
def check_child(version, output, pinned): # Verify output assert_match('.*\n' 'Dependencies \(solution\):\n' ' libchild=' + version + (' \(pinned\)' if pinned else "") + '\n' ' libparent=1\.0\.0\n' '.*\n', output, flags=re.S) # Verify lockfile check_line_in(alr_lockfile(), 'name = "libchild"') check_line_in(alr_lockfile(), f'version = "{version}"')
def check_run(release, match=""): p = run_alr('get', release, complain_on_error=True, quiet=True) # Enter working copy and try to run default executable os.chdir(target) p = run_alr('run', complain_on_error=not match, quiet=not match) # Check output when pattern was given: if match: assert_match(match, p.out, flags=re.S) # Otherwise run worked as expected os.chdir('..') shutil.rmtree(target)
def check_child(version, output, pinned): # Verify output assert_match('.*\n' 'Dependencies \(solution\):\n' ' libchild=' + version + (" \(pinned\)" if pinned else "") + '.*\n', output, flags=re.S) # Verify lockfile check_line_in(alr_lockfile(), 'name = "libchild"') check_line_in(alr_lockfile(), f'version = "{version}"') # Verify dependency folders assert os.path.exists('alire/cache/dependencies/libchild_' + version + '_filesystem')
def should_work(commit="", branch=""): os.mkdir("nest") os.chdir("nest") for crate in ["xxx", "yyy"]: init_local_crate(crate) alr_pin("zzz", url=url, commit=commit, branch=branch) os.chdir("..") os.chdir("xxx") alr_pin("yyy", path="../yyy") p = run_alr("pin") assert_match( escape("yyy file:../yyy") + ".*\n" + escape("zzz file:alire/cache/pins/zzz") + ".*" + escape(url), p.out) # Clean up for next trial os.chdir("..") os.chdir("..") git_blast("nest")
def do_checks(path_to_dependency): flag_post_fetch = path_to_dependency + "/test_post_fetch" flag_pre_build = path_to_dependency + "/test_pre_build" flag_post_build = path_to_dependency + "/test_post_build" # Immediately after adding the dependency, this is the situation: check(flag_post_fetch, True) check(flag_pre_build, False) check(flag_post_build, False) # Remove post-fetch to check it doesn't come back unexpectedly os.remove(flag_post_fetch) # Build with error, so only pre-build runs but not post-build Path(f"{path_to_dependency}/src/empty.adb").touch() p = run_alr('build', complain_on_error=False) assert_match(".*compilation of empty.adb failed.*", p.out) # Post build shouldn't be here because of build failure check(flag_post_fetch, False) check(flag_pre_build, True) check(flag_post_build, False) os.remove(flag_pre_build) os.remove(f"{path_to_dependency}/src/empty.adb") # Build without error run_alr('build') # pre/post-build expected for successful build check(flag_post_fetch, False) check(flag_pre_build, True) check(flag_post_build, True) return # updating dependencies causes the post-fetch action to run: run_alr('update') check(flag_post_fetch, True) check(flag_pre_build, True) check(flag_post_build, True)
def should_not_work(commits=["", ""], branches=["", ""], match_error="FAIL"): # Commits and branches must contain two values that go into each crate pin os.mkdir("nest") os.chdir("nest") crates = ["xxx", "yyy"] for i in [0, 1]: init_local_crate(crates[i]) alr_pin("zzz", url=url, commit=commits[i], branch=branches[i]) os.chdir("..") os.chdir("xxx") alr_pin("yyy", path="../yyy", update=False) p = run_alr("pin", complain_on_error=False) assert_match(match_error, p.out) # Clean up for next trial os.chdir("..") os.chdir("..") shutil.rmtree("nest")
# Initialize project run_alr('init', '--bin', 'xxx') os.chdir('xxx') # Add dependency on hello^1. Solution is hello=1.0.1 --> libhello=1.1.0 run_alr('with', 'hello^1') # Add dependency on superhello*. Solution is superhello=1.0 --> libhello=1.0.1 # This implies a downgrade from libhello=1.1.0 to libhello=1.0.1, which is the # only possible combination of libhello^1.0 & libhello~1.0 run_alr('with', 'superhello') # Add more dependencies, without a proper release run_alr('with', 'wip', '--use', '/fake') run_alr('with', 'unobtanium', '--force') # Verify printout (but for test-dependent path) # Note that superhello was auto-narrowed down to ^1, but missed ones did not p = run_alr('with', '--tree') assert_match(re.escape('''xxx=0.0.0 +-- hello=1.0.1 (^1) | +-- libhello=1.0.1 (^1.0) +-- superhello=1.0.0 (^1.0.0) | +-- libhello=1.0.1 (~1.0) +-- unobtanium* (direct,missed) (*) +-- wip* (direct,linked,pin=''') + '.*' + re.escape(') (*)'), p.out, flags=re.S) print('SUCCESS')
""" Check that a bad crate file is warned about """ from drivers.alr import run_alr from drivers.asserts import assert_match import os import re # Create a new crate run_alr('init', '--bin', 'xxx') # And muck its tomlfile os.chdir('xxx') with open("alire.toml", "a") as myfile: myfile.write("SHOULND'T BE HERE") # Verify that the expected error is given p = run_alr('show', complain_on_error=False) assert_match('.*Cannot continue with invalid session.*' 'Failed to load.*', p.out, flags=re.S) print('SUCCESS')
from glob import glob from drivers.alr import run_alr from drivers.asserts import assert_match import re import platform # 1st test: showing available information on all platforms p = run_alr('show', 'crate', '--external') assert_match( ".*Executable make --version .*" "(case Toolchain is SYSTEM => False, USER => False).*", p.out, flags=re.S) # 2nd test: showing available information on current platform p = run_alr('show', 'crate', '--external', '--system') assert_match(".*Executable make --version .* False.*", p.out, flags=re.S) # 3rd test: crate is not detected because it is unavailable. It would be # detectable otherwise (make is installed in all test images) p = run_alr('show', '--no-tty', 'crate', '--external-detect', quiet=False) assert_match("Not found: crate\*.*", p.out, flags=re.S)
# Tests with different default arguments that must all succeed run_alr("--force", "publish") verify_manifest() run_alr("--force", "publish", ".") verify_manifest() run_alr("--force", "publish", ".", "master") verify_manifest() run_alr("--force", "publish", ".", "HEAD") verify_manifest() # Verify that a dirty repo precludes publishing with open("lasagna", "wt") as file: file.write("wanted\n") assert run(["git", "add", "lasagna"]).returncode == 0 p = run_alr("--force", "publish", complain_on_error=False) assert_match(".*You have unstaged changes.*", p.out) # Even if changes are committed but not pushed assert run(["git", "add", "."]).returncode == 0 assert run(["git", "commit", "-a", "-m", "please"]).returncode == 0 p = run_alr("--force", "publish", complain_on_error=False) assert_match(".*Your branch is ahead of remote.*", p.out) print('SUCCESS')
import os import re from drivers.alr import run_alr from drivers.asserts import assert_match from drivers.helpers import dir_separator from glob import glob # Retrieve a crate run_alr('get', 'hello=1') target = glob('hello*')[0] # Initialize a workspace run_alr('init', '--bin', 'xxx') os.chdir('xxx') # Pin the hello crate as local dir dependency run_alr('with', 'hello', '--use', '..' + dir_separator() + target) # Verify that hello dependencies are detected and used p = run_alr('with', '--solve') assert_match( '''.*Dependencies \(solution\): hello=1\.0\.0 .* libhello=1\.1\.0 .*''', # we skip non-relevant details p.out, flags=re.S) print('SUCCESS')
""" Test alr edit with two project files defined. """ from glob import glob from drivers.alr import run_alr from drivers.asserts import assert_match import os import shutil # Get the "libhello" project and enter its directory run_alr('get', 'libhello') os.chdir(glob('libhello*')[0]) gs = shutil.which('gnatstudio') if gs is None: # GNATstudio not in PATH: Check that we get an error saying GS not # available p = run_alr('edit', complain_on_error=False) assert_match(".*GNATstudio not available or not in PATH.*", p.out) else: # GNATstudio in PATH: Check that we get an error when multiple project # files are defined p = run_alr('edit', complain_on_error=False) assert_match(".*Please specify a project file with --project=.*", p.out) print('SUCCESS')
Check that updating an incomplete solution is doable resulting in no changes """ import re import os from drivers.alr import run_alr from drivers.asserts import assert_match from glob import glob # Add a dependency and force it missing by pinning it to non-existing version run_alr('init', '--bin', 'xxx') os.chdir('xxx') run_alr('with', 'libhello') run_alr('pin', '--force', 'libhello=3') # See that updating succeeds run_alr('update') # Check that the solution is still the expected one, and also that the original # dependency is included in the restrictions p = run_alr('with', '--solve') assert_match( '.*Dependencies \(external\):\n' ' ' + re.escape('libhello(=3.0.0) & (^2.0.0) (direct,missed,pin=3.0.0)') + '.*', p.out, flags=re.S) print('SUCCESS')
from drivers.alr import run_alr from drivers.asserts import assert_match from glob import glob # Initialize test crate run_alr('init', '--bin', 'xxx') os.chdir('xxx') # 1st test, adding an entirely inexistent crate run_alr('with', 'unobtanium', '--force') # 2nd test, adding a dependency that exists but with missing version run_alr('with', 'libhello^3', '--force') # 3rd test, adding a dependency that has missing dependencies run_alr('with', 'hello^3', '--force') # Check that the solution contains the requested dependencies p = run_alr('with', '--solve') assert_match('.*Dependencies \(solution\):\n' ' hello=3\.0\.0.*' # skip origin 'Dependencies \(external\):\n' ' libhello\^3.*' # skip flags ' unobtanium\*.*', p.out, flags=re.S) print('SUCCESS')
""" Check behavior of unknown enum values in manifests """ from drivers.alr import run_alr from drivers.asserts import assert_match import glob import os # Verify that we can list the index, despite containing an unknown distro value run_alr("search", "--crates") # Verify that checking the index strictly does fail p = run_alr("index", "--check", complain_on_error=False) assert_match(".*invalid enumeration value:.*", p.out) # Verify that we can retrieve and load such a crate's manifest run_alr("get", "crate") os.chdir(glob.glob("crate_*")[0]) run_alr("show") # Verify that adding bad values to our local manifest won't slip by with open("alire.toml", "at") as manifest: manifest.write(""" [available.'case(distribution)'] ubuntu = true nonexistent-distro = false\n""") p = run_alr("show", complain_on_error=False) assert_match(".*invalid enumeration value:.*", p.out)
# Run it not quietly to ensure that at normal level # the output is not broken by some log message p = run_alr('printenv', quiet=False) assert_eq(0, p.status) expected_gpr_path = [] expected_gpr_path += [['.*', 'hello_1.0.1_filesystem']] expected_gpr_path += [[ '.*', 'alire', 'cache', 'dependencies', 'libhello_1\.0\.0_filesystem' ]] for i, path in enumerate(expected_gpr_path): if platform.system() == 'Windows': expected_gpr_path[i] = "\\\\".join(path) else: expected_gpr_path[i] = "/".join(path) expected_gpr_path = os.pathsep.join(expected_gpr_path) assert_match('export ALIRE="True"\n' '.*' 'export GPR_PROJECT_PATH="' + expected_gpr_path + '"\n' '.*' 'export TEST_GPR_EXTERNAL="gpr_ext_B"\n' '.*', p.out, flags=re.S) print('SUCCESS')
from drivers.alr import run_alr from drivers.asserts import assert_match from drivers.helpers import contents, content_of, init_git_repo, zip_dir from shutil import copyfile, rmtree from zipfile import ZipFile import os # Prepare a repo and a zipball to be used as "remote", without a manifest run_alr("init", "--bin", "xxx") # Remove the alire cache rmtree(os.path.join("xxx", "alire")) # Remove the manifest os.remove(os.path.join("xxx", "alire.toml")) # Create the zip zip_dir("xxx", "xxx.zip") # A "remote" source archive. We force to allow the test to skip the remote # check. Curl requires an absolute path to work. target = os.path.join(os.getcwd(), "xxx.zip") p = run_alr("publish", f"file:{target}", "--force", "--skip-build", complain_on_error=False) # Should fail reporting the missing manifest assert_match(".*Remote sources are missing the 'alire.toml' manifest file.*", p.out) print('SUCCESS')
def check_output(dump): assert_match( '''stderr: PROGRAM_ERROR stderr: Raising forcibly stderr: raised PROGRAM_ERROR : Raising forcibly.* ''', dump)
from drivers.asserts import assert_eq, assert_match # Get the "hello" project and enter its directory, without solving dependencies run_alr('get', 'hello', '--only') os.chdir(glob('hello*')[0]) # Verify that it has no solution p = run_alr('with', '--solve') assert_eq( 'Dependencies (direct):\n' ' libhello^1.0\n' 'Dependencies (solution):\n' ' No solving attempted\n', p.out) # Verify that it has no pins p = run_alr('pin') assert_eq('There are no pins\n', p.out) # Verify that updating it fixes the solution run_alr('update') p = run_alr('with', '--solve') assert_match( '.*\n' # Skip dependencies 'Dependencies \(solution\):\n' ' libhello=1\.0\.0.*\n' '.*', # Skip graph p.out, flags=re.S) print('SUCCESS')
# Get and check post fetch action run_alr('get', 'hello_world') os.chdir("hello_world_0.1.0_filesystem/") check_expected('./test_post_fetch') check_not_expected('./test_pre_build') check_not_expected('./test_post_build') # Remove post-fetch to check it doesn't come back os.remove('./test_post_fetch') # Build with error os.mkdir('src') Path('src/empty.adb').touch() p = run_alr('build', complain_on_error=False) assert_match(".*compilation of empty.adb failed.*", p.out) # Post build shouldn't be here because of build failure check_not_expected('./test_post_fetch') check_expected('./test_pre_build') check_not_expected('./test_post_build') os.remove('./test_pre_build') os.remove('src/empty.adb') # Build without error run_alr('build', complain_on_error=False) # pre/post-build expected for successful build check_not_expected('./test_post_fetch') check_expected('./test_pre_build')
def check_config(path, profile): conf = content_of(path) assert_match('.*Build_Profile : Build_Profile_Kind := "%s"' % profile, conf)
""" Tests that the "trusted repos" list is applied """ from drivers.alr import run_alr from drivers.asserts import assert_match # Try with obvious bad site and slight variations before/after for domain in ["badsite.com", "ggithub.com", "github.comm"]: p = run_alr("publish", f"http://{domain}/repo.git", "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef", complain_on_error=False) assert_match(f".*Origin is hosted on unknown site: {domain}.*", p.out) # Try that having credentials doesn't interfere with the previous check and # that the domain was recognized properly for domain in ["badsite.com", "ggithub.com", "github.comm"]: for creds in ["user", "user:passwd"]: p = run_alr("publish", f"http://{creds}@{domain}/repo.git", "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef", complain_on_error=False) assert_match(f".*Origin is hosted on unknown site: {domain}.*", p.out) print('SUCCESS')
def make_path(list): if platform.system() == 'Windows': return "\\\\".join(list) else: return "/".join(list) expected_hello_path = make_path(['.*', 'hello_1.0.1_filesystem']) expected_libhello_path = make_path( ['.*', 'alire', 'cache', 'dependencies', 'libhello_1\.0\.0_filesystem']) expected_gpr_path = os.pathsep.join( [expected_hello_path, expected_libhello_path]) assert_match('export ALIRE="True"\n' '.*' 'export GPR_PROJECT_PATH="' + expected_gpr_path + '"\n' '.*' 'export HELLO_ALIRE_PREFIX="' + expected_hello_path + '"\n' '.*' 'export LIBHELLO_ALIRE_PREFIX="' + expected_libhello_path + '"\n' '.*' 'export TEST_GPR_EXTERNAL="gpr_ext_B"\n' '.*', p.out, flags=re.S) print('SUCCESS')
""" Check detection of manifest in wrong shelf """ import re from drivers.alr import run_alr from drivers.asserts import assert_match p = run_alr("list", complain_on_error=False) assert_match('.*ERROR: Mismatch between manifest and shelf:.*', p.out, flags=re.S) print('SUCCESS')