def test_unreachabale_file():
    files = {
        'root.file': """
from fo.o import aa
from bar import bb
""",
        'bar': '[ap] = 2'
    }

    # Failed to parse internal module.
    with pytest.raises(ImportLoaderError) as e:
        collect_imports('root.file', read_file_from_dict(files))
    assert f"""
{get_location_marks(files['root.file'], e.value.location)}
{e.value.message}
""".startswith("""
from fo.o import aa
     ^**^
Could not load module 'fo.o'.
Error: """)

    # Failed to parse root module.
    with pytest.raises(ImportLoaderError) as e:
        collect_imports('bad.root', read_file_from_dict(files))
    assert e.value.message.startswith("Could not load module 'bad.root'.")
def test_shallow_tree_graph():
    files = {
        'root.file': """
from a import aa
from b import bb
""",
        'a': '[ap] = 1',
        'b': '[ap] = 2'
    }

    expected_res = {name: parse_file(code) for name, code in files.items()}
    assert collect_imports('root.file',
                           read_file_from_dict(files)) == expected_res
    assert set(collect_imports('a',
                               read_file_from_dict(files)).keys()) == {'a'}
Пример #3
0
def verify_exception(code: str, error: str, exc_type=PreprocessorError):
    return generic_verify_exception(
        code=code,
        error=error,
        pass_manager=starknet_pass_manager(
            prime=DEFAULT_PRIME,
            read_module=read_file_from_dict(TEST_MODULES)),
        exc_type=exc_type)
Пример #4
0
def preprocess_str(code: str) -> StarknetPreprocessedProgram:
    preprocessed = preprocess_str_ex(
        code=code,
        pass_manager=starknet_pass_manager(
            prime=DEFAULT_PRIME,
            read_module=read_file_from_dict(TEST_MODULES)))
    assert isinstance(preprocessed, StarknetPreprocessedProgram)
    return preprocessed
def verify_exception(files: Dict[str, str], main_file: str, error: str):
    """
    Verifies that parsing the code results in the given error.
    """
    with pytest.raises(LocationError) as e:
        collect_imports(main_file, read_file_from_dict(files))
    # Remove line and column information from the error using a regular expression.
    assert re.sub(':[0-9]+:[0-9]+: ', ':?:?: ', str(e.value)) == error.strip()
def test_circular_dep():
    # Singleton circle.
    with pytest.raises(UsingCycleError) as e:
        collect_imports('a', read_file_from_dict({'a': 'from a import b'}))
    assert str(e.value) == """\
Found circular imports dependency:
a imports
a"""

    # Big circle.
    with pytest.raises(UsingCycleError) as e:
        collect_imports(
            'a0',
            read_file_from_dict(
                {f'a{i}': f'from a{(i+1) % 9} import b'
                 for i in range(10)}))
    assert str(e.value) == """\
def test_unparsable_import():
    files = {
        'root.file': """
from foo import bar
""",
        'foo': 'this is not cairo code'
    }

    with pytest.raises(ParserError):
        collect_imports('root.file', read_file_from_dict(files))
Пример #8
0
def test_import_identifiers():
    # Define files used in this test.
    files = {
        '.': """
from a.b.c import alpha as x
from a.b.c import beta
from a.b.c import xi
""",
        'a.b.c': """
from tau import xi
const alpha = 0
const beta = 1
const gamma = 2
""",
        'tau': """
const xi = 42
"""
    }

    # Prepare auxiliary functions for tests.
    scope = ScopedName.from_string

    def get_full_name(name, curr_scope=''):
        try:
            return program.identifiers.search(
                accessible_scopes=[scope(curr_scope)],
                name=scope(name)).get_canonical_name()
        except IdentifierError:
            return None

    # Preprocess program.
    program = preprocess_codes(codes=[(files['.'], '.')],
                               prime=PRIME,
                               read_module=read_file_from_dict(files),
                               main_scope=scope('__main__'))

    # Verify identifiers are resolved correctly.
    assert get_full_name('x', '__main__') == scope('a.b.c.alpha')
    assert get_full_name('beta', '__main__') == scope('a.b.c.beta')
    assert get_full_name('xi', '__main__') == scope('tau.xi')

    assert get_full_name('alpha', 'a.b.c') == scope('a.b.c.alpha')
    assert get_full_name('beta', 'a.b.c') == scope('a.b.c.beta')
    assert get_full_name('gamma', 'a.b.c') == scope('a.b.c.gamma')
    assert get_full_name('xi', 'a.b.c') == scope('tau.xi')

    assert get_full_name('xi', 'tau') == scope('tau.xi')

    # Verify inaccessible identifiers.
    assert get_full_name('alpha', '__main__') is None
    assert get_full_name('gamma', '__main__') is None
    assert get_full_name('a.b.c.alpha', '__main__') is None
    assert get_full_name('tau.xi', '__main__') is None
def test_lang_directive():
    files = {
        'a': """
from c import x
""",
        'b': """
%lang other_lang
from c import x
""",
        'c': """
%lang lang
from d_lang import x
from d_no_lang import x
""",
        'd_lang': """
%lang lang
const x = 0
""",
        'd_no_lang': """
const x = 0
""",
        'e': """
%lang lang  # First line.
%lang lang  # Second line.
"""
    }

    # Make sure that starting from 'c' does not raise an exception.
    collect_imports('c', read_file_from_dict(files))

    verify_exception(
        files, 'a', """
a:?:?: Importing modules with %lang directive 'lang' must be from a module with the same directive.
from c import x
     ^
""")

    verify_exception(
        files, 'b', """
b:?:?: Importing modules with %lang directive 'lang' must be from a module with the same directive.
from c import x
     ^
""")

    verify_exception(
        files, 'e', """
e:?:?: Found two %lang directives
%lang lang  # Second line.
^********^
""")
Пример #10
0
def test_import():
    files = {
        '.': """
from a import f as g
call g
""",
        'a': """
func f():
  jmp f
end
"""
    }
    program = preprocess_codes(codes=[(files['.'], '.')],
                               prime=PRIME,
                               read_module=read_file_from_dict(files))

    assert program.format() == """\
Пример #11
0
def verify_exception(code: str,
                     error: str,
                     files: Dict[str, str] = {},
                     main_scope: ScopedName = ScopedName(),
                     exc_type=PreprocessorError,
                     preprocessor_cls: Optional[Type[Preprocessor]] = None):
    """
    Verifies that compiling the code results in the given error.
    """
    with pytest.raises(exc_type) as e:
        preprocess_codes([(code, '')],
                         prime=PRIME,
                         read_module=read_file_from_dict(files),
                         main_scope=main_scope,
                         preprocessor_cls=preprocessor_cls)
    # Remove line and column information from the error using a regular expression.
    assert re.sub(':[0-9]+:[0-9]+: ', 'file:?:?: ',
                  str(e.value)) == error.strip()
def test_dag():
    files = {
        'root.file': """
from a import aa
from b import bb
""",
        'a': """
from common.first import some1
from common.second import some2
""",
        'b': """
from common.first import some1
from common.second import some2
""",
        'common.first': '[ap] = 1',
        'common.second': '[ap] = 2',
    }

    expected_res = {name: parse_file(code) for name, code in files.items()}
    assert collect_imports('root.file',
                           read_file_from_dict(files)) == expected_res
Пример #13
0
def verify_exception(code: str,
                     error: str,
                     files: Dict[str, str] = {},
                     main_scope: Optional[ScopedName] = None,
                     exc_type=PreprocessorError,
                     pass_manager: Optional[PassManager] = None):
    """
    Verifies that compiling the code results in the given error.
    """
    if main_scope is None:
        main_scope = TEST_SCOPE

    if pass_manager is None:
        pass_manager = default_pass_manager(
            prime=PRIME, read_module=read_file_from_dict(files))

    with pytest.raises(exc_type) as e:
        preprocess_codes(codes=[(code, '')],
                         pass_manager=pass_manager,
                         main_scope=main_scope)
    # Remove line and column information from the error using a regular expression.
    assert re.sub(':[0-9]+:[0-9]+', 'file:?:?', str(e.value)) == error.strip()
def test_topologycal_order():
    """
    Build dependencies DAG over the vertices 0..99 and a list of files named 'a0'..'a99'
    such that a<i> imports a<j> directly if and only if i -> j in the dependencies DAG.
    The dependencies DAG is constructed by having every node pointing to 3 other nodes
    having higher indices.
    We test collect_imports on 'a0' returns the dictionary ordered correctly,
    by scanning it and ensuring that when we see some file, all it's dependencies
    where seen before.
    """

    N_VERTICES = 100
    N_NEIGHBORS = 3

    # Initialize the dependencies DAG. A list of int lists.
    # j is in the i-th list iff i->j in the dependencies DAG.
    dependencies = [[]] * N_VERTICES
    for i in range(N_VERTICES - N_NEIGHBORS):
        dependencies[i] = sample(range(i + 1, N_VERTICES), N_NEIGHBORS)

    # Construct files.
    files: Dict[str, str] = {}
    for i in range(N_VERTICES):
        # Build the i-th file.
        files[f'a{i}'] = '\n'.join(
            [f'from a{j} import nothing' for j in dependencies[i]])

    # Collect packages.
    packages = collect_imports('a0', read_file_from_dict(files))

    # Test order.
    seen = [False] * N_VERTICES
    for pkg in packages:
        curr_id = int(pkg[1:])
        for j in dependencies[i]:
            assert seen[j]
        seen[curr_id] = True
def test_long_path_grph():
    files = {f'a{i}': f'from a{i+1} import b' for i in range(10)}
    files['a9'] = '[ap] = 0'

    expected_res = {name: parse_file(code) for name, code in files.items()}
    assert collect_imports('a0', read_file_from_dict(files)) == expected_res