def test_load_cached_sources_list():
    from rosdep2.sources_list import load_cached_sources_list, update_sources_list
    tempdir = tempfile.mkdtemp()

    # test behavior on empty cache
    assert [] == load_cached_sources_list(sources_cache_dir=tempdir)
    
    # pull in cache data
    sources_list_dir=get_test_dir()
    retval = update_sources_list(sources_list_dir=sources_list_dir,
                                 sources_cache_dir=tempdir, error_handler=None)
    assert retval
    
    # now test with cached data
    retval = load_cached_sources_list(sources_cache_dir=tempdir)
    assert len(retval) == 3, '%s != %s' % ([source0, source1, source2], retval[0:3])
    source0 = retval[0]
    source1 = retval[1]
    source2 = retval[2]
    
    # this should be the 'default' source
    assert 'python' in source1.rosdep_data
    assert not source0.tags
    
    # this should be the 'non-existent' source
    assert source2.rosdep_data == {}
    assert source2.tags == ['ubuntu']
Esempio n. 2
0
def test_load_cached_sources_list():
    from rosdep2.sources_list import load_cached_sources_list, update_sources_list
    tempdir = tempfile.mkdtemp()

    # test behavior on empty cache
    assert [] == load_cached_sources_list(sources_cache_dir=tempdir)

    # pull in cache data
    sources_list_dir = get_test_dir()
    retval = update_sources_list(sources_list_dir=sources_list_dir,
                                 sources_cache_dir=tempdir,
                                 error_handler=None)
    assert retval

    # now test with cached data
    retval = load_cached_sources_list(sources_cache_dir=tempdir)
    assert len(retval) == 3
    source0 = retval[0]
    source1 = retval[1]
    source2 = retval[2]

    # this should be the 'default' source
    assert 'python' in source1.rosdep_data
    assert not source0.tags

    # this should be the 'non-existent' source
    assert source2.rosdep_data is None
    assert source2.tags == ['ubuntu']
def test_update_sources_list():
    from rosdep2.sources_list import update_sources_list, InvalidData, compute_filename_hash, PICKLE_CACHE_EXT
    try:
        import cPickle as pickle
    except ImportError:
        import pickle
    try:
        from urllib.request import pathname2url
    except ImportError:
        from urllib import pathname2url
    sources_list_dir = get_test_dir()
    index_path = os.path.abspath(
        os.path.join(os.path.dirname(__file__), 'rosdistro', 'index.yaml'))
    index_url = 'file://' + pathname2url(index_path)
    os.environ['ROSDISTRO_INDEX_URL'] = index_url
    tempdir = tempfile.mkdtemp()
    # use a subdirectory of test dir to make sure rosdep creates the necessary substructure
    tempdir = os.path.join(tempdir, 'newdir')

    errors = []

    def error_handler(loc, e):
        errors.append((loc, e))

    retval = update_sources_list(sources_list_dir=sources_list_dir,
                                 sources_cache_dir=tempdir,
                                 error_handler=error_handler)
    assert retval
    assert len(retval) == 2, retval
    # one of our sources is intentionally bad, this should be a softfail
    assert len(errors) == 1, errors
    assert errors[0][
        0].url == 'https://badhostname.willowgarage.com/rosdep.yaml'

    source0, path0 = retval[0]
    assert source0.origin.endswith('20-default.list'), source0
    hash1 = compute_filename_hash(GITHUB_URL)
    hash2 = compute_filename_hash(BADHOSTNAME_URL)
    filepath = os.path.join(tempdir, hash1)
    assert filepath == path0, '%s vs %s' % (filepath, path0)
    with open(filepath + PICKLE_CACHE_EXT, 'rb') as f:
        data = pickle.loads(f.read())
        assert 'cmake' in data

    # verify that cache index exists. contract specifies that even
    # failed downloads are specified in the index, just in case old
    # download data is present.
    with open(os.path.join(tempdir, 'index'), 'r') as f:
        index = f.read().strip()
    expected = "#autogenerated by rosdep, do not edit. use 'rosdep update' instead\n"\
               'yaml %s \n'\
               'yaml %s python\n'\
               'yaml %s ubuntu' % (GITHUB_URL, GITHUB_PYTHON_URL, BADHOSTNAME_URL)
    assert expected == index, '\n[%s]\nvs\n[%s]' % (expected, index)
Esempio n. 4
0
def test_update_sources_list():
    from rosdep2.sources_list import update_sources_list, InvalidData, compute_filename_hash, PICKLE_CACHE_EXT
    try:
        import cPickle as pickle
    except ImportError:
        import pickle
    try:
        from urllib.request import pathname2url
    except ImportError:
        from urllib import pathname2url
    sources_list_dir = get_test_dir()
    index_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'rosdistro', 'index.yaml'))
    index_url = 'file://' + pathname2url(index_path)
    os.environ['ROSDISTRO_INDEX_URL'] = index_url
    tempdir = tempfile.mkdtemp()
    # use a subdirectory of test dir to make sure rosdep creates the necessary substructure
    tempdir = os.path.join(tempdir, 'newdir')

    errors = []

    def error_handler(loc, e):
        errors.append((loc, e))
    retval = update_sources_list(sources_list_dir=sources_list_dir,
                                 sources_cache_dir=tempdir, error_handler=error_handler)
    assert retval
    assert len(retval) == 2, retval
    # one of our sources is intentionally bad, this should be a softfail
    assert len(errors) == 1, errors
    assert errors[0][0].url == 'https://badhostname.willowgarage.com/rosdep.yaml'

    source0, path0 = retval[0]
    assert source0.origin.endswith('20-default.list'), source0
    hash1 = compute_filename_hash(GITHUB_URL)
    hash2 = compute_filename_hash(BADHOSTNAME_URL)
    filepath = os.path.join(tempdir, hash1)
    assert filepath == path0, '%s vs %s' % (filepath, path0)
    with open(filepath + PICKLE_CACHE_EXT, 'rb') as f:
        data = pickle.loads(f.read())
        assert 'cmake' in data

    # verify that cache index exists. contract specifies that even
    # failed downloads are specified in the index, just in case old
    # download data is present.
    with open(os.path.join(tempdir, 'index'), 'r') as f:
        index = f.read().strip()
    expected = "#autogenerated by rosdep, do not edit. use 'rosdep update' instead\n"\
               'yaml %s \n'\
               'yaml %s python\n'\
               'yaml %s ubuntu' % (GITHUB_URL, GITHUB_PYTHON_URL, BADHOSTNAME_URL)
    assert expected == index, '\n[%s]\nvs\n[%s]' % (expected, index)
Esempio n. 5
0
def test_update_sources_list():
    from rosdep2.sources_list import update_sources_list, InvalidData, compute_filename_hash
    sources_list_dir = get_test_dir()
    tempdir = tempfile.mkdtemp()
    # use a subdirectory of test dir to make sure rosdep creates the necessary substructure
    tempdir = os.path.join(tempdir, 'newdir')

    errors = []

    def error_handler(loc, e):
        errors.append((loc, e))

    retval = update_sources_list(sources_list_dir=sources_list_dir,
                                 sources_cache_dir=tempdir,
                                 error_handler=error_handler)
    assert retval
    assert len(retval) == 2, retval
    # one of our sources is intentionally bad, this should be a softfail
    assert len(errors) == 1, errors
    assert errors[0][
        0].url == 'https://badhostname.willowgarage.com/rosdep.yaml'

    source0, path0 = retval[0]
    assert source0.origin.endswith('20-default.list'), source0
    hash1 = compute_filename_hash(GITHUB_URL)
    hash2 = compute_filename_hash(BADHOSTNAME_URL)
    filepath = os.path.join(tempdir, hash1)
    assert filepath == path0, "%s vs %s" % (filepath, path0)
    with open(filepath, 'r') as f:
        data = yaml.load(f)
        assert 'cmake' in data

    # verify that cache index exists. contract specifies that even
    # failed downloads are specified in the index, just in case old
    # download data is present.
    with open(os.path.join(tempdir, 'index'), 'r') as f:
        index = f.read().strip()
    expected = """#autogenerated by rosdep, do not edit. use 'rosdep update' instead
yaml %s 
yaml %s python
yaml %s ubuntu""" % (GITHUB_URL, GITHUB_PYTHON_URL, BADHOSTNAME_URL)
    assert expected == index, "\n[%s]\nvs\n[%s]" % (expected, index)
def test_update_sources_list():
    from rosdep2.sources_list import update_sources_list, InvalidData, compute_filename_hash, PICKLE_CACHE_EXT
    import cPickle
    sources_list_dir=get_test_dir()
    tempdir = tempfile.mkdtemp()
    # use a subdirectory of test dir to make sure rosdep creates the necessary substructure
    tempdir = os.path.join(tempdir, 'newdir')

    errors = []
    def error_handler(loc, e):
        errors.append((loc, e))
    retval = update_sources_list(sources_list_dir=sources_list_dir,
                                 sources_cache_dir=tempdir, error_handler=error_handler)
    assert retval
    assert len(retval) == 2, retval
    # one of our sources is intentionally bad, this should be a softfail
    assert len(errors) == 1, errors
    assert errors[0][0].url == 'https://badhostname.willowgarage.com/rosdep.yaml'

    source0, path0 = retval[0]
    assert source0.origin.endswith('20-default.list'), source0
    hash1 = compute_filename_hash(GITHUB_URL)
    hash2 = compute_filename_hash(BADHOSTNAME_URL)
    filepath = os.path.join(tempdir, hash1)
    assert filepath == path0, "%s vs %s"%(filepath, path0)
    with open(filepath+PICKLE_CACHE_EXT, 'r') as f:
        data = cPickle.loads(f.read())
        assert 'cmake' in data

    # verify that cache index exists. contract specifies that even
    # failed downloads are specified in the index, just in case old
    # download data is present.
    with open(os.path.join(tempdir, 'index'), 'r') as f:
        index = f.read().strip()
    expected = """#autogenerated by rosdep, do not edit. use 'rosdep update' instead
yaml %s 
yaml %s python
yaml %s ubuntu"""%(GITHUB_URL, GITHUB_PYTHON_URL, BADHOSTNAME_URL)
    assert expected == index, "\n[%s]\nvs\n[%s]"%(expected, index)
Esempio n. 7
0
def test_SourcesListLoader_create_default():
    from rosdep2.sources_list import update_sources_list, SourcesListLoader, DataSourceMatcher
    # create temp dir for holding sources cache
    tempdir = tempfile.mkdtemp()

    # pull in cache data
    sources_list_dir = get_test_dir()
    retval = update_sources_list(sources_list_dir=sources_list_dir,
                                 sources_cache_dir=tempdir,
                                 error_handler=None)
    assert retval

    # now test with cached data
    matcher = rosdep2.sources_list.DataSourceMatcher(['ubuntu', 'lucid'])
    loader = SourcesListLoader.create_default(matcher,
                                              sources_cache_dir=tempdir)
    assert loader.sources
    sources0 = loader.sources
    assert not any([s for s in loader.sources if not matcher.matches(s)])

    loader = SourcesListLoader.create_default(matcher,
                                              sources_cache_dir=tempdir)
    assert sources0 == loader.sources

    # now test with different matcher
    matcher2 = rosdep2.sources_list.DataSourceMatcher(['python'])
    loader2 = SourcesListLoader.create_default(matcher2,
                                               sources_cache_dir=tempdir)
    assert loader2.sources
    # - should have filtered down to python-only
    assert sources0 != loader2.sources
    assert not any([s for s in loader2.sources if not matcher2.matches(s)])

    # test API

    # very simple, always raises RNF
    try:
        loader.get_rosdeps('foo')
    except rospkg.ResourceNotFound:
        pass
    try:
        loader.get_view_key('foo')
    except rospkg.ResourceNotFound:
        pass

    assert [] == loader.get_loadable_resources()
    all_sources = [x.url for x in loader.sources]
    assert all_sources == loader.get_loadable_views()

    # test get_source early to make sure model matches expected
    try:
        loader.get_source('foo')
        assert False, "should have raised"
    except rospkg.ResourceNotFound:
        pass
    s = loader.get_source(GITHUB_URL)
    assert s.url == GITHUB_URL

    # get_view_dependencies
    # - loader doesn't new view name, so assume everything
    assert all_sources == loader.get_view_dependencies('foo')
    # - actual views don't depend on anything
    assert [] == loader.get_view_dependencies(GITHUB_URL)

    # load_view
    from rosdep2.model import RosdepDatabase
    for verbose in [True, False]:
        rosdep_db = RosdepDatabase()
        loader.load_view(GITHUB_URL, rosdep_db, verbose=verbose)
        assert rosdep_db.is_loaded(GITHUB_URL)
        assert [] == rosdep_db.get_view_dependencies(GITHUB_URL)
        entry = rosdep_db.get_view_data(GITHUB_URL)
        assert 'cmake' in entry.rosdep_data
        assert GITHUB_URL == entry.origin

    #  - coverage, repeat loader, should noop
    loader.load_view(GITHUB_URL, rosdep_db)
def test_SourcesListLoader_create_default():
    from rosdep2.sources_list import update_sources_list, SourcesListLoader, DataSourceMatcher
    # create temp dir for holding sources cache
    tempdir = tempfile.mkdtemp()

    # pull in cache data
    sources_list_dir=get_test_dir()
    retval = update_sources_list(sources_list_dir=sources_list_dir,
                                 sources_cache_dir=tempdir, error_handler=None)
    assert retval
    
    # now test with cached data
    matcher = rosdep2.sources_list.DataSourceMatcher(['ubuntu', 'lucid'])
    loader = SourcesListLoader.create_default(matcher, sources_cache_dir=tempdir)
    assert loader.sources
    sources0 = loader.sources
    assert not any([s for s in loader.sources if not matcher.matches(s)])
    
    loader = SourcesListLoader.create_default(matcher, sources_cache_dir=tempdir)
    assert sources0 == loader.sources
    
    # now test with different matcher
    matcher2 = rosdep2.sources_list.DataSourceMatcher(['python'])
    loader2 = SourcesListLoader.create_default(matcher2, sources_cache_dir=tempdir)
    assert loader2.sources
    # - should have filtered down to python-only
    assert sources0 != loader2.sources
    assert not any([s for s in loader2.sources if not matcher2.matches(s)])

    # test API

    # very simple, always raises RNF
    try:
        loader.get_rosdeps('foo')
    except rospkg.ResourceNotFound: pass
    try:
        loader.get_view_key('foo')
    except rospkg.ResourceNotFound: pass

    assert [] == loader.get_loadable_resources()
    all_sources = [x.url for x in loader.sources]
    assert all_sources == loader.get_loadable_views()
    
    # test get_source early to make sure model matches expected
    try:
        loader.get_source('foo')
        assert False, "should have raised"
    except rospkg.ResourceNotFound: pass
    s = loader.get_source(GITHUB_URL)
    assert s.url == GITHUB_URL

    # get_view_dependencies
    # - loader doesn't new view name, so assume everything
    assert all_sources == loader.get_view_dependencies('foo')
    # - actual views don't depend on anything
    assert [] == loader.get_view_dependencies(GITHUB_URL)    

    # load_view
    from rosdep2.model import RosdepDatabase
    for verbose in [True, False]:
        rosdep_db = RosdepDatabase()
        loader.load_view(GITHUB_URL, rosdep_db, verbose=verbose)
        assert rosdep_db.is_loaded(GITHUB_URL)
        assert [] == rosdep_db.get_view_dependencies(GITHUB_URL)
        entry = rosdep_db.get_view_data(GITHUB_URL)
        assert 'cmake' in entry.rosdep_data
        assert GITHUB_URL == entry.origin

    #  - coverage, repeat loader, should noop
    loader.load_view(GITHUB_URL, rosdep_db)