Esempio n. 1
0
def fix_annoying_dependency_cycle(module_names, target_module, version):
    """ merge all bru & gyp files together, making module_names[0] the
        merge target """

    print('merging dependency cycle:', module_names, 'into', target_module)
    library = get_library()
    formulas = list(map(
        lambda module_name: library.load_formula(module_name, version), 
        module_names))
    gyps = list(map(
        lambda formula: library.load_gyp(formula),
        formulas))

    # Merge the *.bru files.
    # Replace each source formula with a formula that has only a single
    # dependency to the merged formula, and with no tar.gz urls:
    target_formula = copy.deepcopy(formulas[0])
    target_formula['module'] = target_module
    target_formula['url'] = []
    for merge_source in formulas:
        if not isinstance(merge_source['url'], list):
            merge_source['url'] = [ merge_source['url'] ]
        merge_key(merge_source, target_formula, 'url')
        merge_key(merge_source, target_formula, 'dependencies')
        merge_source['url'] = []
        merge_source['dependencies'] = { target_module: version }
        bru.save_formula(merge_source)
    #xxx TODO: remove deps to merge_sources from target_formula
    bru.save_formula(target_formula)
    
    # Merge the *.gyp files.
    # Replace the original gyp files with one that depends on the merged
    # dependency only, and forwards all its settings via 
    # gyp's export_dependent_settings.
    target_gyp = copy.deepcopy(gyps[0])
    merged_target = target_gyp['targets'][0]
    merged_target['target_name'] = target_module
    merged_target['include_dirs'] = []
    merged_target['sources'] = []
    merge_dep = '../{}/{}.gyp:*'.format(
        target_module, target_module)
    for i in range(len(gyps)):
        formula = formulas[i]
        gyp = gyps[i]
        targets = gyp['targets']
        assert len(targets) == 1
        source = targets[0]
        for key in ['all_dependent_settings', 'direct_dependent_settings', 
                    'include_dirs', 'sources']:
            if key in source:
                merge_key(source, merged_target, key)
                del source[key]
        source['dependencies'] = [ merge_dep ]
        source['export_dependent_settings'] = [ merge_dep ]
        bru.save_gyp(formula, gyp)
    #xxx TODO: remove deps to merge_sources from target_gyp
    bru.save_gyp(target_formula, target_gyp)
Esempio n. 2
0
def fix_annoying_dependency_cycle(module_names, target_module, version):
    """ merge all bru & gyp files together, making module_names[0] the
        merge target """

    print('merging dependency cycle:', module_names, 'into', target_module)
    library = get_library()
    formulas = list(
        map(lambda module_name: library.load_formula(module_name, version),
            module_names))
    gyps = list(map(lambda formula: library.load_gyp(formula), formulas))

    # Merge the *.bru files.
    # Replace each source formula with a formula that has only a single
    # dependency to the merged formula, and with no tar.gz urls:
    target_formula = copy.deepcopy(formulas[0])
    target_formula['module'] = target_module
    target_formula['url'] = []
    for merge_source in formulas:
        if not isinstance(merge_source['url'], list):
            merge_source['url'] = [merge_source['url']]
        merge_key(merge_source, target_formula, 'url')
        merge_key(merge_source, target_formula, 'dependencies')
        merge_source['url'] = []
        merge_source['dependencies'] = {target_module: version}
        bru.save_formula(merge_source)
    #xxx TODO: remove deps to merge_sources from target_formula
    bru.save_formula(target_formula)

    # Merge the *.gyp files.
    # Replace the original gyp files with one that depends on the merged
    # dependency only, and forwards all its settings via
    # gyp's export_dependent_settings.
    target_gyp = copy.deepcopy(gyps[0])
    merged_target = target_gyp['targets'][0]
    merged_target['target_name'] = target_module
    merged_target['include_dirs'] = []
    merged_target['sources'] = []
    merge_dep = '../{}/{}.gyp:*'.format(target_module, target_module)
    for i in range(len(gyps)):
        formula = formulas[i]
        gyp = gyps[i]
        targets = gyp['targets']
        assert len(targets) == 1
        source = targets[0]
        for key in [
                'all_dependent_settings', 'direct_dependent_settings',
                'include_dirs', 'sources'
        ]:
            if key in source:
                merge_key(source, merged_target, key)
                del source[key]
        source['dependencies'] = [merge_dep]
        source['export_dependent_settings'] = [merge_dep]
        bru.save_gyp(formula, gyp)
    #xxx TODO: remove deps to merge_sources from target_gyp
    bru.save_gyp(target_formula, target_gyp)
Esempio n. 3
0
def import_boost(boost_lib, version):
    """ param boost_lib like "asio", version like "1.57.0" """

    assert re.match("[a-z0-9_]+", boost_lib)
    bru_module_name = "boost-" + boost_lib
    tar_url = "https://github.com/boostorg/" + boost_lib + "/archive/boost-" + version + ".tar.gz"
    bru_modules = "./bru_modules"
    bru.unpack_dependency(bru_modules, bru_module_name, 
                          version, tar_url)

    # now that the modularized boost dep was unpacked in ./bru_modules,
    # so lets inspect it:
    
    # this here is the dir the tar was extracted into:
    tar_root_dir = os.path.join(bru_modules, bru_module_name, version)

    # this is one level below the tar_root_dir, it's the dir which should
    # have an ./include dir.
    tar_content_dir = os.path.join(tar_root_dir, boost_lib + "-boost-" + version)

    assert os.path.exists(tar_content_dir)
    assert os.path.exists(os.path.join(tar_content_dir, "include"))
    print("downloaded " + boost_lib + " to " + tar_content_dir)

    formula = OrderedDict([
        ("homepage", "http://www.boost.org/"),
        ("url", "https://github.com/boostorg/{}/archive/boost-{}.tar.gz"\
            .format(boost_lib, version)),
        ("module", bru_module_name),
        ("version", version),
    ])

    # some boost libs have a src dir (e.g. boost-regex), but most don't. The 
    # gyp target will need to know if the lib is #include-only or not:
    include_dir = os.path.join(tar_content_dir, "include")
    src_dir = os.path.join(tar_content_dir, "src")
    assert os.path.exists(include_dir)
    has_src_dir = os.path.exists(src_dir)

    def get_dir_relative_to_gyp(path):
        return os.path.relpath(path, start=os.path.dirname(tar_root_dir))

    gyp_target =\
        OrderedDict([
            ('target_name', bru_module_name),
            ('type', 'static_library' if has_src_dir else 'none'),
            ('include_dirs', [ get_dir_relative_to_gyp(include_dir) ]),
            # I wish I could use direct_dependent_settings here, but I cannot:
            ('all_dependent_settings', {
                'include_dirs' : [ get_dir_relative_to_gyp(include_dir) ]
            })
        ])
    if has_src_dir:
        gyp_target['sources'] = [ 
            os.path.join(get_dir_relative_to_gyp(src_dir), "*.cpp") ]

        def has_subdirs(dir):
            for file in os.listdir(dir):
                if os.path.isdir(os.path.join(dir, file)):
                    return True
            return False
        # src_dir should be flat, otherwise we'd have to use a different
        # 'sources' expression in the gyp file. One boost dir that violates 
        # this is boost.context, with an asm subdir
        libs_with_known_src_subdirs = [
            'context',   # asm subdir
            'coroutine', # posix and windows subdirs
            'date_time', # posix_time
            'locale',
            'math',
            'mpi',
            'python',
            'thread',
            'wave',
        ]
        assert not has_subdirs(src_dir) or boost_lib in libs_with_known_src_subdirs

    gyp = { "targets": [ gyp_target ] }

    # here we could in theory also determine deps between boost modules
    # automatically by finding #include statements in cpp and hpp files, and
    # by searching all local boost_modules for which boost module provides
    # each #include. But let's rather have a more general tool do that for
    # arbitrary libraries, not just for boost.
    # See scan_deps.py for that tool.

    library_root = "./library"
    if not os.path.isdir(library_root):
      raise Exception("expected to run script in repo root with " + libary_root + " dir")
    
    if not os.path.exists(os.path.join(library_root, bru_module_name)):
        print('saving', bru_module_name, 'to library')
        bru.save_formula(formula)
        bru.save_gyp(formula, gyp)
    else:
        print('skipping existing module', bru_module_name)