Ejemplo n.º 1
0
def test_combine_variants():
    v1 = {'python': '2.7.*', 'extend_keys': ['dict', 'list'], 'list': 'steve',
          'dict': {'some': 'value'}}
    v2 = {'python': '3.5.*', 'list': 'frank', 'dict': {'some': 'other', 'test': 'value'}}
    combined = variants.combine_variants(v1, v2)
    assert combined['python'] == '3.5.*'
    assert set(combined['list']) == {'steve', 'frank'}
    assert len(combined['dict']) == 2
    assert combined['dict']['some'] == 'other'
Ejemplo n.º 2
0
def distribute_variants(metadata, variants, index, permit_unsatisfiable_variants=False):
    rendered_metadata = {}
    need_reparse_in_env = False
    unsatisfiable_variants = []
    packages_needing_building = set()

    for variant in variants:
        mv = metadata.copy()
        # deep copy the sensitive parts to decouple metadata objects
        mv.config = metadata.config.copy()
        mv.config.variant = combine_variants(variant, mv.config.variant)
        mv.final = False

        # TODO: may need to compute new build id, or at least remove any envs before building
        #    another variant

        if 'target_platform' in variant:
            mv.config.host_subdir = variant['target_platform']
        if not need_reparse_in_env:
            try:
                mv.parse_until_resolved()
                need_source_download = (bool(mv.meta.get('source')) and
                                        not mv.needs_source_for_render and
                                        not os.listdir(mv.config.work_dir))
                # this is a bit wasteful.  We don't store the output here - we'll have to recompute
                #    it later.  We don't store it, so that we can have per-subpackage exclusions
                #    from the hash.  Since finalizing brings in *all* build-time packages, notest
                #    just the ones from the recipe, it is impossible to remove them in the general
                #    case.  Instead, we just leave the recipe unfinalized until then, so that by
                #    excluding one higher-level package (e.g. python), we also won't include its
                #    deps in the hash
                finalize_metadata(mv, index)
            except DependencyNeedsBuildingError as e:
                unsatisfiable_variants.append(variant)
                packages_needing_building.update(set(e.packages))
                if permit_unsatisfiable_variants:
                    rendered_metadata[mv.dist()] = (mv, need_source_download,
                                                        need_reparse_in_env)
                continue
            except exceptions.UnableToParseMissingSetuptoolsDependencies:
                need_reparse_in_env = True
            except:
                raise

        # computes hashes based on whatever the current specs are - not the final specs
        #    This is a deduplication step.  Any variants that end up identical because a
        #    given variant is not used in a recipe are effectively ignored, though we still pay
        #    the price to parse for that variant.
        rendered_metadata[mv.build_id()] = (mv, need_source_download, need_reparse_in_env)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    if unsatisfiable_variants and not permit_unsatisfiable_variants:
        raise DependencyNeedsBuildingError(packages=packages_needing_building)
    return list(rendered_metadata.values())
Ejemplo n.º 3
0
def distribute_variants(metadata,
                        variants,
                        index,
                        permit_unsatisfiable_variants=False):
    rendered_metadata = {}
    need_reparse_in_env = False
    unsatisfiable_variants = []
    packages_needing_building = set()

    for variant in variants:
        mv = metadata.copy()
        # deep copy the sensitive parts to decouple metadata objects
        mv.config = metadata.config.copy()
        mv.config.variant = combine_variants(variant, mv.config.variant)
        mv.final = False

        # TODO: may need to compute new build id, or at least remove any envs before building
        #    another variant

        if 'target_platform' in variant:
            mv.config.host_subdir = variant['target_platform']
        if not need_reparse_in_env:
            try:
                mv.parse_until_resolved()
                need_source_download = (bool(mv.meta.get('source'))
                                        and not mv.needs_source_for_render
                                        and not os.listdir(mv.config.work_dir))
                mv = finalize_metadata(mv, index)
            except DependencyNeedsBuildingError as e:
                unsatisfiable_variants.append(variant)
                packages_needing_building.update(set(e.packages))
                if permit_unsatisfiable_variants:
                    rendered_metadata[mv.build_id()] = (mv,
                                                        need_source_download,
                                                        need_reparse_in_env)
                continue
            except exceptions.UnableToParseMissingSetuptoolsDependencies:
                need_reparse_in_env = True
            except:
                raise

        # computes hashes based on whatever the current specs are - not the final specs
        #    This is a deduplication step.  Any variants that end up identical because a
        #    given variant is not used in a recipe are effectively ignored, though we still pay
        #    the price to parse for that variant.
        rendered_metadata[mv.build_id()] = (mv, need_source_download,
                                            need_reparse_in_env)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    if unsatisfiable_variants and not permit_unsatisfiable_variants:
        raise DependencyNeedsBuildingError(packages=packages_needing_building)
    return list(rendered_metadata.values())
Ejemplo n.º 4
0
def test_combine_variants():
    v1 = {
        'python': '2.7.*',
        'extend_keys': ['dict', 'list'],
        'list': 'steve',
        'dict': {
            'some': 'value'
        }
    }
    v2 = {
        'python': '3.5.*',
        'list': 'frank',
        'dict': {
            'some': 'other',
            'test': 'value'
        }
    }
    combined = variants.combine_variants(v1, v2)
    assert combined['python'] == '3.5.*'
    assert set(combined['list']) == {'steve', 'frank'}
    assert len(combined['dict']) == 2
    assert combined['dict']['some'] == 'other'