def test_git_version(container, expected):
    output, _ = container.exec(["git", "--version"])
    m = re.match(r'git version (\d+\.\d+\.\d+)', output.strip())
    vRunning = Version(m.group(1))
    vRequired = Version(git_versions[expected.distro.version.full_version])
    assert vRequired < vRunning
    assert vRunning < Version('3.0.0')
Example #2
0
def expected(request) -> Expected:
    # Parse the image filename
    image = request.config.option.image
    m = re.match(
        r'((?P<domain>[\w\-.]+)\/)?'
        r'(?P<username>[\w\-.]+)\/'
        r'((?P<compiler>gcc|clang)(?P<version>\d+))?'
        r'(?P<service_base>base)?'
        r'-(?P<distro>[a-z]+)(?P<distro_version>[\d.]+)'
        r'(-(?P<suffix>cci))?'
        r'(-(?P<service>builder|jenkins))?'
        r'(:(?P<tag>[\w\-.]+))?', image)

    # Parse the envfile used to generate the docker images
    env_values = get_envfile_values()

    distro = Distro(m.group('distro'), Version(m.group('distro_version')))
    python = Version(env_values.get('PYTHON_VERSION'))
    cmake = Version(env_values.get('CMAKE_VERSION_FULL'))
    expected = Expected(distro, m.group('username'), m.group('tag'), python,
                        cmake)
    expected.conan = Version(env_values.get('CONAN_VERSION'))
    expected.compiler_versions = get_compiler_versions()
    expected.suffix = m.group('suffix')

    if m.group('compiler'):
        compiler = m.group('compiler')
        major = m.group('version')
        full_version = env_values.get(f"{compiler.upper()}{major}_VERSION")
        expected.compiler = Compiler(compiler, Version(full_version))

    print(expected)
    return expected
Example #3
0
def check_fixed_in(fixed_in, version_series):
    # used to check if the bug belongs to that series
    if fixed_in is None:
        return True
    if not isinstance(fixed_in, Version):
        fixed_in = Version(fixed_in)
    return fixed_in.is_in_series(version_series)
Example #4
0
def check_fixed_in(fixed_in, version_series):
    # used to check if the bug belongs to that series
    if fixed_in is None:
        return True
    if not isinstance(fixed_in, Version):
        fixed_in = Version(fixed_in)
    return fixed_in.is_in_series(version_series)
def test_version(v1, op, v2):
    v1 = Version(v1)
    v2 = Version(v2)
    if op == GT:
        assert v1 > v2
    elif op == LT:
        assert v1 < v2
    elif op == EQ:
        assert v1 == v2
Example #6
0
class TenantAddForm(View):
    """The form on the Add page"""
    cloud_provider = BootstrapSelect(id='ems_id')
    name = Input('name')
    save_button = VersionPick({
        Version.lowest(): Button('Save'),
        Version.latest(): Button('Add')
    })
    reset_button = Button('Reset')
    cancel_button = Button('Cancel')
Example #7
0
def get_compiler_versions():
    env_values = get_envfile_values()
    compiler_versions = defaultdict(list)
    for key, value in env_values.items():
        m_gcc = re.match(r'GCC\d+_VERSION', key)
        m_clang = re.match(r'CLANG\d+_VERSION', key)
        if m_gcc:
            compiler_versions['gcc'].append(Version(value))
        elif m_clang:
            compiler_versions['clang'].append(Version(value))
        else:
            pass
    return compiler_versions
Example #8
0
def templates(request, group_id=None, prov_id=None):
    if group_id is None:
        try:
            return redirect("group_templates", group_id=Group.objects.order_by("id")[0].id)
        except IndexError:
            # No Group
            messages.info(request, "No group present, redirected to the homepage.")
            return go_home(request)
    else:
        try:
            group = Group.objects.get(id=group_id)
        except ObjectDoesNotExist:
            messages.warning(request, "Group '{}' does not exist.".format(group_id))
            return redirect("templates")
    if prov_id is not None:
        try:
            provider = Provider.objects.get(id=prov_id)
        except ObjectDoesNotExist:
            messages.warning(request, "Provider '{}' does not exist.".format(prov_id))
            return redirect("templates")
    else:
        provider = None
    groups = Group.objects.order_by("id")
    mismatched_versions = MismatchVersionMailer.objects.order_by("id")
    prepared_table = []
    zstream_rowspans = {}
    version_rowspans = {}
    date_version_rowspans = {}
    items = group.zstreams_versions.items()
    items.sort(key=lambda pair: Version(pair[0]), reverse=True)
    for zstream, versions in items:
        for version in versions:
            for template in Template.objects.filter(
                    template_group=group, version=version, exists=True,
                    ready=True).order_by('-date', 'provider'):
                if zstream in zstream_rowspans:
                    zstream_rowspans[zstream] += 1
                    zstream_append = None
                else:
                    zstream_rowspans[zstream] = 1
                    zstream_append = zstream

                if version in version_rowspans:
                    version_rowspans[version] += 1
                    version_append = None
                else:
                    version_rowspans[version] = 1
                    version_append = version

                datetuple = (template.date, version)
                if datetuple in date_version_rowspans:
                    date_version_rowspans[datetuple] += 1
                    date_append = None
                else:
                    date_version_rowspans[datetuple] = 1
                    date_append = template.date
                prepared_table.append((
                    zstream_append, version_append, date_append, datetuple, template.provider,
                    template))
    return render(request, 'appliances/templates.html', locals())
Example #9
0
def handle_client(version, queue, nr, packet, args):
    if nr.stage == 0:
        if packet == 'handshake':
            if args[0] == network.HANDSHAKE:
                nr.stage = 1
            else:
                nr.sendp.disconnect('Bad handshake')
        else:
            return
    elif nr.stage == 1:
        if packet == 'login':
            verj, vern, username, _passkey, room = args
            c_ver = Version('Vyolet', (verj, vern))
            if not (c_ver == version):
                nr.sendp.disconnect('Version Mismatch')
                return
            # TODO: verify passkey here
            if room != 'default':
                nr.sendp.disconnect('Invalid room')
                return
            nr.username = username
            nr.stage = 2
            queue.put((events.LOGIN, (username, nr.sendp)))
            nr.sendp.login_confirm()
        else:
            return
    # Main packet handlers
    else:
        if packet == 'disconnect':
            queue.put((events.LOGOUT, (nr.username, )))
        else:
            queue.put((events.UCMD, (nr.username, packet, args)))
def appliance_preupdate(old_version, appliance):

    series = appliance.version.series()
    update_url = "update_url_{}".format(series.replace('.', ''))
    """Requests appliance from sprout based on old_versions, edits partitions and adds
    repo file for update"""

    usable = []
    sp = SproutClient.from_config()
    available_versions = set(sp.call_method('available_cfme_versions'))
    for a in available_versions:
        if a.startswith(old_version):
            usable.append(Version(a))
    usable.sort(reverse=True)
    try:
        apps, pool_id = sp.provision_appliances(count=1,
                                                preconfigured=True,
                                                lease_time=180,
                                                version=str(usable[0]))
    except Exception as e:
        logger.exception(
            "Couldn't provision appliance with following error:{}".format(e))
        raise SproutException('No provision available')

    apps[0].db.extend_partition()
    urls = process_url(cfme_data['basic_info'][update_url])
    output = build_file(urls)
    with tempfile.NamedTemporaryFile('w') as f:
        f.write(output)
        f.flush()
        os.fsync(f.fileno())
        apps[0].ssh_client.put_file(f.name, '/etc/yum.repos.d/update.repo')
    yield apps[0]
    apps[0].ssh_client.close()
    sp.destroy_pool(pool_id)
Example #11
0
    def __getattr__(self, attr):
        """This proxies the attribute queries to the Bug object and modifies its result.

        If the field looked up is specified as loose field, it will be converted to Version.
        If the field is string and it has zero length, or the value is specified as "not specified",
        it will return None.
        """
        value = getattr(self._bug, attr)
        if attr in self.loose:
            if isinstance(value,
                          Sequence) and not isinstance(value, basestring):
                value = value[0]
            value = value.strip()
            if not value:
                return None
            if value.lower() in NONE_FIELDS:
                return None
            # We have to strip any leading non-number characters to correctly match
            value = re.sub(r"^[^0-9]+", "", value)
            if not value:
                return None
            return Version(value)
        if isinstance(value, basestring):
            if len(value.strip()) == 0:
                return None
            else:
                return value
        else:
            return value
Example #12
0
def pytest_generate_tests(metafunc):
    argnames, argvalues, idlist = testgen.provider_by_type(
        metafunc, ['virtualcenter'])
    argnames = argnames + ["_host_provider"]

    new_idlist = []
    new_argvalues = []

    for i, argvalue_tuple in enumerate(argvalues):
        args = dict(zip(argnames, argvalue_tuple))
        if args['provider'].type != "virtualcenter":
            continue
        hosts = args['provider'].data.get("hosts", [])
        if not hosts:
            continue

        version = args['provider'].data.get("version", None)
        if version is None:
            # No version, no test
            continue
        if Version(version) < "5.0":
            # Ignore lesser than 5
            continue

        host = hosts[0]
        creds = credentials[host["credentials"]]
        ip_address = resolve_hostname(host["name"])
        cred = VMwareProvider.Credential(principal=creds["username"],
                                         secret=creds["password"],
                                         verify_secret=creds["password"])
        # Mock provider data
        provider_data = {}
        provider_data.update(args['provider'].data)
        provider_data["name"] = host["name"]
        provider_data["hostname"] = host["name"]
        provider_data["ipaddress"] = ip_address
        provider_data["credentials"] = host["credentials"]
        provider_data.pop("host_provisioning", None)
        provider_data["hosts"] = [host]
        provider_data["discovery_range"] = {}
        provider_data["discovery_range"]["start"] = ip_address
        provider_data["discovery_range"]["end"] = ip_address
        host_provider = VMwareProvider(
            name=host["name"],
            hostname=host["name"],
            ip_address=ip_address,
            credentials={'default': cred},
            provider_data=provider_data,
        )
        argvalues[i].append(host_provider)
        idlist[i] = "{}/{}".format(args['provider'].key, host["name"])
        new_idlist.append(idlist[i])
        new_argvalues.append(argvalues[i])

    testgen.parametrize(metafunc,
                        argnames,
                        new_argvalues,
                        ids=new_idlist,
                        scope="module")
Example #13
0
def pytest_generate_tests(metafunc):
    argnames, argvalues, idlist = testgen.providers_by_class(
        metafunc, [VMwareProvider])
    argnames = argnames + ["_host_provider"]

    new_idlist = []
    new_argvalues = []

    for i, argvalue_tuple in enumerate(argvalues):
        args = dict(zip(argnames, argvalue_tuple))
        # TODO
        # All this should be replaced with a proper ProviderFilter passed to testgen.providers()
        if args['provider'].type != "virtualcenter":
            continue
        hosts = args['provider'].data.get("hosts", [])
        if not hosts:
            continue

        version = args['provider'].data.get("version", None)
        if version is None:
            # No version, no test
            continue
        if Version(version) < "5.0":
            # Ignore lesser than 5
            continue

        host = hosts[0]
        ip_address = resolve_hostname(host["name"])
        endpoint = DefaultEndpoint(credentials=host["credentials"],
                                   hostname=host["name"])
        # Mock provider data
        provider_data = {}
        provider_data.update(args['provider'].data)
        provider_data["name"] = host["name"]
        provider_data["hostname"] = host["name"]
        provider_data["ipaddress"] = ip_address
        provider_data.pop("host_provisioning", None)
        provider_data["hosts"] = [host]
        provider_data["discovery_range"] = {}
        provider_data["discovery_range"]["start"] = ip_address
        provider_data["discovery_range"]["end"] = ip_address
        host_provider = VMwareProvider(name=host["name"],
                                       ip_address=ip_address,
                                       endpoints=endpoint,
                                       provider_data=provider_data)
        argvalues[i].append(host_provider)
        idlist[i] = "{}/{}".format(args['provider'].key, host["name"])
        new_idlist.append(idlist[i])
        new_argvalues.append(argvalues[i])

    testgen.parametrize(metafunc,
                        argnames,
                        new_argvalues,
                        ids=new_idlist,
                        scope="module")
def pytest_generate_tests(metafunc):
    arg_names = "provider", "provider_data", "original_provider_key"
    arg_values = []
    arg_ids = []
    for provider_key, provider in cfme_data.get("management_systems", {}).iteritems():
        if provider["type"] != "virtualcenter":
            continue
        hosts = provider.get("hosts", [])
        if not hosts:
            continue

        version = provider.get("version", None)
        if version is None:
            # No version, no test
            continue
        if Version(version) < "5.0":
            # Ignore lesser than 5
            continue

        host = random.choice(hosts)
        creds = credentials[host["credentials"]]
        ip_address = resolve_hostname(host["name"])
        cred = VMwareProvider.Credential(
            principal=creds["username"],
            secret=creds["password"],
            verify_secret=creds["password"]
        )
        # Mock provider data
        provider_data = {}
        provider_data.update(provider)
        provider_data["name"] = host["name"]
        provider_data["hostname"] = host["name"]
        provider_data["ipaddress"] = ip_address
        provider_data["credentials"] = host["credentials"]
        provider_data.pop("host_provisioning", None)
        provider_data["hosts"] = [host]
        provider_data["discovery_range"] = {}
        provider_data["discovery_range"]["start"] = ip_address
        provider_data["discovery_range"]["end"] = ip_address
        host_provider = VMwareProvider(
            name=host["name"],
            hostname=host["name"],
            ip_address=ip_address,
            credentials={'default': cred},
            provider_data=provider_data,
        )
        arg_values.append([host_provider, provider_data, provider_key])
        arg_ids.append("{}/random_host".format(provider_key))
    metafunc.parametrize(arg_names, arg_values, ids=arg_ids, scope="module")
Example #15
0
 def default_release(self):
     return Version(self._data["default_release"])
Example #16
0
 def upstream_version(self):
     if self.default_product is not None:
         return self.default_product.latest_version
     else:
         return Version(
             cfme_data.get("bugzilla", {}).get("upstream_version", "9.9"))
Example #17
0
 def version(self):
     return Version(self.api.si.content.about.version)
def load_node_and_link(node_save_info_list, line_save_info_list, global_info):
    file_ver = Version(global_info['maj_ver'], global_info['min_ver'])
    # load 기본 타입은 dict 이다.

    # node_save_info_list에서 node_set 생성
    node_set = NodeSet()
    link_set = LineSet()
    junction_set = JunctionSet()

    # 노드 생성하기
    for save_info in node_save_info_list:
        idx = save_info['idx']
        point = save_info['point']
        try:
            node_type = save_info['node_type']
        except:
            node_type = None

        try:
            on_stop_line = save_info['on_stop_line']
        except:
            on_stop_line = None

        node = Node(idx)
        node.point = np.array(point)
        node.node_type = node_type
        node.on_stop_line = on_stop_line

        # 교차로 생성하기 (노드 생성하면서 같이 수행)
        if file_ver >= Version(2, 5):
            junction_list = save_info['junction']

            if junction_list is None:
                continue
            elif len(junction_list) == 0:
                node.junctions = list()
            else:
                for junction_id in junction_list:
                    if junction_id in junction_set.junctions.keys():
                        repeated_jc = junction_set.junctions[junction_id]
                        repeated_jc.add_jc_node(node)
                    else:
                        new_junction = Junction(junction_id)
                        new_junction.add_jc_node(node)

                        junction_set.append_junction(new_junction)

        elif file_ver >= Version(2, 3):
            junction_id = save_info['junction']

            if junction_id is not None:
                if junction_id in junction_set.junctions.keys():
                    repeated_jc = junction_set.junctions[junction_id]
                    repeated_jc.add_jc_node(node)
                else:
                    new_junction = Junction(junction_id)
                    new_junction.add_jc_node(node)

                    junction_set.append_junction(new_junction)

        node_set.append_node(node, create_new_key=False)

    # 링크 생성하기
    for save_info in line_save_info_list:
        idx = save_info['idx']
        from_node = node_set.nodes[save_info['from_node_idx']] if save_info[
            'from_node_idx'] in node_set.nodes else None
        to_node = node_set.nodes[save_info['to_node_idx']] if save_info[
            'to_node_idx'] in node_set.nodes else None
        points = save_info['points']
        lazy_init = save_info['lazy_init']
        link_type = save_info['link_type']
        try:
            force_width_start = save_info['force_width_start']
            width_start = save_info['width_start']
            force_width_end = save_info['force_width_end']
            width_end = save_info['width_end']
            enable_side_border = save_info['enable_side_border']
        except:
            force_width_start, width_start, force_width_end, width_end = Link.get_default_width_related_values(
            )
            enable_side_border = False

        # 우선 위 값만 가지고 링크를 먼저 세팅한다
        link = Link(idx=idx, lazy_point_init=lazy_init)
        link.set_from_node(from_node)
        link.set_to_node(to_node)
        link.set_width_related_values(force_width_start, width_start,
                                      force_width_end, width_end)
        link.set_points(np.array(points))
        link.link_type = link_type
        link.enable_side_border = enable_side_border

        # 버전 2.2 이상부터 max speed 정보를 담고 있다
        if file_ver >= Version(2, 2):
            link.set_max_speed_kph(save_info['max_speed'])

        if file_ver >= Version(2, 4):
            link.road_id = save_info['road_id']
            link.ego_lane = save_info['ego_lane']
            link.lane_change_dir = save_info['lane_change_dir']
            link.hov = save_info['hov']

        if file_ver >= Version(2, 6):
            link.geometry = save_info['geometry']

        link.can_move_left_lane = save_info[
            'can_move_left_lane'] if 'can_move_left_lane' in save_info else False
        link.can_move_right_lane = save_info[
            'can_move_right_lane'] if 'can_move_right_lane' in save_info else False
        link.road_type = save_info[
            'road_type'] if 'road_type' in save_info else None
        link.related_signal = save_info[
            'related_signal'] if 'related_signal' in save_info else None
        link.its_link_id = save_info[
            'its_link_id'] if 'its_link_id' in save_info else None

        link_set.append_line(link, create_new_key=False)

    for save_info in line_save_info_list:
        idx = save_info['idx']
        link = link_set.lines[idx]

        # 각 링크에 대해 다음을 설정
        if not link.is_it_for_lane_change():
            # 차선 변경이 아닐 경우, 차선 변경으로 진입 가능한 링크를 설정
            if save_info['left_lane_change_dst_link_idx'] is not None:
                dst_link = link_set.lines[
                    save_info['left_lane_change_dst_link_idx']]
                link.set_left_lane_change_dst_link(dst_link)
                if link.link_type in ['1', '2', '3']:
                    link.can_move_left_lane = False
                else:
                    link.can_move_left_lane = True

            if save_info['right_lane_change_dst_link_idx'] is not None:
                dst_link = link_set.lines[
                    save_info['right_lane_change_dst_link_idx']]
                link.set_right_lane_change_dst_link(dst_link)
                if link.link_type in ['1', '2', '3']:
                    link.can_move_right_lane = False
                else:
                    link.can_move_right_lane = True

        else:
            # 차선 변경일 경우,

            # 우선 인덱스로 표시된 lane_ch_link_path를 link에 대한 reference로 변경
            lane_ch_link_path_idx = save_info['lane_ch_link_path']
            lane_ch_link_path = []
            for idx in lane_ch_link_path_idx:
                lane_ch_link_path.append(link_set.lines[idx])

            # 이 값을 통해서 link 내부 값 설정
            link.set_values_for_lane_change_link(lane_ch_link_path)

    # 모든 링크에 대한 cost 계산
    for key, link in link_set.lines.items():
        link.calculate_cost()

    return node_set, link_set, junction_set
def main(appliance, jenkins_url, jenkins_user, jenkins_token, job_name):
    appliance_version = str(appliance.version).strip()
    print('Looking for appliance version {} in {}'.format(
        appliance_version, job_name))
    client = jenkins.Jenkins(jenkins_url,
                             username=jenkins_user,
                             password=jenkins_token)
    build_numbers = get_build_numbers(client, job_name)
    if not build_numbers:
        print('No builds for job {}'.format(job_name))
        return 1

    # Find the builds with appliance version
    eligible_build_numbers = set()
    for build_number in build_numbers:
        try:
            artifacts = client.get_build_info(job_name,
                                              build_number)['artifacts']
            if not artifacts:
                raise ValueError()
        except (KeyError, ValueError):
            print('No artifacts for {}/{}'.format(job_name, build_number))
            continue

        artifacts = group_list_dict_by(artifacts, 'fileName')
        if 'appliance_version' not in artifacts:
            print('appliance_version not in artifacts of {}/{}'.format(
                job_name, build_number))
            continue

        build_appliance_version = download_artifact(
            jenkins_user, jenkins_token, jenkins_url, job_name, build_number,
            artifacts['appliance_version']['relativePath']).strip()

        if Version(build_appliance_version) < Version(appliance_version):
            print('Build {} already has lower version ({})'.format(
                build_number, build_appliance_version))
            print('Ending here')
            break

        if 'coverage-results.tgz' not in artifacts:
            print('coverage-results.tgz not in artifacts of {}/{}'.format(
                job_name, build_number))
            continue

        if build_appliance_version == appliance_version:
            print('Build {} waas found to contain what is needed'.format(
                build_number))
            eligible_build_numbers.add(build_number)
        else:
            print(
                'Skipping build {} because it does not have correct version ({})'
                .format(build_number, build_appliance_version))

    if not eligible_build_numbers:
        print('Could not find coverage reports for {} in {}'.format(
            appliance_version, job_name))
        return 2

    # Stop the evm service, not needed at all
    print('Stopping evmserverd')
    appliance.evmserverd.stop()
    # Install the coverage tools on the appliance
    print('Installing simplecov')
    appliance.coverage._install_simplecov()
    # Upload the merger
    print('Installing coverage merger')
    appliance.coverage._upload_coverage_merger()
    with appliance.ssh_client as ssh:
        if not ssh.run_command('mkdir -p /var/www/miq/vmdb/coverage'):
            print(
                'Could not create /var/www/miq/vmdb/coverage on the appliance!'
            )
            return 3
        # Download all the coverage reports
        for build_number in eligible_build_numbers:
            print('Downloading the coverage report from build {}'.format(
                build_number))
            download_url = jenkins_artifact_url(
                jenkins_user, jenkins_token, jenkins_url, job_name,
                build_number, 'log/coverage/coverage-results.tgz')
            cmd = ssh.run_command(
                'curl -k -o /var/www/miq/vmdb/coverage/tmp.tgz {}'.format(
                    quote(download_url)))
            if not cmd:
                print('Could not download! - {}'.format(str(cmd)))
                return 4
            print('Extracting the coverage report from build {}'.format(
                build_number))
            extract_command = ' && '.join([
                'cd /var/www/miq/vmdb/coverage',
                'tar xf tmp.tgz --strip-components=1',
                'rm -f tmp.tgz',
            ])
            cmd = ssh.run_command(extract_command)
            if not cmd:
                print('Could not extract! - {}'.format(str(cmd)))
                return 5

        # Now run the merger
        print('Running the merger')
        cmd = ssh.run_command(
            'cd /var/www/miq/vmdb; time bin/rails runner coverage_merger.rb')
        if not cmd:
            print('Failure running the merger - {}'.format(str(cmd)))
            return 6
        else:
            print('Coverage report generation was successful')
            print(str(cmd))
        print('Packing the generated HTML')
        cmd = ssh.run_command(
            'cd /var/www/miq/vmdb/coverage; tar cfz /tmp/merged.tgz merged')
        if not cmd:
            print('Could not compress! - {}'.format(str(cmd)))
            return 7
        print('Grabbing the generated HTML')
        ssh.get_file('/tmp/merged.tgz', log_path.strpath)
        print('Decompressing the generated HTML')
        rc = subprocess.call([
            'tar', 'xf',
            log_path.join('merged.tgz').strpath, '-C', log_path.strpath
        ])
        if rc == 0:
            print('Done!')
        else:
            print('Failure to extract')
            return 8
Example #20
0
 def version(self):
     return Version(self._version)
Example #21
0
 def versions(self):
     versions = []
     for version in self._data["versions"]:
         if version["name"] not in NONE_FIELDS:
             versions.append(Version(version["name"]))
     return sorted(versions)
Example #22
0
def generate(env, version=None, abi=None, topdir=None, verbose=0):
    """Add Builders and construction variables for Intel C/C++ compiler
    to an Environment.
    args:
      version: (string) compiler version to use, like "80"
      abi:     (string) 'win32' or whatever Itanium version wants
      topdir:  (string) compiler top dir, like
                         "c:\Program Files\Intel\Compiler70"
                        If topdir is used, version and abi are ignored.
      verbose: (int)    if >0, prints compiler version used.
    """
    if not (is_mac or is_linux or is_windows):
        # can't handle this platform
        return

    if is_windows:
        SCons.Tool.msvc.generate(env)
    elif is_linux:
        SCons.Tool.gcc.generate(env)
    elif is_mac:
        SCons.Tool.gcc.generate(env)

    # if version is unspecified, use latest
    vlist = detect_installed_versions()
    if not version:
        if vlist:
            version = vlist[0]
    else:
        # User may have specified '90' but we need to get actual dirname '9.0'.
        # get_version_from_list does that mapping.
        v = get_version_from_list(version, vlist)
        if not v:
            raise SCons.Errors.UserError("Invalid Intel compiler version %s: "%version + \
                  "installed versions are %s"%(', '.join(vlist)))
        version = v

    env['COMPILER_VERSION_DETECTED']  = Version(version)
    env['COMPILER_VERSION_INSTALLED'] = vlist

    # if abi is unspecified, use ia32
    # alternatives are ia64 for Itanium, or amd64 or em64t or x86_64 (all synonyms here)
    abi = check_abi(abi)
    if abi is None:
        if is_mac or is_linux:
            # Check if we are on 64-bit linux, default to 64 then.
            uname_m = os.uname()[4]
            if uname_m == 'x86_64':
                abi = 'x86_64'
            else:
                abi = 'ia32'
        else:
            if is_win64:
                abi = 'em64t'
            else:
                abi = 'ia32'

    if version and not topdir:
        try:
            topdir = get_intel_compiler_top(version, abi)
        except (SCons.Util.RegError, IntelCError):
            topdir = None

    if not topdir:
        # Normally this is an error, but it might not be if the compiler is
        # on $PATH and the user is importing their env.
        class ICLTopDirWarning(SCons.Warnings.Warning):
            pass
        if (is_mac or is_linux) and not env.Detect('icc') or \
           is_windows and not env.Detect('icl'):

            SCons.Warnings.enableWarningClass(ICLTopDirWarning)
            SCons.Warnings.warn(ICLTopDirWarning,
                                "Failed to find Intel compiler for version='%s', abi='%s'"%
                                (str(version), str(abi)))
        else:
            # should be cleaned up to say what this other version is
            # since in this case we have some other Intel compiler installed
            SCons.Warnings.enableWarningClass(ICLTopDirWarning)
            SCons.Warnings.warn(ICLTopDirWarning,
                                "Can't find Intel compiler top dir for version='%s', abi='%s'"%
                                    (str(version), str(abi)))

    if topdir:
        archdir={'x86_64': 'intel64',
                 'amd64' : 'intel64',
                 'em64t' : 'intel64',
                 'x86'   : 'ia32',
                 'i386'  : 'ia32',
                 'ia32'  : 'ia32'
        }[abi] # for v11 and greater
        if os.path.exists(os.path.join(topdir, 'bin', archdir)):
            bindir="bin/%s"%archdir
            libdir="lib/%s"%archdir
        else:
            bindir="bin"
            libdir="lib"
        if verbose:
            print "Intel C compiler: using version %s (%g), abi %s, in '%s/%s'"%\
                  (repr(version), linux_ver_normalize(version),abi,topdir,bindir)
            if is_linux:
                # Show the actual compiler version by running the compiler.
                os.system('%s/%s/icc --version'%(topdir,bindir))
            if is_mac:
                # Show the actual compiler version by running the compiler.
                os.system('%s/%s/icc --version'%(topdir,bindir))

        env['INTEL_C_COMPILER_TOP'] = topdir
        if is_linux:
            paths={'INCLUDE'         : 'include',
                   'LIB'             : libdir,
                   'PATH'            : bindir,
                   'LD_LIBRARY_PATH' : libdir}
            for p in paths.keys():
                env.PrependENVPath(p, os.path.join(topdir, paths[p]))
        if is_mac:
            paths={'INCLUDE'         : 'include',
                   'LIB'             : libdir,
                   'PATH'            : bindir,
                   'LD_LIBRARY_PATH' : libdir}
            for p in paths.keys():
                env.PrependENVPath(p, os.path.join(topdir, paths[p]))
        if is_windows:
            #       env key    reg valname   default subdir of top
            paths=(('INCLUDE', 'IncludeDir', 'Include'),
                   ('LIB'    , 'LibDir',     'Lib'),
                   ('PATH'   , 'BinDir',     bindir))
            # We are supposed to ignore version if topdir is set, so set
            # it to the emptry string if it's not already set.
            if version is None:
                version = ''
            # Each path has a registry entry, use that or default to subdir
            for p in paths:
                try:
                    path=get_intel_registry_value(p[1], version, abi)
                    # These paths may have $(ICInstallDir)
                    # which needs to be substituted with the topdir.
                    path=path.replace('$(ICInstallDir)', topdir + os.sep)
                except IntelCError:
                    # Couldn't get it from registry: use default subdir of topdir
                    env.PrependENVPath(p[0], os.path.join(topdir, p[2]))
                else:
                    env.PrependENVPath(p[0], path.split(os.pathsep))
                    # print "ICL %s: %s, final=%s"%(p[0], path, str(env['ENV'][p[0]]))

    if is_windows:
        env['CC']        = 'icl'
        env['CXX']       = 'icl'
        env['LINK']      = 'xilink'
        env['AR']        = 'xilib'
    else:
        env['CC']        = 'icc'
        env['CXX']       = 'icpc'
        # Don't reset LINK here;
        # use smart_link which should already be here from link.py.
        #env['LINK']      = '$CC'
        env['AR']        = 'xiar'
        env['LD']        = 'xild' # not used by default

    # This is not the exact (detailed) compiler version,
    # just the major version as determined above or specified
    # by the user.  It is a float like 80 or 90, in normalized form for Linux
    # (i.e. even for Linux 9.0 compiler, still returns 90 rather than 9.0)
    if version:
        env['INTEL_C_COMPILER_VERSION']=linux_ver_normalize(version)

    if is_windows:
        # Look for license file dir
        # in system environment, registry, and default location.
        envlicdir = os.environ.get("INTEL_LICENSE_FILE", '')
        K = ('SOFTWARE\Intel\Licenses')
        try:
            k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K)
            reglicdir = SCons.Util.RegQueryValueEx(k, "w_cpp")[0]
        except (AttributeError, SCons.Util.RegError):
            reglicdir = ""
        defaultlicdir = r'C:\Program Files\Common Files\Intel\Licenses'

        licdir = None
        for ld in [envlicdir, reglicdir]:
            # If the string contains an '@', then assume it's a network
            # license (port@system) and good by definition.
            if ld and (ld.find('@') != -1 or os.path.exists(ld)):
                licdir = ld
                break
        if not licdir:
            licdir = defaultlicdir
            if not os.path.exists(licdir):
                class ICLLicenseDirWarning(SCons.Warnings.Warning):
                    pass
                SCons.Warnings.enableWarningClass(ICLLicenseDirWarning)
                SCons.Warnings.warn(ICLLicenseDirWarning,
                                    "Intel license dir was not found."
                                    "  Tried using the INTEL_LICENSE_FILE environment variable (%s), the registry (%s) and the default path (%s)."
                                    "  Using the default path as a last resort."
                                        % (envlicdir, reglicdir, defaultlicdir))
        env['ENV']['INTEL_LICENSE_FILE'] = licdir

    # In our CentOS 6 machines, we are using the custom gcc 4.8.2 toolchain from
    # http://people.centos.org/tru/devtools-2/readme. We should point to it in
    # order to use C++11 features.
    if sa.system.is_linux:
      gcc_toolchain = '/opt/rh/devtoolset-2/root/usr'
      if os.path.exists(gcc_toolchain):
         env['GCC_SUPPORT_ROOT'] = gcc_toolchain
         env.PrependENVPath('PATH', os.path.join(gcc_toolchain, 'bin'))