Esempio n. 1
0
    def test_prepare_toolchain_no_app_config(self, mock_config_init):
        """
        Test that prepare_toolchain correctly deals with no app_config

        :param mock_config_init: mock of Config __init__
        :return:
        """
        mock_config_init.return_value = None

        prepare_toolchain(self.src_paths, self.target, self.toolchain_name)

        mock_config_init.assert_called_with(self.target,
                                            self.src_paths,
                                            app_config=None)
    def test_prepare_toolchain_no_app_config(self, mock_config_init):
        """
        Test that prepare_toolchain correctly deals with no app_config

        :param mock_config_init: mock of Config __init__
        :return:
        """
        mock_config_init.return_value = namedtuple("Config", "target")(
            namedtuple("Target",
                       "init_hooks name features core")(lambda _, __ : None,
                                                        "Junk", [], "Cortex-M3"))

        prepare_toolchain(self.src_paths, self.target, self.toolchain_name)

        mock_config_init.assert_called_once_with(self.target, self.src_paths,
                                                 app_config=None)
Esempio n. 3
0
    def test_prepare_toolchain_no_app_config(self, mock_config_init):
        """
        Test that prepare_toolchain correctly deals with no app_config

        :param mock_config_init: mock of Config __init__
        :return:
        """
        mock_target = make_mock_target(lambda _, __ : None,
                                       "Junk", [], "Cortex-M3", TOOLCHAINS)
        mock_config_init.return_value = namedtuple(
            "Config", "target has_regions name")(mock_target, False, None)

        prepare_toolchain(self.src_paths, None, self.target, self.toolchain_name)

        mock_config_init.assert_called_once_with(self.target, self.src_paths,
                                                 app_config=None)
Esempio n. 4
0
    def test_prepare_toolchain_no_app_config(self, mock_config_init):
        """
        Test that prepare_toolchain correctly deals with no app_config

        :param mock_config_init: mock of Config __init__
        :return:
        """
        mock_target = make_mock_target(lambda _, __ : None,
                                       "Junk", [], "Cortex-M3", TOOLCHAINS)
        mock_config_init.return_value = namedtuple(
            "Config", "target has_regions name")(mock_target, False, None)

        prepare_toolchain(self.src_paths, None, self.target, self.toolchain_name)

        mock_config_init.assert_called_once_with(self.target, self.src_paths,
                                                 app_config=None)
Esempio n. 5
0
    def test_prepare_toolchain_no_app_config(self, mock_config_init):
        """
        Test that prepare_toolchain correctly deals with no app_config

        :param mock_config_init: mock of Config __init__
        :return:
        """
        mock_config_init.return_value = namedtuple("Config", "target")(
            namedtuple("Target",
                       "init_hooks name features core")(lambda _, __: None,
                                                        "Junk", [],
                                                        "Cortex-M3"))

        prepare_toolchain(self.src_paths, self.target, self.toolchain_name)

        mock_config_init.assert_called_once_with(self.target,
                                                 self.src_paths,
                                                 app_config=None)
Esempio n. 6
0
    def test_prepare_toolchain_app_config(self, mock_config_init):
        """
        Test that prepare_toolchain uses app_config correctly

        :param mock_config_init: mock of Config __init__
        :return:
        """
        app_config = "app_config"
        mock_config_init.return_value = None

        prepare_toolchain(self.src_paths,
                          self.target,
                          self.toolchain_name,
                          app_config=app_config)

        mock_config_init.assert_called_with(self.target,
                                            self.src_paths,
                                            app_config=app_config)
Esempio n. 7
0
    def test_prepare_toolchain_app_config(self, mock_config_init):
        """
        Test that prepare_toolchain uses app_config correctly

        :param mock_config_init: mock of Config __init__
        :return:
        """
        app_config = "app_config"
        mock_target = make_mock_target(lambda _ : None,
                                       "Junk", [], "Cortex-M3", TOOLCHAINS, mock_build_tools_metadata)
        mock_config_init.return_value = namedtuple(
            "Config", "target has_regions name")(mock_target, False, None)

        prepare_toolchain(self.src_paths, None, self.target, self.toolchain_name,
                          app_config=app_config)

        mock_config_init.assert_called_once_with(self.target, self.src_paths,
                                                 app_config=app_config)
Esempio n. 8
0
    def test_prepare_toolchain_app_config(self, mock_config_init):
        """
        Test that prepare_toolchain uses app_config correctly

        :param mock_config_init: mock of Config __init__
        :return:
        """
        app_config = "app_config"
        mock_target = namedtuple("Target",
                                 "init_hooks name features core")(lambda _, __ : None,
                                                                  "Junk", [], "Cortex-M3")
        mock_config_init.return_value = namedtuple("Config",
                                                   "target has_regions")(
                                                       mock_target,
                                                       False)

        prepare_toolchain(self.src_paths, None, self.target, self.toolchain_name,
                          app_config=app_config)

        mock_config_init.assert_called_once_with(self.target, self.src_paths,
                                                 app_config=app_config)
Esempio n. 9
0
    def test_always_complete_build(self, *_):
        with MagicMock() as notify:
            toolchain = prepare_toolchain(self.src_paths, self.build_path, self.target,
                                          self.toolchain_name, notify=notify)

            res = scan_resources(self.src_paths, toolchain)

            toolchain.RESPONSE_FILES=False
            toolchain.config_processed = True
            toolchain.config_file = "junk"
            toolchain.compile_sources(res)

            assert any('percent' in msg[0] and msg[0]['percent'] == 100.0
                       for _, msg, _ in notify.mock_calls if msg)
Esempio n. 10
0
    def test_always_complete_build(self, *_):
        notify = MockNotifier()
        toolchain = prepare_toolchain(self.src_paths, self.build_path, self.target,
                                        self.toolchain_name, notify=notify)

        res = scan_resources(self.src_paths, toolchain)

        toolchain.RESPONSE_FILES=False
        toolchain.config_processed = True
        toolchain.config_file = "junk"
        toolchain.compile_sources(res)

        assert any('percent' in msg and msg['percent'] == 100.0
                   for msg in notify.messages if msg)
Esempio n. 11
0
    def test_always_complete_build(self, *_):
        notify = MockNotifier()
        toolchain = prepare_toolchain(self.src_paths, self.build_path, self.target,
                                      self.toolchain_name, notify=notify)

        res = Resources(MockNotifier()).scan_with_toolchain(
            self.src_paths, toolchain)

        toolchain.RESPONSE_FILES=False
        toolchain.config_processed = True
        toolchain.config_file = "junk"
        toolchain.compile_sources(res)

        assert any('percent' in msg and msg['percent'] == 100.0
                   for msg in notify.messages if msg)
Esempio n. 12
0
    def generate(self):
        """
        Generate the .project and .cproject files.
        """
        options = {}

        if not self.resources.linker_script:
            raise NotSupportedException("No linker script found.")

        print('\nCreate a System Workbench for STM32 managed project')
        print('Project name: {0}'.format(self.project_name))
        print('Target:       {0}'.format(self.toolchain.target.name))
        print('Toolchain:    {0}'.format(self.TOOLCHAIN) + '\n')

        self.resources.win_to_unix()

        config_header = self.toolchain.get_config_header()
        if config_header:
            config_header = relpath(config_header, self.resources.file_basepath[config_header])

        libraries = []
        for lib in self.resources.libraries:
            library, _ = splitext(basename(lib))
            libraries.append(library[3:])

        self.system_libraries = [
            'stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys'
        ]

        profiles = self.get_all_profiles()
        self.as_defines = [s.replace('"', '"')
                           for s in self.toolchain.get_symbols(True)]
        self.c_defines = [s.replace('"', '"')
                          for s in self.toolchain.get_symbols()]
        self.cpp_defines = self.c_defines

        self.include_path = []
        for s in self.resources.inc_dirs:
            self.include_path.append("../" + self.filter_dot(s))
        print('Include folders: {0}'.format(len(self.include_path)))

        self.compute_exclusions()

        print('Exclude folders: {0}'.format(len(self.excluded_folders)))

        ld_script = self.filter_dot(self.resources.linker_script)
        print('Linker script:   {0}'.format(ld_script))

        lib_dirs = [self.filter_dot(s) for s in self.resources.lib_dirs]

        preproc_cmd = basename(self.toolchain.preproc[0]) + " " + " ".join(self.toolchain.preproc[1:])

        for id in ['debug', 'release']:
            opts = {}
            opts['common'] = {}
            opts['as'] = {}
            opts['c'] = {}
            opts['cpp'] = {}
            opts['ld'] = {}

            opts['id'] = id
            opts['name'] = opts['id'].capitalize()

            profile = profiles[id]

            # A small hack, do not bother with src_path again,
            # pass an empty string to avoid crashing.
            src_paths = ['']
            toolchain = prepare_toolchain(
                src_paths, "", self.toolchain.target.name, self.TOOLCHAIN, build_profile=[profile])

            # Hack to fill in build_dir
            toolchain.build_dir = self.toolchain.build_dir

            flags = self.toolchain_flags(toolchain)

            # Most GNU ARM Eclipse options have a parent,
            # either debug or release.
            if '-O0' in flags['common_flags'] or '-Og' in flags['common_flags']:
                opts['parent_id'] = 'debug'
            else:
                opts['parent_id'] = 'release'

            self.process_options(opts, flags)

            opts['c']['defines'] = self.c_defines
            opts['cpp']['defines'] = self.cpp_defines
            opts['as']['defines'] = self.as_defines

            self.process_sw_options(opts, flags)

            opts['ld']['library_paths'] = [
                self.filter_dot(s) for s in self.resources.lib_dirs]

            opts['ld']['user_libraries'] = libraries
            opts['ld']['system_libraries'] = self.system_libraries
            opts['ld']['script'] = "linker-script-" + id + ".ld"

            # Unique IDs used in multiple places.
            uid = {}
            uid['config'] = u.id
            uid['tool_c_compiler'] = u.id
            uid['tool_c_compiler_input'] = u.id
            uid['tool_cpp_compiler'] = u.id
            uid['tool_cpp_compiler_input'] = u.id

            opts['uid'] = uid

            options[id] = opts

        ctx = {
            'name': self.project_name,
            'platform': platform,
            'include_paths': self.include_path,
            'config_header': config_header,
            'exclude_paths': '|'.join(self.excluded_folders),
            'ld_script': ld_script,
            'library_paths': lib_dirs,
            'object_files': self.resources.objects,
            'libraries': libraries,
            'board_name': self.BOARDS[self.target.upper()]['name'],
            'mcu_name': self.BOARDS[self.target.upper()]['mcuId'],
            'cpp_cmd': preproc_cmd,
            'options': options,
            # id property of 'u' will generate new random identifier every time
            # when called.
            'u': u
        }

        self.__gen_dir('.settings')
        self.gen_file('sw4stm32/language_settings_commom.tmpl',
                      ctx, '.settings/language.settings.xml')
        self.gen_file('sw4stm32/project_common.tmpl', ctx, '.project')
        self.gen_file('sw4stm32/cproject_common.tmpl', ctx, '.cproject')
        self.gen_file('sw4stm32/makefile.targets.tmpl', ctx,
                      'makefile.targets', trim_blocks=True, lstrip_blocks=True)
        self.gen_file('sw4stm32/launch.tmpl', ctx, self.project_name +
                      ' ' + options['debug']['name'] + '.launch')
Esempio n. 13
0
    def generate(self):
        """
        Generate the .project and .cproject files.
        """
        options = {}

        if not self.resources.linker_script:
            raise NotSupportedException("No linker script found.")

        print('\nCreate a System Workbench for STM32 managed project')
        print('Project name: {0}'.format(self.project_name))
        print('Target:       {0}'.format(self.toolchain.target.name))
        print('Toolchain:    {0}'.format(self.TOOLCHAIN) + '\n')

        self.resources.win_to_unix()

        config_header = self.filter_dot(self.toolchain.get_config_header())

        libraries = []
        for lib in self.resources.libraries:
            library, _ = splitext(basename(lib))
            libraries.append(library[3:])

        self.system_libraries = ['stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys']

        profiles = self.get_all_profiles()
        self.as_defines = [
            s.replace('"', '"') for s in self.toolchain.get_symbols(True)
        ]
        self.c_defines = [
            s.replace('"', '"') for s in self.toolchain.get_symbols()
        ]
        self.cpp_defines = self.c_defines
        print 'Symbols: {0}'.format(len(self.c_defines))

        self.include_path = []
        for s in self.resources.inc_dirs:
            self.include_path.append("../" + self.filter_dot(s))
        print('Include folders: {0}'.format(len(self.include_path)))

        self.compute_exclusions()

        print('Exclude folders: {0}'.format(len(self.excluded_folders)))

        ld_script = self.filter_dot(self.resources.linker_script)
        print('Linker script:   {0}'.format(ld_script))

        lib_dirs = [self.filter_dot(s) for s in self.resources.lib_dirs]

        preproc_cmd = basename(self.toolchain.preproc[0]) + " " + " ".join(
            self.toolchain.preproc[1:])

        for id in ['debug', 'release']:
            opts = {}
            opts['common'] = {}
            opts['as'] = {}
            opts['c'] = {}
            opts['cpp'] = {}
            opts['ld'] = {}

            opts['id'] = id
            opts['name'] = opts['id'].capitalize()

            # TODO: Add prints to log or console in verbose mode.
            #print ('\nBuild configuration: {0}'.format(opts['name']))

            profile = profiles[id]

            # A small hack, do not bother with src_path again,
            # pass an empty string to avoid crashing.
            src_paths = ['']
            toolchain = prepare_toolchain(src_paths,
                                          "",
                                          self.toolchain.target.name,
                                          self.TOOLCHAIN,
                                          build_profile=[profile])

            # Hack to fill in build_dir
            toolchain.build_dir = self.toolchain.build_dir

            flags = self.toolchain_flags(toolchain)

            # TODO: Add prints to log or console in verbose mode.
            # print 'Common flags:', ' '.join(flags['common_flags'])
            # print 'C++ flags:', ' '.join(flags['cxx_flags'])
            # print 'C flags:', ' '.join(flags['c_flags'])
            # print 'ASM flags:', ' '.join(flags['asm_flags'])
            # print 'Linker flags:', ' '.join(flags['ld_flags'])

            # Most GNU ARM Eclipse options have a parent,
            # either debug or release.
            if '-O0' in flags['common_flags'] or '-Og' in flags['common_flags']:
                opts['parent_id'] = 'debug'
            else:
                opts['parent_id'] = 'release'

            self.process_options(opts, flags)

            opts['c']['defines'] = self.c_defines
            opts['cpp']['defines'] = self.cpp_defines
            opts['as']['defines'] = self.as_defines

            self.process_sw_options(opts, flags)

            opts['ld']['library_paths'] = [
                self.filter_dot(s) for s in self.resources.lib_dirs
            ]

            opts['ld']['user_libraries'] = libraries
            opts['ld']['system_libraries'] = self.system_libraries
            opts['ld']['script'] = "linker-script-" + id + ".ld"

            # Unique IDs used in multiple places.
            uid = {}
            uid['config'] = u.id
            uid['tool_c_compiler'] = u.id
            uid['tool_c_compiler_input'] = u.id
            uid['tool_cpp_compiler'] = u.id
            uid['tool_cpp_compiler_input'] = u.id

            opts['uid'] = uid

            options[id] = opts

        ctx = {
            'name': self.project_name,
            'platform': platform,
            'include_paths': self.include_path,
            'config_header': config_header,
            'exclude_paths': '|'.join(self.excluded_folders),
            'ld_script': ld_script,
            'library_paths': lib_dirs,
            'object_files': self.resources.objects,
            'libraries': libraries,
            'board_name': self.BOARDS[self.target.upper()]['name'],
            'mcu_name': self.BOARDS[self.target.upper()]['mcuId'],
            'cpp_cmd': preproc_cmd,
            'options': options,
            # id property of 'u' will generate new random identifier every time
            # when called.
            'u': u
        }

        self.__gen_dir('.settings')
        self.gen_file('sw4stm32/language_settings_commom.tmpl', ctx,
                      '.settings/language.settings.xml')
        self.gen_file('sw4stm32/project_common.tmpl', ctx, '.project')
        self.gen_file('sw4stm32/cproject_common.tmpl', ctx, '.cproject')
        self.gen_file('sw4stm32/makefile.targets.tmpl',
                      ctx,
                      'makefile.targets',
                      trim_blocks=True,
                      lstrip_blocks=True)
        self.gen_file(
            'sw4stm32/launch.tmpl', ctx,
            self.project_name + ' ' + options['debug']['name'] + '.launch')
Esempio n. 14
0
    def generate(self):
        """
        Generate the .project and .cproject files.
        """
        if not self.resources.linker_script:
            raise NotSupportedException("No linker script found.")

        print
        print 'Create a GNU ARM Eclipse C++ managed project'
        print 'Project name: {0}'.format(self.project_name)
        print 'Target: {0}'.format(self.toolchain.target.name)
        print 'Toolchain: {0}'.format(self.TOOLCHAIN)

        self.resources.win_to_unix()

        # TODO: use some logger to display additional info if verbose

        libraries = []
        # print 'libraries'
        # print self.resources.libraries
        for lib in self.resources.libraries:
            l, _ = splitext(basename(lib))
            libraries.append(l[3:])

        self.system_libraries = [
            'stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys'
        ]

        # Read in all profiles, we'll extract compiler options.
        profiles = self.get_all_profiles()

        profile_ids = [s.lower() for s in profiles]
        profile_ids.sort()

        # TODO: get the list from existing .cproject
        build_folders = [s.capitalize() for s in profile_ids]
        build_folders.append('BUILD')
        # print build_folders

        objects = [self.filter_dot(s) for s in self.resources.objects]
        for bf in build_folders:
            objects = [o for o in objects if not o.startswith(bf + '/')]
        # print 'objects'
        # print objects

        self.compute_exclusions()

        self.include_path = [
            self.filter_dot(s) for s in self.resources.inc_dirs]
        print 'Include folders: {0}'.format(len(self.include_path))

        self.as_defines = self.toolchain.get_symbols(True)
        self.c_defines = self.toolchain.get_symbols()
        self.cpp_defines = self.c_defines
        print 'Symbols: {0}'.format(len(self.c_defines))

        self.ld_script = self.filter_dot(
            self.resources.linker_script)
        print 'Linker script: {0}'.format(self.ld_script)

        self.options = {}
        for id in profile_ids:

            # There are 4 categories of options, a category common too
            # all tools and a specific category for each of the tools.
            opts = {}
            opts['common'] = {}
            opts['as'] = {}
            opts['c'] = {}
            opts['cpp'] = {}
            opts['ld'] = {}

            opts['id'] = id
            opts['name'] = opts['id'].capitalize()

            print
            print 'Build configuration: {0}'.format(opts['name'])

            profile = profiles[id]
            profile_toolchain = profile[self.TOOLCHAIN]

            # A small hack, do not bother with src_path again,
            # pass an empty string to avoid crashing.
            src_paths = ['']
            target_name = self.toolchain.target.name
            toolchain = prepare_toolchain(
                src_paths, "", target_name, self.TOOLCHAIN, build_profile=profile_toolchain)

            # Hack to fill in build_dir
            toolchain.build_dir = self.toolchain.build_dir

            flags = self.toolchain_flags(toolchain)

            print 'Common flags:', ' '.join(flags['common_flags'])
            print 'C++ flags:', ' '.join(flags['cxx_flags'])
            print 'C flags:', ' '.join(flags['c_flags'])
            print 'ASM flags:', ' '.join(flags['asm_flags'])
            print 'Linker flags:', ' '.join(flags['ld_flags'])

            # Most GNU ARM Eclipse options have a parent,
            # either debug or release.
            if '-O0' in flags['common_flags'] or '-Og' in flags['common_flags']:
                opts['parent_id'] = 'debug'
            else:
                opts['parent_id'] = 'release'

            self.process_options(opts, flags)

            opts['as']['defines'] = self.as_defines
            opts['c']['defines'] = self.c_defines
            opts['cpp']['defines'] = self.cpp_defines

            opts['common']['include_paths'] = self.include_path
            opts['common']['excluded_folders'] = '|'.join(
                self.excluded_folders)

            opts['ld']['library_paths'] = [
                self.filter_dot(s) for s in self.resources.lib_dirs]

            opts['ld']['object_files'] = objects
            opts['ld']['user_libraries'] = libraries
            opts['ld']['system_libraries'] = self.system_libraries
            opts['ld']['script'] = self.ld_script

            # Unique IDs used in multiple places.
            # Those used only once are implemented with {{u.id}}.
            uid = {}
            uid['config'] = u.id
            uid['tool_c_compiler'] = u.id
            uid['tool_c_compiler_input'] = u.id
            uid['tool_cpp_compiler'] = u.id
            uid['tool_cpp_compiler_input'] = u.id

            opts['uid'] = uid

            self.options[id] = opts

        jinja_ctx = {
            'name': self.project_name,

            # Compiler & linker command line options
            'options': self.options,

            # Must be an object with an `id` property, which
            # will be called repeatedly, to generate multiple UIDs.
            'u': u,
        }

        # TODO: it would be good to have jinja stop if one of the
        # expected context values is not defined.
        self.gen_file('gnuarmeclipse/.project.tmpl', jinja_ctx,
                      '.project', trim_blocks=True, lstrip_blocks=True)
        self.gen_file('gnuarmeclipse/.cproject.tmpl', jinja_ctx,
                      '.cproject', trim_blocks=True, lstrip_blocks=True)
        self.gen_file('gnuarmeclipse/makefile.targets.tmpl', jinja_ctx,
                      'makefile.targets', trim_blocks=True, lstrip_blocks=True)

        if not exists('.mbedignore'):
            print
            print 'Create .mbedignore'
            with open('.mbedignore', 'w') as f:
                for bf in build_folders:
                    print bf + '/'
                    f.write(bf + '/\n')

        print
        print 'Done. Import the \'{0}\' project in Eclipse.'.format(self.project_name)
Esempio n. 15
0
    def create_jinja_ctx(self):

        self.validate_resources()

        self.resources.win_to_unix()

        # TODO: use some logger to display additional info if verbose

        libraries = []
        for lib in self.resources.libraries:
            l, _ = splitext(basename(lib))
            libraries.append(l[3:])

        self.system_libraries = [
            'stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys'
        ]

        # Read in all profiles, we'll extract compiler options.
        profiles = self.get_all_profiles()

        profile_ids = [s.lower() for s in profiles]
        profile_ids.sort()

        # TODO: get the list from existing .cproject
        build_folders = [s.capitalize() for s in profile_ids]
        build_folders.append('BUILD')

        objects = [self.filter_dot(s) for s in self.resources.objects]
        for bf in build_folders:
            objects = [o for o in objects if not o.startswith(bf + '/')]

        self.compute_exclusions()

        self.include_path = [
            self.filter_dot(s) for s in self.resources.inc_dirs]

        self.as_defines = self.toolchain.get_symbols(True)
        self.c_defines = self.toolchain.get_symbols()
        self.cpp_defines = self.c_defines

        self.ld_script = self.filter_dot(
            self.resources.linker_script)

        self.options = {}
        for id in profile_ids:

            # There are 4 categories of options, a category common too
            # all tools and a specific category for each of the tools.
            opts = {}
            opts['common'] = {}
            opts['as'] = {}
            opts['c'] = {}
            opts['cpp'] = {}
            opts['ld'] = {}

            opts['id'] = id
            opts['name'] = opts['id'].capitalize()


            profile = profiles[id]

            # A small hack, do not bother with src_path again,
            # pass an empty string to avoid crashing.
            src_paths = ['']
            target_name = self.toolchain.target.name
            toolchain = prepare_toolchain(
                src_paths, "", target_name, self.TOOLCHAIN, build_profile=[profile])

            # Hack to fill in build_dir
            toolchain.build_dir = self.toolchain.build_dir
            toolchain.config = self.toolchain.config
            toolchain.set_config_data(self.toolchain.config.get_config_data())

            flags = self.toolchain_flags(toolchain)

            # Most GNU ARM Eclipse options have a parent,
            # either debug or release.
            if '-O0' in flags['common_flags'] or '-Og' in flags['common_flags']:
                opts['parent_id'] = 'debug'
            else:
                opts['parent_id'] = 'release'

            self.process_options(opts, flags)

            opts['as']['defines'] = self.as_defines
            opts['c']['defines'] = self.c_defines
            opts['cpp']['defines'] = self.cpp_defines

            opts['common']['include_paths'] = self.include_path
            opts['common']['excluded_folders'] = '|'.join(
                self.excluded_folders)

            opts['ld']['library_paths'] = [
                self.filter_dot(s) for s in self.resources.lib_dirs]

            opts['ld']['object_files'] = objects
            opts['ld']['user_libraries'] = libraries
            opts['ld']['system_libraries'] = self.system_libraries
            opts['ld']['script'] = join(id.capitalize(),
                                        "linker-script-%s.ld" % id)
            opts['cpp_cmd'] = '"{}"'.format(toolchain.preproc[0]) + " " + " ".join(toolchain.preproc[1:])

            # Unique IDs used in multiple places.
            # Those used only once are implemented with {{u.id}}.
            uid = {}
            uid['config'] = u.id
            uid['tool_c_compiler'] = u.id
            uid['tool_c_compiler_input'] = u.id
            uid['tool_cpp_compiler'] = u.id
            uid['tool_cpp_compiler_input'] = u.id

            opts['uid'] = uid

            self.options[id] = opts

        jinja_ctx = {
            'name': self.project_name,
            'ld_script': self.ld_script,

            # Compiler & linker command line options
            'options': self.options,

            # Must be an object with an `id` property, which
            # will be called repeatedly, to generate multiple UIDs.
            'u': u,
        }
        return jinja_ctx
Esempio n. 16
0
def main():
    library_names = {
        'cpputest': "CppUTest",
        'usb_host': "USB Host support",
        'usb': "USB Device support",
        'ublox': "U-blox drivers",
        'rtos': "RTOS abstraction layer",
        'dsp': "DSP Library",
        'rpc': "RPC Support",
        'fat': "FAT File System support",
        'eth': "Ethernet support",
        'rtx': "Keil RTX RTOS",
        'features': 'Device features'
    }

    print("Parsing targets...")

    supported_targets = TARGET_NAMES
    copy._deepcopy_dispatch[type(re.compile('x'))] = copy_regex

    #IPV6 is configured, but not listed as supported.Ignore it.
    supported_targets = [t for t in supported_targets if "Super_Target" not in t]

    rootNode = ElementTree.Element('ParsedTargetList')
    targetListNode = append_node(rootNode, 'Targets')
    targetNumber = 1

    for target in supported_targets:
        print('\t' + target + ' (' + str(targetNumber) + '/' + str(len(supported_targets)) + ')...')
        targetNumber = targetNumber + 1
        targetNode = append_node(targetListNode, 'Target')
        targetNode.append(make_node('ID', target))

        toolchain = ba.prepare_toolchain([ROOT], "", target, 'GCC_ARM', silent=True)

        fullRes = toolchain.scan_resources(ROOT)
        fullRes.toolchain = toolchain

        hexFiles = bootloader_scanner.LocateHexFiles(toolchain, fullRes)

        #Find optional libraries (that have mbed_lib.json file in the root directory)
        mbed_library_dirs = [os.path.dirname(j) for j in fullRes.json_files if os.path.basename(j) == 'mbed_lib.json']

        #Treat the MBED platform library as a part of the SDK and exclude the TARGET_xxx subdirs inside library dirs
        mbed_library_dirs = [lib for lib in mbed_library_dirs if lib != os.path.join(ROOT, 'platform') and not os.path.basename(lib).startswith("TARGET_")]

        #Now rescan everything except the optional libraries under the 'features' directory
        minimal_res = toolchain.scan_resources(ROOT, exclude_paths = [os.path.join(ROOT, 'features')] + mbed_library_dirs)

        targetNode.append(make_node('Features', ";".join(toolchain.target.features)))
        toolchain.target.features = []
        toolchain.config.load_resources(minimal_res)

        baseCfg = BuildConfiguration(toolchain, minimal_res, hexFiles)
        targetNode.append(baseCfg.ToXML('BaseConfiguration'))
        targetNode.append(make_node('CFLAGS', ";".join(toolchain.cpu[:])))

        derivedCfgListNode = append_node(targetNode, 'DerivedConfigurations')

        for libDir in mbed_library_dirs:
            libNode = append_node(derivedCfgListNode, 'DerivedConfiguration')
            libNode.append(make_node('Library', libDir))

            libToolchain = deepcopy(toolchain)
            libRes = libToolchain.scan_resources(libDir)
            libToolchain.config.load_resources(libRes)
            libCfg = BuildConfiguration(libToolchain, libRes)
            libNode.append(libCfg.ToXML('Configuration'))

        for feature in copy.copy(fullRes.features):
            featureNode = append_node(derivedCfgListNode, 'DerivedConfiguration')
            featureNode.append(make_node('Feature', feature))

            featureToolchain = deepcopy(toolchain)
            featureRes = fullRes.features[feature]
            featureToolchain.config.load_resources(featureRes)
            featureCfg = BuildConfiguration(featureToolchain, featureRes)
            featureNode.append(featureCfg.ToXML('Configuration'))

        for lib in LIBRARIES:
            if lib['id'] in ['rtos' ,'rtx']:
                continue   #Already handled via mbed_library_dirs
            sourceDirs = lib['source_dir']
            if isinstance(sourceDirs, str):
                sourceDirs = [sourceDirs]

            libNode = append_node(derivedCfgListNode, 'DerivedConfiguration')
            libNode.append(make_node('Library', lib['id']))
            libNode.append(make_node('LibraryName', library_names.get(lib['id'])))

            cfgListNode = append_node(libNode, 'ConfigurationsToMerge')

            for srcDir in sourceDirs:
                libToolchain = deepcopy(toolchain)
                libRes = libToolchain.scan_resources(srcDir)
                libToolchain.config.load_resources(libRes)
                libCfg = BuildConfiguration(libToolchain, libRes)
                cfgListNode.append(libCfg.ToXML('BuildConfiguration'))

    rootNode.attrib['xmlns:xsi'] = 'http://www.w3.org/2001/XMLSchema-instance'
    rootNode.attrib['xmlns:xsd'] = 'http://www.w3.org/2001/XMLSchema'
    xml_str = ElementTree.tostring(rootNode)
    with open(join(ROOT, 'ParsedTargets.xml'), 'w') as xml_file:
        xml_file.write(xml_str.encode('utf-8'))
Esempio n. 17
0
    def create_jinja_ctx(self):

        self.validate_resources()

        self.resources.win_to_unix()

        # TODO: use some logger to display additional info if verbose

        libraries = []
        library_files = []
        for lib in self.libraries:
            library_files.append(self.filter_dot(lib))
            l, _ = splitext(basename(lib))
            libraries.append(l[3:])

        self.system_libraries = ['stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys']

        # Read in all profiles, we'll extract compiler options.
        profiles = self.get_all_profiles()

        profile_ids = [s.lower() for s in profiles]
        profile_ids.sort()

        # TODO: get the list from existing .cproject
        build_folders = [s.capitalize() for s in profile_ids]
        build_folders.append('BUILD')

        objects = [self.filter_dot(s) for s in self.resources.objects]
        for bf in build_folders:
            objects = [o for o in objects if not o.startswith(bf + '/')]

        self.compute_exclusions()

        self.include_path = [
            self.filter_dot(s) for s in self.resources.inc_dirs
        ]

        self.as_defines = self.toolchain.get_symbols(True)
        self.c_defines = self.toolchain.get_symbols()
        self.cpp_defines = self.c_defines

        self.ld_script = self.filter_dot(self.resources.linker_script)

        self.options = {}
        for id in profile_ids:

            # There are 4 categories of options, a category common too
            # all tools and a specific category for each of the tools.
            opts = {}
            opts['common'] = {}
            opts['as'] = {}
            opts['c'] = {}
            opts['cpp'] = {}
            opts['ld'] = {}

            opts['id'] = id
            opts['name'] = opts['id'].capitalize()

            profile = profiles[id]

            # A small hack, do not bother with src_path again,
            # pass an empty string to avoid crashing.
            src_paths = ['']
            target_name = self.toolchain.target.name
            toolchain = prepare_toolchain(src_paths,
                                          "",
                                          target_name,
                                          self.TOOLCHAIN,
                                          build_profile=[profile])

            # Hack to fill in build_dir
            toolchain.build_dir = self.toolchain.build_dir
            toolchain.config = self.toolchain.config
            toolchain.set_config_data(self.toolchain.config.get_config_data())

            flags = self.toolchain_flags(toolchain)

            # Most GNU ARM Eclipse options have a parent,
            # either debug or release.
            if '-O0' in flags['common_flags'] or '-Og' in flags['common_flags']:
                opts['parent_id'] = 'debug'
            else:
                opts['parent_id'] = 'release'

            self.process_options(opts, flags)

            opts['as']['defines'] = self.as_defines
            opts['c']['defines'] = self.c_defines
            opts['cpp']['defines'] = self.cpp_defines

            opts['common']['include_paths'] = self.include_path
            opts['common']['excluded_folders'] = '|'.join(
                self.excluded_folders)

            opts['ld']['library_paths'] = [
                self.filter_dot(s) for s in self.resources.lib_dirs
            ]

            opts['ld']['object_files'] = objects
            opts['ld']['user_libraries'] = libraries
            opts['ld']['user_library_files'] = library_files
            opts['ld']['system_libraries'] = self.system_libraries
            opts['ld']['script'] = join(id.capitalize(),
                                        "linker-script-%s.ld" % id)
            opts['cpp_cmd'] = '"{}"'.format(
                toolchain.preproc[0]) + " " + " ".join(toolchain.preproc[1:])

            # Unique IDs used in multiple places.
            # Those used only once are implemented with {{u.id}}.
            uid = {}
            uid['config'] = u.id
            uid['tool_c_compiler'] = u.id
            uid['tool_c_compiler_input'] = u.id
            uid['tool_cpp_compiler'] = u.id
            uid['tool_cpp_compiler_input'] = u.id

            opts['uid'] = uid

            self.options[id] = opts

        jinja_ctx = {
            'name': self.project_name,
            'ld_script': self.ld_script,

            # Compiler & linker command line options
            'options': self.options,

            # Must be an object with an `id` property, which
            # will be called repeatedly, to generate multiple UIDs.
            'u': u,
        }
        return jinja_ctx
Esempio n. 18
0
    def generate(self):
        """
        Generate the .project and .cproject files.
        """
        if not self.resources.linker_script:
            raise NotSupportedException("No linker script found.")

        print
        print 'Create a GNU ARM Eclipse C++ managed project'
        print 'Project name: {0}'.format(self.project_name)
        print 'Target: {0}'.format(self.toolchain.target.name)
        print 'Toolchain: {0}'.format(self.TOOLCHAIN)

        self.resources.win_to_unix()

        # TODO: use some logger to display additional info if verbose

        libraries = []
        # print 'libraries'
        # print self.resources.libraries
        for lib in self.resources.libraries:
            l, _ = splitext(basename(lib))
            libraries.append(l[3:])

        self.system_libraries = ['stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys']

        # Read in all profiles, we'll extract compiler options.
        profiles = self.get_all_profiles()

        profile_ids = [s.lower() for s in profiles]
        profile_ids.sort()

        # TODO: get the list from existing .cproject
        build_folders = [s.capitalize() for s in profile_ids]
        build_folders.append('BUILD')
        # print build_folders

        objects = [self.filter_dot(s) for s in self.resources.objects]
        for bf in build_folders:
            objects = [o for o in objects if not o.startswith(bf + '/')]
        # print 'objects'
        # print objects

        self.compute_exclusions()

        self.include_path = [
            self.filter_dot(s) for s in self.resources.inc_dirs
        ]
        print 'Include folders: {0}'.format(len(self.include_path))

        self.as_defines = self.toolchain.get_symbols(True)
        self.c_defines = self.toolchain.get_symbols()
        self.cpp_defines = self.c_defines
        print 'Symbols: {0}'.format(len(self.c_defines))

        self.ld_script = self.filter_dot(self.resources.linker_script)
        print 'Linker script: {0}'.format(self.ld_script)

        self.options = {}
        for id in profile_ids:

            # There are 4 categories of options, a category common too
            # all tools and a specific category for each of the tools.
            opts = {}
            opts['common'] = {}
            opts['as'] = {}
            opts['c'] = {}
            opts['cpp'] = {}
            opts['ld'] = {}

            opts['id'] = id
            opts['name'] = opts['id'].capitalize()

            print
            print 'Build configuration: {0}'.format(opts['name'])

            profile = profiles[id]
            profile_toolchain = profile[self.TOOLCHAIN]

            # A small hack, do not bother with src_path again,
            # pass an empty string to avoid crashing.
            src_paths = ['']
            target_name = self.toolchain.target.name
            toolchain = prepare_toolchain(src_paths,
                                          "",
                                          target_name,
                                          self.TOOLCHAIN,
                                          build_profile=profile_toolchain)

            # Hack to fill in build_dir
            toolchain.build_dir = self.toolchain.build_dir

            flags = self.toolchain_flags(toolchain)

            print 'Common flags:', ' '.join(flags['common_flags'])
            print 'C++ flags:', ' '.join(flags['cxx_flags'])
            print 'C flags:', ' '.join(flags['c_flags'])
            print 'ASM flags:', ' '.join(flags['asm_flags'])
            print 'Linker flags:', ' '.join(flags['ld_flags'])

            # Most GNU ARM Eclipse options have a parent,
            # either debug or release.
            if '-O0' in flags['common_flags'] or '-Og' in flags['common_flags']:
                opts['parent_id'] = 'debug'
            else:
                opts['parent_id'] = 'release'

            self.process_options(opts, flags)

            opts['as']['defines'] = self.as_defines
            opts['c']['defines'] = self.c_defines
            opts['cpp']['defines'] = self.cpp_defines

            opts['common']['include_paths'] = self.include_path
            opts['common']['excluded_folders'] = '|'.join(
                self.excluded_folders)

            opts['ld']['library_paths'] = [
                self.filter_dot(s) for s in self.resources.lib_dirs
            ]

            opts['ld']['object_files'] = objects
            opts['ld']['user_libraries'] = libraries
            opts['ld']['system_libraries'] = self.system_libraries
            opts['ld']['script'] = self.ld_script

            # Unique IDs used in multiple places.
            # Those used only once are implemented with {{u.id}}.
            uid = {}
            uid['config'] = u.id
            uid['tool_c_compiler'] = u.id
            uid['tool_c_compiler_input'] = u.id
            uid['tool_cpp_compiler'] = u.id
            uid['tool_cpp_compiler_input'] = u.id

            opts['uid'] = uid

            self.options[id] = opts

        jinja_ctx = {
            'name': self.project_name,

            # Compiler & linker command line options
            'options': self.options,

            # Must be an object with an `id` property, which
            # will be called repeatedly, to generate multiple UIDs.
            'u': u,
        }

        # TODO: it would be good to have jinja stop if one of the
        # expected context values is not defined.
        self.gen_file('gnuarmeclipse/.project.tmpl',
                      jinja_ctx,
                      '.project',
                      trim_blocks=True,
                      lstrip_blocks=True)
        self.gen_file('gnuarmeclipse/.cproject.tmpl',
                      jinja_ctx,
                      '.cproject',
                      trim_blocks=True,
                      lstrip_blocks=True)
        self.gen_file('gnuarmeclipse/makefile.targets.tmpl',
                      jinja_ctx,
                      'makefile.targets',
                      trim_blocks=True,
                      lstrip_blocks=True)

        if not exists('.mbedignore'):
            print
            print 'Create .mbedignore'
            with open('.mbedignore', 'w') as f:
                for bf in build_folders:
                    print bf + '/'
                    f.write(bf + '/\n')

        print
        print 'Done. Import the \'{0}\' project in Eclipse.'.format(
            self.project_name)
Esempio n. 19
0
def export_project(src_paths, export_path, target, ide, libraries_paths=None,
                   linker_script=None, notify=None, verbose=False, name=None,
                   inc_dirs=None, jobs=1, silent=False, extra_verbose=False,
                   config=None, macros=None, zip_proj=None, inc_repos=False,
                   build_profile=None):
    """Generates a project file and creates a zip archive if specified

    Positional Arguments:
    src_paths - a list of paths from which to find source files
    export_path - a path specifying the location of generated project files
    target - the mbed board/mcu for which to generate the executable
    ide - the ide for which to generate the project fields

    Keyword Arguments:
    libraries_paths - paths to additional libraries
    linker_script - path to the linker script for the specified target
    notify - function is passed all events, and expected to handle notification
      of the user, emit the events to a log, etc.
    verbose - assigns the notify function to toolchains print_notify_verbose
    name - project name
    inc_dirs - additional include directories
    jobs - number of threads
    silent - silent build - no output
    extra_verbose - assigns the notify function to toolchains
      print_notify_verbose
    config - toolchain's config object
    macros - User-defined macros
    zip_proj - string name of the zip archive you wish to creat (exclude arg
     if you do not wish to create an archive
    """

    # Convert src_path to a list if needed
    if isinstance(src_paths, dict):
        paths = sum(src_paths.values(), [])
    elif isinstance(src_paths, list):
        paths = src_paths[:]
    else:
        paths = [src_paths]

    # Extend src_paths wit libraries_paths
    if libraries_paths is not None:
        paths.extend(libraries_paths)

    if not isinstance(src_paths, dict):
        src_paths = {"": paths}

    # Export Directory
    if not exists(export_path):
        makedirs(export_path)

    _, toolchain_name = get_exporter_toolchain(ide)

    # Pass all params to the unified prepare_resources()
    toolchain = prepare_toolchain(
        paths, "", target, toolchain_name, macros=macros, jobs=jobs,
        notify=notify, silent=silent, verbose=verbose,
        extra_verbose=extra_verbose, config=config, build_profile=build_profile)
    # The first path will give the name to the library
    if name is None:
        name = basename(normpath(abspath(src_paths[0])))

    # Call unified scan_resources
    resource_dict = {loc: scan_resources(path, toolchain, inc_dirs=inc_dirs)
                     for loc, path in src_paths.iteritems()}
    resources = Resources()
    toolchain.build_dir = export_path
    config_header = toolchain.get_config_header()
    resources.headers.append(config_header)
    resources.file_basepath[config_header] = dirname(config_header)

    if zip_proj:
        subtract_basepath(resources, export_path)
        for loc, res in resource_dict.iteritems():
            temp = copy.deepcopy(res)
            subtract_basepath(temp, export_path, loc)
            resources.add(temp)
    else:
        for _, res in resource_dict.iteritems():
            resources.add(res)

    # Change linker script if specified
    if linker_script is not None:
        resources.linker_script = linker_script

    files, exporter = generate_project_files(resources, export_path,
                                             target, name, toolchain, ide,
                                             macros=macros)
    files.append(config_header)
    if zip_proj:
        for resource in resource_dict.values():
            for label, res in resource.features.iteritems():
                if label not in toolchain.target.features:
                    resource.add(res)
        if isinstance(zip_proj, basestring):
            zip_export(join(export_path, zip_proj), name, resource_dict, files,
                       inc_repos)
        else:
            zip_export(zip_proj, name, resource_dict, files, inc_repos)

    return exporter
Esempio n. 20
0
def export_project(src_paths, export_path, target, ide, libraries_paths=None,
                   linker_script=None, notify=None, verbose=False, name=None,
                   inc_dirs=None, jobs=1, silent=False, extra_verbose=False,
                   config=None, macros=None, zip_proj=None, inc_repos=False,
                   build_profile=None, app_config=None):
    """Generates a project file and creates a zip archive if specified

    Positional Arguments:
    src_paths - a list of paths from which to find source files
    export_path - a path specifying the location of generated project files
    target - the mbed board/mcu for which to generate the executable
    ide - the ide for which to generate the project fields

    Keyword Arguments:
    libraries_paths - paths to additional libraries
    linker_script - path to the linker script for the specified target
    notify - function is passed all events, and expected to handle notification
      of the user, emit the events to a log, etc.
    verbose - assigns the notify function to toolchains print_notify_verbose
    name - project name
    inc_dirs - additional include directories
    jobs - number of threads
    silent - silent build - no output
    extra_verbose - assigns the notify function to toolchains
      print_notify_verbose
    config - toolchain's config object
    macros - User-defined macros
    zip_proj - string name of the zip archive you wish to creat (exclude arg
     if you do not wish to create an archive
    """

    # Convert src_path to a list if needed
    if isinstance(src_paths, dict):
        paths = sum(src_paths.values(), [])
    elif isinstance(src_paths, list):
        paths = src_paths[:]
    else:
        paths = [src_paths]

    # Extend src_paths wit libraries_paths
    if libraries_paths is not None:
        paths.extend(libraries_paths)

    if not isinstance(src_paths, dict):
        src_paths = {"": paths}

    # Export Directory
    if not exists(export_path):
        makedirs(export_path)

    _, toolchain_name = get_exporter_toolchain(ide)

    ###################################
    # mbed Classic/2.0/libary support #

    # Find build system profile
    profile = None
    targets_json = None
    for path in paths:
        profile = find_build_profile(path) or profile
        if profile:
            targets_json = join(dirname(dirname(abspath(__file__))), 'legacy_targets.json')
        else:
            targets_json = find_targets_json(path) or targets_json

    # Apply targets.json to active targets
    if targets_json:
        if not silent:
            print("Using targets from %s" % targets_json)
        set_targets_json_location(targets_json)

    # Apply profile to toolchains
    if profile:
        def init_hook(self):
            profile_data = get_toolchain_profile(self.name, profile)
            if not profile_data:
                return
            if not silent:
                self.info("Using toolchain %s profile %s" % (self.name, profile))

            for k,v in profile_data.items():
                if self.flags.has_key(k):
                    self.flags[k] = v
                else:
                    setattr(self, k, v)

        mbedToolchain.init = init_hook

    # mbed Classic/2.0/libary support #
    ###################################

    # Pass all params to the unified prepare_resources()
    toolchain = prepare_toolchain(
        paths, "", target, toolchain_name, macros=macros, jobs=jobs,
        notify=notify, silent=silent, verbose=verbose,
        extra_verbose=extra_verbose, config=config, build_profile=build_profile,
        app_config=app_config)
    # The first path will give the name to the library
    if name is None:
        name = basename(normpath(abspath(src_paths[0])))

    # Call unified scan_resources
    resource_dict = {loc: scan_resources(path, toolchain, inc_dirs=inc_dirs)
                     for loc, path in src_paths.iteritems()}
    resources = Resources()
    toolchain.build_dir = export_path
    config_header = toolchain.get_config_header()
    resources.headers.append(config_header)
    resources.file_basepath[config_header] = dirname(config_header)

    if zip_proj:
        subtract_basepath(resources, ".")
        for loc, res in resource_dict.iteritems():
            temp = copy.deepcopy(res)
            subtract_basepath(temp, ".", loc)
            resources.add(temp)
    else:
        for _, res in resource_dict.iteritems():
            resources.add(res)

    # Change linker script if specified
    if linker_script is not None:
        resources.linker_script = linker_script

    files, exporter = generate_project_files(resources, export_path,
                                             target, name, toolchain, ide,
                                             macros=macros)
    files.append(config_header)
    if zip_proj:
        for resource in resource_dict.values():
            for label, res in resource.features.iteritems():
                if label not in toolchain.target.features:
                    resource.add(res)
        if isinstance(zip_proj, basestring):
            zip_export(join(export_path, zip_proj), name, resource_dict, files,
                       inc_repos)
        else:
            zip_export(zip_proj, name, resource_dict, files, inc_repos)
    else:
        for exported in files:
            if not exists(join(export_path, basename(exported))):
                copyfile(exported, join(export_path, basename(exported)))

    return exporter
Esempio n. 21
0
    def extract_project_info(self, generate_config=False):
        """Extract comprehensive information in order to build a PlatformIO project

        src_paths - a list of paths that contain needed files to build project
        build_path - a path where mbed_config.h will be created
        target - suitable mbed target name
        framework_path = path to the root folder of the mbed framework package
        app_config - path to mbed_app.json
        ignore_dirs - doesn't work with GCC at the moment?
        """
        # Default values for mbed build api functions
        if self.custom_target_path and isfile(
                join(self.custom_target_path, "custom_targets.json")):
            print ("Detected custom target file")
            Target.add_extra_targets(source_dir=self.custom_target_path)
            update_target_data()
        target = self.get_target_config()
        build_profile = self.get_build_profile()

        jobs = 1  # how many compilers we can run at once
        name = None  # the name of the project
        dependencies_paths = None  # libraries location to include when linking
        macros = None  # additional macros
        inc_dirs = None  # additional dirs where include files may be found
        ignore = self.ignore_dirs  # list of paths to add to mbedignore
        clean = False  # Rebuild everything if True

        # For cases when project and framework are on different
        # logic drives (Windows only)
        backup_cwd = os.getcwd()
        os.chdir(self.framework_path)

        # Convert src_path to a list if needed
        if not isinstance(self.src_paths, list):
            self.src_paths = [self.src_paths]
        self.src_paths = [relpath(s) for s in self.src_paths]

        # Pass all params to the unified prepare_toolchain()
        self.toolchain = prepare_toolchain(
            self.src_paths, self.build_path, target, self.toolchain_name,
            macros=macros, clean=clean, jobs=jobs, notify=self.notify,
            app_config=self.app_config, build_profile=build_profile,
            ignore=ignore)

        # The first path will give the name to the library
        if name is None:
            name = basename(normpath(abspath(self.src_paths[0])))

        # Disabled for legacy libraries
        # for src_path in self.src_paths:
        #     if not exists(src_path):
        #         error_msg = "The library src folder doesn't exist:%s", src_path
        #         raise Exception(error_msg)


        self.resources = MbedResourcesFixedPath(self.framework_path, self.notify).scan_with_toolchain(
            self.src_paths, self.toolchain, dependencies_paths,
            inc_dirs=inc_dirs)

        src_files = (
            self.resources.s_sources +
            self.resources.c_sources +
            self.resources.cpp_sources
        )

        if generate_config:
            self.generate_mbed_config_file()

        # Revert back project cwd
        os.chdir(backup_cwd)

        result = {
            "src_files": src_files,
            "inc_dirs": self.resources.inc_dirs,
            "ldscript": [self.resources.linker_script],
            "objs": self.resources.objects,
            "build_flags": {k: sorted(v) for k, v in self.toolchain.flags.items()},
            "libs": [basename(l) for l in self.resources.libraries],
            "lib_paths": self.resources.lib_dirs,
            "syslibs": self.toolchain.sys_libs,
            "build_symbols": self.process_symbols(
                self.toolchain.get_symbols()),
            "hex": self.resources.hex_files,
            "bin": self.resources.bin_files
        }

        return result
Esempio n. 22
0
    def create_jinja_ctx(self):
        self.options = {}
        flags = {}
        self.validate_resources()
        # Convert all Backslashes to Forward Slashes
        self.resources.win_to_unix()

        self.ld_script = self.filter_dot(self.resources.linker_script)

        # Read in all profiles, we'll extract compiler options.
        profiles = self.get_all_profiles()

        profile_ids = [s.lower() for s in profiles]
        profile_ids.sort()
        for prof_id in profile_ids:
            # There are 4 categories of options, a category common too
            # all tools and a specific category for each of the tools.
            opts = {}
            opts['defines'] = {}
            opts['common'] = {}
            opts['as'] = {}
            opts['c'] = {}
            opts['cpp'] = {}
            opts['ld'] = {}

            opts['id'] = prof_id
            opts['name'] = opts['id'].capitalize()

            profile = profiles[prof_id]

            # A small hack, do not bother with src_path again,
            # pass an empty string to avoid crashing.
            src_paths = ['']
            target_name = self.toolchain.target.name

            toolchain = prepare_toolchain(src_paths,
                                          "",
                                          target_name,
                                          self.TOOLCHAIN,
                                          build_profile=[profile])

            flags = self.toolchain_flags(toolchain)

            opts['defines'] = self.get_defines_and_remove_from_flags(
                flags, 'common_flags')
            opts['forced_includes'] = self.get_includes_and_remove_from_flags(
                flags, 'common_flags')
            opts['common'] = flags['common_flags']
            opts['as'] = flags['asm_flags']
            opts['c'] = flags['c_flags']
            opts['cpp'] = flags['cxx_flags']
            opts['ld'] = flags['ld_flags']

            self.options[prof_id] = opts

        sources = []  # list of strings

        forced_includes = self.get_includes_and_remove_from_flags(
            flags, 'c_flags')
        forced_includes += self.get_includes_and_remove_from_flags(
            flags, 'cxx_flags')

        # Remove Duplicates
        forced_includes = list(set(forced_includes))

        c_std = self.get_c_std_and_remove_from_flag(flags, 'c_flags')
        cpp_std = self.get_c_std_and_remove_from_flag(flags, 'cxx_flags')

        # Make one list of all resources
        for r_type in ['c_sources', 's_sources', 'cpp_sources']:
            sources.extend(getattr(self.resources, r_type))

        # Remove all leading './'
        c_sources = [
            self.filter_dot(field) for field in self.resources.c_sources
        ]
        cpp_sources = [
            self.filter_dot(field) for field in self.resources.cpp_sources
        ]
        s_sources = [
            self.filter_dot(field) for field in self.resources.s_sources
        ]
        headers = [self.filter_dot(field) for field in self.resources.headers]
        sources = [self.filter_dot(field) for field in sources]
        include_paths = [
            self.filter_dot(field) for field in self.resources.inc_dirs
        ]

        sys_libs = [
            self.prepare_sys_lib(lib) for lib in self.toolchain.sys_libs
        ]
        preproc = " ".join([basename(self.toolchain.preproc[0])] +
                           self.toolchain.preproc[1:] + self.toolchain.ld[1:])

        if 'nbproject' in include_paths:
            include_paths.remove('nbproject')

        jinja_ctx = {
            'name': self.project_name,
            'target': self.toolchain.target.name,
            'elf_location': join('BUILD', self.project_name) + '.elf',
            'c_symbols': self.toolchain.get_symbols(),
            'asm_symbols': self.toolchain.get_symbols(True),
            'c_flags': flags['c_flags'],
            'cxx_flags': flags['cxx_flags'],
            'ld_flags': self.flags['ld_flags'],
            'asm_flags': self.flags['asm_flags'],
            'common_flags': self.flags['common_flags'],
            'include_paths': include_paths,
            'forced_includes': forced_includes,
            'c_sources': c_sources,
            'cpp_sources': cpp_sources,
            's_sources': s_sources,
            'headers': headers,
            'headers_folder': self.get_netbeans_file_list(sorted(headers)),
            'sources_folder': self.get_netbeans_file_list(sorted(sources)),
            'options': self.options,
            'c_std': self.get_netbeans_c_std(c_std),
            'cpp_std': self.get_netbeans_cpp_std(cpp_std),
            'linker_script': self.ld_script,
            'linker_libs': sys_libs,
            'pp_cmd': preproc,
            'cc_cmd': self.toolchain.cc[0],
            'cppc_cmd': self.toolchain.cppc[0],
            'asm_cmd': self.toolchain.asm[0],
            'ld_cmd': self.toolchain.ld[0],
            'elf2bin_cmd': self.toolchain.elf2bin
        }
        return jinja_ctx
Esempio n. 23
0
    def generate(self):
        """
        Generate the .project and .cproject files.
        """
        if not self.resources.linker_script:
            raise NotSupportedException("No linker script found.")

        self.resources.win_to_unix()

        # TODO: use some logger to display additional info if verbose

        self.libraries = []
        # print 'libraries'
        # print self.resources.libraries
        for lib in self.resources.libraries:
            l, _ = splitext(basename(lib))
            self.libraries.append(l[3:])

        self.system_libraries = [
            'stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys'
        ]

        # Read in all profiles, we'll extract compiler options.
        profiles = self.get_all_profiles()

        profile_ids = [s.lower() for s in profiles]
        profile_ids.sort()

        # TODO: get the list from existing .cproject
        build_folders = [s.capitalize() for s in profile_ids]
        build_folders.append('BUILD')
        # print build_folders

        objects = [self.filter_dot(s) for s in self.resources.objects]
        for bf in build_folders:
            objects = [o for o in objects if not o.startswith(bf + '/')]
        # print 'objects'
        # print objects

        self.compute_exclusions()

        self.include_path = [
            self.filter_dot(s) for s in self.resources.inc_dirs]

        self.as_defines = self.toolchain.get_symbols(True)
        self.c_defines = self.toolchain.get_symbols()
        self.cpp_defines = self.c_defines

        self.ld_script = self.filter_dot(
            self.resources.linker_script)

        self.options = {}
        profile_ids.remove('develop')
        for id in profile_ids:

            # There are 4 categories of options, a category common too
            # all tools and a specific category for each of the tools.
            opts = {}
            opts['common'] = {}
            opts['as'] = {}
            opts['c'] = {}
            opts['cpp'] = {}
            opts['ld'] = {}

            opts['id'] = id
            opts['name'] = opts['id'].capitalize()

            print

            profile = profiles[id]

            # A small hack, do not bother with src_path again,
            # pass an empty string to avoid crashing.
            src_paths = ['']
            target_name = self.toolchain.target.name
            toolchain = prepare_toolchain(
                src_paths, "", target_name, self.TOOLCHAIN, build_profile=[profile])

            # Hack to fill in build_dir
            toolchain.build_dir = self.toolchain.build_dir

            flags = self.toolchain_flags(toolchain)

            # Most GNU ARM Eclipse options have a parent,
            # either debug or release.
            if '-O0' in flags['common_flags'] or '-Og' in flags['common_flags']:
                opts['parent_id'] = 'debug'
            else:
                opts['parent_id'] = 'release'

            self.process_options(opts, flags)

            opts['as']['defines'] = self.as_defines
            opts['c']['defines'] = self.c_defines
            opts['cpp']['defines'] = self.cpp_defines

            opts['common']['include_paths'] = self.include_path
            opts['common']['excluded_folders'] = '|'.join(
                self.excluded_folders)
            self.excluded_folders = [item.replace("\\", "/") for item in self.excluded_folders]

            opts['ld']['library_paths'] = [
                self.filter_dot(s) for s in self.resources.lib_dirs]

            opts['ld']['object_files'] = objects
            opts['ld']['user_libraries'] = self.libraries
            opts['ld']['system_libraries'] = self.system_libraries
            opts['ld']['script'] = "linker-script-%s.ld" % id
            opts['cpp_cmd'] = " ".join(toolchain.preproc)

            # Unique IDs used in multiple places.
            # Those used only once are implemented with {{u.id}}.
            u = UID()
            uid = {}
            uid['config'] = u.id
            uid['tool_c_compiler'] = u.id
            uid['tool_c_compiler_input'] = u.id
            uid['tool_cpp_compiler'] = u.id
            uid['tool_cpp_compiler_input'] = u.id

            opts['uid'] = uid

            self.options[id] = opts

        jinja_ctx = {
            'name': self.project_name,
            'ld_script': self.ld_script,

            # Compiler & linker command line options
            'options': self.options,

            # Must be an object with an `id` property, which
            # will be called repeatedly, to generate multiple UIDs.
            'u': u,
        }

        self.gen_file('mcuxpresso/.project.tmpl', jinja_ctx,
                      '.project', trim_blocks=True, lstrip_blocks=True)
        self.gen_file('mcuxpresso/{0}_cproject.tmpl'.format(target_name), jinja_ctx,
                      '.cproject', trim_blocks=True, lstrip_blocks=True)
        self.gen_file('mcuxpresso/makefile.targets.tmpl', jinja_ctx,
                      'makefile.targets', trim_blocks=True, lstrip_blocks=True)
        self.gen_file_nonoverwrite('mcuxpresso/mbedignore.tmpl', jinja_ctx,
                                   '.mbedignore')

        print('Done. Import the \'{0}\' project in MCUXpresso.'.format(
            self.project_name))
Esempio n. 24
0
def export_project(src_paths,
                   export_path,
                   target,
                   ide,
                   libraries_paths=None,
                   linker_script=None,
                   notify=None,
                   verbose=False,
                   name=None,
                   inc_dirs=None,
                   jobs=1,
                   silent=False,
                   extra_verbose=False,
                   config=None,
                   macros=None,
                   zip_proj=None,
                   inc_repos=False,
                   build_profile=None,
                   app_config=None):
    """Generates a project file and creates a zip archive if specified

    Positional Arguments:
    src_paths - a list of paths from which to find source files
    export_path - a path specifying the location of generated project files
    target - the mbed board/mcu for which to generate the executable
    ide - the ide for which to generate the project fields

    Keyword Arguments:
    libraries_paths - paths to additional libraries
    linker_script - path to the linker script for the specified target
    notify - function is passed all events, and expected to handle notification
      of the user, emit the events to a log, etc.
    verbose - assigns the notify function to toolchains print_notify_verbose
    name - project name
    inc_dirs - additional include directories
    jobs - number of threads
    silent - silent build - no output
    extra_verbose - assigns the notify function to toolchains
      print_notify_verbose
    config - toolchain's config object
    macros - User-defined macros
    zip_proj - string name of the zip archive you wish to creat (exclude arg
     if you do not wish to create an archive
    """

    # Convert src_path to a list if needed
    if isinstance(src_paths, dict):
        paths = sum(src_paths.values(), [])
    elif isinstance(src_paths, list):
        paths = src_paths[:]
    else:
        paths = [src_paths]

    # Extend src_paths wit libraries_paths
    if libraries_paths is not None:
        paths.extend(libraries_paths)

    if not isinstance(src_paths, dict):
        src_paths = {"": paths}

    # Export Directory
    if not exists(export_path):
        makedirs(export_path)

    _, toolchain_name = get_exporter_toolchain(ide)

    ###################################
    # mbed Classic/2.0/libary support #

    # Find build system profile
    profile = None
    targets_json = None
    for path in paths:
        profile = find_build_profile(path) or profile
        if profile:
            targets_json = join(dirname(dirname(abspath(__file__))),
                                'legacy_targets.json')
        else:
            targets_json = find_targets_json(path) or targets_json

    # Apply targets.json to active targets
    if targets_json:
        if not silent:
            print("Using targets from %s" % targets_json)
        set_targets_json_location(targets_json)

    # Apply profile to toolchains
    if profile:

        def init_hook(self):
            profile_data = get_toolchain_profile(self.name, profile)
            if not profile_data:
                return
            if not silent:
                self.info("Using toolchain %s profile %s" %
                          (self.name, profile))

            for k, v in profile_data.items():
                if self.flags.has_key(k):
                    self.flags[k] = v
                else:
                    setattr(self, k, v)

        mbedToolchain.init = init_hook

    # mbed Classic/2.0/libary support #
    ###################################

    # Pass all params to the unified prepare_resources()
    toolchain = prepare_toolchain(paths,
                                  "",
                                  target,
                                  toolchain_name,
                                  macros=macros,
                                  jobs=jobs,
                                  notify=notify,
                                  silent=silent,
                                  verbose=verbose,
                                  extra_verbose=extra_verbose,
                                  config=config,
                                  build_profile=build_profile,
                                  app_config=app_config)
    # The first path will give the name to the library
    if name is None:
        name = basename(normpath(abspath(src_paths[0])))

    # Call unified scan_resources
    resource_dict = {
        loc: scan_resources(path, toolchain, inc_dirs=inc_dirs)
        for loc, path in src_paths.iteritems()
    }
    resources = Resources()
    toolchain.build_dir = export_path
    config_header = toolchain.get_config_header()
    resources.headers.append(config_header)
    resources.file_basepath[config_header] = dirname(config_header)

    if zip_proj:
        subtract_basepath(resources, ".")
        for loc, res in resource_dict.iteritems():
            temp = copy.deepcopy(res)
            subtract_basepath(temp, ".", loc)
            resources.add(temp)
    else:
        for _, res in resource_dict.iteritems():
            resources.add(res)

    # Change linker script if specified
    if linker_script is not None:
        resources.linker_script = linker_script

    files, exporter = generate_project_files(resources,
                                             export_path,
                                             target,
                                             name,
                                             toolchain,
                                             ide,
                                             macros=macros)
    files.append(config_header)
    if zip_proj:
        for resource in resource_dict.values():
            for label, res in resource.features.iteritems():
                if label not in toolchain.target.features:
                    resource.add(res)
        if isinstance(zip_proj, basestring):
            zip_export(join(export_path, zip_proj), name, resource_dict, files,
                       inc_repos)
        else:
            zip_export(zip_proj, name, resource_dict, files, inc_repos)
    else:
        for exported in files:
            if not exists(join(export_path, basename(exported))):
                copyfile(exported, join(export_path, basename(exported)))

    return exporter
Esempio n. 25
0
# Can NOT be the current directory, or it screws up some internal regexes inside mbed tools.
# That was a fun hour to debug...

config_header_dir = os.path.join(generated_path, "config-headers")
pathlib.Path(config_header_dir).mkdir(parents=True, exist_ok=True) # create dir if not exists

notifier = TerminalNotifier(True, False)

# create a different toolchain for each profile so that we can detect the flags needed in each configuration
profile_toolchains = []
for profile_json_path in profile_jsons:
    with open(profile_json_path) as profile_file:

        print(">> Collecting data for config " + profile_json_path)
        profile_data = json.load(profile_file)
        profile_toolchain = build_api.prepare_toolchain(src_paths=[mbed_os_dir], build_dir=config_header_dir, target=target_name, toolchain_name=toolchain_name, build_profile=[profile_data])
        # each toolchain must then scan the mbed dir to pick up more configs
        resources = Resources(notifier).scan_with_toolchain(src_paths=[mbed_os_dir], toolchain=profile_toolchain, exclude=True)
        profile_toolchain.RESPONSE_FILES=False


        profile_toolchains.append(profile_toolchain)


# Profiles seem to only set flags, so for the remaining operations we can use any toolchain
toolchain = profile_toolchains[0]
print("Generated config header: " + toolchain.get_config_header())


print("Using settings from these JSON files:\n " + "\n ".join(resources.get_file_paths(FileType.JSON)))
Esempio n. 26
0
    def create_jinja_ctx(self):
        self.options = {}
        flags = {}
        self.validate_resources()
        # Convert all Backslashes to Forward Slashes
        self.resources.win_to_unix()

        self.ld_script = self.filter_dot(
            self.resources.linker_script)

        # Read in all profiles, we'll extract compiler options.
        profiles = self.get_all_profiles()

        profile_ids = [s.lower() for s in profiles]
        profile_ids.sort()
        for prof_id in profile_ids:
            # There are 4 categories of options, a category common too
            # all tools and a specific category for each of the tools.
            opts = {}
            opts['defines'] = {}
            opts['common'] = {}
            opts['as'] = {}
            opts['c'] = {}
            opts['cpp'] = {}
            opts['ld'] = {}

            opts['id'] = prof_id
            opts['name'] = opts['id'].capitalize()

            profile = profiles[prof_id]

            # A small hack, do not bother with src_path again,
            # pass an empty string to avoid crashing.
            src_paths = ['']
            target_name = self.toolchain.target.name

            toolchain = prepare_toolchain(
                src_paths, "", target_name, self.TOOLCHAIN, build_profile=[profile])

            flags = self.toolchain_flags(toolchain)

            opts['defines'] = self.get_defines_and_remove_from_flags(flags, 'common_flags')
            opts['forced_includes'] = self.get_includes_and_remove_from_flags(flags, 'common_flags')
            opts['common'] = flags['common_flags']
            opts['as'] = flags['asm_flags']
            opts['c'] = flags['c_flags']
            opts['cpp'] = flags['cxx_flags']
            opts['ld'] = flags['ld_flags']

            self.options[prof_id] = opts

        sources = []  # list of strings

        forced_includes = self.get_includes_and_remove_from_flags(flags, 'c_flags')
        forced_includes += self.get_includes_and_remove_from_flags(flags, 'cxx_flags')

        # Remove Duplicates
        forced_includes = list(set(forced_includes))

        c_std = self.get_c_std_and_remove_from_flag(flags, 'c_flags')
        cpp_std = self.get_c_std_and_remove_from_flag(flags, 'cxx_flags')

        # Make one list of all resources
        for r_type in ['c_sources', 's_sources', 'cpp_sources']:
            sources.extend(getattr(self.resources, r_type))

        # Remove all leading './'
        c_sources = [self.filter_dot(field) for field in self.resources.c_sources]
        cpp_sources = [self.filter_dot(field) for field in self.resources.cpp_sources]
        s_sources = [self.filter_dot(field) for field in self.resources.s_sources]
        headers = [self.filter_dot(field) for field in self.resources.headers]
        sources = [self.filter_dot(field) for field in sources]
        include_paths = [self.filter_dot(field) for field in self.resources.inc_dirs]

        sys_libs = [self.prepare_sys_lib(lib) for lib
                    in self.toolchain.sys_libs]
        preproc = " ".join([basename(self.toolchain.preproc[0])] +
                           self.toolchain.preproc[1:] +
                           self.toolchain.ld[1:])

        if 'nbproject' in include_paths:
            include_paths.remove('nbproject')

        jinja_ctx = {
            'name': self.project_name,
            'target': self.toolchain.target.name,
            'elf_location': join('BUILD', self.project_name) + '.elf',
            'c_symbols': self.toolchain.get_symbols(),
            'asm_symbols': self.toolchain.get_symbols(True),
            'c_flags': flags['c_flags'],
            'cxx_flags': flags['cxx_flags'],
            'ld_flags': self.flags['ld_flags'],
            'asm_flags': self.flags['asm_flags'],
            'common_flags': self.flags['common_flags'],
            'include_paths': include_paths,
            'forced_includes': forced_includes,
            'c_sources': c_sources,
            'cpp_sources': cpp_sources,
            's_sources': s_sources,
            'headers': headers,
            'headers_folder': self.get_netbeans_file_list(sorted(headers)),
            'sources_folder': self.get_netbeans_file_list(sorted(sources)),
            'options': self.options,
            'c_std': self.get_netbeans_c_std(c_std),
            'cpp_std': self.get_netbeans_cpp_std(cpp_std),
            'linker_script': self.ld_script,
            'linker_libs': sys_libs,
            'pp_cmd': preproc,
            'cc_cmd': self.toolchain.cc[0],
            'cppc_cmd': self.toolchain.cppc[0],
            'asm_cmd': self.toolchain.asm[0],
            'ld_cmd': self.toolchain.ld[0],
            'elf2bin_cmd': self.toolchain.elf2bin
        }
        return jinja_ctx
Esempio n. 27
0
    def generate(self):
        """
        Generate the .project and .cproject files.
        """
        if not self.resources.linker_script:
            raise NotSupportedException("No linker script found.")

        self.resources.win_to_unix()

        # TODO: use some logger to display additional info if verbose

        libraries = []
        # print 'libraries'
        # print self.resources.libraries
        for lib in self.libraries:
            l, _ = splitext(basename(lib))
            libraries.append(l[3:])

        self.system_libraries = ['stdc++', 'supc++', 'm', 'c', 'gcc', 'nosys']

        # Read in all profiles, we'll extract compiler options.
        profiles = self.get_all_profiles()

        profile_ids = [s.lower() for s in profiles]
        profile_ids.sort()

        # TODO: get the list from existing .cproject
        build_folders = [s.capitalize() for s in profile_ids]
        build_folders.append('BUILD')
        # print build_folders

        objects = [self.filter_dot(s) for s in self.resources.objects]
        for bf in build_folders:
            objects = [o for o in objects if not o.startswith(bf + '/')]
        # print 'objects'
        # print objects

        self.compute_exclusions()

        self.include_path = [
            self.filter_dot(s) for s in self.resources.inc_dirs
        ]

        self.as_defines = self.toolchain.get_symbols(True)
        self.c_defines = self.toolchain.get_symbols()
        self.cpp_defines = self.c_defines

        self.ld_script = self.filter_dot(self.resources.linker_script)

        self.options = {}
        profile_ids.remove('develop')
        for id in profile_ids:

            # There are 4 categories of options, a category common too
            # all tools and a specific category for each of the tools.
            opts = {}
            opts['common'] = {}
            opts['as'] = {}
            opts['c'] = {}
            opts['cpp'] = {}
            opts['ld'] = {}

            opts['id'] = id
            opts['name'] = opts['id'].capitalize()

            print

            profile = profiles[id]

            # A small hack, do not bother with src_path again,
            # pass an empty string to avoid crashing.
            src_paths = ['']
            target_name = self.toolchain.target.name
            toolchain = prepare_toolchain(src_paths,
                                          "",
                                          target_name,
                                          self.TOOLCHAIN,
                                          build_profile=[profile])

            # Hack to fill in build_dir
            toolchain.build_dir = self.toolchain.build_dir

            flags = self.toolchain_flags(toolchain)

            # Most GNU ARM Eclipse options have a parent,
            # either debug or release.
            if '-O0' in flags['common_flags'] or '-Og' in flags['common_flags']:
                opts['parent_id'] = 'debug'
            else:
                opts['parent_id'] = 'release'

            self.process_options(opts, flags, libraries)

            opts['as']['defines'] = self.as_defines
            opts['c']['defines'] = self.c_defines
            opts['cpp']['defines'] = self.cpp_defines

            opts['common']['include_paths'] = self.include_path
            opts['common']['excluded_folders'] = '|'.join(
                self.excluded_folders)
            self.excluded_folders = [
                item.replace("\\", "/") for item in self.excluded_folders
            ]

            opts['ld']['library_paths'] = [
                self.filter_dot(s) for s in self.resources.lib_dirs
            ]

            opts['ld']['object_files'] = objects
            opts['ld']['user_libraries'] = libraries
            opts['ld']['system_libraries'] = self.system_libraries
            opts['ld']['script'] = "linker-script-%s.ld" % id
            opts['cpp_cmd'] = " ".join(toolchain.preproc)

            # Unique IDs used in multiple places.
            # Those used only once are implemented with {{u.id}}.
            u = UID()
            uid = {}
            uid['config'] = u.id
            uid['tool_c_compiler'] = u.id
            uid['tool_c_compiler_input'] = u.id
            uid['tool_cpp_compiler'] = u.id
            uid['tool_cpp_compiler_input'] = u.id

            opts['uid'] = uid

            self.options[id] = opts

        jinja_ctx = {
            'name': self.project_name,
            'ld_script': self.ld_script,

            # Compiler & linker command line options
            'options': self.options,

            # Must be an object with an `id` property, which
            # will be called repeatedly, to generate multiple UIDs.
            'u': u,
        }

        self.gen_file('mcuxpresso/.project.tmpl',
                      jinja_ctx,
                      '.project',
                      trim_blocks=True,
                      lstrip_blocks=True)
        self.gen_file('mcuxpresso/{0}_cproject.tmpl'.format(target_name),
                      jinja_ctx,
                      '.cproject',
                      trim_blocks=True,
                      lstrip_blocks=True)
        self.gen_file('mcuxpresso/makefile.targets.tmpl',
                      jinja_ctx,
                      'makefile.targets',
                      trim_blocks=True,
                      lstrip_blocks=True)
        self.gen_file_nonoverwrite('mcuxpresso/mbedignore.tmpl', jinja_ctx,
                                   '.mbedignore')

        print('Done. Import the \'{0}\' project in MCUXpresso.'.format(
            self.project_name))
Esempio n. 28
0
def main():
    ignore_targets = {
        # Not compiled with the mbed-cli
        'ELEKTOR_COCORICO': 'Wrong target configuration, no \'device_has\' attribute',
        'KL26Z': 'undefined reference to \'init_data_bss\'',
        'LPC11U37_501': 'fatal error: device.h: No such file or directory',
        'LPC11U68': 'multiple definition of \'__aeabi_atexit\'',
        'SAMG55J19': 'error: \'s\' undeclared here: #define OPTIMIZE_HIGH __attribute__((optimize(s)))',
        'LPC810': 'region \'FLASH\' overflowed by 2832 bytes',
        'LPC2368': 'undefined reference to \'__get_PRIMASK\'',
        'LPC2460': 'undefined reference to \'__get_PRIMASK\'',
        'MTM_MTCONNECT04S_BOOT': 'fatal error: device.h: No such file or directory',
        'MTM_MTCONNECT04S_OTA': 'fatal error: device.h: No such file or directory',

        # Hex merge problem targets
        'NRF51_MICROBIT_BOOT': 'Hex file problem',
        'ARCH_BLE': 'Hex file problem',
        'RBLAB_NRF51822': 'Hex file problem',
        'RBLAB_BLENANO': 'Hex file problem',
        'NRF51822_BOOT': 'Hex file problem',
        'NRF51_MICROBIT': 'Hex file problem',
        'WALLBOT_BLE': 'Hex file problem',
        'WALLBOT_BLE_OTA': 'Hex file problem',
        'MTM_MTCONNECT04S': 'Hex file problem',
        'MTM_MTCONNECT04S_BOOT': 'Hex file problem',
        'TY51822R3_BOOT': 'Hex file problem',
        'NRF51822_OTA': 'Hex file problem',
        'RBLAB_NRF51822_OTA': 'Hex file problem',
        'NRF51822_Y5_MBUG': 'Hex file problem',
        'NRF51822': 'Hex file problem',
        'ARCH_BLE_BOOT': 'Hex file problem',
        'RBLAB_BLENANO_BOOT': 'Hex file problem',
        'TY51822R3_OTA': 'Hex file problem',
        'SEEED_TINY_BLE': 'Hex file problem',
        'RBLAB_NRF51822_BOOT': 'Hex file problem',
        'NRF51_DK_LEGACY': 'Hex file problem',
        'DELTA_DFCM_NNN40_OTA': 'Hex file problem',
        'TY51822R3': 'Hex file problem',
        'NRF51_DONGLE_LEGACY': 'Hex file problem',
        'DELTA_DFBM_NQ620': 'Hex file problem',
        'WALLBOT_BLE_BOOT': 'Hex file problem',
        'DELTA_DFCM_NNN40': 'Hex file problem',
        'SEEED_TINY_BLE_OTA': 'Hex file problem',
        'ARCH_LINK_OTA': 'Hex file problem',
        'NRF51_DK_BOOT': 'Hex file problem',
        'NRF51_DONGLE': 'Hex file problem',
        'DELTA_DFCM_NNN40_BOOT': 'Hex file problem',
        'NRF51_MICROBIT_B_OTA': 'Hex file problem',
        'NRF51_MICROBIT_B_BOOT': 'Hex file problem',
        'SEEED_TINY_BLE_BOOT': 'Hex file problem',
        'ARCH_LINK': 'Hex file problem',
        'NRF51_MICROBIT_B': 'Hex file problem',
        'NRF51_DK_OTA': 'Hex file problem',
        'RBLAB_BLENANO_OTA': 'Hex file problem',
        'ARCH_LINK_BOOT': 'Hex file problem',
        'ARCH_BLE_OTA': 'Hex file problem',
        'HRM1017': 'Hex file problem',
        'NRF52_DK': 'Hex file problem',
        'NRF51_DONGLE_OTA': 'Hex file problem',
        'NRF51_DONGLE_BOOT': 'Hex file problem',
        'NRF51_MICROBIT_OTA': 'Hex file problem',
        'NRF51_DK': 'Hex file problem',
        'HRM1017_BOOT': 'Hex file problem',
        'HRM1017_OTA': 'Hex file problem',

        # LED Blink problem targets
        'LPC1549': 'error: \'sleep\' was not declared in this scope',
        'NUMAKER_PFM_M453': 'multiple definition of \'__wrap__sbrk\'',
        'NUMAKER_PFM_NUC472': 'fatal error: mbedtls/config.h: No such file or directory',
        'RZ_A1H': 'error: \'sleep\' was not declared in this scope',
        'VK_RZ_A1H': 'error: \'sleep\' was not declared in this scope',

        # LED Blink RTOS problem targets
        'KL05Z': 'region \'RAM\' overflowed by 3020 bytes',
        'EFM32HG_STK3400': 'region RAM overflowed with stack',
        'VK_RZ_A1H': 'multiple definition of \'eth_arch_enetif_init\'',
        'LPC812': 'region \'RAM\' overflowed by 3108 bytes',
        'MAXWSNENV': 'undefined reference to *',
        'ARM_BEETLE_SOC': 'undefined reference to *',

        # USB Device problem targets
        'LPC1347': 'region \'RAM\' overflowed by 156 bytes',
        'MAX32620HSP': 'undefined reference to *',
        'EFM32HG_STK3400': ' region \'RAM\' overflowed by 516 bytes',
        'MAXWSNENV': 'undefined reference to *',
        'KL27Z': 'undefined reference to \'USBHAL\' + region \'m_data\' overflowed by 88 bytes',

    }

    with open(os.path.join(script_path, 'linker_data.json')) as linker_data:
        linker_data = json.load(linker_data)

    source_condition_map = {}
    header_condition_map = {}
    symbol_condition_map = {}
    include_dir_condition_map = {}
    src_dir_to_lib_map = {}
    resources_map = {}
    lib_builder_map = {}

    library_names = {
        'cpputest': "CppUTest",
        'usb_host': "USB Host support",
        'usb': "USB Device support",
        'ublox': "U-blox drivers",
        'rtos': "RTOS abstraction layer",
        'dsp': "DSP Library",
        'rpc': "RPC Support",
        'fat': "FAT File System support",
        'eth': "Ethernet support",
        'rtx': "Keil RTX RTOS",
        'features': 'Device features'
    }

    print("Parsing targets...")
    xml = ElementTree.parse(os.path.join(script_path, 'bsp_template.xml'))
    mcus = xml.find("SupportedMCUs")
    family = xml.find("MCUFamilies/MCUFamily")

    targets_count = 0
    for target in Exporter.TARGETS:
        print('\t' + target + '...')

        toolchain = ba.prepare_toolchain(ROOT, target, 'GCC_ARM')

        # Scan src_path for config files
        res = toolchain.scan_resources(ROOT, exclude_paths=[os.path.join(ROOT, 'rtos'), os.path.join(ROOT, 'features')])
        res.toolchain = toolchain
        # for path in src_paths[1:]:
        #     resources.add(toolchain.scan_resources(path))

        res.headers += [MBED_HEADER, ROOT]
        # res += toolchain.scan_resources(os.path.join(ROOT, 'events'))

        toolchain.config.load_resources(res)

        target_lib_macros = toolchain.config.config_to_macros(toolchain.config.get_config_data())
        toolchain.set_config_data(toolchain.config.get_config_data())
        toolchain.config.validate_config()

        res.relative_to(ROOT, False)
        res.win_to_unix()

        for items, object_map, is_path in [
            [res.c_sources + res.cpp_sources + res.s_sources, source_condition_map, True],
            [res.headers, header_condition_map, True],
            [res.inc_dirs, include_dir_condition_map, True],
            [toolchain.get_symbols(), symbol_condition_map, False],
            [target_lib_macros, symbol_condition_map, False]]:
            for fn in items:
                if is_path:
                    fn = "$$SYS:BSP_ROOT$$/" + fn.replace("\\", "/")
                object_map.setdefault(fn, []).append(target)
        targets_count += 1
        resources_map[target] = res

        for lib in LIBRARIES:
            sources = lib['source_dir']
            if isinstance(sources, str):
                sources = [sources]
            for src in sources:
                lib_toolchain = ba.prepare_toolchain(ROOT, target, 'GCC_ARM')
                # ignore rtx while scanning rtos
                exclude_paths = [os.path.join(ROOT, 'rtos', 'rtx')] if lib['id'] != 'rtos' else []
                lib_res = lib_toolchain.scan_resources(src, exclude_paths=exclude_paths)
                lib_toolchain.config.load_resources(lib_res)
                lib_macros = lib_toolchain.config.config_to_macros(lib_toolchain.config.get_config_data())
                new_lib = copy.copy(lib)
                macros = new_lib.get('macros', None)
                if macros is None:
                    macros = lib_macros
                else:
                    macros += lib_macros
                new_lib['macros'] = macros
                lib_res.relative_to(ROOT, False)
                lib_res.win_to_unix()
                lib_builder_map.setdefault(new_lib['id'], LibraryBuilder(new_lib, target)).append_resources(
                    target, lib_res, macros)
                src_dir_to_lib_map[src] = new_lib['id']

        # Add specific features as a library
        features_path = os.path.join(ROOT, 'features')
        features_toolchain = ba.prepare_toolchain(features_path, target, 'GCC_ARM')
        features_resources = features_toolchain.scan_resources(features_path)
        features_toolchain.config.load_resources(features_resources)
        new_macros = features_toolchain.config.config_to_macros(features_toolchain.config.get_config_data())
        features_macros = [x for x in new_macros if x not in target_lib_macros]
        # if 'MBED_CONF_LWIP_ADDR_TIMEOUT=5' in features_macros:
        #     features_macros.remove('MBED_CONF_LWIP_ADDR_TIMEOUT=5')
        #     features_macros.append('MBED_CONF_LWIP_ADDR_TIMEOUT=$$com.sysprogs.bspoptions.lwip.addr_timeout$$')
        if 'MBED_CONF_LWIP_IPV6_ENABLED=0' in features_macros:
            features_macros.remove('MBED_CONF_LWIP_IPV6_ENABLED=0')
            features_macros.append('MBED_CONF_LWIP_IPV6_ENABLED=$$com.sysprogs.bspoptions.lwip.ipv6_en$$')
        if 'MBED_CONF_LWIP_IPV4_ENABLED=1' in features_macros:
            features_macros.remove('MBED_CONF_LWIP_IPV4_ENABLED=1')
            features_macros.append('MBED_CONF_LWIP_IPV4_ENABLED=$$com.sysprogs.bspoptions.lwip.ipv4_en$$')

        features_resources.relative_to(ROOT, False)
        features_resources.win_to_unix()
        features_lib = {
            'id': 'features',
            'source_dir': os.path.join(ROOT, 'features'),
            'build_dir': tools.libraries.RTOS_LIBRARIES,
            'dependencies': [tools.libraries.MBED_LIBRARIES, tools.libraries.MBED_RTX, tools.libraries.RTOS_LIBRARIES],
            'macros': features_macros
        }
        for feature in toolchain.config.get_features():
            if feature in features_resources.features:
                features_resources += features_resources.features[feature]
        lib_builder_map.setdefault('features', LibraryBuilder(features_lib, target)).append_resources(
            target, features_resources, features_macros)
        src_dir_to_lib_map[features_path] = 'features'

    for fw in lib_builder_map.values():
        fw.DependencyIDs = set([])
        for dep in fw.Dependencies:
            id = src_dir_to_lib_map.get(dep)
            if id is not None:
                fw.DependencyIDs.add(id)

    # Set flags different for each target
    include_ignored_targets = '--alltargets' in sys.argv
	
    for target in Exporter.TARGETS:
        res = resources_map.get(target, None)
        if res is None:
            print('Target ignored: ' + target + ': No resources')
            continue
        if res.linker_script is None:
            print('Target ignored: ' + target + ': No linker script')
            continue
        if not include_ignored_targets and target in ignore_targets:
            print('Target ' + target + ' ignored: ' + ignore_targets[target])
            continue

        mcu = ElementTree.Element('MCU')
        mcu.append(make_node('ID', target))
        mcu.append(make_node('HierarchicalPath', 'Mbed'))
        mcu.append(make_node('FamilyID', family.find('ID').text))

        props_list = provide_node(provide_node(provide_node(provide_node(mcu, "ConfigurableProperties"),
                                                            "PropertyGroups"), "PropertyGroup"), "Properties")

        if 'FEATURE_LWIP=1' in symbol_condition_map:
            if target in symbol_condition_map['FEATURE_LWIP=1']:
                prop_node = ElementTree.SubElement(props_list, "PropertyEntry", {"xsi:type": "Enumerated"})
                prop_node.extend([make_node('Name', 'LWIP IPV6 config'),
                                  make_node('UniqueID', 'com.sysprogs.bspoptions.lwip.ipv6_en'),
                                  make_node('DefaultEntryIndex', '1')])
                list_node = ElementTree.SubElement(prop_node, 'SuggestionList')
                ElementTree.SubElement(list_node, "Suggestion").extend([make_node("UserFriendlyName", "enable"),
                                                                        make_node("InternalValue", '1')])
                ElementTree.SubElement(list_node, "Suggestion").extend([make_node("UserFriendlyName", "disable"),
                                                                        make_node("InternalValue", '0')])

                prop_node = ElementTree.SubElement(props_list, "PropertyEntry", {"xsi:type": "Enumerated"})
                prop_node.extend([make_node("Name", "LWIP IPV4 config"),
                                  make_node("UniqueID", "com.sysprogs.bspoptions.lwip.ipv4_en"),
                                  make_node("DefaultEntryIndex", "0")])
                list_node = ElementTree.SubElement(prop_node, "SuggestionList")
                ElementTree.SubElement(list_node, "Suggestion").extend([make_node("UserFriendlyName", "enable"),
                                                                        make_node("InternalValue", '1')])
                ElementTree.SubElement(list_node, "Suggestion").extend([make_node("UserFriendlyName", "disable"),
                                                                        make_node("InternalValue", '0')])

        flags = append_node(mcu, "CompilationFlags")
        for (node, dict) in [[append_node(mcu, "AdditionalSourceFiles"), source_condition_map],
                             [append_node(mcu, "AdditionalHeaderFiles"), header_condition_map],
                             [append_node(flags, "IncludeDirectories"), include_dir_condition_map],
                             [append_node(flags, "PreprocessorMacros"), symbol_condition_map]]:
            for (filename, targets) in dict.items():
                if len(list(set(targets))) < targets_count and target in targets:
                    node.append(make_node("string", filename))

        flagList = res.toolchain.cpu[:]
        if "-mfloat-abi=softfp" in flagList:
            flagList.remove("-mfloat-abi=softfp")
            flagList.append("$$com.sysprogs.bspoptions.arm.floatmode$$")
            prop_node = ElementTree.SubElement(props_list, "PropertyEntry", {"xsi:type": "Enumerated"})
            prop_node.extend([make_node("Name", "Floating point support"),
                           make_node("UniqueID", "com.sysprogs.bspoptions.arm.floatmode"),
                           make_node("DefaultEntryIndex", "2")])
            list_node = ElementTree.SubElement(prop_node, "SuggestionList")
            ElementTree.SubElement(list_node, "Suggestion").extend(
                [make_node("UserFriendlyName", "Software"), make_node("InternalValue", "-mfloat-abi=soft")])
            ElementTree.SubElement(list_node, "Suggestion").extend(
                [make_node("UserFriendlyName", "Hardware"), make_node("InternalValue", "-mfloat-abi=hard")])
            ElementTree.SubElement(list_node, "Suggestion").extend(
                [make_node("UserFriendlyName", "Hardware with Software interface"),
                 make_node("InternalValue", "-mfloat-abi=softfp")])
            ElementTree.SubElement(list_node, "Suggestion").extend(
                [make_node("UserFriendlyName", "Unspecified"), make_node("InternalValue", "")])

        ElementTree.SubElement(flags, "COMMONFLAGS").text = " ".join(flagList)
        ElementTree.SubElement(flags, "LinkerScript").text = "$$SYS:BSP_ROOT$$/" + res.linker_script

        mems = parse_linker_script(os.path.join(ROOT, res.linker_script))
        ram_size = str(sum([m.Size for m in mems if ("RAM" in m.Name.upper())]))
        flash_size = str(sum([m.Size for m in mems if ("FLASH" in m.Name.upper())]))
        if target in linker_data:
            ram_size = linker_data[target]['RAM']
            flash_size = linker_data[target]['FLASH']
        else:
            print('No RAM and FLASH size for a target ' + target)
        mcu.append(make_node("RAMSize", ram_size))
        mcu.append(make_node("FLASHSize", flash_size))

        mem_list = ElementTree.SubElement(ElementTree.SubElement(mcu, "MemoryMap"), "Memories")
        for mem in mems:
            mem_el = ElementTree.SubElement(mem_list, "MCUMemory")
            mem_el.append(make_node("Name", mem.Name))
            mem_el.append(make_node("Address", str(mem.Start)))
            mem_el.append(make_node("Size", str(mem.Size)))
            if mem.Name.upper() == "FLASH":
                mem_el.append(make_node("Flags", "IsDefaultFLASH"))
            if mem.Name.upper() == "RAM":
                mem_el.append(make_node("LoadedFromMemory", "FLASH"))

        mcus.append(mcu)

    # Set flags shared between targets
    flags = append_node(family, "CompilationFlags")
    for (node, dict) in [[append_node(family, "AdditionalSourceFiles"), source_condition_map],
                         [append_node(family, "AdditionalHeaderFiles"), header_condition_map],
                         [append_node(flags, "IncludeDirectories"), include_dir_condition_map],
                         [append_node(flags, "PreprocessorMacros"), symbol_condition_map]]:
        for (filename, targets) in dict.items():
            if len(list(set(targets))) == targets_count:
                node.append(make_node("string", filename))

    family.find("AdditionalSourceFiles").append(make_node("string", "$$SYS:BSP_ROOT$$/stubs.cpp"))
    cond_list = xml.find("FileConditions")
    flag_cond_list = xml.find("ConditionalFlags")

    # Add frameworks
    for lib in lib_builder_map.values():
        fw = ElementTree.SubElement(xml.find("Frameworks"), "EmbeddedFramework")
        if len(lib.SupportedTargets) != targets_count:
            fw.append(make_node("MCUFilterRegex", "|".join(lib.SupportedTargets.keys())))

        fw.append(make_node("ID", "com.sysprogs.arm.mbed." + lib.ID))
        fw.append(make_node("ProjectFolderName", lib.ID))
        fw.append(make_node("UserFriendlyName", library_names.get(lib.ID, lib.ID + " library")))
        ElementTree.SubElement(fw, "AdditionalSourceFiles").extend(
            [make_node("string", fn) for fn in lib.source_condition_map.keys()])
        ElementTree.SubElement(fw, "AdditionalHeaderFiles").extend(
            [make_node("string", fn) for fn in lib.header_condition_map.keys()])
        ElementTree.SubElement(fw, "AdditionalIncludeDirs").extend(
            [make_node("string", fn) for (fn, cond) in lib.include_dir_condition_map.items() if
             len(cond) == len(lib.SupportedTargets)])
        ElementTree.SubElement(fw, "AdditionalPreprocessorMacros").extend(
            [make_node("string", fn) for fn in lib.macros_condition_map.keys()])
        if len(lib.DependencyIDs) > 0:
            ElementTree.SubElement(fw, "RequiredFrameworks").extend(
                [make_node("string", "com.sysprogs.arm.mbed." + id) for id in lib.DependencyIDs])
        # ET.SubElement(ET.SubElement(fw, "AdditionalSystemVars"), "SysVarEntry").extend([make_node("Key", "com.sysprogs.arm.mbed." + lib.ID + ".included"), make_node("Value", "1")])

        for (fn, cond) in lib.source_condition_map.items() + lib.header_condition_map.items():
            if len(cond) == len(lib.SupportedTargets):
                continue
            if len(cond) > len(lib.SupportedTargets):
                raise AssertionError("Source file condition list longer than the framework condition list. "
                                     "Check how the framework conditions are formed.")
            file_cond_node = ElementTree.SubElement(cond_list, "FileCondition")
            h_cond_node = ElementTree.SubElement(file_cond_node, "ConditionToInclude", {"xsi:type": "MatchesRegex"})
            h_cond_node.append(make_node("Expression", "$$SYS:MCU_ID$$"))
            h_cond_node.append(make_node("Regex", "|".join(cond)))
            file_cond_node.append(make_node("FilePath", fn))

        for (inc_dir, cond) in lib.include_dir_condition_map.items():
            if len(cond) == len(lib.SupportedTargets):
                continue
            if len(cond) > len(lib.SupportedTargets):
                raise AssertionError("Source file condition list longer than the framework condition list. "
                                     "Check how the framework conditions are formed.")
            flag_cond_node = ElementTree.SubElement(flag_cond_list, "ConditionalToolFlags")
            cond_list_node = ElementTree.SubElement(
                ElementTree.SubElement(flag_cond_node, "FlagCondition", {"xsi:type": "And"}), "Arguments")
            ElementTree.SubElement(cond_list_node, "Condition", {"xsi:type": "ReferencesFramework"}).append(
                make_node("FrameworkID", "com.sysprogs.arm.mbed." + lib.ID))
            ElementTree.SubElement(cond_list_node, "Condition", {"xsi:type": "MatchesRegex"}).extend(
                [make_node("Expression", "$$SYS:MCU_ID$$"), make_node("Regex", "|".join(cond))])
            flags_node = ElementTree.SubElement(flag_cond_node, "Flags")
            include_dir_list_node = ElementTree.SubElement(flags_node, "IncludeDirectories")
            include_dir_list_node.append(make_node("string", inc_dir))

        for (macro, cond) in lib.macros_condition_map.items():
            if len(cond) == len(lib.SupportedTargets):
                continue
            if len(cond) > len(lib.SupportedTargets):
                raise AssertionError('A number of macros is larger than number of supported targets')
            macro_cond_node = ElementTree.SubElement(flag_cond_list, "ConditionalToolFlags")
            macro_list_node = ElementTree.SubElement(
                ElementTree.SubElement(macro_cond_node, "FlagCondition", {"xsi:type": "And"}), "Arguments")
            ElementTree.SubElement(macro_list_node, "Condition", {"xsi:type": "ReferencesFramework"}).append(
                make_node("FrameworkID", "com.sysprogs.arm.mbed." + lib.ID))
            ElementTree.SubElement(macro_list_node, "Condition", {"xsi:type": "MatchesRegex"}).extend(
                [make_node("Expression", "$$SYS:MCU_ID$$"), make_node("Regex", "|".join(cond))])
            macro_flags_node = ElementTree.SubElement(macro_cond_node, 'Flags')
            macros_node = ElementTree.SubElement(macro_flags_node, 'PreprocessorMacros')
            macros_node.append(make_node('string', macro))

    samples = xml.find('Examples')
    for (root, dirs, files) in os.walk(os.path.join(ROOT, 'samples')):
        for subdir in dirs:
            samples.append(make_node('string', 'samples/' + basename(subdir)))

    xml.getroot().attrib['xmlns:xsi'] = 'http://www.w3.org/2001/XMLSchema-instance'
    xml.getroot().attrib['xmlns:xsd'] = 'http://www.w3.org/2001/XMLSchema'
    root_node = minidom.parseString(ElementTree.tostring(xml.getroot()))
    xml_str = '\n'.join([line for line in root_node.toprettyxml(indent=' '*2).split('\n') if line.strip()])
    with open(join(ROOT, 'BSP.xml'), 'w') as xml_file:
        xml_file.write(xml_str.encode('utf-8'))
Esempio n. 29
0
def export_project(src_paths,
                   export_path,
                   target,
                   ide,
                   libraries_paths=None,
                   linker_script=None,
                   notify=None,
                   verbose=False,
                   name=None,
                   inc_dirs=None,
                   jobs=1,
                   silent=False,
                   extra_verbose=False,
                   config=None,
                   macros=None,
                   zip_proj=None,
                   inc_repos=False,
                   build_profile=None):
    """Generates a project file and creates a zip archive if specified

    Positional Arguments:
    src_paths - a list of paths from which to find source files
    export_path - a path specifying the location of generated project files
    target - the mbed board/mcu for which to generate the executable
    ide - the ide for which to generate the project fields

    Keyword Arguments:
    libraries_paths - paths to additional libraries
    linker_script - path to the linker script for the specified target
    notify - function is passed all events, and expected to handle notification
      of the user, emit the events to a log, etc.
    verbose - assigns the notify function to toolchains print_notify_verbose
    name - project name
    inc_dirs - additional include directories
    jobs - number of threads
    silent - silent build - no output
    extra_verbose - assigns the notify function to toolchains
      print_notify_verbose
    config - toolchain's config object
    macros - User-defined macros
    zip_proj - string name of the zip archive you wish to creat (exclude arg
     if you do not wish to create an archive
    """

    # Convert src_path to a list if needed
    if isinstance(src_paths, dict):
        paths = sum(src_paths.values(), [])
    elif isinstance(src_paths, list):
        paths = src_paths[:]
    else:
        paths = [src_paths]

    # Extend src_paths wit libraries_paths
    if libraries_paths is not None:
        paths.extend(libraries_paths)

    if not isinstance(src_paths, dict):
        src_paths = {"": paths}

    # Export Directory
    if not exists(export_path):
        makedirs(export_path)

    _, toolchain_name = get_exporter_toolchain(ide)

    # Pass all params to the unified prepare_resources()
    toolchain = prepare_toolchain(paths,
                                  export_path,
                                  target,
                                  toolchain_name,
                                  macros=macros,
                                  jobs=jobs,
                                  notify=notify,
                                  silent=silent,
                                  verbose=verbose,
                                  extra_verbose=extra_verbose,
                                  config=config,
                                  build_profile=build_profile)
    # The first path will give the name to the library
    if name is None:
        name = basename(normpath(abspath(src_paths[0])))

    # Call unified scan_resources
    resource_dict = {
        loc: scan_resources(path, toolchain, inc_dirs=inc_dirs)
        for loc, path in src_paths.iteritems()
    }
    resources = Resources()
    toolchain.build_dir = export_path
    config_header = toolchain.get_config_header()
    resources.headers.append(config_header)
    resources.file_basepath[config_header] = dirname(config_header)

    if zip_proj:
        subtract_basepath(resources, export_path)
        for loc, res in resource_dict.iteritems():
            temp = copy.deepcopy(res)
            subtract_basepath(temp, export_path, loc)
            resources.add(temp)
    else:
        for _, res in resource_dict.iteritems():
            resources.add(res)

    # Change linker script if specified
    if linker_script is not None:
        resources.linker_script = linker_script

    files, exporter = generate_project_files(resources,
                                             export_path,
                                             target,
                                             name,
                                             toolchain,
                                             ide,
                                             macros=macros)
    files.append(config_header)
    if zip_proj:
        for resource in resource_dict.values():
            for label, res in resource.features.iteritems():
                if label not in toolchain.target.features:
                    resource.add(res)
        if isinstance(zip_proj, basestring):
            zip_export(join(export_path, zip_proj), name, resource_dict, files,
                       inc_repos)
        else:
            zip_export(zip_proj, name, resource_dict, files, inc_repos)

    return exporter
Esempio n. 30
0
def main():
    ignore_targets = {
        # Not compiled with the mbed-cli
        'ELEKTOR_COCORICO':
        'Wrong target configuration, no \'device_has\' attribute',
        'KL26Z':
        'undefined reference to \'init_data_bss\'',
        'LPC11U37_501':
        'fatal error: device.h: No such file or directory',
        'LPC11U68':
        'multiple definition of \'__aeabi_atexit\'',
        'SAMG55J19':
        'error: \'s\' undeclared here: #define OPTIMIZE_HIGH __attribute__((optimize(s)))',
        'LPC810':
        'region \'FLASH\' overflowed by 2832 bytes',
        'LPC2368':
        'undefined reference to \'__get_PRIMASK\'',
        'LPC2460':
        'undefined reference to \'__get_PRIMASK\'',
        'MTM_MTCONNECT04S_BOOT':
        'fatal error: device.h: No such file or directory',
        'MTM_MTCONNECT04S_OTA':
        'fatal error: device.h: No such file or directory',

        # Hex merge problem targets
        'NRF51_MICROBIT_BOOT':
        'Hex file problem',
        'ARCH_BLE':
        'Hex file problem',
        'RBLAB_NRF51822':
        'Hex file problem',
        'RBLAB_BLENANO':
        'Hex file problem',
        'NRF51822_BOOT':
        'Hex file problem',
        'NRF51_MICROBIT':
        'Hex file problem',
        'WALLBOT_BLE':
        'Hex file problem',
        'WALLBOT_BLE_OTA':
        'Hex file problem',
        'MTM_MTCONNECT04S':
        'Hex file problem',
        'MTM_MTCONNECT04S_BOOT':
        'Hex file problem',
        'TY51822R3_BOOT':
        'Hex file problem',
        'NRF51822_OTA':
        'Hex file problem',
        'RBLAB_NRF51822_OTA':
        'Hex file problem',
        'NRF51822_Y5_MBUG':
        'Hex file problem',
        'NRF51822':
        'Hex file problem',
        'ARCH_BLE_BOOT':
        'Hex file problem',
        'RBLAB_BLENANO_BOOT':
        'Hex file problem',
        'TY51822R3_OTA':
        'Hex file problem',
        'SEEED_TINY_BLE':
        'Hex file problem',
        'RBLAB_NRF51822_BOOT':
        'Hex file problem',
        'NRF51_DK_LEGACY':
        'Hex file problem',
        'DELTA_DFCM_NNN40_OTA':
        'Hex file problem',
        'TY51822R3':
        'Hex file problem',
        'NRF51_DONGLE_LEGACY':
        'Hex file problem',
        'DELTA_DFBM_NQ620':
        'Hex file problem',
        'WALLBOT_BLE_BOOT':
        'Hex file problem',
        'DELTA_DFCM_NNN40':
        'Hex file problem',
        'SEEED_TINY_BLE_OTA':
        'Hex file problem',
        'ARCH_LINK_OTA':
        'Hex file problem',
        'NRF51_DK_BOOT':
        'Hex file problem',
        'NRF51_DONGLE':
        'Hex file problem',
        'DELTA_DFCM_NNN40_BOOT':
        'Hex file problem',
        'NRF51_MICROBIT_B_OTA':
        'Hex file problem',
        'NRF51_MICROBIT_B_BOOT':
        'Hex file problem',
        'SEEED_TINY_BLE_BOOT':
        'Hex file problem',
        'ARCH_LINK':
        'Hex file problem',
        'NRF51_MICROBIT_B':
        'Hex file problem',
        'NRF51_DK_OTA':
        'Hex file problem',
        'RBLAB_BLENANO_OTA':
        'Hex file problem',
        'ARCH_LINK_BOOT':
        'Hex file problem',
        'ARCH_BLE_OTA':
        'Hex file problem',
        'HRM1017':
        'Hex file problem',
        'NRF52_DK':
        'Hex file problem',
        'NRF51_DONGLE_OTA':
        'Hex file problem',
        'NRF51_DONGLE_BOOT':
        'Hex file problem',
        'NRF51_MICROBIT_OTA':
        'Hex file problem',
        'NRF51_DK':
        'Hex file problem',
        'HRM1017_BOOT':
        'Hex file problem',
        'HRM1017_OTA':
        'Hex file problem',

        # LED Blink problem targets
        'LPC1549':
        'error: \'sleep\' was not declared in this scope',
        'NUMAKER_PFM_M453':
        'multiple definition of \'__wrap__sbrk\'',
        'NUMAKER_PFM_NUC472':
        'fatal error: mbedtls/config.h: No such file or directory',
        'RZ_A1H':
        'error: \'sleep\' was not declared in this scope',
        'VK_RZ_A1H':
        'error: \'sleep\' was not declared in this scope',

        # LED Blink RTOS problem targets
        'KL05Z':
        'region \'RAM\' overflowed by 3020 bytes',
        'EFM32HG_STK3400':
        'region RAM overflowed with stack',
        'VK_RZ_A1H':
        'multiple definition of \'eth_arch_enetif_init\'',
        'LPC812':
        'region \'RAM\' overflowed by 3108 bytes',
        'MAXWSNENV':
        'undefined reference to *',
        'ARM_BEETLE_SOC':
        'undefined reference to *',

        # USB Device problem targets
        'LPC1347':
        'region \'RAM\' overflowed by 156 bytes',
        'MAX32620HSP':
        'undefined reference to *',
        'EFM32HG_STK3400':
        ' region \'RAM\' overflowed by 516 bytes',
        'MAXWSNENV':
        'undefined reference to *',
        'KL27Z':
        'undefined reference to \'USBHAL\' + region \'m_data\' overflowed by 88 bytes',
    }

    with open(os.path.join(script_path, 'linker_data.json')) as linker_data:
        linker_data = json.load(linker_data)

    source_condition_map = {}
    header_condition_map = {}
    symbol_condition_map = {}
    include_dir_condition_map = {}
    src_dir_to_lib_map = {}
    resources_map = {}
    lib_builder_map = {}

    library_names = {
        'cpputest': "CppUTest",
        'usb_host': "USB Host support",
        'usb': "USB Device support",
        'ublox': "U-blox drivers",
        'rtos': "RTOS abstraction layer",
        'dsp': "DSP Library",
        'rpc': "RPC Support",
        'fat': "FAT File System support",
        'eth': "Ethernet support",
        'rtx': "Keil RTX RTOS",
        'features': 'Device features'
    }

    print("Parsing targets...")
    xml = ElementTree.parse(os.path.join(script_path, 'bsp_template.xml'))
    mcus = xml.find("SupportedMCUs")
    family = xml.find("MCUFamilies/MCUFamily")

    targets_count = 0
    for target in Exporter.TARGETS:
        print('\t' + target + '...')

        toolchain = ba.prepare_toolchain(ROOT, target, 'GCC_ARM')

        # Scan src_path for config files
        res = toolchain.scan_resources(ROOT,
                                       exclude_paths=[
                                           os.path.join(ROOT, 'rtos'),
                                           os.path.join(ROOT, 'features')
                                       ])
        res.toolchain = toolchain
        # for path in src_paths[1:]:
        #     resources.add(toolchain.scan_resources(path))

        res.headers += [MBED_HEADER, ROOT]
        # res += toolchain.scan_resources(os.path.join(ROOT, 'events'))

        toolchain.config.load_resources(res)

        target_lib_macros = toolchain.config.config_to_macros(
            toolchain.config.get_config_data())
        toolchain.set_config_data(toolchain.config.get_config_data())
        toolchain.config.validate_config()

        res.relative_to(ROOT, False)
        res.win_to_unix()

        for items, object_map, is_path in [[
                res.c_sources + res.cpp_sources + res.s_sources,
                source_condition_map, True
        ], [res.headers, header_condition_map, True
            ], [res.inc_dirs, include_dir_condition_map,
                True], [toolchain.get_symbols(), symbol_condition_map, False
                        ], [target_lib_macros, symbol_condition_map, False]]:
            for fn in items:
                if is_path:
                    fn = "$$SYS:BSP_ROOT$$/" + fn.replace("\\", "/")
                object_map.setdefault(fn, []).append(target)
        targets_count += 1
        resources_map[target] = res

        for lib in LIBRARIES:
            sources = lib['source_dir']
            if isinstance(sources, str):
                sources = [sources]
            for src in sources:
                lib_toolchain = ba.prepare_toolchain(ROOT, target, 'GCC_ARM')
                # ignore rtx while scanning rtos
                exclude_paths = [os.path.join(ROOT, 'rtos', 'rtx')
                                 ] if lib['id'] != 'rtos' else []
                lib_res = lib_toolchain.scan_resources(
                    src, exclude_paths=exclude_paths)
                lib_toolchain.config.load_resources(lib_res)
                lib_macros = lib_toolchain.config.config_to_macros(
                    lib_toolchain.config.get_config_data())
                new_lib = copy.copy(lib)
                macros = new_lib.get('macros', None)
                if macros is None:
                    macros = lib_macros
                else:
                    macros += lib_macros
                new_lib['macros'] = macros
                lib_res.relative_to(ROOT, False)
                lib_res.win_to_unix()
                lib_builder_map.setdefault(
                    new_lib['id'], LibraryBuilder(new_lib,
                                                  target)).append_resources(
                                                      target, lib_res, macros)
                src_dir_to_lib_map[src] = new_lib['id']

        # Add specific features as a library
        features_path = os.path.join(ROOT, 'features')
        features_toolchain = ba.prepare_toolchain(features_path, target,
                                                  'GCC_ARM')
        features_resources = features_toolchain.scan_resources(features_path)
        features_toolchain.config.load_resources(features_resources)
        new_macros = features_toolchain.config.config_to_macros(
            features_toolchain.config.get_config_data())
        features_macros = [x for x in new_macros if x not in target_lib_macros]
        # if 'MBED_CONF_LWIP_ADDR_TIMEOUT=5' in features_macros:
        #     features_macros.remove('MBED_CONF_LWIP_ADDR_TIMEOUT=5')
        #     features_macros.append('MBED_CONF_LWIP_ADDR_TIMEOUT=$$com.sysprogs.bspoptions.lwip.addr_timeout$$')
        if 'MBED_CONF_LWIP_IPV6_ENABLED=0' in features_macros:
            features_macros.remove('MBED_CONF_LWIP_IPV6_ENABLED=0')
            features_macros.append(
                'MBED_CONF_LWIP_IPV6_ENABLED=$$com.sysprogs.bspoptions.lwip.ipv6_en$$'
            )
        if 'MBED_CONF_LWIP_IPV4_ENABLED=1' in features_macros:
            features_macros.remove('MBED_CONF_LWIP_IPV4_ENABLED=1')
            features_macros.append(
                'MBED_CONF_LWIP_IPV4_ENABLED=$$com.sysprogs.bspoptions.lwip.ipv4_en$$'
            )

        features_resources.relative_to(ROOT, False)
        features_resources.win_to_unix()
        features_lib = {
            'id':
            'features',
            'source_dir':
            os.path.join(ROOT, 'features'),
            'build_dir':
            tools.libraries.RTOS_LIBRARIES,
            'dependencies': [
                tools.libraries.MBED_LIBRARIES, tools.libraries.MBED_RTX,
                tools.libraries.RTOS_LIBRARIES
            ],
            'macros':
            features_macros
        }
        for feature in toolchain.config.get_features():
            if feature in features_resources.features:
                features_resources += features_resources.features[feature]
        lib_builder_map.setdefault('features',
                                   LibraryBuilder(features_lib,
                                                  target)).append_resources(
                                                      target,
                                                      features_resources,
                                                      features_macros)
        src_dir_to_lib_map[features_path] = 'features'

    for fw in lib_builder_map.values():
        fw.DependencyIDs = set([])
        for dep in fw.Dependencies:
            id = src_dir_to_lib_map.get(dep)
            if id is not None:
                fw.DependencyIDs.add(id)

    # Set flags different for each target
    include_ignored_targets = '--alltargets' in sys.argv

    for target in Exporter.TARGETS:
        res = resources_map.get(target, None)
        if res is None:
            print('Target ignored: ' + target + ': No resources')
            continue
        if res.linker_script is None:
            print('Target ignored: ' + target + ': No linker script')
            continue
        if not include_ignored_targets and target in ignore_targets:
            print('Target ' + target + ' ignored: ' + ignore_targets[target])
            continue

        mcu = ElementTree.Element('MCU')
        mcu.append(make_node('ID', target))
        mcu.append(make_node('HierarchicalPath', 'Mbed'))
        mcu.append(make_node('FamilyID', family.find('ID').text))

        props_list = provide_node(
            provide_node(
                provide_node(provide_node(mcu, "ConfigurableProperties"),
                             "PropertyGroups"), "PropertyGroup"), "Properties")

        if 'FEATURE_LWIP=1' in symbol_condition_map:
            if target in symbol_condition_map['FEATURE_LWIP=1']:
                prop_node = ElementTree.SubElement(props_list, "PropertyEntry",
                                                   {"xsi:type": "Enumerated"})
                prop_node.extend([
                    make_node('Name', 'LWIP IPV6 config'),
                    make_node('UniqueID',
                              'com.sysprogs.bspoptions.lwip.ipv6_en'),
                    make_node('DefaultEntryIndex', '1')
                ])
                list_node = ElementTree.SubElement(prop_node, 'SuggestionList')
                ElementTree.SubElement(list_node, "Suggestion").extend([
                    make_node("UserFriendlyName", "enable"),
                    make_node("InternalValue", '1')
                ])
                ElementTree.SubElement(list_node, "Suggestion").extend([
                    make_node("UserFriendlyName", "disable"),
                    make_node("InternalValue", '0')
                ])

                prop_node = ElementTree.SubElement(props_list, "PropertyEntry",
                                                   {"xsi:type": "Enumerated"})
                prop_node.extend([
                    make_node("Name", "LWIP IPV4 config"),
                    make_node("UniqueID",
                              "com.sysprogs.bspoptions.lwip.ipv4_en"),
                    make_node("DefaultEntryIndex", "0")
                ])
                list_node = ElementTree.SubElement(prop_node, "SuggestionList")
                ElementTree.SubElement(list_node, "Suggestion").extend([
                    make_node("UserFriendlyName", "enable"),
                    make_node("InternalValue", '1')
                ])
                ElementTree.SubElement(list_node, "Suggestion").extend([
                    make_node("UserFriendlyName", "disable"),
                    make_node("InternalValue", '0')
                ])

        flags = append_node(mcu, "CompilationFlags")
        for (node, dict) in [
            [append_node(mcu, "AdditionalSourceFiles"), source_condition_map],
            [append_node(mcu, "AdditionalHeaderFiles"), header_condition_map],
            [
                append_node(flags, "IncludeDirectories"),
                include_dir_condition_map
            ],
            [append_node(flags, "PreprocessorMacros"), symbol_condition_map]
        ]:
            for (filename, targets) in dict.items():
                if len(list(
                        set(targets))) < targets_count and target in targets:
                    node.append(make_node("string", filename))

        flagList = res.toolchain.cpu[:]
        if "-mfloat-abi=softfp" in flagList:
            flagList.remove("-mfloat-abi=softfp")
            flagList.append("$$com.sysprogs.bspoptions.arm.floatmode$$")
            prop_node = ElementTree.SubElement(props_list, "PropertyEntry",
                                               {"xsi:type": "Enumerated"})
            prop_node.extend([
                make_node("Name", "Floating point support"),
                make_node("UniqueID", "com.sysprogs.bspoptions.arm.floatmode"),
                make_node("DefaultEntryIndex", "2")
            ])
            list_node = ElementTree.SubElement(prop_node, "SuggestionList")
            ElementTree.SubElement(list_node, "Suggestion").extend([
                make_node("UserFriendlyName", "Software"),
                make_node("InternalValue", "-mfloat-abi=soft")
            ])
            ElementTree.SubElement(list_node, "Suggestion").extend([
                make_node("UserFriendlyName", "Hardware"),
                make_node("InternalValue", "-mfloat-abi=hard")
            ])
            ElementTree.SubElement(list_node, "Suggestion").extend([
                make_node("UserFriendlyName",
                          "Hardware with Software interface"),
                make_node("InternalValue", "-mfloat-abi=softfp")
            ])
            ElementTree.SubElement(list_node, "Suggestion").extend([
                make_node("UserFriendlyName", "Unspecified"),
                make_node("InternalValue", "")
            ])

        ElementTree.SubElement(flags, "COMMONFLAGS").text = " ".join(flagList)
        ElementTree.SubElement(
            flags,
            "LinkerScript").text = "$$SYS:BSP_ROOT$$/" + res.linker_script

        mems = parse_linker_script(os.path.join(ROOT, res.linker_script))
        ram_size = str(sum([m.Size for m in mems
                            if ("RAM" in m.Name.upper())]))
        flash_size = str(
            sum([m.Size for m in mems if ("FLASH" in m.Name.upper())]))
        if target in linker_data:
            ram_size = linker_data[target]['RAM']
            flash_size = linker_data[target]['FLASH']
        else:
            print('No RAM and FLASH size for a target ' + target)
        mcu.append(make_node("RAMSize", ram_size))
        mcu.append(make_node("FLASHSize", flash_size))

        mem_list = ElementTree.SubElement(
            ElementTree.SubElement(mcu, "MemoryMap"), "Memories")
        for mem in mems:
            mem_el = ElementTree.SubElement(mem_list, "MCUMemory")
            mem_el.append(make_node("Name", mem.Name))
            mem_el.append(make_node("Address", str(mem.Start)))
            mem_el.append(make_node("Size", str(mem.Size)))
            if mem.Name.upper() == "FLASH":
                mem_el.append(make_node("Flags", "IsDefaultFLASH"))
            if mem.Name.upper() == "RAM":
                mem_el.append(make_node("LoadedFromMemory", "FLASH"))

        mcus.append(mcu)

    # Set flags shared between targets
    flags = append_node(family, "CompilationFlags")
    for (node, dict) in [
        [append_node(family, "AdditionalSourceFiles"), source_condition_map],
        [append_node(family, "AdditionalHeaderFiles"), header_condition_map],
        [append_node(flags, "IncludeDirectories"), include_dir_condition_map],
        [append_node(flags, "PreprocessorMacros"), symbol_condition_map]
    ]:
        for (filename, targets) in dict.items():
            if len(list(set(targets))) == targets_count:
                node.append(make_node("string", filename))

    family.find("AdditionalSourceFiles").append(
        make_node("string", "$$SYS:BSP_ROOT$$/stubs.cpp"))
    cond_list = xml.find("FileConditions")
    flag_cond_list = xml.find("ConditionalFlags")

    # Add frameworks
    for lib in lib_builder_map.values():
        fw = ElementTree.SubElement(xml.find("Frameworks"),
                                    "EmbeddedFramework")
        if len(lib.SupportedTargets) != targets_count:
            fw.append(
                make_node("MCUFilterRegex",
                          "|".join(lib.SupportedTargets.keys())))

        fw.append(make_node("ID", "com.sysprogs.arm.mbed." + lib.ID))
        fw.append(make_node("ProjectFolderName", lib.ID))
        fw.append(
            make_node("UserFriendlyName",
                      library_names.get(lib.ID, lib.ID + " library")))
        ElementTree.SubElement(fw, "AdditionalSourceFiles").extend([
            make_node("string", fn) for fn in lib.source_condition_map.keys()
        ])
        ElementTree.SubElement(fw, "AdditionalHeaderFiles").extend([
            make_node("string", fn) for fn in lib.header_condition_map.keys()
        ])
        ElementTree.SubElement(fw, "AdditionalIncludeDirs").extend([
            make_node("string", fn)
            for (fn, cond) in lib.include_dir_condition_map.items()
            if len(cond) == len(lib.SupportedTargets)
        ])
        ElementTree.SubElement(fw, "AdditionalPreprocessorMacros").extend([
            make_node("string", fn) for fn in lib.macros_condition_map.keys()
        ])
        if len(lib.DependencyIDs) > 0:
            ElementTree.SubElement(fw, "RequiredFrameworks").extend([
                make_node("string", "com.sysprogs.arm.mbed." + id)
                for id in lib.DependencyIDs
            ])
        # ET.SubElement(ET.SubElement(fw, "AdditionalSystemVars"), "SysVarEntry").extend([make_node("Key", "com.sysprogs.arm.mbed." + lib.ID + ".included"), make_node("Value", "1")])

        for (fn, cond) in lib.source_condition_map.items(
        ) + lib.header_condition_map.items():
            if len(cond) == len(lib.SupportedTargets):
                continue
            if len(cond) > len(lib.SupportedTargets):
                raise AssertionError(
                    "Source file condition list longer than the framework condition list. "
                    "Check how the framework conditions are formed.")
            file_cond_node = ElementTree.SubElement(cond_list, "FileCondition")
            h_cond_node = ElementTree.SubElement(file_cond_node,
                                                 "ConditionToInclude",
                                                 {"xsi:type": "MatchesRegex"})
            h_cond_node.append(make_node("Expression", "$$SYS:MCU_ID$$"))
            h_cond_node.append(make_node("Regex", "|".join(cond)))
            file_cond_node.append(make_node("FilePath", fn))

        for (inc_dir, cond) in lib.include_dir_condition_map.items():
            if len(cond) == len(lib.SupportedTargets):
                continue
            if len(cond) > len(lib.SupportedTargets):
                raise AssertionError(
                    "Source file condition list longer than the framework condition list. "
                    "Check how the framework conditions are formed.")
            flag_cond_node = ElementTree.SubElement(flag_cond_list,
                                                    "ConditionalToolFlags")
            cond_list_node = ElementTree.SubElement(
                ElementTree.SubElement(flag_cond_node, "FlagCondition",
                                       {"xsi:type": "And"}), "Arguments")
            ElementTree.SubElement(cond_list_node, "Condition", {
                "xsi:type": "ReferencesFramework"
            }).append(
                make_node("FrameworkID", "com.sysprogs.arm.mbed." + lib.ID))
            ElementTree.SubElement(cond_list_node, "Condition", {
                "xsi:type": "MatchesRegex"
            }).extend([
                make_node("Expression", "$$SYS:MCU_ID$$"),
                make_node("Regex", "|".join(cond))
            ])
            flags_node = ElementTree.SubElement(flag_cond_node, "Flags")
            include_dir_list_node = ElementTree.SubElement(
                flags_node, "IncludeDirectories")
            include_dir_list_node.append(make_node("string", inc_dir))

        for (macro, cond) in lib.macros_condition_map.items():
            if len(cond) == len(lib.SupportedTargets):
                continue
            if len(cond) > len(lib.SupportedTargets):
                raise AssertionError(
                    'A number of macros is larger than number of supported targets'
                )
            macro_cond_node = ElementTree.SubElement(flag_cond_list,
                                                     "ConditionalToolFlags")
            macro_list_node = ElementTree.SubElement(
                ElementTree.SubElement(macro_cond_node, "FlagCondition",
                                       {"xsi:type": "And"}), "Arguments")
            ElementTree.SubElement(macro_list_node, "Condition", {
                "xsi:type": "ReferencesFramework"
            }).append(
                make_node("FrameworkID", "com.sysprogs.arm.mbed." + lib.ID))
            ElementTree.SubElement(macro_list_node, "Condition", {
                "xsi:type": "MatchesRegex"
            }).extend([
                make_node("Expression", "$$SYS:MCU_ID$$"),
                make_node("Regex", "|".join(cond))
            ])
            macro_flags_node = ElementTree.SubElement(macro_cond_node, 'Flags')
            macros_node = ElementTree.SubElement(macro_flags_node,
                                                 'PreprocessorMacros')
            macros_node.append(make_node('string', macro))

    samples = xml.find('Examples')
    for (root, dirs, files) in os.walk(os.path.join(ROOT, 'samples')):
        for subdir in dirs:
            samples.append(make_node('string', 'samples/' + basename(subdir)))

    xml.getroot(
    ).attrib['xmlns:xsi'] = 'http://www.w3.org/2001/XMLSchema-instance'
    xml.getroot().attrib['xmlns:xsd'] = 'http://www.w3.org/2001/XMLSchema'
    root_node = minidom.parseString(ElementTree.tostring(xml.getroot()))
    xml_str = '\n'.join([
        line for line in root_node.toprettyxml(indent=' ' * 2).split('\n')
        if line.strip()
    ])
    with open(join(ROOT, 'BSP.xml'), 'w') as xml_file:
        xml_file.write(xml_str.encode('utf-8'))