예제 #1
0
    def test_task_copy_files(self, d):
        os.chdir(d.path)

        file_path1 = os.path.join(d.path, 'test-copy-1.txt')
        file_path2 = os.path.join(d.path, 'test-copy-2.txt')
        to_path = os.path.join(d.path, 'files')

        d.write(file_path1, 'sample data'.encode('utf-8'))
        d.write(file_path2, 'sample data'.encode('utf-8'))

        FileUtil.create_dir(to_path)

        task = Task(task_type=Task.TYPE_COPY_FILES,
                    task_name='Sample copy files task',
                    task_params={
                        'from': os.path.join(d.path, '*.txt'),
                        'to': to_path
                    })

        process_data = ProcessData()
        template_data = {}

        task.parse(process_data)
        task.run(process_data=process_data,
                 template_data=template_data,
                 working_dir=d.path)

        self.assertTrue(
            os.path.exists(os.path.join(to_path, 'test-copy-1.txt')))
        self.assertTrue(
            os.path.exists(os.path.join(to_path, 'test-copy-2.txt')))
예제 #2
0
    def test_write_to_file(self, d):
        os.chdir(d.path)

        filename = 'new-file.txt'

        FileUtil.write_to_file('.', filename, 'content test')

        self.assertTrue(os.path.isfile(filename))
        self.assertEqual(os.path.getsize(filename), 12)
예제 #3
0
파일: repository.py 프로젝트: nut799/ezored
 def get_vendor_dir(self):
     if self.rep_type == Repository.TYPE_GITHUB:
         return os.path.join(FileUtil.get_current_dir(),
                             Constants.VENDOR_DIR, self.get_dir_name())
     elif self.rep_type == Repository.TYPE_LOCAL:
         return os.path.join(FileUtil.get_current_dir(),
                             Constants.VENDOR_DIR, self.get_dir_name())
     else:
         return ''
예제 #4
0
    def test_create_dir(self, d):
        os.chdir(d.path)

        dir_name_1 = 'new-dir-1'
        dir_name_2 = 'new-dir-2'

        FileUtil.create_dir(dir_name_1)
        FileUtil.create_dir(os.path.join(dir_name_1, dir_name_2))

        self.assertTrue(os.path.isdir(dir_name_1))
        self.assertTrue(os.path.isdir(os.path.join(dir_name_1, dir_name_2)))
예제 #5
0
    def initialize(self):
        from ezored.models.logger import Logger
        from ezored.models.constants import Constants
        from ezored.models.util.file_util import FileUtil

        Logger.i('Cleaning...')

        FileUtil.remove_dir(Constants.TEMP_DIR)
        FileUtil.remove_dir(Constants.VENDOR_DIR)

        Logger.i('Finished')
예제 #6
0
    def test_remove_file(self, d):
        os.chdir(d.path)

        filename = 'new-file.txt'

        FileUtil.write_to_file('.', filename, 'content test')

        self.assertTrue(os.path.isfile(filename))

        FileUtil.remove_file(filename)

        self.assertFalse(os.path.isfile(filename))
예제 #7
0
    def test_task_run_all_tasks(self, d):
        os.chdir(d.path)

        file_content = """
file = open("test-target-file.txt", "w") 
file.write("{0}") 
file.close()
        """

        file_content = file_content.format(Constants.PROJECT_NAME)

        file_path = os.path.join(d.path, 'test-file.py')
        target_file_path = os.path.join(d.path, 'test-target-file.txt')

        d.write(file_path, file_content.encode('utf-8'))

        task = Task(task_type=Task.TYPE_RUN,
                    task_name='Sample run task - run all tasks',
                    task_params={'args': ['python', file_path]})

        process_data = ProcessData()
        template_data = {}

        task.parse(process_data)

        Task.run_all_tasks([task],
                           process_data=process_data,
                           template_data=template_data,
                           working_dir=d.path)

        content = FileUtil.read_file(target_file_path)

        self.assertEqual(Constants.PROJECT_NAME, content)
예제 #8
0
    def test_reset(self, d):
        os.chdir(d.path)

        current_dir = FileUtil.get_current_dir()

        process_data = ProcessData()
        process_data.reset()

        process_data.project_name = Constants.PROJECT_NAME

        self.assertEqual(process_data.project_name, Constants.PROJECT_NAME)
        self.assertEqual(process_data.project_home_dir, current_dir)

        self.assertEqual(process_data.dependency_name, '')
        self.assertEqual(process_data.dependency_source_dir, '')
        self.assertEqual(process_data.dependency_build_dir, '')
        self.assertEqual(process_data.dependency_temp_dir, '')
        self.assertEqual(process_data.dependency_vendor_dir, '')

        self.assertEqual(process_data.target_name, '')
        self.assertEqual(process_data.target_source_dir, '')
        self.assertEqual(process_data.target_build_dir, '')
        self.assertEqual(process_data.target_temp_dir, '')
        self.assertEqual(process_data.target_vendor_dir, '')

        self.assertEqual(process_data.temp_dir,
                         os.path.join(current_dir, Constants.TEMP_DIR))
        self.assertEqual(process_data.build_dir,
                         os.path.join(current_dir, Constants.BUILD_DIR))
        self.assertEqual(process_data.vendor_dir,
                         os.path.join(current_dir, Constants.VENDOR_DIR))
예제 #9
0
파일: repository.py 프로젝트: nut799/ezored
    def build(self, process_data):
        Logger.i('Building repository: {0}...'.format(self.get_name()))

        vendor_file_data = self.load_vendor_file_data()

        if 'vendor' in vendor_file_data:
            vendor_data = vendor_file_data['vendor']

            if 'build' in vendor_data:
                vendor_data_build = vendor_data['build']

                exitcode, stderr, stdout = FileUtil.run(
                    vendor_data_build, self.get_temp_dir(),
                    process_data.get_environ())

                if exitcode == 0:
                    Logger.i('Build finished for repository: {0}'.format(
                        self.get_name()))
                else:
                    if stdout:
                        Logger.i('Build output for repository: {0}'.format(
                            self.get_name()))
                        Logger.clean(stdout)

                    if stderr:
                        Logger.i(
                            'Error output while build repository: {0}'.format(
                                self.get_name()))
                        Logger.clean(stderr)

                    Logger.f('Failed to build repository: {0}'.format(
                        self.get_name()))
예제 #10
0
    def test_normalize_path_from_list(self):
        paths = ['C:\\ezored\\Test1', 'C:\\ezored\\Test2']
        normalized = FileUtil.normalize_path_from_list(paths)
        expected1 = 'C:/ezored/Test1'
        expected2 = 'C:/ezored/Test2'

        self.assertEqual(normalized[0], expected1)
        self.assertEqual(normalized[1], expected2)
예제 #11
0
    def test_find_files(self, d):
        os.chdir(d.path)

        FileUtil.write_to_file('.', 'file1.txt', '')
        FileUtil.write_to_file('.', 'file2.txt', '')
        FileUtil.write_to_file('.', 'file3.log', '')

        files_txt = FileUtil.find_files('file*.txt')
        files_log = FileUtil.find_files('file*.log')

        self.assertEqual(len(files_txt), 2)
        self.assertEqual(len(files_log), 1)
예제 #12
0
    def download_from_git(self):
        # download
        Logger.i('Downloading repository: {0}...'.format(self.get_name()))

        force_download = False

        rep_path, rep_type, rep_version = self.get_git_data()

        download_filename = self.get_download_filename()
        download_dest_dir = Constants.TEMP_DIR
        download_dest_path = os.path.join(download_dest_dir, download_filename)

        downloaded_version = GitUtil.get_current_downloaded_repository_version(
            download_dest_path)

        if rep_type == Constants.GIT_TYPE_BRANCH:
            force_download = True

        if downloaded_version is not None:
            if downloaded_version != rep_version:
                Logger.i(
                    'Repository downloaded version ({0}) is different from configured version ({1}), '
                    'downloading configured version...'.format(
                        downloaded_version.strip(), rep_version.strip()))

                force_download = True

        # skip if exists
        if not force_download and os.path.isdir(download_dest_path):
            Logger.i('Repository already downloaded: {0}'.format(
                self.get_name()))
        else:
            FileUtil.remove_dir(download_dest_path)

            GitUtil.download(rep_path, rep_type, rep_version,
                             download_dest_path)

            # check if file was downloaded
            if os.path.isdir(download_dest_path):
                Logger.i('Repository downloaded: {0}'.format(self.get_name()))
            else:
                Logger.f('Problems when download repository: {0}'.format(
                    self.get_name()))
예제 #13
0
    def prepare_from_process_data(self, process_data):
        if process_data:
            process_data.set_target_data(
                name=self.get_name(),
                temp_dir=self.repository.get_temp_dir(),
                vendor_dir=self.repository.get_vendor_dir(),
                source_dir=self.repository.get_source_dir(),
                build_dir=os.path.join(FileUtil.get_current_dir(),
                                       Constants.BUILD_DIR, self.get_name()),
            )

            if self.repository:
                self.repository.prepare_from_process_data(process_data)
예제 #14
0
    def test_temp_working_dir(self, d):
        os.chdir(d.path)

        repository = Repository.from_dict({
            'type': Constants.REPOSITORY_TYPE_TAR,
            'path': 'http://ezored.com/downloads/dependency-sample.tar.gz',
        })

        temp_working_dir = repository.get_temp_dir()

        self.assertEqual(
            temp_working_dir,
            os.path.join(FileUtil.get_current_dir(), Constants.TEMP_DIR, repository.get_temp_dir())
        )
예제 #15
0
    def reset(self):
        self.project_name = ''
        self.project_home_dir = FileUtil.normalize_path(
            FileUtil.get_current_dir())

        self.target_temp_dir = ''
        self.target_source_dir = ''
        self.target_vendor_dir = ''
        self.target_build_dir = ''
        self.target_name = ''

        self.dependency_temp_dir = ''
        self.dependency_source_dir = ''
        self.dependency_vendor_dir = ''
        self.dependency_build_dir = ''
        self.dependency_name = ''

        self.temp_dir = FileUtil.normalize_path(
            os.path.join(self.project_home_dir, Constants.TEMP_DIR))
        self.build_dir = FileUtil.normalize_path(
            os.path.join(self.project_home_dir, Constants.BUILD_DIR))
        self.vendor_dir = FileUtil.normalize_path(
            os.path.join(self.project_home_dir, Constants.VENDOR_DIR))
예제 #16
0
    def test_git_temp_working_dir(self, d):
        os.chdir(d.path)

        repository = Repository.from_dict({
            'type': Constants.REPOSITORY_TYPE_GIT,
            'path': 'https://github.com/ezored/dependency-sample.git',
            'version': 't:1.0.0',
        })

        temp_working_dir = repository.get_temp_dir()

        self.assertEqual(
            temp_working_dir,
            os.path.join(FileUtil.get_current_dir(), Constants.TEMP_DIR,
                         repository.get_temp_dir()))
예제 #17
0
    def parse(self, process_data):
        if process_data:
            Logger.d('Parsing target data...')

            self.project_name = process_data.parse_text(self.project_name)

            self.header_search_paths = process_data.parse_text_list(self.header_search_paths)
            self.header_search_paths = FileUtil.normalize_path_from_list(self.header_search_paths)

            self.library_search_paths = process_data.parse_text_list(self.library_search_paths)
            self.library_search_paths = FileUtil.normalize_path_from_list(self.library_search_paths)

            self.source_groups = process_data.parse_sourge_group_list(self.source_groups)

            self.library_links = process_data.parse_text_list(self.library_links)
            self.framework_links = process_data.parse_text_list(self.framework_links)

            self.c_flags = process_data.parse_text_list(self.c_flags)
            self.cxx_flags = process_data.parse_text_list(self.cxx_flags)
            self.compiler_options = process_data.parse_text_list(self.compiler_options)

            self.tasks = process_data.parse_task_list(self.tasks)
        else:
            Logger.d('Cannot parse target data with invalid source')
예제 #18
0
    def test_github_temp_working_dir(self, d):
        os.chdir(d.path)

        repository = Repository.from_dict({
            'type': 'github',
            'name': 'ezored/dependency-sample',
            'version': 't:1.0.0',
        })

        temp_working_dir = repository.get_temp_dir()

        self.assertEqual(
            temp_working_dir,
            os.path.join(FileUtil.get_current_dir(), Constants.TEMP_DIR,
                         repository.get_dir_name()))
예제 #19
0
파일: repository.py 프로젝트: nut799/ezored
    def download_from_github(self):
        # download
        Logger.i('Downloading repository: {0}...'.format(self.get_name()))

        download_url = self.get_download_url()
        download_filename = self.get_download_filename()
        download_dest_dir = Constants.TEMP_DIR
        download_dest_path = os.path.join(Constants.TEMP_DIR,
                                          download_filename)
        unpacked_dir = self.get_temp_dir()
        unpack_dir = Constants.TEMP_DIR
        force_download = False

        _, git_data_type, git_data_version = self.get_git_data()

        if git_data_type == Repository.GIT_TYPE_BRANCH:
            force_download = True

        # skip if exists
        if not force_download and os.path.isfile(download_dest_path):
            Logger.i('Repository already downloaded: {0}'.format(
                self.get_name()))
        else:
            FileUtil.remove_file(download_dest_path)

            DownloadUtil.download_file(download_url, download_dest_dir,
                                       download_filename)

            # check if file was downloaded
            if os.path.isfile(download_dest_path):
                Logger.i('Repository downloaded: {0}'.format(self.get_name()))
            else:
                Logger.f('Problems when download repository: {0}'.format(
                    self.get_name()))

        # unpack
        Logger.i('Unpacking repository: {0}...'.format(self.get_name()))

        if not force_download and os.path.isdir(unpacked_dir):
            Logger.i('Repository already unpacked: {0}...'.format(
                self.get_name()))
        else:
            FileUtil.remove_dir(unpacked_dir)

            # untar file
            FileUtil.create_dir(unpack_dir)

            tar = tarfile.open(download_dest_path)
            tar.extractall(path=unpack_dir)
            tar.close()

            if os.path.isdir(unpacked_dir):
                Logger.i('Repository unpacked: {0}'.format(self.get_name()))
            else:
                Logger.f('Problems when unpack repository: {0}'.format(
                    self.get_name()))
예제 #20
0
 def get_vendor_dir(self):
     if self.rep_type == Constants.REPOSITORY_TYPE_GIT:
         return FileUtil.normalize_path(
             os.path.join(FileUtil.get_current_dir(), Constants.VENDOR_DIR,
                          self.get_dir_name()))
     elif self.rep_type == Constants.REPOSITORY_TYPE_LOCAL:
         return FileUtil.normalize_path(
             os.path.join(FileUtil.get_current_dir(), Constants.VENDOR_DIR,
                          self.get_dir_name()))
     elif self.rep_type == Constants.REPOSITORY_TYPE_ZIP:
         return FileUtil.normalize_path(
             os.path.join(FileUtil.get_current_dir(), Constants.VENDOR_DIR,
                          self.get_dir_name()))
     elif self.rep_type == Constants.REPOSITORY_TYPE_TAR:
         return FileUtil.normalize_path(
             os.path.join(FileUtil.get_current_dir(), Constants.VENDOR_DIR,
                          self.get_dir_name()))
     else:
         return ''
예제 #21
0
    def download_from_zip(self):
        # download
        Logger.i('Downloading repository: {0}...'.format(self.get_name()))

        download_url = self.get_download_url()
        download_filename = self.get_download_filename()
        download_dest_dir = Constants.TEMP_DIR
        download_dest_path = os.path.join(download_dest_dir, download_filename)
        unpacked_dir = self.get_temp_dir()
        unpack_dir = download_dest_dir

        # skip if exists
        if os.path.isfile(download_dest_path):
            Logger.i('Repository already downloaded: {0}'.format(
                self.get_name()))
        else:
            FileUtil.remove_file(download_dest_path)

            DownloadUtil.download_file(download_url, download_dest_dir,
                                       download_filename)

            # check if file was downloaded
            if os.path.isfile(download_dest_path):
                Logger.i('Repository downloaded: {0}'.format(self.get_name()))
            else:
                Logger.f('Problems when download repository: {0}'.format(
                    self.get_name()))

        # unpack
        Logger.i('Unpacking repository: {0}...'.format(self.get_name()))

        if os.path.isdir(unpacked_dir):
            Logger.i('Repository already unpacked: {0}...'.format(
                self.get_name()))
        else:
            FileUtil.remove_dir(unpacked_dir)

            # unpack file
            FileUtil.create_dir(unpack_dir)

            zipref = zipfile.ZipFile(download_dest_path, 'r')
            zipref.extractall(path=unpack_dir)
            zipref.close()

            if os.path.isdir(unpacked_dir):
                Logger.i('Repository unpacked: {0}'.format(self.get_name()))
            else:
                Logger.f('Problems when unpack repository: {0}'.format(
                    self.get_name()))
예제 #22
0
    def test_target_github_parse_file(self, d):
        os.chdir(d.path)

        project_file_data = """
config:
  name: EzoRed
targets:
  - name: github-test 
    repository:
      name: ezored/target-github-test
      type: github
      version: b:master
dependencies:
  - repository:
      name: ezored/dependency-github-test
      type: github
      version: b:master      
"""

        d.write(Constants.PROJECT_FILE, project_file_data.encode('utf-8'))

        output = popen(['ezored', 'dependency', 'update', '-d'],
                       stdout=PIPE).communicate()[0]
        output = str(output)
        print(output)

        output = popen(['ezored', 'target', 'build', 'github-test', '-d'],
                       stdout=PIPE).communicate()[0]
        output = str(output)
        print(output)

        required = 'Build finished for target: github-test'
        self.assertTrue(required in output)

        file_to_read = os.path.join('vendor', 'target-github-test-master',
                                    'file-to-parse.txt')
        self.assertTrue(os.path.exists(file_to_read))

        content = FileUtil.read_file(file_to_read)
        self.assertEqual(content, Constants.PROJECT_NAME)
예제 #23
0
    def test_task_parse_file(self, d):
        os.chdir(d.path)

        file_path = os.path.join(d.path, '*.txt')

        d.write(file_path, '{{ name }}'.encode('utf-8'))

        task = Task(task_type=Task.TYPE_PARSE_FILE,
                    task_name='Sample parse file task',
                    task_params={'file': file_path})

        process_data = ProcessData()
        template_data = {'name': Constants.PROJECT_NAME}

        task.parse(process_data)
        task.run(process_data=process_data,
                 template_data=template_data,
                 working_dir=d.path)

        content = FileUtil.read_file(file_path)

        self.assertEqual(Constants.PROJECT_NAME, content)
예제 #24
0
파일: task.py 프로젝트: nicoddemus/ezored
    def run(self, process_data, template_data, working_dir):
        Logger.d('Running task: {0}...'.format(self.get_name()))

        if process_data:
            if self.type == self.TYPE_COPY_FILE:
                from_path = self.params['from'] if self.params['from'] else None
                to_path = self.params['to'] if self.params['to'] else None

                FileUtil.copy_file(from_path=from_path, to_path=to_path)

            elif self.type == self.TYPE_COPY_FILES:
                to_path = self.params['to'] if self.params['to'] else None
                file_pattern = self.params[
                    'from'] if 'from' in self.params else None
                file_pattern = process_data.parse_text(file_pattern)
                found_files = FileUtil.find_files(file_pattern)

                for f in found_files:
                    if f:
                        FileUtil.copy_file(from_path=f,
                                           to_path=os.path.join(
                                               to_path, os.path.basename(f)))

            elif self.type == self.TYPE_PARSE_FILE:
                file_pattern = self.params[
                    'file'] if 'file' in self.params else None
                file_pattern = process_data.parse_text(file_pattern)
                found_files = FileUtil.find_files(file_pattern)

                for f in found_files:
                    if f:
                        template_file = os.path.abspath(f)
                        template_loader = jinja2.FileSystemLoader(
                            searchpath=os.path.dirname(template_file))
                        template_env = jinja2.Environment(
                            loader=template_loader)
                        template = template_env.get_template(
                            os.path.basename(template_file))
                        templ_result = template.render(template_data)

                        FileUtil.write_to_file(os.path.dirname(template_file),
                                               os.path.basename(template_file),
                                               str(templ_result))

            elif self.type == self.TYPE_RUN:
                run_args = self.params[
                    'args'] if 'args' in self.params else None

                if run_args:
                    exitcode, stderr, stdout = FileUtil.run(
                        run_args, working_dir,
                        process_data.get_merged_data_for_runner())

                    if exitcode == 0:
                        Logger.i('Run finished for task: {0}'.format(
                            self.get_name()))
                    else:
                        if stdout:
                            Logger.i('Run output for task: {0}'.format(
                                self.get_name()))
                            Logger.clean(stdout)

                        if stderr:
                            Logger.i('Error output while run task: {0}'.format(
                                self.get_name()))
                            Logger.clean(stderr)

                        Logger.f('Failed to run task: {0}'.format(
                            self.get_name()))

            else:
                Logger.f('Invalid task type')
        else:
            Logger.d('Process data is invalid to run task')
예제 #25
0
 def remove(self):
     Logger.d('Removing files for target: {0}...'.format(self.get_name()))
     vendor_dir = self.repository.get_vendor_dir()
     FileUtil.remove_dir(vendor_dir)
예제 #26
0
    def test_normalize_path(self):
        normalized = FileUtil.normalize_path('C:\\ezored\\Test')
        expected = 'C:/ezored/Test'

        self.assertEqual(normalized, expected)
예제 #27
0
    def download_file(url, dest=None, filename=None):
        """
        Download and save a file specified by url to dest directory.
        """
        Logger.d('New download request: {0}'.format(url))
        Logger.d('Destination: {0}'.format(dest))
        Logger.d('Filename: {0}'.format(filename))

        req = urllib2.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
        u = urllib2.urlopen(req)

        scheme, netloc, path, query, fragment = urlparse.urlsplit(url)

        if not filename:
            filename = DownloadUtil.get_filename_from_url(path)

        if dest:
            FileUtil.create_dir(dest)
            filename = os.path.join(dest, filename)

        Logger.d('Getting file metadata...')

        with open(filename, 'wb') as f:
            meta = u.info()
            meta_func = meta.getheaders if hasattr(
                meta, 'getheaders') else meta.get_all
            meta_length = meta_func('Content-Length')
            file_size = None
            pbar = None

            if meta_length:
                file_size = int(meta_length[0])

            if file_size:
                Logger.d('File size in bytes: {0}'.format(file_size))
                Logger.clean('')
                pbar = tqdm(total=file_size)

            file_size_dl = 0
            block_sz = 8192

            if not pbar:
                Logger.d('Downloading, please wait...')

            while True:
                dbuffer = u.read(block_sz)

                if not dbuffer:
                    break

                dbuffer_len = len(dbuffer)
                file_size_dl += dbuffer_len
                f.write(dbuffer)

                if pbar:
                    pbar.update(dbuffer_len)

            if pbar:
                pbar.close()
                Logger.clean('')

            return filename
예제 #28
0
    def build(self, target_name):
        from ezored.models.logger import Logger
        from ezored.models.project import Project

        project = Project.create_from_project_file()

        process_data = ProcessData()
        process_data.reset()
        process_data.project_name = project.get_config_value('name')

        if target_name:
            Logger.i('Build only target: {0}'.format(target_name))
        else:
            Logger.i('Build all targets')

        target_found = False

        for target in project.targets:
            can_build = False

            if not target_name:
                can_build = True
            elif target.get_name() == target_name:
                can_build = True

            if can_build:
                Logger.d('Getting target data by target name: {0}...'.format(
                    target_name))
                target_found = True

                # targets need be deleted to be always fresh with target data from dependencies
                target.remove()

                # build the target repository after download
                target.prepare_from_process_data(process_data)
                target.repository.download()
                target.repository.build(process_data)

                # get all target data from project dependencies
                target_data = TargetData()
                target_data.project_config = project.config

                for dependency in project.dependencies:
                    dependency.prepare_from_process_data(process_data)

                    new_target_data = dependency.get_target_data_by_target_name_and_parse(
                        target.get_name(), process_data)

                    target_data.merge(new_target_data)

                # back to target data
                target.prepare_from_process_data(process_data)

                # copy files from dependencies to target directory
                FileUtil.copy_files_from_list(target_data.copy_files)

                # parse files path and it content
                target_project_file_data = target.load_target_project_file_data(
                )

                if 'target' in target_project_file_data:
                    target_project_data = target_project_file_data['target']

                    # parse files
                    if 'parse_files' in target_project_data:
                        target_project_data_parse_files = target_project_data[
                            'parse_files']

                        if target_project_data_parse_files:
                            Logger.d('Files to parse from target: {0}'.format(
                                len(target_project_data_parse_files)))

                            target_project_data_parse_files = process_data.parse_text_list(
                                target_project_data_parse_files)

                            for target_project_data_parse_file in target_project_data_parse_files:
                                template_loader = jinja2.FileSystemLoader(
                                    searchpath='/')
                                template_env = jinja2.Environment(
                                    loader=template_loader)
                                template_file = target_project_data_parse_file
                                template = template_env.get_template(
                                    template_file)
                                templ_result = template.render(
                                    target=target_data)

                                FileUtil.write_to_file(
                                    os.path.dirname(
                                        target_project_data_parse_file),
                                    os.path.basename(
                                        target_project_data_parse_file),
                                    str(templ_result))
                        else:
                            Logger.d('No files need to parse from target: {0}'.
                                     format(target.get_name()))

                    # build target
                    if 'build' in target_project_data:
                        Logger.i('Building target: {0}...'.format(
                            target.get_name()))

                        target_project_data_build = target_project_data[
                            'build']

                        exitcode, stderr, stdout = FileUtil.run(
                            target_project_data_build,
                            target.repository.get_vendor_dir(),
                            process_data.get_environ())

                        if exitcode == 0:
                            Logger.i('Build finished for target: {0}'.format(
                                target.get_name()))
                        else:
                            if stdout:
                                Logger.i('Build output for target: {0}'.format(
                                    target.get_name()))
                                Logger.clean(stdout)

                            if stderr:
                                Logger.i(
                                    'Error output while build target: {0}'.
                                    format(target.get_name()))
                                Logger.clean(stderr)

                            Logger.f('Failed to build target: {0}'.format(
                                target.get_name()))

        if not target_found:
            Logger.f('Target not found: {0}'.format(target_name))
예제 #29
0
    def get_target_data_by_target_name_and_parse(self, target_name,
                                                 process_data):
        Logger.d('Getting target data from dependency: {0}...'.format(
            self.get_name()))

        target_file_data = self.repository.load_target_data_file()

        if target_file_data:
            if 'targets' in target_file_data:
                targets_data = target_file_data['targets']

                for target_data_item in targets_data:
                    current_target_name = target_data_item['name']

                    if self.match_name(pattern=current_target_name,
                                       name=target_name):
                        # get target data
                        target_data = TargetData()

                        if 'data' in target_data_item:
                            target_data_dict = target_data_item['data']

                            if 'header_search_paths' in target_data_dict:
                                if target_data_dict['header_search_paths']:
                                    target_data.header_search_paths.extend(
                                        FileUtil.normalize_path_from_list(
                                            target_data_dict[
                                                'header_search_paths']))

                            if 'library_search_paths' in target_data_dict:
                                if target_data_dict['library_search_paths']:
                                    target_data.library_search_paths.extend(
                                        FileUtil.normalize_path_from_list(
                                            target_data_dict[
                                                'library_search_paths']))

                            if 'c_flags' in target_data_dict:
                                if target_data_dict['c_flags']:
                                    target_data.c_flags.extend(
                                        target_data_dict['c_flags'])

                            if 'cxx_flags' in target_data_dict:
                                if target_data_dict['cxx_flags']:
                                    target_data.cxx_flags.extend(
                                        target_data_dict['cxx_flags'])

                            if 'library_links' in target_data_dict:
                                if target_data_dict['library_links']:
                                    target_data.library_links.extend(
                                        target_data_dict['library_links'])

                            if 'framework_links' in target_data_dict:
                                if target_data_dict['framework_links']:
                                    target_data.framework_links.extend(
                                        target_data_dict['framework_links'])

                            if 'tasks' in target_data_dict:
                                if target_data_dict['tasks']:
                                    for target_data_task in target_data_dict[
                                            'tasks']:
                                        task = Task.from_dict(target_data_task)
                                        target_data.tasks.append(task)

                            # create source group if have files for it
                            target_data_header_files = []
                            target_data_source_files = []

                            if 'header_files' in target_data_dict:
                                if target_data_dict['header_files']:
                                    for file_data in target_data_dict[
                                            'header_files']:
                                        # find all files
                                        source_file_to_find = SourceFile.from_dict(
                                            file_data)

                                        if source_file_to_find:
                                            # process file pattern before
                                            file_pattern = source_file_to_find.file
                                            file_pattern = process_data.parse_text(
                                                file_pattern)

                                            found_files = FileUtil.find_files(
                                                file_pattern)
                                            found_files = FileUtil.normalize_path_from_list(
                                                found_files)

                                            # create new source file for each found file
                                            for f in found_files:
                                                target_data_header_files.append(
                                                    SourceFile(
                                                        source_file=f,
                                                        compile_flags=
                                                        source_file_to_find.
                                                        compile_flags))

                            if 'source_files' in target_data_dict:
                                if target_data_dict['source_files']:
                                    for file_data in target_data_dict[
                                            'source_files']:
                                        # find all files
                                        source_file_to_find = SourceFile.from_dict(
                                            file_data)

                                        if source_file_to_find:
                                            # process file pattern before
                                            file_pattern = source_file_to_find.file
                                            file_pattern = process_data.parse_text(
                                                file_pattern)

                                            found_files = FileUtil.find_files(
                                                file_pattern)
                                            found_files = FileUtil.normalize_path_from_list(
                                                found_files)

                                            # create new source file for each found file
                                            for f in found_files:
                                                target_data_source_files.append(
                                                    SourceFile(
                                                        source_file=FileUtil.
                                                        normalize_path(f),
                                                        compile_flags=
                                                        source_file_to_find.
                                                        compile_flags))

                            if len(target_data_header_files) > 0 or len(
                                    target_data_source_files) > 0:
                                target_data_source_group = SourceGroup()
                                target_data_source_group.name = self.get_name()
                                target_data_source_group.header_files = target_data_header_files
                                target_data_source_group.source_files = target_data_source_files

                                target_data.source_groups.append(
                                    target_data_source_group)

                            # parse all things
                            target_data.parse(process_data)
                            return target_data