Exemplo n.º 1
0
    def test_task_invalid_type(self):
        task = Task(task_type='invalid_type_xyz')

        with pytest.raises(SystemExit) as error:
            task.get_name()

        self.assertEqual(error.type, SystemExit)
        self.assertEqual(error.value.code, 1)
Exemplo n.º 2
0
    def test_task_parse_invalid_type(self):
        task = Task(task_name='Sample task', task_type='invalid_type_xyz')

        with pytest.raises(SystemExit) as error:
            task.parse(process_data=ProcessData())

        self.assertEqual(error.type, SystemExit)
        self.assertEqual(error.value.code, 1)
Exemplo n.º 3
0
    def test_task_run_invalid_type(self):
        task = Task(task_name='Sample task', task_type='invalid_type_xyz')

        with pytest.raises(SystemExit) as error:
            task.run(process_data=ProcessData(),
                     template_data={},
                     working_dir=None)

        self.assertEqual(error.type, SystemExit)
        self.assertEqual(error.value.code, 1)
Exemplo n.º 4
0
    def test_task_get_name(self):
        # run
        task = Task(task_type=Task.TYPE_RUN)

        self.assertEqual(task.get_name(), 'Run')

        # parse file
        task = Task(task_type=Task.TYPE_PARSE_FILE)

        self.assertEqual(task.get_name(), 'Parse file')

        # copy file
        task = Task(task_type=Task.TYPE_COPY_FILE)

        self.assertEqual(task.get_name(), 'Copy file')
Exemplo n.º 5
0
    def test_task_run_all_tasks(self, d):
        os.chdir(d.path)

        file_content = """
file = open("test-target-file.txt", "w") 
file.write("{0}") 
file.close()
        """

        file_content = file_content.format(Constants.PROJECT_NAME)

        file_path = os.path.join(d.path, 'test-file.py')
        target_file_path = os.path.join(d.path, 'test-target-file.txt')

        d.write(file_path, file_content.encode('utf-8'))

        task = Task(task_type=Task.TYPE_RUN,
                    task_name='Sample run task - run all tasks',
                    task_params={'args': ['python', file_path]})

        process_data = ProcessData()
        template_data = {}

        task.parse(process_data)

        Task.run_all_tasks([task],
                           process_data=process_data,
                           template_data=template_data,
                           working_dir=d.path)

        content = FileUtil.read_file(target_file_path)

        self.assertEqual(Constants.PROJECT_NAME, content)
Exemplo n.º 6
0
    def test_task_copy_files(self, d):
        os.chdir(d.path)

        file_path1 = os.path.join(d.path, 'test-copy-1.txt')
        file_path2 = os.path.join(d.path, 'test-copy-2.txt')
        to_path = os.path.join(d.path, 'files')

        d.write(file_path1, 'sample data'.encode('utf-8'))
        d.write(file_path2, 'sample data'.encode('utf-8'))

        FileUtil.create_dir(to_path)

        task = Task(task_type=Task.TYPE_COPY_FILES,
                    task_name='Sample copy files task',
                    task_params={
                        'from': os.path.join(d.path, '*.txt'),
                        'to': to_path
                    })

        process_data = ProcessData()
        template_data = {}

        task.parse(process_data)
        task.run(process_data=process_data,
                 template_data=template_data,
                 working_dir=d.path)

        self.assertTrue(
            os.path.exists(os.path.join(to_path, 'test-copy-1.txt')))
        self.assertTrue(
            os.path.exists(os.path.join(to_path, 'test-copy-2.txt')))
Exemplo n.º 7
0
    def test_task_run_generate_error(self, d):
        os.chdir(d.path)

        file_content = """
print("Sample task")
raise Exception('Sample task')
        """

        file_path = os.path.join(d.path, 'test-file.py')
        d.write(file_path, file_content.encode('utf-8'))

        task = Task(task_type=Task.TYPE_RUN,
                    task_name='Sample run task',
                    task_params={'args': ['python', file_path]})

        with pytest.raises(SystemExit) as error:
            task.run(process_data=ProcessData(),
                     template_data={},
                     working_dir=d.path)

        self.assertEqual(error.type, SystemExit)
        self.assertEqual(error.value.code, 1)
Exemplo n.º 8
0
    def test_task_create_from_dict(self):
        task_name = 'Sample task'
        task_type = Task.TYPE_RUN
        task_params = {'args': ['python']}

        dict_data = {
            'name': task_name,
            'type': task_type,
            'params': task_params
        }

        task = Task.from_dict(dict_data)

        self.assertEqual(task_name, task.name)
        self.assertEqual(task_type, task.type)
        self.assertEqual(task_params, task.params)
Exemplo n.º 9
0
    def test_task_parse_file(self, d):
        os.chdir(d.path)

        file_path = os.path.join(d.path, '*.txt')

        d.write(file_path, '{{ name }}'.encode('utf-8'))

        task = Task(task_type=Task.TYPE_PARSE_FILE,
                    task_name='Sample parse file task',
                    task_params={'file': file_path})

        process_data = ProcessData()
        template_data = {'name': Constants.PROJECT_NAME}

        task.parse(process_data)
        task.run(process_data=process_data,
                 template_data=template_data,
                 working_dir=d.path)

        content = FileUtil.read_file(file_path)

        self.assertEqual(Constants.PROJECT_NAME, content)
Exemplo n.º 10
0
    def test_task_run_invalid_binary(self, d):
        os.chdir(d.path)

        task = Task(task_type=Task.TYPE_RUN,
                    task_name='Sample run task - invalid binary',
                    task_params={'args': ['dont_exists_xyz']})

        process_data = ProcessData()
        template_data = {}

        task.parse(process_data)

        error_type = OSError

        if sys.version_info >= (3, ):
            error_type = FileNotFoundError

        with pytest.raises(error_type) as error:
            task.run(process_data=process_data,
                     template_data=template_data,
                     working_dir=d.path)

        self.assertEqual(error.type, error_type)
Exemplo n.º 11
0
    def test_task_copy_file(self, d):
        os.chdir(d.path)

        from_path = os.path.join(d.path, 'test-copy.txt')
        to_path = os.path.join(d.path, 'test-copy2.txt')

        d.write(from_path, 'sample data'.encode('utf-8'))

        task = Task(task_type=Task.TYPE_COPY_FILE,
                    task_name='Sample copy file task',
                    task_params={
                        'from': from_path,
                        'to': to_path
                    })

        process_data = ProcessData()
        template_data = {}

        task.parse(process_data)
        task.run(process_data=process_data,
                 template_data=template_data,
                 working_dir=d.path)

        self.assertTrue(os.path.exists(to_path))
Exemplo n.º 12
0
    def execute_command(self, target_command, target_name):
        from ezored.models.logger import Logger
        from ezored.models.project import Project
        import importlib
        import sys

        project = Project.create_from_project_file()

        if target_name:
            Logger.i('Execute command "{0}" only on target "{1}"'.format(
                target_command, target_name))
        else:
            Logger.i(
                'Execute command "{0}" on all targets'.format(target_command))

        target_found = False
        total_targets = len(project.targets)

        if total_targets > 0:
            for target in project.targets:
                process_data = ProcessData()
                process_data.reset()
                process_data.project_name = project.get_config_value('name')

                can_build = False

                if not target_name:
                    can_build = True
                elif target.get_name() == target_name:
                    can_build = True

                if can_build:
                    Logger.d(
                        'Getting target data by target name "{0}"...'.format(
                            target.get_name()))
                    target_found = True

                    # targets need be deleted to be always fresh with target data from dependencies
                    target.remove()

                    # build the target repository after download
                    target.prepare_from_process_data(process_data)
                    target.repository.download()
                    target.repository.build(project=project,
                                            process_data=process_data)

                    # get all target data from project dependencies
                    target_data = TargetData()
                    target_data.project_home = target.repository.get_vendor_dir(
                    )
                    target_data.project_config = project.config

                    for dependency in project.dependencies:
                        dependency.prepare_from_process_data(process_data)

                        new_target_data = dependency.get_target_data_by_target_name_and_parse(
                            target.get_name(), process_data)

                        target_data.merge(new_target_data)

                    # back to target data
                    target.prepare_from_process_data(process_data)

                    # process target data and execute required command
                    target_data_file = target.repository.load_target_data_file(
                    )

                    if 'target' in target_data_file:
                        target_project_data = target_data_file['target']

                        # target tasks
                        if 'tasks' in target_project_data:
                            target_tasks_data = target_project_data['tasks']

                            for target_task_data in target_tasks_data:
                                task = Task.from_dict(target_task_data)
                                task.parse(process_data)
                                target_data.tasks.append(task)

                        # run all tasks
                        Task.run_all_tasks(
                            tasks=target_data.tasks,
                            process_data=process_data,
                            template_data={'target': target_data},
                            working_dir=target.repository.get_vendor_dir())

                        # execute command on target
                        Logger.i('Executing command "{0}" on target "{1}"...'.
                                 format(target_command, target.get_name()))

                        sys_path = list(sys.path)
                        original_cwd = os.getcwd()

                        try:
                            sys.path.insert(0,
                                            target.repository.get_vendor_dir())

                            target_module = importlib.import_module(
                                Constants.TARGET_MODULE_NAME)
                            command = getattr(target_module,
                                              'do_' + target_command)

                            command(
                                params={
                                    'project': project,
                                    'target': target,
                                    'target_data': target_data,
                                    'process_data': process_data,
                                })

                            del sys.modules[Constants.TARGET_MODULE_NAME]
                            del target_module
                            del command

                            Logger.i('Command "{0}" finished for target "{1}"'.
                                     format(target_command, target.get_name()))
                        except Exception as e:
                            Logger.e(
                                'Error while call "{0}" on target "{1}": {2}'.
                                format(target_command, target.get_name(),
                                       e.message))
                            raise

                        sys.path = sys_path
                        os.chdir(original_cwd)

            if not target_found:
                Logger.f('Target not found: {0}'.format(target_name))
        else:
            Logger.i('Your project does not have targets')
Exemplo n.º 13
0
    def test_task_parse_invalid_process_data(self):
        task = Task(task_type=Task.TYPE_RUN)

        task.parse(process_data=None)
Exemplo n.º 14
0
    def test_task_run_invalid_process_data(self):
        task = Task(task_name='Sample task', task_type=Task.TYPE_RUN)

        task.run(process_data=None, template_data={}, working_dir=None)
Exemplo n.º 15
0
 def test_task_run_all_tasks_with_invalid_list(self):
     Task.run_all_tasks(tasks=None,
                        process_data=None,
                        template_data={},
                        working_dir=None)
Exemplo n.º 16
0
    def get_target_data_by_target_name_and_parse(self, target_name,
                                                 process_data):
        Logger.d('Getting target data from dependency: {0}...'.format(
            self.get_name()))

        target_file_data = self.repository.load_target_data_file()

        if target_file_data:
            if 'targets' in target_file_data:
                targets_data = target_file_data['targets']

                for target_data_item in targets_data:
                    current_target_name = target_data_item['name']

                    if self.match_name(pattern=current_target_name,
                                       name=target_name):
                        # get target data
                        target_data = TargetData()

                        if 'data' in target_data_item:
                            target_data_dict = target_data_item['data']

                            if 'header_search_paths' in target_data_dict:
                                if target_data_dict['header_search_paths']:
                                    target_data.header_search_paths.extend(
                                        FileUtil.normalize_path_from_list(
                                            target_data_dict[
                                                'header_search_paths']))

                            if 'library_search_paths' in target_data_dict:
                                if target_data_dict['library_search_paths']:
                                    target_data.library_search_paths.extend(
                                        FileUtil.normalize_path_from_list(
                                            target_data_dict[
                                                'library_search_paths']))

                            if 'c_flags' in target_data_dict:
                                if target_data_dict['c_flags']:
                                    target_data.c_flags.extend(
                                        target_data_dict['c_flags'])

                            if 'cxx_flags' in target_data_dict:
                                if target_data_dict['cxx_flags']:
                                    target_data.cxx_flags.extend(
                                        target_data_dict['cxx_flags'])

                            if 'library_links' in target_data_dict:
                                if target_data_dict['library_links']:
                                    target_data.library_links.extend(
                                        target_data_dict['library_links'])

                            if 'framework_links' in target_data_dict:
                                if target_data_dict['framework_links']:
                                    target_data.framework_links.extend(
                                        target_data_dict['framework_links'])

                            if 'tasks' in target_data_dict:
                                if target_data_dict['tasks']:
                                    for target_data_task in target_data_dict[
                                            'tasks']:
                                        task = Task.from_dict(target_data_task)
                                        target_data.tasks.append(task)

                            # create source group if have files for it
                            target_data_header_files = []
                            target_data_source_files = []

                            if 'header_files' in target_data_dict:
                                if target_data_dict['header_files']:
                                    for file_data in target_data_dict[
                                            'header_files']:
                                        # find all files
                                        source_file_to_find = SourceFile.from_dict(
                                            file_data)

                                        if source_file_to_find:
                                            # process file pattern before
                                            file_pattern = source_file_to_find.file
                                            file_pattern = process_data.parse_text(
                                                file_pattern)

                                            found_files = FileUtil.find_files(
                                                file_pattern)
                                            found_files = FileUtil.normalize_path_from_list(
                                                found_files)

                                            # create new source file for each found file
                                            for f in found_files:
                                                target_data_header_files.append(
                                                    SourceFile(
                                                        source_file=f,
                                                        compile_flags=
                                                        source_file_to_find.
                                                        compile_flags))

                            if 'source_files' in target_data_dict:
                                if target_data_dict['source_files']:
                                    for file_data in target_data_dict[
                                            'source_files']:
                                        # find all files
                                        source_file_to_find = SourceFile.from_dict(
                                            file_data)

                                        if source_file_to_find:
                                            # process file pattern before
                                            file_pattern = source_file_to_find.file
                                            file_pattern = process_data.parse_text(
                                                file_pattern)

                                            found_files = FileUtil.find_files(
                                                file_pattern)
                                            found_files = FileUtil.normalize_path_from_list(
                                                found_files)

                                            # create new source file for each found file
                                            for f in found_files:
                                                target_data_source_files.append(
                                                    SourceFile(
                                                        source_file=FileUtil.
                                                        normalize_path(f),
                                                        compile_flags=
                                                        source_file_to_find.
                                                        compile_flags))

                            if len(target_data_header_files) > 0 or len(
                                    target_data_source_files) > 0:
                                target_data_source_group = SourceGroup()
                                target_data_source_group.name = self.get_name()
                                target_data_source_group.header_files = target_data_header_files
                                target_data_source_group.source_files = target_data_source_files

                                target_data.source_groups.append(
                                    target_data_source_group)

                            # parse all things
                            target_data.parse(process_data)
                            return target_data