Exemplo n.º 1
0
    def do_baseline_run(self,
                        target_config: TargetConfiguration) -> ms.RunResult:
        log.get_logger().log('LocalRunner::do_baseline_run')
        accu_runtime = .0

        if not target_config.has_args_for_invocation():
            log.get_logger().log(
                'LocalRunner::do_baseline_run: BEGIN not target_config.has_args_for_invocation()'
            )
            # This runner only takes into account the first argument string (if not already set)
            args = self._config.get_args(target_config.get_build(),
                                         target_config.get_target())
            log.get_logger().log('LocalRunner::do_baseline_run: args: ' +
                                 str(args))
            target_config.set_args_for_invocation(args[0])
            log.get_logger().log(
                'LocalRunner::do_baseline_run: END not target_config.has_args_for_invocation()'
            )

        # TODO Better evaluation of the obtained timings.
        for y in range(0, self._num_repetitions):
            log.get_logger().log(
                'LocalRunner::do_baseline_run: Running iteration ' + str(y),
                level='debug')
            accu_runtime += self.run(target_config, InstrumentConfig(), True)

        run_result = ms.RunResult(accu_runtime, self._num_repetitions)
        log.get_logger().log('[Vanilla][RUNTIME] Vanilla avg: ' +
                             str(run_result.get_average()) + '\n',
                             level='perf')

        return run_result
Exemplo n.º 2
0
    def get_param_mapping(self, target_config: TargetConfiguration) -> str:
        if not target_config.has_args_for_invocation():
            return '.'

        args = target_config.get_args_for_invocation()
        param_str = ''
        # TODO FIX ME!
        if isinstance(args, list):
            L.get_logger().log(
                'ExtrapProfileSink::get_param_mapping: isinstance of list')
            param_str = str(args[1]) + str(args[2]) + '.' + str(args[4]) + str(
                args[0])

        elif not isinstance(args, tuple):
            L.get_logger().log(
                'ExtrapProfileSink::get_param_mapping: not isinstance of tuple'
            )
            param_str = str(
                args)  # PiraArgument knows how to unparse to string

        else:
            for v in args:
                param_str += v

        L.get_logger().log('ExtrapProfileSink::get_param_mapping: ' +
                           param_str)
        return param_str
Exemplo n.º 3
0
    def check_and_prepare(self, experiment_dir: str,
                          target_config: TargetConfiguration,
                          instr_config: InstrumentConfig) -> str:
        cur_ep_dir = self.get_extrap_dir_name(
            target_config, instr_config.get_instrumentation_iteration())
        if not u.is_valid_file_name(cur_ep_dir):
            log.get_logger().log(
                'ExtrapProfileSink::check_and_prepare: Generated directory name no good. Abort\n'
                + cur_ep_dir,
                level='error')
        else:
            if u.check_provided_directory(cur_ep_dir):
                new_dir_name = cur_ep_dir + '_' + u.generate_random_string()
                log.get_logger().log(
                    'ExtrapProfileSink::check_and_prepare: Moving old experiment directory to: '
                    + new_dir_name,
                    level='info')
                u.rename(cur_ep_dir, new_dir_name)

            u.create_directory(cur_ep_dir)
            cubex_name = experiment_dir + '/' + target_config.get_flavor(
            ) + '-' + target_config.get_target() + '.cubex'
            log.get_logger().log(cubex_name)

            if not u.is_file(cubex_name):
                log.get_logger().log(
                    'ExtrapProfileSink::check_and_prepare: Returned experiment cube name is no file: '
                    + cubex_name)
            else:
                return cubex_name

        raise ProfileSinkException(
            'ExtrapProfileSink: Could not create target directory or Cube dir bad.'
        )
Exemplo n.º 4
0
  def __init__(self, target_config: TargetConfiguration, instrument: bool, instr_file: str = None) -> None:
    if target_config is None:
      raise BuilderException('Builder::ctor: Target Configuration was None')

    self.target_config = target_config
    self.directory = target_config.get_place()
    self.old_cwd = ''
    self.build_instr = instrument
    self.instrumentation_file = instr_file
    self._compile_time_filtering = target_config.is_compile_time_filtering()
    self.error = None
Exemplo n.º 5
0
    def set_up(self, target_config: TargetConfiguration,
               instrumentation_config: InstrumentConfig,
               compile_time_filter: bool) -> None:
        if not target_config.is_compile_time_filtering():
            scorep_filter_file = self.prepare_scorep_filter_file(
                target_config.get_instr_file())
            self.set_filter_file(scorep_filter_file)

        self._set_up(target_config.get_build(), target_config.get_target(),
                     target_config.get_flavor(),
                     instrumentation_config.get_instrumentation_iteration(),
                     instrumentation_config.is_instrumentation_run())
Exemplo n.º 6
0
    def do_baseline_run(self,
                        target_config: TargetConfiguration) -> ms.RunResult:
        log.get_logger().log('LocalScalingRunner::do_baseline_run')
        args = self._config.get_args(target_config.get_build(),
                                     target_config.get_target())
        run_result = ms.RunResult()
        for arg_cfg in args:
            target_config.set_args_for_invocation(arg_cfg)
            rr = super().do_baseline_run(target_config)
            run_result.add_from(rr)

        return run_result
Exemplo n.º 7
0
 def setUp(self):
     self.cfg_loader = cln.ConfigurationLoader()
     self.cfg = self.cfg_loader.load_conf('input/unit_input_004.json')
     self.target_cfg = TargetConfiguration('/this/is/top_dir',
                                           '/this/is/top_dir', 'item01',
                                           'item01-flavor01', '')
     self.instr_cfg = InstrumentConfig(True, 0)
Exemplo n.º 8
0
    def process(self, exp_dir: str, target_config: TargetConfiguration,
                instr_config: InstrumentConfig) -> None:
        L.get_logger().log('ExtrapProfileSink::process: ' +
                           str(instr_config.get_instrumentation_iteration()))
        if instr_config.get_instrumentation_iteration(
        ) > self._iteration or target_config.get_args_for_invocation(
        ) is not self._VALUE:
            self._iteration = instr_config.get_instrumentation_iteration()
            self._repetition = -1
            self._VALUE = ()

        self._repetition += 1
        self._VALUE = target_config.get_args_for_invocation()
        src_cube_name = self.check_and_prepare(exp_dir, target_config,
                                               instr_config)
        self._sink_target = self.get_extrap_dir_name(target_config,
                                                     self._iteration)

        self.do_copy(src_cube_name, self._sink_target)
Exemplo n.º 9
0
def execute_with_config(runner: Runner, analyzer: A, pira_iters: int, target_config: TargetConfiguration) -> None:
  try:
    log.get_logger().log('run_setup phase.', level='debug')
    instrument = False
    pira_iterations = pira_iters 

    # Build without any instrumentation
    vanilla_builder = B(target_config, instrument)
    tracker = tt.TimeTracker()
    tracker.m_track('Vanilla Build', vanilla_builder, 'build')

    # Run without instrumentation for baseline
    log.get_logger().log('Running baseline measurements', level='info')
    vanilla_rr = runner.do_baseline_run(target_config)
    log.get_logger().log(
        'Pira::execute_with_config: RunResult: ' + str(vanilla_rr) + ' | avg: ' + str(vanilla_rr.get_average()),
        level='debug')
    instr_file = ''

    for x in range(0, pira_iterations):
      log.get_logger().log('Running instrumentation iteration ' + str(x), level='info')

      # Only run the pgoe to get the functions name
      iteration_tracker = tt.TimeTracker()

      # Analysis Phase
      instr_file = analyzer.analyze(target_config, x)
      log.get_logger().log('[WHITELIST] $' + str(x) + '$ ' + str(util.lines_in_file(instr_file)), level='perf')
      util.shell('stat ' + instr_file)

      # After baseline measurement is complete, do the instrumented build/run
      # This is only necessary in every iteration when run in compile-time mode.
      if x is 0 or target_config.is_compile_time_filtering():
        instrument = True
        instr_builder = B(target_config, instrument, instr_file)
        tracker.m_track('Instrument Build', instr_builder, 'build')

      #Run Phase
      log.get_logger().log('Running profiling measurements', level='info')
      instr_rr = runner.do_profile_run(target_config, x)

      # Compute overhead of instrumentation
      ovh_percentage = instr_rr.compute_overhead(vanilla_rr)
      log.get_logger().log('[RUNTIME] $' + str(x) + '$ ' + str(instr_rr.get_average()), level='perf')
      log.get_logger().log('[OVERHEAD] $' + str(x) + '$ ' + str(ovh_percentage), level='perf')

      iteration_tracker.stop()
      user_time, system_time = iteration_tracker.get_time()
      log.get_logger().log('[ITERTIME] $' + str(x) + '$ ' + str(user_time) + ', ' + str(system_time), level='perf')

  except Exception as e:
    log.get_logger().log(
        'Pira::execute_with_config: Problem during preparation of run.\nMessage:\n' + str(e), level='error')
    raise RuntimeError(str(e))
Exemplo n.º 10
0
    def do_profile_run(self,
                       target_config: TargetConfiguration,
                       instr_iteration: int,
                       compile_time_filtering: bool = True) -> ms.RunResult:
        log.get_logger().log('LocalScalingRunner::do_profile_run')
        # We run as many experiments as we have input data configs
        # TODO: How to handle the model parameter <-> input parameter relation, do we care?
        args = self._config.get_args(target_config.get_build(),
                                     target_config.get_target())
        # TODO: How to handle multiple MeasurementResult items? We get a vector of these after this function.
        run_result = ms.RunResult()
        for arg_cfg in args:
            # Call the runner method with the correct arguments.
            target_config.set_args_for_invocation(arg_cfg)
            rr = super().do_profile_run(target_config, instr_iteration,
                                        compile_time_filtering)
            run_result.add_from(rr)

        # At this point we have all the data we need to construct an Extra-P model

        return run_result
Exemplo n.º 11
0
    def run(self, target_config: TargetConfiguration,
            instrument_config: InstrumentConfig,
            compile_time_filtering: bool) -> float:
        """ Implements the actual invocation """
        functor_manager = fm.FunctorManager()
        run_functor = functor_manager.get_or_load_functor(
            target_config.get_build(), target_config.get_target(),
            target_config.get_flavor(), 'run')
        default_provider = defaults.BackendDefaults()
        kwargs = default_provider.get_default_kwargs()
        kwargs['util'] = util
        kwargs['LD_PRELOAD'] = default_provider.get_MPI_wrap_LD_PRELOAD()
        runtime = .0

        if run_functor.get_method()['active']:
            run_functor.active(target_config.get_target(), **kwargs)
            log.get_logger().log(
                'For the active functor we can barely measure runtime',
                level='warn')
            runtime = 1.0

        try:
            util.change_cwd(target_config.get_place())

            invoke_arguments = target_config.get_args_for_invocation()
            kwargs['args'] = invoke_arguments
            if invoke_arguments is not None:
                log.get_logger().log('LocalBaseRunner::run: (args) ' +
                                     str(invoke_arguments))

            command = run_functor.passive(target_config.get_target(), **kwargs)
            _, runtime = util.shell(command, time_invoc=True)
            log.get_logger().log(
                'LocalBaseRunner::run::passive_invocation -> Returned runtime: '
                + str(runtime),
                level='debug')

        except Exception as e:
            log.get_logger().log('LocalBaseRunner::run Exception\n' + str(e),
                                 level='error')
            raise RuntimeError('LocalBaseRunner::run caught exception. ' +
                               str(e))

        # TODO: Insert the data into the database
        return runtime
Exemplo n.º 12
0
  def test_analyze_local(self):
    ld = C.SimplifiedConfigurationLoader()
    cfg = ld.load_conf('../inputs/configs/basic_config_005.json')

    analyzer = A.Analyzer(cfg)
    fm = F.FunctorManager(cfg)

    a_f = fm.get_or_load_functor('/tmp', 'test_item', 'ct', 'analyze')
    self.assertIsNotNone(a_f)
    self.assertTrue(a_f.get_method()['passive'])
    self.assertEqual(a_f.get_it(), 0)


    tc = TargetConfiguration(cfg.get_place('/tmp'), '/tmp', 'test_item', 'ct', 'asdf')
    with self.assertRaises(RuntimeError) as assert_cm:
      analyzer.analyze(tc, 0)
    rt_err = assert_cm.exception
    self.assertEqual(str(rt_err), 'Analyzer::analyze: Profile Sink in Analyzer not set!')

    analyzer.set_profile_sink(TestProfileSink())
    analyzer.analyze(tc, 0)
    self.assertEqual(a_f.get_it(), 1)
Exemplo n.º 13
0
    def do_profile_run(self,
                       target_config: TargetConfiguration,
                       instr_iteration: int,
                       compile_time_filtering: bool = True) -> ms.RunResult:
        log.get_logger().log(
            'LocalRunner::do_profile_run: Received instrumentation file: ' +
            target_config.get_instr_file(),
            level='debug')
        scorep_helper = ms.ScorepSystemHelper(self._config)
        instrument_config = InstrumentConfig(True, instr_iteration)
        scorep_helper.set_up(target_config, instrument_config,
                             compile_time_filtering)
        runtime = .0

        if not target_config.has_args_for_invocation():
            # This runner only takes into account the first argument string (if not already set)
            args = self._config.get_args(target_config.get_build(),
                                         target_config.get_target())
            target_config.set_args_for_invocation(args[0])

        for y in range(0, self._num_repetitions):
            log.get_logger().log(
                'LocalRunner::do_profile_run: Running instrumentation iteration '
                + str(y),
                level='debug')
            runtime = runtime + self.run(target_config, instrument_config,
                                         compile_time_filtering)
            # Enable further processing of the resulting profile
            self._sink.process(scorep_helper.get_exp_dir(), target_config,
                               instrument_config)

        run_result = ms.RunResult(runtime, self._num_repetitions)
        log.get_logger().log('[Instrument][RUNTIME] $' + str(instr_iteration) +
                             '$ ' + str(run_result.get_average()),
                             level='perf')
        return run_result
Exemplo n.º 14
0
def execute_with_config(runner: Runner, analyzer: A, pira_iters: int,
                        target_config: TargetConfiguration,
                        csv_config: CSVConfiguration) -> None:
    try:
        instrument = False
        pira_iterations = pira_iters
        hybrid_filtering = target_config.is_hybrid_filtering()
        compile_time_filtering = target_config.is_compile_time_filtering()
        hybrid_filter_iters = target_config.get_hybrid_filter_iters()

        rr_exporter = E.RunResultExporter()

        # Build without any instrumentation
        L.get_logger().log(
            'Building vanilla version for baseline measurements', level='info')
        vanilla_builder = BU(target_config, instrument)
        tracker = T.TimeTracker()
        tracker.m_track('Vanilla Build', vanilla_builder, 'build')

        # Run without instrumentation for baseline
        L.get_logger().log('Running baseline measurements', level='info')
        vanilla_rr = runner.do_baseline_run(target_config)
        L.get_logger().log('Pira::execute_with_config: RunResult: ' +
                           str(vanilla_rr) + ' | avg: ' +
                           str(vanilla_rr.get_average()),
                           level='debug')
        instr_file = ''

        if (csv_config.should_export()):
            rr_exporter.add_row('Vanilla', vanilla_rr)

        for x in range(0, pira_iterations):
            L.get_logger().log('Running instrumentation iteration ' + str(x),
                               level='info')

            # Only run the pgoe to get the functions name
            iteration_tracker = T.TimeTracker()

            # Analysis Phase
            instr_file = analyzer.analyze(target_config, x)
            L.get_logger().log('[WHITELIST] $' + str(x) + '$ ' +
                               str(U.lines_in_file(instr_file)),
                               level='perf')
            U.shell('stat ' + instr_file)

            # After baseline measurement is complete, do the instrumented build/run
            # This is only necessary in every iteration when run in compile-time mode.
            # For hybrid-filtering this is done after the specified amount of iterations
            if (hybrid_filtering and (x % hybrid_filter_iters is 0)
                ) or x is 0 or compile_time_filtering:
                instrument = True
                instr_builder = BU(target_config, instrument, instr_file)
                tracker.m_track('Instrument Build', instr_builder, 'build')

            #Run Phase
            L.get_logger().log('Running profiling measurements', level='info')
            instr_rr = runner.do_profile_run(target_config, x)

            if (csv_config.should_export()):
                rr_exporter.add_row('Instrumented ' + str(x), instr_rr)

            # Compute overhead of instrumentation
            ovh_percentage = instr_rr.compute_overhead(vanilla_rr)
            L.get_logger().log('[RUNTIME] $' + str(x) + '$ ' +
                               str(instr_rr.get_average()),
                               level='perf')
            L.get_logger().log('[OVERHEAD] $' + str(x) + '$ ' +
                               str(ovh_percentage),
                               level='perf')

            iteration_tracker.stop()
            user_time, system_time = iteration_tracker.get_time()
            L.get_logger().log('[ITERTIME] $' + str(x) + '$ ' +
                               str(user_time) + ', ' + str(system_time),
                               level='perf')

        if (csv_config.should_export()):
            file_name = target_config.get_target(
            ) + '_' + target_config.get_flavor() + '.csv'
            csv_file = os.path.join(csv_config.get_csv_dir(), file_name)
            try:
                U.make_dir(csv_config.get_csv_dir())
                rr_exporter.export(csv_file, csv_config.get_csv_dialect())
            except Exception as e:
                L.get_logger().log(
                    'Pira::execute_with_config: Problem writing CSV file\nMessage:\n'
                    + str(e),
                    level='error')

    except Exception as e:
        L.get_logger().log(
            'Pira::execute_with_config: Problem during preparation of run.\nMessage:\n'
            + str(e),
            level='error')
        raise RuntimeError(str(e))
Exemplo n.º 15
0
def main(arguments) -> None:
    """ Main function for pira framework. Used to invoke the various components. """
    show_pira_invoc_info(arguments)

    invoc_cfg = process_args_for_invoc(arguments)
    use_extra_p, extrap_config = process_args_for_extrap(arguments)

    home_dir = U.get_cwd()
    U.set_home_dir(home_dir)

    U.make_dir(invoc_cfg.get_pira_dir())
    BackendDefaults(invoc_cfg)

    csv_config = process_args_for_csv(arguments)

    try:
        if arguments.config_version is 1:
            config_loader = CLoader()
        else:
            config_loader = SCLoader()

        configuration = config_loader.load_conf(invoc_cfg.get_path_to_cfg())
        checker.check_configfile(configuration, arguments.config_version)

        if B.check_queued_job():
            # FIXME: Implement
            L.get_logger().log(
                'In this version of PIRA it is not yet implemented',
                level='error')
            assert (False)

        else:
            '''
      This branch is running PIRA actively on the local machine.
      It is blocking, and the user can track the progress in the terminal.
      '''
            L.get_logger().log('Running the local case')

            # The FunctorManager manages loaded functors and generates the respective names
            F.FunctorManager(configuration)
            dbm = D.DBManager(D.DBManager.db_name + '.' + D.DBManager.db_ext)
            dbm.create_cursor()
            analyzer = A(configuration)

            runner_factory = PiraRunnerFactory(invoc_cfg, configuration)
            runner = runner_factory.get_simple_local_runner()
            if use_extra_p:
                L.get_logger().log('Running with Extra-P runner')
                runner = runner_factory.get_scalability_runner(extrap_config)

            if runner.has_sink():
                analyzer.set_profile_sink(runner.get_sink())

            # A build/place is a top-level directory
            for build in configuration.get_builds():
                L.get_logger().log('Build: ' + str(build))
                app_tuple = (U.generate_random_string(), build, '', '')
                dbm.insert_data_application(app_tuple)

                # An item is a target/software in that directory
                for item in configuration.get_items(build):
                    L.get_logger().log('Running for item ' + str(item))

                    # A flavor is a specific version to build
                    if configuration.has_local_flavors(build, item):
                        for flavor in configuration.get_flavors(build, item):
                            L.get_logger().log('Running for local flavor ' +
                                               flavor,
                                               level='debug')

                            # prepare database, and get a unique handle for current item.
                            db_item_id = dbm.prep_db_for_build_item_in_flavor(
                                configuration, build, item, flavor)
                            # Create configuration object for the item currently processed.
                            place = configuration.get_place(build)
                            t_config = TargetConfiguration(
                                place, build, item, flavor, db_item_id,
                                invoc_cfg.is_compile_time_filtering(),
                                invoc_cfg.get_hybrid_filter_iters())

                            # Execute using a local runner, given the generated target description
                            execute_with_config(runner, analyzer,
                                                invoc_cfg.get_pira_iters(),
                                                t_config, csv_config)

                    # If global flavor
                    else:
                        # TODO: Implement
                        L.get_logger().log(
                            'In this version of PIRA it is not yet implemented',
                            level='error')
                        assert (False)

        U.change_cwd(home_dir)

    except RuntimeError as rt_err:
        U.change_cwd(home_dir)
        L.get_logger().log('Runner.run caught exception. Message: ' +
                           str(rt_err),
                           level='error')
        L.get_logger().dump_tape()
        sys.exit(-1)
Exemplo n.º 16
0
 def test_config_empty_sink(self):
   analyzer = A.Analyzer(self._pira_two_cfg)
   tc = TargetConfiguration(self._it_dir, self._it_dir, self._it_dir, 'dflt', 'asdf')
   with self.assertRaises(RuntimeError):
     analyzer.analyze(tc, 0)