Exemple #1
0
    def __init__(self, sim_name, results_directory, testbed=None):
        self.sim_name = sim_name
        self.results_directory = results_directory

        self.normalised_values = self.normalised_parameters()
        self.normalised_values += (('time_after_first_normal', '1'),
                                   ('AttackerDistance',
                                    'max_source_distance_meters'))

        self.filtered_values = self.filtered_parameters()

        self.values = self.results_header()

        self.values['dropped no sink delivery'] = lambda x: str(
            x.dropped_no_sink_delivery)
        self.values['dropped hit upper bound'] = lambda x: str(
            x.dropped_hit_upper_bound)
        self.values['dropped duplicates'] = lambda x: str(x.dropped_duplicates)

        if testbed:
            if isinstance(testbed, str):
                testbed = submodule_loader.load(data.testbed, testbed)

            if hasattr(testbed, "testbed_header"):
                self.values.update(testbed.testbed_header(self))

            if hasattr(testbed, "testbed_normalised"):
                self.normalised_values += testbed.testbed_normalised(self)
Exemple #2
0
    def __init__(self, sim_name, output_directory):
        self.output_directory = output_directory

        self.sim_name = sim_name

        sim = submodule_loader.load(simulator.sim, self.sim_name)

        self._key_names_base = sim.global_parameter_names
Exemple #3
0
def main():
    import importlib

    module = sys.argv[1]

    Arguments = importlib.import_module(f"{module}.Arguments")

    a = Arguments.Arguments()
    a.parse(sys.argv[2:])

    sim = submodule_loader.load(simulator.sim, a.args.sim)

    result = sim.run_simulation(module, a)

    sys.exit(result)
Exemple #4
0
def build(module, a):
    import data.cycle_accurate
    from data.run.driver.avrora_builder import Runner as Builder

    from data import submodule_loader

    target = module.replace(".", "/") + ".txt"

    avrora = submodule_loader.load(data.cycle_accurate, "avrora")

    builder = Builder(avrora, max_buffer_size=a.args.max_buffer_size)
    builder.total_job_size = 1
    
    #(a, module, module_path, target_directory)
    builder.add_job((module, a), target)
Exemple #5
0
    def _java_prepare_command(self, sim_name):

        sim = submodule_loader.load(simulator.sim, sim_name)

        if not sim.cluster_need_java:
            return ""

        if self.java_prepare_command is None:
            raise RuntimeError(
                f"{sim_name} need java to run, but cluster doesn't know how to load it"
            )

        # Nothign special to do
        if self.java_prepare_command is True:
            return ""

        return self.java_prepare_command
Exemple #6
0
    def __init__(self, sim_name, result_file, parameters, results, results_filter=None,
                 source_period_normalisation=None, network_size_normalisation=None):
        self.sim_name = sim_name
        self.parameter_names = tuple(parameters)
        self.result_names = tuple(results)
        self.result_file_name = result_file

        self.data = {}

        sim = submodule_loader.load(simulator.sim, sim_name)

        self.global_parameter_names = sim.global_parameter_names[:-1]

        # Create attributes that will store all the parameter value for a given parameter
        for param in self.global_parameter_names:
            setattr(self, self.name_to_attr(param), set())

        self._read_results(result_file, results_filter, source_period_normalisation, network_size_normalisation)
Exemple #7
0
    def common_results_header(self, local_parameter_names):
        d = OrderedDict()

        # Include the number of simulations that were analysed
        d['repeats'] = lambda x: str(x.number_of_repeats)

        # Give everyone access to the number of nodes in the simulation
        d['num nodes'] = lambda x: str(x.configuration.size())

        sim = submodule_loader.load(simulator.sim, self.sim_name)

        # The options that all simulations must include and the local parameter names
        for parameter in sim.global_parameter_names + local_parameter_names:

            param_underscore = parameter.replace(" ", "_")

            d[parameter] = lambda x, name=param_underscore: x.opts[name]

        return d
Exemple #8
0
def build(module, a):
    import data.cycle_accurate
    from data.run.driver.cooja_builder import Runner as Builder

    from data import submodule_loader

    target = module.replace(".", "/") + ".txt"

    cooja = submodule_loader.load(data.cycle_accurate, "cooja")

    builder = Builder(cooja,
                      max_buffer_size=a.args.max_buffer_size,
                      platform=a.args.platform.platform(),
                      quiet=True)
    builder.total_job_size = 1

    #(a, module, module_path, target_directory)
    builder.add_job((module, a), target)

    # 0 For successful build result
    return 0
Exemple #9
0
    def __init__(self,
                 sim_name,
                 driver,
                 algorithm_module,
                 result_path,
                 skip_completed_simulations=True,
                 safety_periods=None,
                 safety_period_equivalence=None):
        self.sim_name = sim_name
        self.driver = driver
        self.algorithm_module = algorithm_module
        self._result_path = result_path
        self._skip_completed_simulations = skip_completed_simulations
        self._safety_periods = safety_periods
        self._safety_period_equivalence = safety_period_equivalence

        self._sim = submodule_loader.load(simulator.sim, self.sim_name)

        self._global_parameter_names = self._sim.global_parameter_names

        if not os.path.exists(self._result_path):
            raise RuntimeError(f"{self._result_path} is not a directory")

        self._existing_results = {}
Exemple #10
0
 def __init__(self, sim_name):
     self.sim_name = sim_name
     self._sim = submodule_loader.load(simulator.sim, self.sim_name)
     self._progress = Progress("building file")
     self.total_job_size = None
     self._jobs_executed = 0
Exemple #11
0
    def _run_min_max_ilp_versus(self, args):
        graph_parameters = {
            'normal latency': ('Normal Message Latency (ms)', 'left top'),
            #            'ssd': ('Sink-Source Distance (hops)', 'left top'),
            'captured': ('Capture Ratio (%)', 'right top'),
            #            'normal': ('Normal Messages Sent', 'left top'),
            'sent': ('Total Messages Sent', 'left top'),
            'received ratio': ('Receive Ratio (%)', 'left bottom'),
            'attacker distance':
            ('Attacker-Source Distance (meters)', 'left top'),
            'norm(sent,time taken)': ('Messages Sent per Second', 'left top'),
            #            'norm(norm(sent,time taken),network size)': ('Messages Sent per Second per Node', 'left top'),
            #            'norm(normal,time taken)': ('Messages Sent per Second', 'left top'),
        }

        custom_yaxis_range_max = {
            'captured': 25,
            'received ratio': 100,
            'attacker distance': 120,
            'normal latency': 4000,
            'norm(sent,time taken)': 8000,
            'norm(norm(sent,time taken),network size)': 15,
        }

        def filter_params(all_params):
            return (all_params['source period'] == '0.125'
                    or all_params['noise model'] == 'meyer-heavy'
                    or all_params['configuration'] != 'SourceCorner')

        def adaptive_filter_params(all_params):
            return filter_params(all_params) or all_params['approach'] in {
                "PB_SINK_APPROACH", "PB_ATTACKER_EST_APPROACH"
            }

        def ilprouting_filter_params(all_params):
            return filter_params(
                all_params) or all_params["pr direct to sink"] != "0.2"

        protectionless_analysis = protectionless.Analysis.Analyzer(
            args.sim, protectionless.results_path(args.sim))

        protectionless_results = results.Results(
            args.sim,
            protectionless.result_file_path(args.sim),
            parameters=protectionless.local_parameter_names,
            results=list(
                set(graph_parameters.keys())
                & set(protectionless_analysis.results_header().keys())),
            results_filter=filter_params)

        adaptive_spr_notify_results = results.Results(
            args.sim,
            self.algorithm_module.result_file_path(args.sim),
            parameters=self.algorithm_module.local_parameter_names,
            results=graph_parameters.keys(),
            results_filter=filter_params)

        adaptive_results = results.Results(
            args.sim,
            adaptive.result_file_path(args.sim),
            parameters=adaptive.local_parameter_names,
            results=graph_parameters.keys(),
            results_filter=adaptive_filter_params)

        ilprouting_results = results.Results(
            args.sim,
            ilprouting.result_file_path(args.sim),
            parameters=ilprouting.local_parameter_names,
            results=graph_parameters.keys(),
            results_filter=ilprouting_filter_params)

        sim = submodule_loader.load(simulator.sim, args.sim)

        def graph_min_max_versus(result_name, xaxis):
            name = 'min-max-ilp-versus-{}-{}'.format(result_name, xaxis)

            if result_name == "attacker distance":
                # Just get the distance of attacker 0 from node 0 (the source in SourceCorner)
                def yextractor(yvalue):
                    print(yvalue)
                    return scalar_extractor(yvalue, key=(0, 0))
            else:
                yextractor = scalar_extractor

            vary = [
                'approach', 'approach',
                ('buffer size', 'max walk length', 'pr direct to sink',
                 'msg group size')
            ]

            g = min_max_versus.Grapher(args.sim,
                                       self.algorithm_module.graphs_path(
                                           args.sim),
                                       name,
                                       xaxis=xaxis,
                                       yaxis=result_name,
                                       vary=vary,
                                       yextractor=yextractor)

            g.xaxis_label = xaxis.title()
            g.yaxis_label = graph_parameters[result_name][0]
            g.key_position = graph_parameters[result_name][1]

            g.xaxis_font = "',16'"
            g.yaxis_font = "',16'"
            g.xlabel_font = "',14'"
            g.ylabel_font = "',14'"
            g.line_width = 3
            g.point_size = 1
            g.nokey = True
            g.legend_font_size = 16

            #g.min_label = ['Static - Lowest']
            #g.max_label = ['Static - Highest']
            g.comparison_label = ['Dynamic', 'DynamicSpr', 'ILPRouting']
            g.vary_label = ''

            if xaxis == 'network size':
                g.xvalues_to_tic_label = lambda x: f'"{x}x{x}"'

            if result_name in custom_yaxis_range_max:
                g.yaxis_range_max = custom_yaxis_range_max[result_name]

            def vvalue_converter(name):
                if isinstance(name, tuple):
                    (buffer_size, max_walk_length, pr_direct_to_sink,
                     msg_group_size) = name

                    return f"Group Size {msg_group_size}"

                try:
                    return {
                        "PB_FIXED1_APPROACH": "Fixed1",
                        "PB_FIXED2_APPROACH": "Fixed2",
                        "PB_RND_APPROACH": "Rnd",
                    }[name]
                except KeyError:
                    return name

            g.vvalue_label_converter = vvalue_converter

            # Want to pretend SeqNosOOOReactiveAttacker is SeqNosReactiveAttacker
            def correct_data_key(data_key):
                data_key = list(data_key)
                data_key[sim.global_parameter_names.index(
                    'attacker model')] = "SeqNosReactiveAttacker()"
                return tuple(data_key)

            g.correct_data_key = correct_data_key

            g.generate_legend_graph = True

            if result_name in protectionless_results.result_names:
                g.create([], [
                    adaptive_results, adaptive_spr_notify_results,
                    ilprouting_results
                ],
                         baseline_results=protectionless_results)
            else:
                g.create([], [
                    adaptive_results, adaptive_spr_notify_results,
                    ilprouting_results
                ])

            summary.GraphSummary(
                os.path.join(self.algorithm_module.graphs_path(args.sim),
                             name),
                os.path.join(
                    algorithm.results_directory_name,
                    '{}-{}'.format(self.algorithm_module.name,
                                   name).replace(" ", "_"))).run()

        for result_name in graph_parameters.keys():
            graph_min_max_versus(result_name, 'network size')
Exemple #12
0
 def _get_testbed(self):
     return submodule_loader.load(data.testbed, self.args.testbed)
Exemple #13
0
def main(argv):
    # Print a traceback in the case of segfaults
    faulthandler.enable()

    if __debug__:
        if len(argv) <= 1:
            print(
                "Please provide the algorithm module as the first parameter. (e.g., algorithm.protectionless)",
                file=sys.stderr)
            return 1

    module = argv[1]

    if __debug__:
        if not (module.startswith('algorithm.')
                or module.startswith('cluster.')):
            print(
                "You can only run algorithms in the 'algorithm' or 'cluster' module.",
                file=sys.stderr)
            return 2

    algorithm_module = algorithm.import_algorithm(module, extras=["Arguments"])

    a = algorithm_module.Arguments.Arguments()
    a.parse(argv[2:])

    sim = submodule_loader.load(simulator.sim, a.args.sim)

    if a.args.mode in ("SINGLE", "GUI", "RAW", "PARALLEL"):
        sim.build(module, a)

    # Make the mode SINGLE, as PROFILE is SINGLE except for not building the code
    if a.args.mode == "PROFILE":
        a.args.mode = "SINGLE"

    # Set the thread count, but only for jobs that need it
    if hasattr(a.args, "thread_count") and a.args.thread_count is None:
        import psutil
        # Set the number of usable CPUs
        a.args.thread_count = len(psutil.Process().cpu_affinity())

    # When doing cluster array jobs only print out this header information on the first job
    if a.args.mode != "CLUSTER" or a.args.job_id is None or a.args.job_id == 1:
        from datetime import datetime

        metrics_class = MetricsCommon.import_algorithm_metrics(
            module, a.args.sim, a.args.extra_metrics)

        # Print out the versions of slp-algorithms-tinyos and tinyos being used
        print(f"@version:python={VersionDetection.python_version()}")
        print(f"@version:numpy={VersionDetection.numpy_version()}")

        print(
            f"@version:slp-algorithms={VersionDetection.slp_algorithms_version()}"
        )

        sim.print_version()

        # Print other potentially useful meta data
        print(f"@date:{str(datetime.now())}")
        print(f"@host:{os.uname()}")

        # Record what algorithm is being run and under what simulator
        print(f"@module:{module}")
        print(f"@sim:{a.args.sim}")

        # Print out the argument settings
        sim.print_arguments(module, a)

        # Print the header for the results
        metrics_class.print_header()

        # Make sure this header has been written
        sys.stdout.flush()

    # Because of the way TOSSIM is architectured each individual simulation
    # needs to be run in a separate process.
    if a.args.mode in ("GUI", "SINGLE", "RAW"):
        sim.run_simulation(module, a, print_warnings=True)
    else:
        _run_parallel(sim, module, a, argv)