예제 #1
0
    def __init__(self):
        ig.get_olympus().route_add(path='/action/adaptAndValidateApplication', method='POST',
                                   callback=self.adapt_and_validate_application)

        ig.get_olympus().route_add(path='/action/validateBaselineApplication', method='POST',
                                   callback=self.validate_baseline_application)

        ig.get_event_router().subscribe_listener(event_tag_or_type=EventType.ERROR, listener=th_client.process_error)
        ig.get_event_router().subscribe_listener(event_tag_or_type=EventType.STATUS, listener=th_client.process_status)
        ig.get_event_router().set_log_events_to_file(event_tag_or_type=EventType.ERROR,
                                                     filepath=get_configuration().logFile,
                                                     transformer=th_client.THErrorStringTransformer)
        ig.get_event_router().subscribe_listener(event_tag_or_type=EventTags.ValidationTestsFinished,
                                                 listener=self.receive_validation_finished)

        if get_configuration().testAdapter.reportRawData:
            ig.get_event_router().subscribe_listener(
                event_tag_or_type=EventTags.AnalyticsEventServerNetworkTrafficMeasuredBytes,
                listener=self._raw_event_appender)

            ig.get_event_router().subscribe_listener(
                event_tag_or_type=EventTags.AnalyticsEventServerNetworkTrafficCalculatedBytesPerSec,
                listener=self._raw_event_appender)

        self._sm = StateManager()
예제 #2
0
    def __init__(self, event_router):
        super(Olympus, self).__init__()

        self.event_router = event_router
        self._define_routes()
        self._host = get_configuration().testAdapter.url
        self._port = get_configuration().testAdapter.port
        self._analytics_event_listeners = set()
        self._validation_results_listeners = set()
        self._das_status_message_listeners = set()
        self.catchall = False
예제 #3
0
    def archive_file(self, source_filepath: str, target_subpath: str = None):
        artifact_root = get_configuration().artifactRoot
        source = path_helper(True, get_configuration().immortalsRoot, source_filepath)

        if target_subpath is None:
            target = path_helper(False, artifact_root, source_filepath[source_filepath.rfind('/') + 1:])
        else:
            target_path = os.path.join(artifact_root, target_subpath)
            if not os.path.exists(target_path):
                os.makedirs(target_path)

            target = path_helper(False,
                                 artifact_root,
                                 os.path.join(target_path, source_filepath[source_filepath.rfind('/') + 1:]))

        shutil.copyfile(source, target)
예제 #4
0
    def _start_das(self):
        # target_override_filepath = _construct_override_file(test_scenario=test_scenario, overrides=overrides)

        if self.immortals_root is None:
            immortals_root = os.path.join('../')
        else:
            immortals_root = os.path.abspath(self.immortals_root)
            print(immortals_root)

        self.das_stdout = open(
            os.path.join(
                get_configuration().globals.globalLogDirectory,
                self._current_test_scenario.scenarioIdentifier +
                '-start-stdout.txt'), 'a')
        self.das_stderr = open(
            os.path.join(
                get_configuration().globals.globalLogDirectory,
                self._current_test_scenario.scenarioIdentifier +
                '-start-stderr.txt'), 'a')
        try:
            self.das_process = Popen([
                'bash',
                os.path.join(
                    root_configuration.get_configuration().immortalsRoot,
                    'das/start.sh'), "-v", "DEBUG"
            ],
                                     cwd=os.path.abspath(
                                         os.path.join(immortals_root,
                                                      'harness')),
                                     stdin=PIPE,
                                     stderr=self.das_stderr,
                                     stdout=self.das_stdout)
        except ResourceWarning:
            # Ignore this...
            pass
예제 #5
0
def execute_ttl_generation(scenario_configuration, output_file=None):
    client = scenario_configuration.clients[0]
    res = client.presentResources

    params = [
        'python3', './py/mission_perturb.py', '--session',
        scenario_configuration.sessionIdentifier, '--output',
        'deployment_model.ttl', '--template', './template/gme-template.ttl',
        '--pli-client-msg-rate',
        str(60000 / int(client.latestSABroadcastIntervalMS)),
        '--image-client-msg-rate',
        str(60000 / int(client.imageBroadcastIntervalMS)),
        '--server-bandwidth',
        str(scenario_configuration.server.bandwidth), '--client-device-count',
        str(client.count), '--android-bluetooth-resource',
        'yes' if 'bluetooth' in res else 'no', '--android-usb-resource',
        'yes' if 'usb' in res else 'no', '--android-internal-gps-resource',
        'yes' if 'internalGps' in res else 'no', '--android-ui-resource',
        'yes' if 'userInterface' in res else 'no', '--gps-satellite-resource',
        'yes' if 'gpsSatellites' in res else 'no', '--mission-trusted-comms',
        'yes' if 'trustedLocations' in client.requiredProperties else 'no'
    ]

    if output_file is not None:
        params.append('--output')
        params.append(os.path.abspath(output_file))

    tpr.global_subprocess.run(args=params,
                              cwd=get_configuration().immortalsRoot +
                              'models/scenario')
예제 #6
0
    def start(self):
        if get_configuration().debugMode:
            logging.getLogger('tornado.access').setLevel(logging.WARNING)
        else:
            logging.getLogger('tornado.access').setLevel(logging.INFO)

        ts = ImmortalsBokehBottleServer(host=self._host, port=self._port)
        bottle.run(app=self, server=ts, debug=False)
예제 #7
0
def get_das_bridge() -> DasBridge:
    global _das_bridge
    with _das_bridge_init_lock:
        if _das_bridge is None:
            _das_bridge = _DasBridgeImpl(
                target_addr='127.0.0.1',
                target_port=get_configuration().dasService.websocketPort)
            _das_bridge.start_in_thread()
    return _das_bridge
예제 #8
0
 def __init__(self, host='127.0.0.1', port=8080, **config):
     super(ImmortalsBokehBottleServer, self).__init__(host, port, **config)
     if get_configuration().visualization.enabled:
         self._io_loop = IOLoop.instance()
         self._bokeh_server = VisualizationService(
             io_loop=self._io_loop, event_router=ig.get_event_router())
     else:
         self._io_loop_thread = None
         self._bokeh_server = None
예제 #9
0
    def archive_to_file(self, str_to_write: str, target_subpath: str, clobber_existing: bool = True):
        target_path = os.path.join(get_configuration().artifactRoot, target_subpath)
        full_target_path = target_path[:target_path.rfind('/')]
        if not os.path.exists(full_target_path):
            os.makedirs(full_target_path)

        f = open(target_path, 'w' if clobber_existing else 'a')
        f.write(str_to_write)
        f.flush()
        f.close()
예제 #10
0
    def _submit(self, event_tag: EventTag, data: object, recurse_errors: bool):
        with self._lock:
            if get_configuration().debug.routing:
                if isinstance(data, Serializable):
                    msg = 'EVENT ' + event_tag.identifier + ' submitted with data: \n' + data.to_json_str_pretty()
                elif isinstance(data, dict):
                    msg = 'EVENT ' + event_tag.identifier + ' submitted with data: \n' + json.dumps(data,
                                                                                                    indent=4,
                                                                                                    separators=(
                                                                                                        ',', ': '))
                else:
                    msg = 'EVENT ' + event_tag.identifier + ' submitted with data: ' + str(data)

                logging.debug(msg)
            submission_listeners = self._event_tag_submission_listeners

        # Errors need to bubble up somehow at all cost, so care must be taken to make sure all listeners execute
        if event_tag.event_type == EventType.ERROR:
            try:
                for l in submission_listeners[event_tag]:
                    try:
                        l(event_tag, data)
                        _FileLogger.flush()
                    except Exception:
                        if recurse_errors:
                            try:
                                self._submit(event_tag=EventTags.THErrorGeneral,
                                             data=traceback.format_exc(),
                                             recurse_errors=False)
                            except:
                                pass
            except:
                pass

        else:
            if event_tag in submission_listeners:
                for l in submission_listeners[event_tag]:
                    if get_configuration().debug.routing:
                        logging.debug('    SENDING TO ' + str(l))
                        l(event_tag, data)
                        logging.debug('    ' + str(l) + ' Finished')
                    else:
                        l(event_tag, data)
    def __init__(self, command_processor: 'ImmortalsSubprocess' or None,
                 log_tag: str, log_dirpath: str = None):
        self.__command_processor = command_processor if command_processor is not None else subprocess

        if log_dirpath is None:
            log_dirpath = get_configuration().artifactRoot

        endpoint = get_std_endpoint(log_dirpath=log_dirpath, file_tag=log_tag)
        self.__err = endpoint.err
        self.__out = endpoint.out
    def __init__(self, gif: GmeInterchangeFormat, runner_configuration: ScenarioRunnerConfiguration):
        super().__init__(gif=gif, runner_configuration=runner_configuration)
        self._offline_clients: List[str] = [a.instanceIdentifier
                                            for a in runner_configuration.scenario.deploymentApplications
                                            if a.applicationIdentifier.lower() == 'ataklite']
        self._timestamp_list: List[int] = []
        self._byte_list: List[int] = []

        self._max_hit_bandwidth_kilobits_per_second: int = -1

        validation_reporting_interval_secs = \
            get_configuration().validation.bandwidthMonitorReportingIntervalMS / 1000

        self._sample_span_seconds: int = 0

        client_count = triples_helper.get_android_client_count(gif)
        image_broadcast_interval_ms = triples_helper.get_image_rate_ms(gif)
        pli_broadcast_interval_ms = triples_helper.get_pli_rate_ms(gif)

        for q in range(client_count):
            self._sample_span_seconds = \
                max(self._sample_span_seconds, image_broadcast_interval_ms, pli_broadcast_interval_ms)

        self._sample_span_seconds /= 1000
        sample_delay_seconds = max(len(self._offline_clients) * 2, self._sample_span_seconds // 2)

        self._sample_span_seconds *= get_configuration().validation.bandwidthValidatorSampleDurationMultiplier

        # Wait for half the sample span for things to settle before starting to measure data
        self._lower_idx: int = sample_delay_seconds
        self._upper_idx: int = int((self._lower_idx + (self._sample_span_seconds / validation_reporting_interval_secs)))

        logging.debug('Bandwidth Sampling Starting Lower Idx: ' + str(self._lower_idx))
        logging.info('Bandwidth Sampling Starting Upper Idx: ' + str(self._upper_idx))

        self._maximum_bandwidth_kbits_per_second: int = triples_helper.get_bandwidth_constraint_kbit_per_second(gif)

        self._image_send_count = 0
        self._image_receive_count = 0
        self._run_time_ms = max(60, 3 * self._sample_span_seconds)
예제 #13
0
    def perform_adaptation(state_manager: StateManager):
        ig.get_event_router().submit(event_tag=EventTags.THStatusAdapting,
                                     data=state_manager.get_test_adapter_state())

        # TODO: This should be done outside of the FredScript, but requires the DAS supporting JSON triples
        execute_ttl_generation(state_manager.get_ll_p1_input())
        deployment_file = ph(True, get_configuration().immortalsRoot, 'models/scenario/deployment_model.ttl')

        ig.get_event_router().archive_file(deployment_file)

        payload = open(deployment_file, 'rb').read()
        headers = {'Content-Type': 'text/plain'}
        req = requests.post('http://localhost:8080/bbn/das/deployment-model', headers=headers, data=payload)

        ar = AdaptationResult.from_dict(json.loads(req.text))

        state_manager.set_adaptation_result(ar)
예제 #14
0
def load_phase01_scenario_runner_configuration(deployment_model: GmeInterchangeFormat) -> ScenarioRunnerConfiguration:
    client_count = get_android_client_count(deployment_model)
    pli_rate_ms = get_pli_rate_ms(deployment_model)
    image_rate_ms = get_image_rate_ms(deployment_model)

    config_dict = resourcemanager.load_configuration_dict('scenario_runner_configuration')

    vp = {
        'sessionIdentifier': deployment_model.sessionIdentifier,
        'runtimeRoot': get_configuration().runtimeRoot,
    }

    src: ScenarioRunnerConfiguration = ScenarioRunnerConfiguration.from_dict(d=config_dict, value_pool=vp)

    vp['deploymentDirectory'] = src.deploymentDirectory

    global_resources: List[Resource] = \
        [k for k in deployment_model.availableResources if not isinstance(k, ExecutionPlatform)]

    for resource in deployment_model.availableResources:
        if isinstance(resource, ServerPlatform):
            server_config: ApplicationConfig = JavaApplicationConfig.from_dict(
                d=resourcemanager.load_configuration_dict('server_marti'),
                value_pool=vp
            )
            src.scenario.deploymentApplications.append(server_config)

        elif isinstance(resource, AndroidPlatform):
            present_resources: List[str] = get_resource_identifiers(global_resources + resource.platformResources)

            # Construct the clients and add them to the scenario runner configuration
            client_j = resourcemanager.load_configuration_dict('client_ataklite')

            ccid = 0
            for q in range(client_count):
                # client_configuration = scenario_configuration.clients[j]  # type: ATAKLiteClient

                i_str = str(q)
                if len(i_str) == 1:
                    i_str = '00' + i_str
                elif len(i_str) == 2:
                    i_str = '0' + i_str

                client_vp = vp.copy()
                client_vp.update({
                    'CCID': str(ccid),
                    'CID': i_str,
                })

                client: AndroidApplicationConfig = AndroidApplicationConfig.from_dict(client_j,
                                                                                      value_pool=client_vp)

                for f in client.filesToCopy:  # type: FileCopyConfiguration

                    if f.targetFilepath == '/sdcard/ataklite/ATAKLite-Config.json':

                        override_file_map = {k.sourceFilepath: k for k in client.configurationOverrides}

                        if f.sourceFilepath in override_file_map:
                            override_obj = override_file_map[f.sourceFilepath]
                        else:
                            override_obj = JsonFileOverrideConfiguration(f.sourceFilepath, {})
                            client.configurationOverrides.append(override_obj)

                        override_obj.overridePairs['callsign'] = client.instanceIdentifier
                        override_obj.overridePairs['latestSABroadcastIntervalMS'] = pli_rate_ms
                        override_obj.overridePairs['imageBroadcastIntervalMS'] = image_rate_ms

                    elif f.targetFilepath == '/sdcard/ataklite/env.json':

                        override_file_map = {k.sourceFilepath: k for k in client.configurationOverrides}

                        if f.sourceFilepath in override_file_map:
                            override_obj = override_file_map[f.sourceFilepath]
                        else:
                            override_obj = JsonFileOverrideConfiguration(f.sourceFilepath, {})
                            client.configurationOverrides.append(override_obj)

                        override_obj.overridePairs['availableResources'] = present_resources

                src.scenario.deploymentApplications.append(client)
            ccid += 1

    return src
예제 #15
0
import json

import requests

import pymmortals.immortalsglobals as ig
from pymmortals.datatypes.root_configuration import get_configuration
from pymmortals.datatypes.routing import EventType, EventTag, EventTags
from pymmortals.datatypes.test_harness_api import LLTestActionDone, LLDasReady, LLDasStatusEvent, LLDasErrorEvent
from pymmortals.generated.mil.darpa.immortals.core.api.ll.phase1.testadapterstate import TestAdapterState
from pymmortals.routing.eventrouter import EventTransformer
from pymmortals.utils import get_th_timestamp

_BASE_URL = get_configuration().testHarness.protocol + get_configuration().testHarness.url + ':' + \
            str(get_configuration().testHarness.port)

_JSON_HEADERS = {'Content-Type': 'application/json'}

URL_TH_ERROR = _BASE_URL + '/error'
URL_TH_READY = _BASE_URL + '/ready'
URL_TH_STATUS = _BASE_URL + '/status'
URL_TH_ACTION = _BASE_URL + '/action'
URL_TH_DONE = URL_TH_ACTION + '/done'


def _post_network_request(event_tag: EventTag, url: str, data: str):
    details_str = 'TA SENDING POST ' + url.replace(_BASE_URL, '') + ' with '

    if event_tag.event_type == EventType.STATUS:
        if url.startswith(URL_TH_ACTION):
            details_str += '/action/' + url[url.rfind('/') + 1:]
            send_data = data
예제 #16
0
import logging
from threading import Thread

import bottle
from bottle import Bottle
from pymmortals import threadprocessrouter as tpr
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.wsgi import WSGIContainer

from pymmortals.datatypes.root_configuration import get_configuration
from pymmortals.generated.mil.darpa.immortals.core.api.ll.phase1.analyticsevent import AnalyticsEvent
from pymmortals.routing.eventrouter import EventRouter
from .datatypes.routing import EventTags

if get_configuration().visualization.enabled:
    from pymmortals import immortalsglobals as ig
    from .visualization.visualizationservice import VisualizationService

bottle.app.catchall = False


class ImmortalsBokehBottleServer(bottle.ServerAdapter):
    def __init__(self, host='127.0.0.1', port=8080, **config):
        super(ImmortalsBokehBottleServer, self).__init__(host, port, **config)
        if get_configuration().visualization.enabled:
            self._io_loop = IOLoop.instance()
            self._bokeh_server = VisualizationService(
                io_loop=self._io_loop, event_router=ig.get_event_router())
        else:
            self._io_loop_thread = None
예제 #17
0
    def get_duration(self):
        duration = get_configuration().validation.minimumTestDurationMS
        for v in list(self._running_pending_validators.values()):
            duration = max(duration, v.run_time_ms())

        return duration
        obj = _ThreadProcessInstance(thread=thread, shutdown_method=shutdown_method, shutdown_args=shutdown_args,
                                     identifier=str(thread_method))

        if shutdown_method is None:
            thread.daemon = True

        _thread_process_stack.append(obj)

        thread.start()

    return thread


def start_timer(duration_seconds, shutdown_method=None, shutdown_args=(), halt_on_shutdown=True):
    t = Timer(
        duration_seconds,
        shutdown_method,
        shutdown_args
    )
    obj = _ThreadProcessInstance(halt_on_shutdown=halt_on_shutdown, timer=t, identifier='Timer')
    _thread_process_stack.append(obj)
    t.start()
    return t


global_subprocess = \
    ImmortalsSubprocess(command_processor=subprocess, log_dirpath=get_configuration().artifactRoot, log_tag='global')

ig.add_exit_handler(exit_handler)