Exemplo n.º 1
0
    def __init__(self, **kwargs):
        """ 
			Create a default message. 
			kwargs can be used to specify message components
			contenttype, data, groups, nodes, docks, sequence, timestamp, src, srcdock, hmac
		"""
        self.msgid = 0
        self.flags = 0
        self.contenttype = kwargs.pop('contenttype', MAGIMessage.NONE)
        self.data = kwargs.pop('data', None)

        self.dstgroups = helpers.toSet(kwargs.pop('groups', set()))
        self.dstnodes = helpers.toSet(kwargs.pop('nodes', set()))
        self.dstdocks = helpers.toSet(kwargs.pop('docks', set()))

        self.sequence = kwargs.pop('sequence', None)
        self.sequenceid = kwargs.pop('sequenceid', None)
        self.timestamp = kwargs.pop('timestamp', None)
        self.hosttime = kwargs.pop('hosttime', None)

        self.src = kwargs.pop('src', None)
        self.srcdock = kwargs.pop('srcdock', None)
        self.hmac = kwargs.pop('hmac', None)

        # Internals, not used on the wire
        self._receivedon = None  # interface we were received on
        self._appendedto = set(
        )  # marks when message is appended to an outgoing queue
        self._routed = None  # marks when a message is routed to particular transports
        self._userargs = {
        }  # for messages entering locally, the user delivery args
        self._orighdrlen = 0  # for statistics

        if len(kwargs) > 0:
            log.error("Unknown arguments for MAGIMessage (%s)", kwargs)
Exemplo n.º 2
0
	def __init__(self, **kwargs):
		""" 
			Create a default message. 
			kwargs can be used to specify message components
			contenttype, data, groups, nodes, docks, sequence, timestamp, src, srcdock, hmac
		"""
		self.msgid = 0
		self.flags = 0
		self.contenttype = kwargs.pop('contenttype', MAGIMessage.NONE)
		self.data = kwargs.pop('data', None)

		self.dstgroups = helpers.toSet(kwargs.pop('groups', set()))
		self.dstnodes  = helpers.toSet(kwargs.pop('nodes', set()))
		self.dstdocks  = helpers.toSet(kwargs.pop('docks', set()))

		self.sequence  = kwargs.pop('sequence', None)
		self.sequenceid  = kwargs.pop('sequenceid', None)
		self.timestamp = kwargs.pop('timestamp', None)
		self.hosttime = kwargs.pop('hosttime', None)

		self.src       = kwargs.pop('src', None)
		self.srcdock   = kwargs.pop('srcdock', None)
		self.hmac      = kwargs.pop('hmac', None)

		# Internals, not used on the wire
		self._receivedon = None   # interface we were received on
		self._appendedto = set()  # marks when message is appended to an outgoing queue
		self._routed = None		  # marks when a message is routed to particular transports
		self._userargs = {}		  # for messages entering locally, the user delivery args
		self._orighdrlen = 0      # for statistics

		if len(kwargs) > 0:
			log.error("Unknown arguments for MAGIMessage (%s)", kwargs)
Exemplo n.º 3
0
    def getData(self,
                msg,
                agents=None,
                nodes=None,
                filters=dict(),
                timestampChunks=None,
                visited=set()):
        """
            Request to fetch data
        """
        functionName = self.getData.__name__
        helpers.entrylog(log, functionName, locals())

        agents_ = helpers.toSet(agents)
        nodes_ = helpers.toSet(nodes)

        if not nodes_:
            nodes_ = config.getTopoGraph().nodes()

        if not agents_:
            if nodes:
                agents_ = self.getSensorAgents(nodes[0])
            else:
                raise AttributeError(
                    "Cannot query for an empty set of collections.")

        if timestampChunks == None:
            timestampChunks = [(0, time.time())]

        data = dict()
        for agent in agents_:
            data[agent] = dict()
            for node in nodes_:
                filters_copy = filters.copy()
                filters_copy['host'] = node
                nodedata = []
                for tsChunk in timestampChunks:
                    nodedata = nodedata + database.getData(
                        agent, filters_copy, tsChunk, database.configHost(),
                        database.ROUTER_SERVER_PORT)
                data[agent][node] = nodedata

        args = {
            "agents": agents,
            "nodes": nodes,
            "filters": filters,
            "timestampChunks": timestampChunks,
            "visited": visited,
            "data": data
        }
        call = {'version': 1.0, 'method': 'putData', 'args': args}
        log.debug('Creating data message')
        msg = MAGIMessage(nodes=msg.src,
                          docks='dataman',
                          contenttype=MAGIMessage.PICKLE,
                          data=pickle.dumps(call))
        log.debug('Sending message')
        self.messenger.send(msg)

        helpers.exitlog(log, functionName)
Exemplo n.º 4
0
 def getData(self, msg, agents=None, nodes=None, filters=dict(), timestampChunks=None, visited=set()):
     """
         Request to fetch data
     """
     functionName = self.getData.__name__
     helpers.entrylog(log, functionName, locals())
     
     agents_ = helpers.toSet(agents)
     nodes_ = helpers.toSet(nodes)
     
     if not nodes_:
         nodes_ = config.getTopoGraph().nodes()
         
     if not agents_:
         if nodes:
             agents_ = self.getSensorAgents(nodes[0])
         else:
             raise AttributeError("Cannot query for an empty set of collections.")
     
     if timestampChunks == None:
         timestampChunks = [(0, time.time())]
     
     data = dict()
     for agent in agents_:
         data[agent] = dict()
         for node in nodes_:
             filters_copy = filters.copy()
             filters_copy['host'] = node
             nodedata = []
             for tsChunk in timestampChunks:
                 nodedata = nodedata + database.getData(agent, 
                                                        filters_copy, 
                                                        tsChunk, 
                                                        database.configHost(), 
                                                        database.ROUTER_SERVER_PORT)
             data[agent][node] = nodedata
     
     args = {
         "agents": agents,
         "nodes": nodes,
         "filters": filters,
         "timestampChunks": timestampChunks,
         "visited": visited,
         "data": data
     }
     call = {'version': 1.0, 'method': 'putData', 'args': args}
     log.debug('Creating data message')
     msg = MAGIMessage(nodes=msg.src, docks='dataman', contenttype=MAGIMessage.PICKLE, data=pickle.dumps(call))
     log.debug('Sending message')
     self.messenger.send(msg)
     
     helpers.exitlog(log, functionName)
Exemplo n.º 5
0
def recieveMessages(messaging, nodeSet, timeout=30):
    
    result = dict()
    nodes = helpers.toSet(value=nodeSet.copy())
    
    # Wait for timeout seconds before stopping 
    start = time.time()
    stop = start + int(timeout) 
    current = start

    # Wait in a loop for timeout seconds 
    while current < stop: 
        current = time.time()
        try:
            msg = messaging.nextMessage(True, timeout=1)
            log.debug(msg)
            if msg.src is not CLIENT_NAME:
                log.info('Node %s' %(msg.src))
                result[msg.src] = yaml.load(msg.data)
                nodes.discard(msg.src)
        # If there are no messages in the Queue, just wait some more 
        except Queue.Empty:
            #check if there is need to wait any more
            if len(nodes) == 0:
                break
            
    return result
Exemplo n.º 6
0
def recieveMessages(messaging, nodeSet, timeout=30):

    result = dict()
    nodes = helpers.toSet(value=nodeSet.copy())

    # Wait for timeout seconds before stopping
    start = time.time()
    stop = start + int(timeout)
    current = start

    # Wait in a loop for timeout seconds
    while current < stop:
        current = time.time()
        try:
            msg = messaging.nextMessage(True, timeout=1)
            log.debug(msg)
            if msg.src is not CLIENT_NAME:
                log.info('Node %s' % (msg.src))
                result[msg.src] = yaml.load(msg.data)
                nodes.discard(msg.src)
        # If there are no messages in the Queue, just wait some more
        except Queue.Empty:
            #check if there is need to wait any more
            if len(nodes) == 0:
                break

    return result
Exemplo n.º 7
0
def getEventTriggers(triggers):
    triggers = helpers.toSet(triggers)
    eventTriggers = set()
    for trigger in triggers:
        if isinstance(trigger, TimeoutTrigger):
            continue
        elif isinstance(trigger, EventTrigger):
            eventTriggers.add(trigger)
        else:
            eventTriggers.update(getEventTriggers(trigger.triggers))
    return eventTriggers
Exemplo n.º 8
0
    def __init__(self, triggerData):
        functionName = self.__init__.__name__
        helpers.entrylog(log, functionName, locals())

        Trigger.__init__(self, triggerData)
        self.event = triggerData.pop('event')
        self.nodes = helpers.toSet(triggerData.pop('nodes', None))
        self.count = triggerData.pop('count', max(len(self.nodes), 1))
        self.args = triggerData
        if not self.args:
            self.args = {'retVal': True}
Exemplo n.º 9
0
def getEventTriggers(triggers):
    triggers = helpers.toSet(triggers)
    eventTriggers = set()
    for trigger in triggers:
        if isinstance(trigger, TimeoutTrigger):
            continue
        elif isinstance(trigger, EventTrigger):
            eventTriggers.add(trigger)
        else:
            eventTriggers.update(getEventTriggers(trigger.triggers))
    return eventTriggers
Exemplo n.º 10
0
 def __init__(self, triggerData):
     functionName = self.__init__.__name__
     helpers.entrylog(log, functionName, locals())
 
     Trigger.__init__(self, triggerData)
     self.event = triggerData.pop('event')
     self.nodes = helpers.toSet(triggerData.pop('nodes', None))
     self.count = triggerData.pop('count', max(len(self.nodes), 1))
     self.args = triggerData
     if not self.args:
         self.args = {'retVal' : True}
Exemplo n.º 11
0
 if options.bridge:
     bridgeNode = options.bridge
     bridgePort = options.port
 elif options.config or (options.project and options.experiment):
     (bridgeNode, bridgePort) = helpers.getBridge(
                                     experimentConfigFile=options.config, 
                                     project=options.project, 
                                     experiment=options.experiment)
 else:
     optparser.print_help()
     optparser.error("Missing bridge information and "
                         "experiment configuration information")
         
 nodeSet = set() 
 if options.nodes:
     nodeSet = helpers.toSet(options.nodes)
 if options.aal:
     nodeSet.update(helpers.getNodesFromAAL(options.aal))
 if not nodeSet and (options.config or (options.project and options.experiment)):
     nodeSet.update(helpers.getMagiNodeList(
                                     experimentConfigFile=options.config, 
                                     project=options.project, 
                                     experiment=options.experiment))
     
 if options.logs:
     (status, result) = getLogsArchive(bridgeNode=bridgeNode, 
                                       bridgePort=bridgePort, 
                                       nodeSet=nodeSet, 
                                       outputdir=options.logoutdir)
     log.info("Received logs stored under %s" %(options.logoutdir))
     exit(0)
Exemplo n.º 12
0
    def __init__(self,
                 files=None,
                 data=None,
                 groupBuildTimeout=20000,
                 dagdisplay=False,
                 triggerCheck=False):
        """
            Create a new AAL object using either files or a
            string object (data).
            The init function parses the yaml file and creates 
            the list of events and triggers that form each event stream
            ADditionally, it also creates the control graph that can be 
            visualized later.
        """

        # TODO: currently the startup stream is always setup for an AAL
        # Later the experiment may or may not have a startup phase
        self.startup = True
        self.agentLoadTimeout = 200000

        try:
            yaml_file = cStringIO.StringIO()
            read_data = False
            for f in files:
                # we concatenate the given files.
                # This allows us to parse all the files as a single YAML
                # string. PyYAML does not support multidocument YAML
                # documents, otherwise we could separate these files explicitly
                # with the yaml document separator, '---'.
                with open(f, 'r') as fd:
                    yaml_file.write(fd.read())
                    read_data = True

            if not read_data:  # There is a more elegant way to do this.
                log.critical('Yaml Parse Error: reading event AAL files.')
                sys.exit(1)

            self.rawAAL = yaml.load(yaml_file.getvalue())

            #Pointer to streams
            self.setupStreams = []
            self.teardownStreams = []
            self.userEventStreams = []

            #Stream name to object map
            self.streamMap = dict()

            #Incoming event triggers keyed by stream name
            self.ieventtriggers = defaultdict(set)
            #Outgoing event triggers keyed by stream name
            self.oeventtriggers = defaultdict(set)

            # Sanity Check: does the AAL have the following directives.
            # if not, log that they are missing but continue
            for k in ['streamstarts', 'agents', 'groups', 'eventstreams']:
                if not k in self.rawAAL.keys():
                    log.critical('missing required key in AAL: %s', k)

            # Add default group to address ALL nodes
            allNodes = set()
            for nodes in self.rawAAL['groups'].values():
                allNodes |= helpers.toSet(nodes)
            self.rawAAL['groups']['__ALL__'] = list(allNodes)

            # Add MAGI Daemon on all nodes as a default agent
            self.rawAAL['agents'].setdefault('__DAEMON__', {
                'group': '__ALL__',
                'dock': 'daemon'
            })

            # The AAL extra-YAML references
            self._resolveReferences()

            ##### STARTUP STREAM #####

            # Define startup stream
            # By default we add a startup stream
            if self.startup:
                # Stand up the experiment, load agents, build groups.

                groupBuildStream = Stream('groupBuildStream')
                self.setupStreams.append(groupBuildStream)
                self.streamMap['groupBuildStream'] = groupBuildStream

                for name, nodes in self.rawAAL['groups'].iteritems():
                    if name == '__ALL__':
                        continue  # all nodes by default receive messages sent to the '__ALL__' group
                    groupBuildStream.append(BuildGroupCall(name, nodes))

                # Add triggers for the BuildGroup calls
                for name, nodes in self.rawAAL['groups'].iteritems():
                    if name == '__ALL__':
                        continue  # no GroupBuild message sent for '__ALL__' group
                    groupBuildStream.append(
                        TriggerList([{
                            'event': 'GroupBuildDone',
                            'group': name,
                            'nodes': nodes
                        }, {
                            'timeout': int(groupBuildTimeout),
                            'target': 'exit'
                        }]))

                for name, nodes in self.rawAAL['groups'].iteritems():
                    if name == '__ALL__':
                        continue  # all nodes by default receive messages sent to the '__ALL__' group
                    groupBuildStream.append(GroupPingCall(name))

                # Add triggers for the BuildGroup calls
                for name, nodes in self.rawAAL['groups'].iteritems():
                    if name == '__ALL__':
                        continue  # no GroupBuild message sent for '__ALL__' group
                    groupBuildStream.append(
                        TriggerList([{
                            'event': 'GroupPong',
                            'group': name,
                            'nodes': nodes
                        }, {
                            'timeout': int(groupBuildTimeout),
                            'target': 'groupBuildStream'
                        }]))

                loadAgentStream = Stream('loadAgentStream')
                self.setupStreams.append(loadAgentStream)
                self.streamMap['loadAgentStream'] = loadAgentStream

                for name, agent in self.rawAAL['agents'].iteritems():
                    # for agents that need to be installed
                    if 'path' in agent or 'tardata' in agent:
                        # create an internal agent dock using unique name of agent.
                        # if specified in the AAL, do not do this.
                        if not 'dock' in agent:
                            agent['dock'] = name + '_dock'
                        if not 'code' in agent:
                            agent['code'] = name + '_code'

                        # Add event call to load agent
                        loadAgentStream.append(LoadAgentCall(name, **agent))

                    else:
                        if not 'dock' in agent:
                            agent['dock'] = 'daemon'

                # Now, add the load agent triggers
                for name, agent in self.rawAAL['agents'].iteritems():
                    # for agents that need to be installed
                    if 'path' in agent or 'tardata' in agent:

                        # Add triggers to ensure the agents are loaded correctly
                        # However, add them only after all the load agent events
                        timeout = agent.get('loadTimeout',
                                            self.agentLoadTimeout)
                        loadAgentStream.append(
                            TriggerList([{
                                'event':
                                'AgentLoadDone',
                                'agent':
                                name,
                                'nodes':
                                self.rawAAL['groups'][agent['group']]
                            }, {
                                'timeout': int(timeout),
                                'target': 'exit'
                            }]))

            ##### TEARDOWN STREAM #####

            # We always define a teardown stream as jumping to target exit
            # activates this stream
            # tear down the experiment, unload agents, leave groups.

            unloadAgentStream = Stream('unloadAgentStream')
            self.teardownStreams.append(unloadAgentStream)
            self.streamMap['unloadAgentStream'] = unloadAgentStream

            # Add unload agent events
            for name, agent in self.rawAAL['agents'].iteritems():
                if 'path' in agent or 'tardata' in agent:
                    unloadAgentStream.append(UnloadAgentCall(name, **agent))

            # Add triggers to ensure the agents are unloaded correctly
            # However, add them only after all the unload agent events
            # Use the same timeouts as setup stream
            for name, agent in self.rawAAL['agents'].iteritems():
                if 'path' in agent or 'tardata' in agent:
                    timeout = agent.get('loadTimeout', self.agentLoadTimeout)
                    unloadAgentStream.append(
                        TriggerList([{
                            'event':
                            'AgentUnloadDone',
                            'agent':
                            name,
                            'nodes':
                            self.rawAAL['groups'][agent['group']]
                        }, {
                            'timeout': int(timeout),
                            'target': 'exit'
                        }]))

            groupLeaveStream = Stream('groupLeaveStream')
            self.teardownStreams.append(groupLeaveStream)
            self.streamMap['groupLeaveStream'] = groupLeaveStream

            # Add leave group events
            for name, nodes in self.rawAAL['groups'].iteritems():
                if name == '__ALL__':
                    continue  # no GroupBuild message sent for '__ALL__' group
                groupLeaveStream.append(LeaveGroupCall(name, nodes))

            # Add triggers to ensure groups are left correctly
            for name, nodes in self.rawAAL['groups'].iteritems():
                if name == '__ALL__':
                    continue  # no LeaveGroup message sent for '__ALL__' group
                groupLeaveStream.append(
                    TriggerList([{
                        'event': 'GroupTeardownDone',
                        'group': name,
                        'nodes': nodes
                    }, {
                        'timeout': int(groupBuildTimeout),
                        'target': 'exit'
                    }]))

            ##### EVENT STREAMS #####

            for streamName, estream in self.rawAAL['eventstreams'].iteritems():
                newstream = Stream(streamName)
                self.userEventStreams.append(newstream)
                self.streamMap[streamName] = newstream
                for event in estream:
                    # The eventstream consists of triggers and events.
                    # First we process the type trigger, then event.
                    # we log errors if it is not an event or trigger.
                    if event['type'] == 'event':
                        agent = self.rawAAL['agents'][event['agent']]
                        newstream.append(EventMethodCall(agent, event))
                        if 'trigger' in event:
                            self.oeventtriggers[streamName].add(
                                event['trigger'])

                    elif event['type'] == 'trigger':
                        triggerList = TriggerList(event['triggers'])
                        newstream.append(triggerList)
                        self.ieventtriggers[streamName].update(
                            set([
                                trigger.event
                                for trigger in getEventTriggers(triggerList)
                            ]))

                    else:
                        log.warning("Skipping unknown stream entry type %s",
                                    event['type'])

            outGoingEventTriggers = set()
            for triggerSet in self.oeventtriggers.values():
                outGoingEventTriggers |= triggerSet

            inComingEventTriggers = set()
            for triggerSet in self.ieventtriggers.values():
                inComingEventTriggers |= triggerSet

            if inComingEventTriggers.issubset(outGoingEventTriggers):
                log.info(
                    'Incoming event triggers is a subset of outgoing event triggers'
                )
            elif triggerCheck:
                log.error(
                    'Incoming event triggers are not a subset of outgoing event triggers'
                )
                raise AALParseError(
                    'Incoming event triggers are not a subset of outgoing event triggers'
                )

            self.cgraph = ControlGraph()
            for eventStream in self.userEventStreams:
                streamName = eventStream.name
                cluster = self.cgraph.createCluster(streamName)
                cluster.nodes.append({
                    'type': 'node',
                    'id': streamName,
                    'label': streamName,
                    'edgeto': set()
                })
                for event in eventStream:
                    if isinstance(event, EventObject):
                        self.cgraph.addEvent(streamName, event)
                    elif isinstance(event, TriggerList):
                        self.cgraph.addTrigger(streamName, event)
                self.cgraph.finishCluster(streamName)
            # the setup and tear-down event streams are presented as singleton events
            self.cgraph.finishControlgraph()

            if dagdisplay:
                print "dagdisplay True, creating graph"
                self.cgraph.writePng()
                #print self.cgraph

        except Exception, e:
            import traceback
            exc_type, exc_value, exc_tb = sys.exc_info()
            log.error(''.join(
                traceback.format_exception(exc_type, exc_value, exc_tb)))
            raise AALParseError("Exception while parsing AAL: %s" % str(e))
Exemplo n.º 13
0
    if options.bridge:
        bridgeNode = options.bridge
        bridgePort = options.port
    elif options.config or (options.project and options.experiment):
        (bridgeNode,
         bridgePort) = helpers.getBridge(experimentConfigFile=options.config,
                                         project=options.project,
                                         experiment=options.experiment)
    else:
        optparser.print_help()
        optparser.error("Missing bridge information and "
                        "experiment configuration information")

    nodeSet = set()
    if options.nodes:
        nodeSet = helpers.toSet(options.nodes)
    if options.aal:
        nodeSet.update(helpers.getNodesFromAAL(options.aal))
    if not nodeSet and (options.config or
                        (options.project and options.experiment)):
        nodeSet.update(
            helpers.getMagiNodeList(experimentConfigFile=options.config,
                                    project=options.project,
                                    experiment=options.experiment))

    if options.logs:
        (status, result) = getLogsArchive(bridgeNode=bridgeNode,
                                          bridgePort=bridgePort,
                                          nodeSet=nodeSet,
                                          outputdir=options.logoutdir)
        log.info("Received logs stored under %s" % (options.logoutdir))
Exemplo n.º 14
0
    def __init__(self, files=None, data=None, groupBuildTimeout=20000, dagdisplay=False, triggerCheck=False):
        """
            Create a new AAL object using either files or a
            string object (data).
            The init function parses the yaml file and creates 
            the list of events and triggers that form each event stream
            ADditionally, it also creates the control graph that can be 
            visualized later.
        """

        # TODO: currently the startup stream is always setup for an AAL 
        # Later the experiment may or may not have a startup phase 
        self.startup = True 
        self.agentLoadTimeout = 200000
        
        try:
            yaml_file = cStringIO.StringIO()
            read_data = False
            for f in files:
                # we concatenate the given files. 
                # This allows us to parse all the files as a single YAML
                # string. PyYAML does not support multidocument YAML 
                # documents, otherwise we could separate these files explicitly
                # with the yaml document separator, '---'. 
                with open(f, 'r') as fd:
                    yaml_file.write(fd.read())
                    read_data = True
    
            if not read_data:  # There is a more elegant way to do this.
                log.critical('Yaml Parse Error: reading event AAL files.')
                sys.exit(1)
    
            self.rawAAL = yaml.load(yaml_file.getvalue())
            
            #Pointer to streams
            self.setupStreams = []
            self.teardownStreams = []
            self.userEventStreams = []
            
            #Stream name to object map
            self.streamMap = dict()
            
            #Incoming event triggers keyed by stream name
            self.ieventtriggers = defaultdict(set) 
            #Outgoing event triggers keyed by stream name
            self.oeventtriggers = defaultdict(set) 
    
            # Sanity Check: does the AAL have the following directives. 
            # if not, log that they are missing but continue 
            for k in ['streamstarts', 'agents', 'groups', 'eventstreams']:
                if not k in self.rawAAL.keys():
                    log.critical('missing required key in AAL: %s', k)
    
            # Add default group to address ALL nodes
            allNodes = set()
            for nodes in self.rawAAL['groups'].values():
                allNodes |= helpers.toSet(nodes)
            self.rawAAL['groups']['__ALL__'] = list(allNodes)
            
            # Add MAGI Daemon on all nodes as a default agent
            self.rawAAL['agents'].setdefault('__DAEMON__', {'group' : '__ALL__',
                                                         'dock' : 'daemon'})
            
            # The AAL extra-YAML references
            self._resolveReferences()
            
            ##### STARTUP STREAM #####
            
            # Define startup stream 
            # By default we add a startup stream 
            if self.startup: 
            # Stand up the experiment, load agents, build groups.
            
                groupBuildStream = Stream('groupBuildStream')
                self.setupStreams.append(groupBuildStream)
                self.streamMap['groupBuildStream'] = groupBuildStream
                
                for name, nodes in self.rawAAL['groups'].iteritems():
                    if name == '__ALL__': continue # all nodes by default receive messages sent to the '__ALL__' group
                    groupBuildStream.append(BuildGroupCall(name, nodes))
    
                # Add triggers for the BuildGroup calls 
                for name, nodes in self.rawAAL['groups'].iteritems():
                    if name == '__ALL__': continue # no GroupBuild message sent for '__ALL__' group
                    groupBuildStream.append(
                        TriggerList([
                            {'event': 'GroupBuildDone', 'group': name, 
                             'nodes': nodes},
                            {'timeout': int(groupBuildTimeout), 
                             'target': 'exit'}]))
                
                for name, nodes in self.rawAAL['groups'].iteritems():
                    if name == '__ALL__': continue # all nodes by default receive messages sent to the '__ALL__' group
                    groupBuildStream.append(GroupPingCall(name))
                    
                # Add triggers for the BuildGroup calls 
                for name, nodes in self.rawAAL['groups'].iteritems():
                    if name == '__ALL__': continue # no GroupBuild message sent for '__ALL__' group
                    groupBuildStream.append(
                        TriggerList([
                            {'event': 'GroupPong', 'group': name, 
                             'nodes': nodes},
                            {'timeout': int(groupBuildTimeout), 
                             'target': 'groupBuildStream'}]))
                    
                loadAgentStream = Stream('loadAgentStream')
                self.setupStreams.append(loadAgentStream)
                self.streamMap['loadAgentStream'] = loadAgentStream
                
                for name, agent in self.rawAAL['agents'].iteritems():
                    # for agents that need to be installed
                    if 'path' in agent or 'tardata' in agent:
                        # create an internal agent dock using unique name of agent. 
                        # if specified in the AAL, do not do this. 
                        if not 'dock' in agent:
                            agent['dock'] = name + '_dock'
                        if not 'code' in agent:
                            agent['code'] = name + '_code' 
        
                        # Add event call to load agent
                        loadAgentStream.append(LoadAgentCall(name, **agent))
                        
                    else:
                        if not 'dock' in agent:
                            agent['dock'] = 'daemon'
                        
                # Now, add the load agent triggers
                for name, agent in self.rawAAL['agents'].iteritems():
                    # for agents that need to be installed
                    if 'path' in agent or 'tardata' in agent:
                        
                        # Add triggers to ensure the agents are loaded correctly 
                        # However, add them only after all the load agent events
                        timeout = agent.get('loadTimeout', self.agentLoadTimeout)
                        loadAgentStream.append(
                            TriggerList([
                                {'event': 'AgentLoadDone', 'agent': name, 
                                 'nodes': self.rawAAL['groups'][agent['group']]},
                                {'timeout': int(timeout), 'target': 'exit'} 
                                 ]))
    
            ##### TEARDOWN STREAM #####
            
            # We always define a teardown stream as jumping to target exit 
            # activates this stream 
            # tear down the experiment, unload agents, leave groups.
            
            unloadAgentStream = Stream('unloadAgentStream')
            self.teardownStreams.append(unloadAgentStream)
            self.streamMap['unloadAgentStream'] = unloadAgentStream
                
            # Add unload agent events
            for name, agent in self.rawAAL['agents'].iteritems():
                if 'path' in agent or 'tardata' in agent:
                    unloadAgentStream.append(UnloadAgentCall(name, **agent))
    
            # Add triggers to ensure the agents are unloaded correctly 
            # However, add them only after all the unload agent events
            # Use the same timeouts as setup stream
            for name, agent in self.rawAAL['agents'].iteritems():
                if 'path' in agent or 'tardata' in agent:
                    timeout = agent.get('loadTimeout', self.agentLoadTimeout)
                    unloadAgentStream.append(
                        TriggerList([
                                {'event': 'AgentUnloadDone', 'agent': name,
                                'nodes': self.rawAAL['groups'][agent['group']]},
                                {'timeout': int(timeout), 'target': 'exit'}
                                ]))
            
            groupLeaveStream = Stream('groupLeaveStream')
            self.teardownStreams.append(groupLeaveStream)
            self.streamMap['groupLeaveStream'] = groupLeaveStream
                
            # Add leave group events
            for name, nodes in self.rawAAL['groups'].iteritems():
                if name == '__ALL__': continue # no GroupBuild message sent for '__ALL__' group    
                groupLeaveStream.append(LeaveGroupCall(name, nodes))
                
            # Add triggers to ensure groups are left correctly
            for name, nodes in self.rawAAL['groups'].iteritems():
                if name == '__ALL__': continue # no LeaveGroup message sent for '__ALL__' group
                groupLeaveStream.append(
                    TriggerList([
                        {'event': 'GroupTeardownDone', 'group': name, 
                         'nodes': nodes},
                        {'timeout': int(groupBuildTimeout), 'target': 'exit'}
                        ]))
    
    
            ##### EVENT STREAMS #####
    
            for streamName, estream in self.rawAAL['eventstreams'].iteritems():
                newstream = Stream(streamName)
                self.userEventStreams.append(newstream)
                self.streamMap[streamName] = newstream
                for event in estream:
                    # The eventstream consists of triggers and events. 
                    # First we process the type trigger, then event. 
                    # we log errors if it is not an event or trigger.
                    if event['type'] == 'event':
                        agent = self.rawAAL['agents'][event['agent']]
                        newstream.append(EventMethodCall(agent, event))
                        if 'trigger' in event:
                            self.oeventtriggers[streamName].add(event['trigger'])
                            
                    elif event['type'] == 'trigger':
                        triggerList = TriggerList(event['triggers'])
                        newstream.append(triggerList)
                        self.ieventtriggers[streamName].update(set([trigger.event 
                                                             for trigger in 
                                                             getEventTriggers(triggerList)]))
                            
                    else:
                        log.warning("Skipping unknown stream entry type %s",
                                    event['type'])
            
            
            outGoingEventTriggers = set()
            for triggerSet in self.oeventtriggers.values():
                outGoingEventTriggers |= triggerSet
                
            inComingEventTriggers = set()
            for triggerSet in self.ieventtriggers.values():
                inComingEventTriggers |= triggerSet
                
            if inComingEventTriggers.issubset(outGoingEventTriggers):
                log.info('Incoming event triggers is a subset of outgoing event triggers')
            elif triggerCheck:
                log.error('Incoming event triggers are not a subset of outgoing event triggers')
                raise AALParseError('Incoming event triggers are not a subset of outgoing event triggers')
            
            self.cgraph = ControlGraph() 
            for eventStream in self.userEventStreams:
                streamName = eventStream.name
                cluster = self.cgraph.createCluster(streamName)
                cluster.nodes.append({'type' : 'node',
                                      'id' : streamName,
                                      'label' : streamName,
                                      'edgeto' : set()})
                for event in eventStream:
                    if isinstance(event, EventObject):
                        self.cgraph.addEvent(streamName, event)
                    elif isinstance(event, TriggerList):
                        self.cgraph.addTrigger(streamName, event)
                self.cgraph.finishCluster(streamName)
            # the setup and tear-down event streams are presented as singleton events
            self.cgraph.finishControlgraph()
    
            if dagdisplay:
                print "dagdisplay True, creating graph" 
                self.cgraph.writePng()    
                #print self.cgraph
                
        except Exception, e:
            import traceback
            exc_type, exc_value, exc_tb = sys.exc_info()
            log.error(''.join(traceback.format_exception(exc_type, exc_value, exc_tb)))
            raise AALParseError("Exception while parsing AAL: %s" %str(e))