示例#1
0
 def _load_resource_Dataset(self, action_cfg):
     if action_cfg[KEY_ID] in self.resource_ids:
         return
     schema_def = {}
     if "schema_def" in action_cfg:
         schema_def = DataSchemaParser.parse_schema_ref(action_cfg["schema_def"])
     if "schema_override" in action_cfg:
         dict_merge(schema_def, action_cfg["schema_override"], inplace=True)
     res_id = self.basic_resource_create(action_cfg, RT.Dataset, "resource_registry", "create", support_bulk=True,
                                         set_attributes=dict(schema_definition=schema_def))
     self.basic_associations_create(action_cfg, action_cfg[KEY_ID], support_bulk=True)
    def test_scion_agent(self):
        # Create user
        actor_id = self.scion_client.define_user(
                first_name="John", last_name="Doe",
                username="******", password="******", email="*****@*****.**")

        # Create instrument
        agent_info=[dict(agent_type="data_agent",
                         config=dict(plugin="scion.agent.model.vmmon.vmmon_plugin.VMMON_DataAgentPlugin",
                                     sampling_interval=0.5, stream_name="basic_streams",
                                     auto_streaming=False))]
        inst_obj = Instrument(name="Sensor 1", description="CPU monitor",
                              location=GeospatialLocation(latitude=32.867079, longitude=-117.257324),
                              agent_info=agent_info)
        inst_id, _ = self.rr.create(inst_obj, actor_id=actor_id)

        # Create dataset
        schema_def = DataSchemaParser.parse_schema_ref("ds_vmmon_main")
        ds_obj = Dataset(name="Dataset Sensor 1",
                         schema_definition=schema_def)
        ds_id, _ = self.rr.create(ds_obj, actor_id=actor_id)

        self.rr.create_association(inst_id, PRED.hasDataset, ds_id)

        ds_filename = self.container.file_system.get("%s/%s%s.hdf5" % (DS_BASE_PATH, DS_FILE_PREFIX, ds_id))
        self.assertFalse(os.path.exists(ds_filename))

        inst_data_t0 = self.scion_client.get_asset_data(inst_id)
        self.assertEquals(inst_data_t0["dataset_id"], ds_id)
        self.assertEquals(inst_data_t0["num_rows"], 0)

        # Install a data packet catcher
        self.recv_packets, self.recv_rows = [], 0
        def process_packet_cb(packet, route, stream):
            if not isinstance(packet, DataPacket):
                log.warn("Received a non DataPacket message")
            self.recv_packets.append(packet)
            self.recv_rows += len(packet.data["data"])
            log.info("Received data packet #%s: rows=%s, cols=%s", len(self.recv_packets), len(packet.data["data"]),
                     packet.data["cols"])
        def cleanup_stream_sub():
            if self.stream_sub:
                self.stream_sub.stop()
                self.stream_sub = None

        self.stream_sub = StreamSubscriber(process=self.scion_proc, stream="basic_streams", callback=process_packet_cb)
        self.stream_sub.start()

        self.addCleanup(cleanup_stream_sub)

        # Start agent
        self.assertFalse(StreamingAgentClient.is_agent_active(inst_id))

        agent_pid = self.scion_client.start_agent(inst_id)

        self.assertTrue(StreamingAgentClient.is_agent_active(inst_id))

        sac = StreamingAgentClient(resource_id=inst_id, process=self.scion_proc)
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_INITIALIZED)

        sac.connect()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_CONNECTED)

        sac.start_streaming()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_STREAMING)

        # Retrieve data
        gevent.sleep(1.1)
        self.assertTrue(os.path.exists(ds_filename))

        inst_data_t1 = self.scion_client.get_asset_data(inst_id)
        self.assertEquals(inst_data_t1["dataset_id"], ds_id)
        # self.assertEquals(inst_data_t1["variables"], ['time', 'cpu_percent'])
        self.assertIn("cpu_percent", inst_data_t1["data"])
        num_rows_t1 = inst_data_t1["num_rows"]
        self.assertGreaterEqual(num_rows_t1, 2)

        gevent.sleep(1)
        inst_data_t2 = self.scion_client.get_asset_data(inst_id)
        num_rows_t2 = inst_data_t2["num_rows"]
        self.assertGreater(num_rows_t2, num_rows_t1)

        self.assertGreaterEqual(len(self.recv_packets), self.recv_rows)
        self.assertLessEqual(abs(self.recv_rows - num_rows_t2), 2)

        # Take down agent
        sac.stop_streaming()  # Not required to stop agent, just to test here
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_CONNECTED)

        sac.disconnect()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_INITIALIZED)

        self.scion_client.stop_agent(inst_id)

        self.assertFalse(StreamingAgentClient.is_agent_active(inst_id))
示例#3
0
    def test_scion_agent(self):
        # Create user
        actor_id = self.scion_client.define_user(first_name="John",
                                                 last_name="Doe",
                                                 username="******",
                                                 password="******",
                                                 email="*****@*****.**")

        inst_ids = []
        ds_ids = []
        for source in sources:

            # Instrument
            agent_info = [
                dict(
                    agent_type="data_agent",
                    config=dict(
                        plugin=
                        "scion.agent.model.orb.orb_plugin.Orb_DataAgentPlugin",
                        sampling_interval=0.5,
                        stream_name="basic_streams",
                        auto_streaming=False))
            ]
            inst_obj = Instrument(
                name=source,
                description="Multiplexed generic compressed data frame packet",
                location=GeospatialLocation(latitude=42.867079,
                                            longitude=-127.257324),
                agent_info=agent_info)
            inst_id, _ = self.rr.create(inst_obj, actor_id=actor_id)
            inst_ids.append(inst_id)

            # Dataset
            schema_def = DataSchemaParser.parse_schema_ref("ds_orb_mgenc_m40")
            ds_obj = Dataset(name=source, schema_definition=schema_def)
            ds_id, _ = self.rr.create(ds_obj, actor_id=actor_id)
            self.rr.create_association(inst_id, PRED.hasDataset, ds_id)

            ds_filename = self.container.file_system.get(
                "%s/%s%s.hdf5" % (DS_BASE_PATH, DS_FILE_PREFIX, ds_id))
            self.assertFalse(os.path.exists(ds_filename))

            inst_data_t0 = self.scion_client.get_asset_data(inst_id)
            self.assertEquals(inst_data_t0["dataset_id"], ds_id)
            self.assertEquals(inst_data_t0["num_rows"], 0)
            ds_ids.append(ds_id)

        # Install a data packet catcher
        # TODO

        # Start agent
        sacs = []
        for idx, source in enumerate(sources):
            self.assertFalse(
                StreamingAgentClient.is_agent_active(inst_ids[idx]))
            agent_pid = self.scion_client.start_agent(inst_ids[idx])
            self.assertTrue(StreamingAgentClient.is_agent_active(
                inst_ids[idx]))

            sac = StreamingAgentClient(resource_id=inst_ids[idx],
                                       process=self.scion_proc)
            agent_status = sac.get_status()
            self.assertEquals(agent_status["current_state"],
                              StreamingAgent.AGENTSTATE_INITIALIZED)

            sac.connect()
            agent_status = sac.get_status()
            self.assertEquals(agent_status["current_state"],
                              StreamingAgent.AGENTSTATE_CONNECTED)

            # Coming in from the agent config.
            streaming_args = {
                #'orb_name' : 'taexport.ucsd.edu:usarrayTA',
                'orb_name': 'ceusnexport.ucsd.edu:usarray',
                'select': source,
                '--timeout': 5,
                'sample_interval': 5
            }

            sac.start_streaming(streaming_args)
            agent_status = sac.get_status()
            self.assertEquals(agent_status["current_state"],
                              StreamingAgent.AGENTSTATE_STREAMING)
            sacs.append(sac)

        gevent.sleep(120)

        # Take down agent
        for idx, sac in enumerate(sacs):
            #sac.stop_streaming()  # Not required to stop agent, just to test here
            #agent_status = sac.get_status()
            #self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_CONNECTED)

            #sac.disconnect()
            #agent_status = sac.get_status()
            #self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_INITIALIZED)

            self.scion_client.stop_agent(inst_ids[idx])
            self.assertFalse(
                StreamingAgentClient.is_agent_active(inst_ids[idx]))
    def test_scion_agent(self):
        # Create user
        actor_id = self.scion_client.define_user(first_name="John",
                                                 last_name="Doe",
                                                 username="******",
                                                 password="******",
                                                 email="*****@*****.**")

        # Create instrument
        agent_info = [
            dict(agent_type="data_agent",
                 config=dict(
                     plugin=
                     "scion.agent.model.cdip.cdip_plugin.CDIP_DataAgentPlugin",
                     sampling_interval=10,
                     stream_name="basic_streams",
                     auto_streaming=False))
        ]
        inst_obj = Instrument(name="TA_121A/MGENC/M40",
                              description="CDIP buoy data",
                              location=GeospatialLocation(
                                  latitude=37.94831666666667,
                                  longitude=-123.4675),
                              agent_info=agent_info)
        inst_id, _ = self.rr.create(inst_obj, actor_id=actor_id)

        # Create dataset
        schema_def = DataSchemaParser.parse_schema_ref("ds_cdip01_main")
        ds_obj = Dataset(name="Dataset Sensor", schema_definition=schema_def)
        ds_id, _ = self.rr.create(ds_obj, actor_id=actor_id)

        self.rr.create_association(inst_id, PRED.hasDataset, ds_id)

        ds_filename = self.container.file_system.get(
            "%s/%s%s.hdf5" % (DS_BASE_PATH, DS_FILE_PREFIX, ds_id))
        self.assertFalse(os.path.exists(ds_filename))

        inst_data_t0 = self.scion_client.get_asset_data(inst_id)
        self.assertEquals(inst_data_t0["dataset_id"], ds_id)
        self.assertEquals(inst_data_t0["num_rows"], 0)

        # Install a data packet catcher
        self.recv_packets, self.recv_rows = [], 0

        def process_packet_cb(packet, route, stream):
            if not isinstance(packet, DataPacket):
                log.warn("Received a non DataPacket message")
            self.recv_packets.append(packet)
            self.recv_rows += len(packet.data["data"])
            log.info("Received data packet #%s: rows=%s, cols=%s",
                     len(self.recv_packets), len(packet.data["data"]),
                     packet.data["cols"])
            #log.info('Packet data: ' + str(packet.data))
        def cleanup_stream_sub():
            if self.stream_sub:
                self.stream_sub.stop()
                self.stream_sub = None

        self.stream_sub = StreamSubscriber(process=self.scion_proc,
                                           stream="basic_streams",
                                           callback=process_packet_cb)
        self.stream_sub.start()

        self.addCleanup(cleanup_stream_sub)

        # Start agent
        self.assertFalse(StreamingAgentClient.is_agent_active(inst_id))

        agent_pid = self.scion_client.start_agent(inst_id)

        self.assertTrue(StreamingAgentClient.is_agent_active(inst_id))

        sac = StreamingAgentClient(resource_id=inst_id,
                                   process=self.scion_proc)
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"],
                          StreamingAgent.AGENTSTATE_INITIALIZED)

        sac.connect()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"],
                          StreamingAgent.AGENTSTATE_CONNECTED)

        # Coming in from the agent config.
        streaming_args = {
            'url': 'http://cdip.ucsd.edu/data_access/justdar.cdip?029+pm',
            'sampling_interval': 10
        }

        sac.start_streaming(streaming_args)
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"],
                          StreamingAgent.AGENTSTATE_STREAMING)

        # Set to progressively high values for real data stream tests.
        gevent.sleep(20)

        # Retrieve data
        self.assertTrue(os.path.exists(ds_filename))

        inst_data = self.scion_client.get_asset_data(inst_id)
        """
        {'data': {'Dp': [[1465682100000, 325]],
            'Hs': [[1465682100000, 3.03]],
            'Ta': [[1465682100000, 6.92]],
            'Temp': [[1465682100000, 12.2]],
            'Tp': [[1465682100000, 9.09]]},
         'dataset_id': '08bc829159e6401182462b713b180dbe',
         'num_rows': 1,
         'ts_generated': '1465685467675',
         'var_def': [{'base_type': 'ntp_time',
              'description': 'NTPv4 timestamp',
              'name': 'time',
              'storage_dtype': 'i8',
              'unit': ''},
             {'base_type': 'float',
              'description': 'Significant wave height',
              'name': 'Hs',
              'storage_dtype': 'f8',
              'unit': 'meters'},
             {'base_type': 'float',
              'description': 'Peak wave period',
              'name': 'Tp',
              'storage_dtype': 'f8',
              'unit': 'seconds'},
             {'base_type': 'int',
              'description': 'Peak wave direction',
              'name': 'Dp',
              'storage_dtype': 'i4',
              'unit': 'degrees'},
             {'base_type': 'float',
              'description': 'Average wave period',
              'name': 'Ta',
              'storage_dtype': 'f8',
              'unit': 'seconds'},
             {'base_type': 'float',
              'description': 'Surface temperature',
              'name': 'Temp',
              'storage_dtype': 'f8',
              'unit': 'celcius'}],
         'variables': ['time', 'Hs', 'Tp', 'Dp', 'Ta', 'Temp']}
        """
        num_rows = inst_data["num_rows"]
        log.info('CDIP test produced %i data rows.' % num_rows)

        # Take down agent
        sac.stop_streaming()  # Not required to stop agent, just to test here
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"],
                          StreamingAgent.AGENTSTATE_CONNECTED)

        sac.disconnect()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"],
                          StreamingAgent.AGENTSTATE_INITIALIZED)

        self.scion_client.stop_agent(inst_id)

        self.assertFalse(StreamingAgentClient.is_agent_active(inst_id))
    def test_scion_agent(self):
        # Create user
        actor_id = self.scion_client.define_user(
                first_name="John", last_name="Doe",
                username="******", password="******", email="*****@*****.**")

        # Create instrument
        data_filename = os.path.join(os.path.split(__file__)[0], "testdata/orb_replay_data.yml")
        agent_info=[dict(agent_type="data_agent",
                         config=dict(plugin="scion.agent.model.orb.orb_replay_plugin.ORBReplay_DataAgentPlugin",
                                     sampling_interval=0.2, stream_name="basic_streams",
                                     replay_file=data_filename, replay_scenario="basic",
                                     auto_streaming=False))]
        inst_obj = Instrument(name="ORB Sensor 1", description="Seismic stream",
                              location=GeospatialLocation(latitude=32.867079, longitude=-117.257324),
                              agent_info=agent_info)
        inst_id, _ = self.rr.create(inst_obj, actor_id=actor_id)

        # Create dataset
        schema_def = DataSchemaParser.parse_schema_ref("ds_orb01_main")
        ds_obj = Dataset(name="ORB Dataset 1",
                         schema_definition=schema_def)
        ds_id, _ = self.rr.create(ds_obj, actor_id=actor_id)

        self.rr.create_association(inst_id, PRED.hasDataset, ds_id)

        ds_filename = self.container.file_system.get("%s/%s%s.hdf5" % (DS_BASE_PATH, DS_FILE_PREFIX, ds_id))

        # Install a data packet catcher
        self.recv_packets, self.recv_rows = [], 0
        def process_packet_cb(packet, route, stream):
            if not isinstance(packet, DataPacket):
                log.warn("Received a non DataPacket message")
            self.recv_packets.append(packet)
            self.recv_rows += len(packet.data["data"])
            log.info("Received data packet #%s: rows=%s, cols=%s", len(self.recv_packets), len(packet.data["data"]),
                     packet.data["cols"])
        def cleanup_stream_sub():
            if self.stream_sub:
                self.stream_sub.stop()
                self.stream_sub = None

        self.stream_sub = StreamSubscriber(process=self.scion_proc, stream="basic_streams", callback=process_packet_cb)
        self.stream_sub.start()

        self.addCleanup(cleanup_stream_sub)

        # Start agent
        agent_pid = self.scion_client.start_agent(inst_id)

        self.assertTrue(StreamingAgentClient.is_agent_active(inst_id))

        sac = StreamingAgentClient(resource_id=inst_id, process=self.scion_proc)
        sac.connect()
        sac.start_streaming()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_STREAMING)

        # Retrieve data
        gevent.sleep(1.5)
        self.assertTrue(os.path.exists(ds_filename))
        inst_data_t2 = self.scion_client.get_asset_data(inst_id)
        print "T2", inst_data_t2

        num_rows_t2 = inst_data_t2["num_rows"]
        self.assertEquals(num_rows_t2, 40)
        sample_data = inst_data_t2["data"]["sample_vector"]
        self.assertEquals(sample_data[0][1], 100)
        self.assertEquals(sample_data[39][1], 409)
        self.assertEquals(sample_data[1][0] - sample_data[0][0], 100)
        self.assertEquals(sample_data[39][0] - sample_data[0][0], 3900)

        # Take down agent
        sac.stop_streaming()  # Not required to stop agent, just to test here
        sac.disconnect()
        self.scion_client.stop_agent(inst_id)

        self.assertFalse(StreamingAgentClient.is_agent_active(inst_id))

        inst_data = self.scion_client.get_asset_data(inst_id, data_filter=dict(max_rows=20))
        self.assertEquals(len(inst_data["data"]["sample_vector"]), 20)
        self.assertEquals(inst_data["data"]["sample_vector"][0][1], 300)

        inst_data = self.scion_client.get_asset_data(inst_id, data_filter=dict(start_time="1460694001000"))
        self.assertEquals(len(inst_data["data"]["sample_vector"]), 30)
        self.assertEquals(inst_data["data"]["sample_vector"][0][1], 200)

        # Note: Time filter applies before expansion
        inst_data = self.scion_client.get_asset_data(inst_id, data_filter=dict(start_time="1460694001000", start_time_include=False))
        self.assertEquals(len(inst_data["data"]["sample_vector"]), 20)
        self.assertEquals(inst_data["data"]["sample_vector"][0][1], 300)

        inst_data = self.scion_client.get_asset_data(inst_id, data_filter=dict(start_time="1460694001000", max_rows=10))
        self.assertEquals(len(inst_data["data"]["sample_vector"]), 10)
        self.assertEquals(inst_data["data"]["sample_vector"][0][1], 400)

        inst_data = self.scion_client.get_asset_data(inst_id, data_filter=dict(start_time="1460694001000", end_time="1460694002000"))
        print inst_data
        self.assertEquals(len(inst_data["data"]["sample_vector"]), 20)
        self.assertEquals(inst_data["data"]["sample_vector"][0][1], 200)
示例#6
0
    def test_scion_agent(self):
        # Create user
        actor_id = self.scion_client.define_user(
                first_name="John", last_name="Doe",
                username="******", password="******", email="*****@*****.**")

        # Create instrument
        agent_info=[dict(agent_type="data_agent",
                         config=dict(plugin="scion.agent.model.orb.orb_plugin.Orb_DataAgentPlugin",
                                     sampling_interval=0.5, stream_name="basic_streams",
                                     auto_streaming=False))]
        inst_obj = Instrument(name="TA_121A/MGENC/M40", description="Multiplexed generic compressed data frame packet",
                              location=GeospatialLocation(latitude=42.867079, longitude=-127.257324),
                              agent_info=agent_info)
        inst_id, _ = self.rr.create(inst_obj, actor_id=actor_id)

        # Create dataset
        schema_def = DataSchemaParser.parse_schema_ref("ds_orb_mgenc_m40")
        ds_obj = Dataset(name="Dataset Sensor 3",
                         schema_definition=schema_def)
        ds_id, _ = self.rr.create(ds_obj, actor_id=actor_id)

        self.rr.create_association(inst_id, PRED.hasDataset, ds_id)

        ds_filename = self.container.file_system.get("%s/%s%s.hdf5" % (DS_BASE_PATH, DS_FILE_PREFIX, ds_id))
        self.assertFalse(os.path.exists(ds_filename))

        inst_data_t0 = self.scion_client.get_asset_data(inst_id)
        self.assertEquals(inst_data_t0["dataset_id"], ds_id)
        self.assertEquals(inst_data_t0["num_rows"], 0)

        # Install a data packet catcher
        self.recv_packets, self.recv_rows = [], 0
        def process_packet_cb(packet, route, stream):
            if not isinstance(packet, DataPacket):
                log.warn("Received a non DataPacket message")
            self.recv_packets.append(packet)
            self.recv_rows += len(packet.data["data"])
            log.info("Received data packet #%s: rows=%s, cols=%s", len(self.recv_packets), len(packet.data["data"]),
                     packet.data["cols"])
            #log.info('Packet data: ' + str(packet.data))
        def cleanup_stream_sub():
            if self.stream_sub:
                self.stream_sub.stop()
                self.stream_sub = None

        self.stream_sub = StreamSubscriber(process=self.scion_proc, stream="basic_streams", callback=process_packet_cb)
        self.stream_sub.start()
        
        self.addCleanup(cleanup_stream_sub)

        # Start agent
        self.assertFalse(StreamingAgentClient.is_agent_active(inst_id))

        agent_pid = self.scion_client.start_agent(inst_id)

        self.assertTrue(StreamingAgentClient.is_agent_active(inst_id))

       
        sac = StreamingAgentClient(resource_id=inst_id, process=self.scion_proc)
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_INITIALIZED)

        sac.connect()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_CONNECTED)

        # Coming in from the agent config.
        streaming_args = { 
            'orb_name' : 'taexport.ucsd.edu:usarrayTA',
            #'select' : 'TA_109C/MGENC/M40',
            'select' : 'TA_121A/MGENC/M40',
            '--timeout' : 5,
            'sample_interval' : 5 
        }

        sac.start_streaming(streaming_args)
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_STREAMING)

        gevent.sleep(180)

        # Retrieve data
        """
        gevent.sleep(1)
        self.assertTrue(os.path.exists(ds_filename))

        inst_data_t1 = self.scion_client.get_asset_data(inst_id)
        self.assertEquals(inst_data_t1["dataset_id"], ds_id)
        self.assertEquals(inst_data_t1["variables"], ['time', 'cpu_percent'])
        self.assertIn("cpu_percent", inst_data_t1["data"])
        num_rows_t1 = inst_data_t1["num_rows"]
        self.assertGreaterEqual(num_rows_t1, 2)

        gevent.sleep(1)
        inst_data_t2 = self.scion_client.get_asset_data(inst_id)
        num_rows_t2 = inst_data_t2["num_rows"]
        self.assertGreater(num_rows_t2, num_rows_t1)

        self.assertGreaterEqual(len(self.recv_packets), self.recv_rows)
        self.assertLessEqual(abs(self.recv_rows - num_rows_t2), 2)
        """

        # Take down agent
        sac.stop_streaming()  # Not required to stop agent, just to test here
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_CONNECTED)

        sac.disconnect()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_INITIALIZED)

        self.scion_client.stop_agent(inst_id)

        self.assertFalse(StreamingAgentClient.is_agent_active(inst_id))
示例#7
0
    def test_scion_agent(self):
        # Create user
        actor_id = self.scion_client.define_user(first_name="John",
                                                 last_name="Doe",
                                                 username="******",
                                                 password="******",
                                                 email="*****@*****.**")

        # Create instrument
        data_filename = os.path.join(
            os.path.split(__file__)[0], "testdata/orb_replay_data.yml")
        agent_info = [
            dict(
                agent_type="data_agent",
                config=dict(
                    plugin=
                    "scion.agent.model.orb.orb_replay_plugin.ORBReplay_DataAgentPlugin",
                    sampling_interval=0.2,
                    stream_name="basic_streams",
                    replay_file=data_filename,
                    replay_scenario="basic",
                    auto_streaming=False))
        ]
        inst_obj = Instrument(name="ORB Sensor 1",
                              description="Seismic stream",
                              location=GeospatialLocation(
                                  latitude=32.867079, longitude=-117.257324),
                              agent_info=agent_info)
        inst_id, _ = self.rr.create(inst_obj, actor_id=actor_id)

        # Create dataset
        schema_def = DataSchemaParser.parse_schema_ref("ds_orb01_main")
        ds_obj = Dataset(name="ORB Dataset 1", schema_definition=schema_def)
        ds_id, _ = self.rr.create(ds_obj, actor_id=actor_id)

        self.rr.create_association(inst_id, PRED.hasDataset, ds_id)

        ds_filename = self.container.file_system.get(
            "%s/%s%s.hdf5" % (DS_BASE_PATH, DS_FILE_PREFIX, ds_id))

        # Install a data packet catcher
        self.recv_packets, self.recv_rows = [], 0

        def process_packet_cb(packet, route, stream):
            if not isinstance(packet, DataPacket):
                log.warn("Received a non DataPacket message")
            self.recv_packets.append(packet)
            self.recv_rows += len(packet.data["data"])
            log.info("Received data packet #%s: rows=%s, cols=%s",
                     len(self.recv_packets), len(packet.data["data"]),
                     packet.data["cols"])

        def cleanup_stream_sub():
            if self.stream_sub:
                self.stream_sub.stop()
                self.stream_sub = None

        self.stream_sub = StreamSubscriber(process=self.scion_proc,
                                           stream="basic_streams",
                                           callback=process_packet_cb)
        self.stream_sub.start()

        self.addCleanup(cleanup_stream_sub)

        # Start agent
        agent_pid = self.scion_client.start_agent(inst_id)

        self.assertTrue(StreamingAgentClient.is_agent_active(inst_id))

        sac = StreamingAgentClient(resource_id=inst_id,
                                   process=self.scion_proc)
        sac.connect()
        sac.start_streaming()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"],
                          StreamingAgent.AGENTSTATE_STREAMING)

        # Retrieve data
        gevent.sleep(1.5)
        self.assertTrue(os.path.exists(ds_filename))
        inst_data_t2 = self.scion_client.get_asset_data(inst_id)
        print "T2", inst_data_t2

        num_rows_t2 = inst_data_t2["num_rows"]
        self.assertEquals(num_rows_t2, 40)
        sample_data = inst_data_t2["data"]["sample_vector"]
        self.assertEquals(sample_data[0][1], 100)
        self.assertEquals(sample_data[39][1], 409)
        self.assertEquals(sample_data[1][0] - sample_data[0][0], 100)
        self.assertEquals(sample_data[39][0] - sample_data[0][0], 3900)

        # Take down agent
        sac.stop_streaming()  # Not required to stop agent, just to test here
        sac.disconnect()
        self.scion_client.stop_agent(inst_id)

        self.assertFalse(StreamingAgentClient.is_agent_active(inst_id))

        inst_data = self.scion_client.get_asset_data(
            inst_id, data_filter=dict(max_rows=20))
        self.assertEquals(len(inst_data["data"]["sample_vector"]), 20)
        self.assertEquals(inst_data["data"]["sample_vector"][0][1], 300)

        inst_data = self.scion_client.get_asset_data(
            inst_id, data_filter=dict(start_time="1460694001000"))
        self.assertEquals(len(inst_data["data"]["sample_vector"]), 30)
        self.assertEquals(inst_data["data"]["sample_vector"][0][1], 200)

        # Note: Time filter applies before expansion
        inst_data = self.scion_client.get_asset_data(
            inst_id,
            data_filter=dict(start_time="1460694001000",
                             start_time_include=False))
        self.assertEquals(len(inst_data["data"]["sample_vector"]), 20)
        self.assertEquals(inst_data["data"]["sample_vector"][0][1], 300)

        inst_data = self.scion_client.get_asset_data(
            inst_id, data_filter=dict(start_time="1460694001000", max_rows=10))
        self.assertEquals(len(inst_data["data"]["sample_vector"]), 10)
        self.assertEquals(inst_data["data"]["sample_vector"][0][1], 400)

        inst_data = self.scion_client.get_asset_data(
            inst_id,
            data_filter=dict(start_time="1460694001000",
                             end_time="1460694002000"))
        print inst_data
        self.assertEquals(len(inst_data["data"]["sample_vector"]), 20)
        self.assertEquals(inst_data["data"]["sample_vector"][0][1], 200)
    def test_scion_agent(self):
        # Create user
        actor_id = self.scion_client.define_user(
                first_name="John", last_name="Doe",
                username="******", password="******", email="*****@*****.**")

        # Create instrument
        agent_info=[dict(agent_type="data_agent",
                         config=dict(plugin="scion.agent.model.cdip.cdip_plugin.CDIP_DataAgentPlugin",
                                     sampling_interval=10, stream_name="basic_streams",
                                     auto_streaming=False))]
        inst_obj = Instrument(name="TA_121A/MGENC/M40", description="CDIP buoy data",
                              location=GeospatialLocation(latitude=37.94831666666667, longitude=-123.4675),
                              agent_info=agent_info)
        inst_id, _ = self.rr.create(inst_obj, actor_id=actor_id)

        # Create dataset
        schema_def = DataSchemaParser.parse_schema_ref("ds_cdip01_main")
        ds_obj = Dataset(name="Dataset Sensor",
                         schema_definition=schema_def)
        ds_id, _ = self.rr.create(ds_obj, actor_id=actor_id)

        self.rr.create_association(inst_id, PRED.hasDataset, ds_id)

        ds_filename = self.container.file_system.get("%s/%s%s.hdf5" % (DS_BASE_PATH, DS_FILE_PREFIX, ds_id))
        self.assertFalse(os.path.exists(ds_filename))

        inst_data_t0 = self.scion_client.get_asset_data(inst_id)
        self.assertEquals(inst_data_t0["dataset_id"], ds_id)
        self.assertEquals(inst_data_t0["num_rows"], 0)

        # Install a data packet catcher
        self.recv_packets, self.recv_rows = [], 0
        def process_packet_cb(packet, route, stream):
            if not isinstance(packet, DataPacket):
                log.warn("Received a non DataPacket message")
            self.recv_packets.append(packet)
            self.recv_rows += len(packet.data["data"])
            log.info("Received data packet #%s: rows=%s, cols=%s", len(self.recv_packets), len(packet.data["data"]),
                     packet.data["cols"])
            #log.info('Packet data: ' + str(packet.data))
        def cleanup_stream_sub():
            if self.stream_sub:
                self.stream_sub.stop()
                self.stream_sub = None

        self.stream_sub = StreamSubscriber(process=self.scion_proc, stream="basic_streams", callback=process_packet_cb)
        self.stream_sub.start()
        
        self.addCleanup(cleanup_stream_sub)

        # Start agent
        self.assertFalse(StreamingAgentClient.is_agent_active(inst_id))

        agent_pid = self.scion_client.start_agent(inst_id)

        self.assertTrue(StreamingAgentClient.is_agent_active(inst_id))

       
        sac = StreamingAgentClient(resource_id=inst_id, process=self.scion_proc)
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_INITIALIZED)

        sac.connect()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_CONNECTED)

        # Coming in from the agent config.
        streaming_args = { 
            'url' : 'http://cdip.ucsd.edu/data_access/justdar.cdip?029+pm',
            'sampling_interval' : 10
        }

        sac.start_streaming(streaming_args)
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_STREAMING)

        # Set to progressively high values for real data stream tests.
        gevent.sleep(20)

        # Retrieve data
        self.assertTrue(os.path.exists(ds_filename))

        inst_data = self.scion_client.get_asset_data(inst_id)
        """
        {'data': {'Dp': [[1465682100000, 325]],
            'Hs': [[1465682100000, 3.03]],
            'Ta': [[1465682100000, 6.92]],
            'Temp': [[1465682100000, 12.2]],
            'Tp': [[1465682100000, 9.09]]},
         'dataset_id': '08bc829159e6401182462b713b180dbe',
         'num_rows': 1,
         'ts_generated': '1465685467675',
         'var_def': [{'base_type': 'ntp_time',
              'description': 'NTPv4 timestamp',
              'name': 'time',
              'storage_dtype': 'i8',
              'unit': ''},
             {'base_type': 'float',
              'description': 'Significant wave height',
              'name': 'Hs',
              'storage_dtype': 'f8',
              'unit': 'meters'},
             {'base_type': 'float',
              'description': 'Peak wave period',
              'name': 'Tp',
              'storage_dtype': 'f8',
              'unit': 'seconds'},
             {'base_type': 'int',
              'description': 'Peak wave direction',
              'name': 'Dp',
              'storage_dtype': 'i4',
              'unit': 'degrees'},
             {'base_type': 'float',
              'description': 'Average wave period',
              'name': 'Ta',
              'storage_dtype': 'f8',
              'unit': 'seconds'},
             {'base_type': 'float',
              'description': 'Surface temperature',
              'name': 'Temp',
              'storage_dtype': 'f8',
              'unit': 'celcius'}],
         'variables': ['time', 'Hs', 'Tp', 'Dp', 'Ta', 'Temp']}
        """
        num_rows = inst_data["num_rows"]
        log.info('CDIP test produced %i data rows.' % num_rows)

        # Take down agent
        sac.stop_streaming()  # Not required to stop agent, just to test here
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_CONNECTED)

        sac.disconnect()
        agent_status = sac.get_status()
        self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_INITIALIZED)

        self.scion_client.stop_agent(inst_id)

        self.assertFalse(StreamingAgentClient.is_agent_active(inst_id))
    def test_scion_agent(self):
        # Create user
        actor_id = self.scion_client.define_user(
                first_name="John", last_name="Doe",
                username="******", password="******", email="*****@*****.**")

        inst_ids = []
        ds_ids = []
        for source in sources:
   
          # Instrument
          agent_info=[dict(agent_type="data_agent",
            config=dict(plugin="scion.agent.model.orb.orb_plugin.Orb_DataAgentPlugin",
            sampling_interval=0.5, stream_name="basic_streams", auto_streaming=False))]
          inst_obj = Instrument(name=source, description="Multiplexed generic compressed data frame packet",
            location=GeospatialLocation(latitude=42.867079, longitude=-127.257324), agent_info=agent_info)
          inst_id, _ = self.rr.create(inst_obj, actor_id=actor_id)
          inst_ids.append(inst_id)

          # Dataset
          schema_def = DataSchemaParser.parse_schema_ref("ds_orb_mgenc_m40")
          ds_obj = Dataset(name=source, schema_definition=schema_def)
          ds_id, _ = self.rr.create(ds_obj, actor_id=actor_id)
          self.rr.create_association(inst_id, PRED.hasDataset, ds_id)

          ds_filename = self.container.file_system.get("%s/%s%s.hdf5" % (DS_BASE_PATH, DS_FILE_PREFIX, ds_id))
          self.assertFalse(os.path.exists(ds_filename))

          inst_data_t0 = self.scion_client.get_asset_data(inst_id)
          self.assertEquals(inst_data_t0["dataset_id"], ds_id)
          self.assertEquals(inst_data_t0["num_rows"], 0)
          ds_ids.append(ds_id)

        # Install a data packet catcher
        # TODO

        # Start agent
        sacs = []
        for idx, source in enumerate(sources):
          self.assertFalse(StreamingAgentClient.is_agent_active(inst_ids[idx]))
          agent_pid = self.scion_client.start_agent(inst_ids[idx])
          self.assertTrue(StreamingAgentClient.is_agent_active(inst_ids[idx]))

          sac = StreamingAgentClient(resource_id=inst_ids[idx], process=self.scion_proc)
          agent_status = sac.get_status()
          self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_INITIALIZED)

          sac.connect()
          agent_status = sac.get_status()
          self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_CONNECTED)

          # Coming in from the agent config.
          streaming_args = { 
              #'orb_name' : 'taexport.ucsd.edu:usarrayTA',
              'orb_name' : 'ceusnexport.ucsd.edu:usarray',
              'select' : source,
              '--timeout' : 5,
              'sample_interval' : 5 
          }

          sac.start_streaming(streaming_args)
          agent_status = sac.get_status()
          self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_STREAMING)
          sacs.append(sac)

        gevent.sleep(120)

        # Take down agent
        for idx, sac in enumerate(sacs):
          #sac.stop_streaming()  # Not required to stop agent, just to test here
          #agent_status = sac.get_status()
          #self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_CONNECTED)

          #sac.disconnect()
          #agent_status = sac.get_status()
          #self.assertEquals(agent_status["current_state"], StreamingAgent.AGENTSTATE_INITIALIZED)

          self.scion_client.stop_agent(inst_ids[idx])
          self.assertFalse(StreamingAgentClient.is_agent_active(inst_ids[idx]))