def test_AllocationPersistence(self): self.launchDeviceManager("/nodes/test_multiDomain_exec/DeviceManager.dcd.xml", domainManager=self._domainManager_1, debug=self.debuglevel) self.launchDeviceManager("/nodes/test_multiDomain_uses/DeviceManager.dcd.xml", domainManager=self._domainManager_2, debug=self.debuglevel) self._domainManager_1.registerRemoteDomainManager(self._domainManager_2) allocMgr_1 = self._domainManager_1._get_allocationMgr() # Make a couple of allocation requests that we know will have to be # split across the two domains execcap = {'DCE:8dcef419-b440-4bcf-b893-cab79b6024fb':1000, 'DCE:4f9a57fc-8fb3-47f6-b779-3c2692f52cf9':50.0} usescap = {'DCE:8cad8ca5-c155-4d1d-ae40-e194aa1d855f':1} requests = [_packageRequest('exec', properties.props_from_dict(execcap)), _packageRequest('uses', properties.props_from_dict(usescap))] results = dict((r.requestID, r) for r in allocMgr_1.allocate(requests)) self.assertEqual(len(requests), len(results)) usesId = results['uses'].allocationID execId = results['exec'].allocationID # Save the current allocation state pre = dict((al.allocationID, al) for al in allocMgr_1.allocations([])) # Kill the DomainManager os.kill(self._domainBooter_1.pid, signal.SIGTERM) if not self.waitTermination(self._domainBooter_1): self.fail("Domain Manager Failed to Die") # Re-launch and check that the allocation state remains the same self.launchDomainManager(endpoint='giop:tcp::5679', dbURI=self._dbfile, debug=self.debuglevel) post = dict((al.allocationID, al) for al in allocMgr_1.allocations([])) self.assertEqual(len(pre), len(post)) self.assertEqual(pre.keys(), post.keys()) for allocId, status in pre.iteritems(): self.assert_(_compareAllocations(status, post[allocId]))
def test_AllocationManagerAllocationIterators(self): """ Verifiers that the AllocationManager's allocation iterators return the same sets of allocations as the corresponding attributes. """ nb1, execDevNode1 = self.launchDeviceManager("/nodes/test_multiDomain_exec/DeviceManager.dcd.xml", domainManager=self._domainManager_1) self.assertNotEqual(execDevNode1, None) nb2, basicDevNode1 = self.launchDeviceManager("/nodes/test_multiDomain_uses/DeviceManager.dcd.xml", domainManager=self._domainManager_2) self.assertNotEqual(basicDevNode1, None) # Connect the domains to each other self._domainManager_1.registerRemoteDomainManager(self._domainManager_2) allocMgr = self._domainManager_1._get_allocationMgr() # Make a couple of allocation requests that we know will have to be # split across the two domains execcap = {'DCE:8dcef419-b440-4bcf-b893-cab79b6024fb':1000, 'DCE:4f9a57fc-8fb3-47f6-b779-3c2692f52cf9':50.0} usescap = {'DCE:8cad8ca5-c155-4d1d-ae40-e194aa1d855f':1} requests = [allocMgrHelpers.createRequest('exec', properties.props_from_dict(execcap)), allocMgrHelpers.createRequest('uses', properties.props_from_dict(usescap))] results = allocMgr.allocate(requests) self.assertEqual(len(requests), len(results)) # Check local allocations local_iter = _iteratorFetch(allocMgr.listAllocations(CF.AllocationManager.LOCAL_ALLOCATIONS, 1)) local_list = allocMgr.localAllocations([]) self.assertTrue(allocMgrHelpers.compareAllocationStatusSequence(local_iter, local_list)) # Check all allocations all_iter = _iteratorFetch(allocMgr.listAllocations(CF.AllocationManager.ALL_ALLOCATIONS, 1)) all_list = allocMgr.allocations([]) self.assertTrue(allocMgrHelpers.compareAllocationStatusSequence(all_iter, all_list))
def _test_stream_id(self, sink): self._generate_config() self.config_params["shape"] = "constant" default_stream_id = "SigGen Stream" test_stream_id = "unit_test_stream_id" self.config_params.pop( "stream_id") # Verify that default stream id value is used self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep( 1. ) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_packets = self._get_received_packets(start_time, rx_len_sec, sink) for p in rx_packets: # Data returned is list of test_utils.BufferedPacket self.assertEqual(p[3].streamID, default_stream_id) self.comp_obj.configure(props_from_dict({"stream_id": test_stream_id})) time.sleep( 1. ) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_packets = self._get_received_packets(start_time, rx_len_sec, sink) for p in rx_packets: # Data returned is list of test_utils.BufferedPacket self.assertEqual(p[3].streamID, test_stream_id)
def test_keyword_change(self): print "\n... Staring mid stream keyword change test" self._generate_config() self.comp_obj.configure(props_from_dict(self.config_dict)) self._generate_keywords() self._send_data(last_pkt_eos=False) expected_data1 = self.expected_data time.sleep(1) # Allow SCAAudioTagger to process the already sent data before changing config parameters self.updated_config = copy.deepcopy(self.config_dict) self.updated_config["encoding"] = "PCM_SIGNED" self.updated_config["channels"] = 1 self.comp_obj.configure(props_from_dict(self.updated_config)) self._send_data(last_pkt_eos=True) expected_data2 = self.expected_data rx_data = self._get_received_data() rx_data1 = rx_data[:len(rx_data)/2] rx_data2 = rx_data[len(rx_data)/2:] print "Received SRI keywords: %s" % props_to_dict(rx_data1[-1].sri.keywords) print "Received SRI keywords: %s" % props_to_dict(rx_data2[-1].sri.keywords) self._validate_data(rx_data1, expected_data1, expected_frame_size=1, config_dict=self.config_dict) self._validate_data(rx_data2, expected_data2, expected_frame_size=1, config_dict=self.updated_config)
def _test_stream_id(self, sink): self._generate_config() self.config_params["shape"] = "constant" default_stream_id = "SigGen Stream" test_stream_id = "unit_test_stream_id" self.config_params.pop("stream_id") # Verify that default stream id value is used self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec, sink) print "\nReceived Data 1 Time Range:" print rx_data[0].T print rx_data[-1].T for p in rx_data: # Data returned is list of test_utils.BufferedPacket self.assertEqual(p.sri.streamID, default_stream_id) self.comp_obj.configure(props_from_dict({"stream_id":test_stream_id})) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec, sink) print "\nReceived Data 2 Time Range:" print rx_data[0].T print rx_data[-1].T for p in rx_data: # Data returned is list of test_utils.BufferedPacket self.assertEqual(p.sri.streamID, test_stream_id)
def test_configure_latency(self): print "\n... Starting Test Configure latency" self._generate_config() self.config_params["shape"] = "constant" self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep( 1. ) # Ensure SigGen is sending out the desired signal before continuing test_stream_id = 'unit_test_stream_%s' MAX_LATENCY = 0.1 ITERATIONS = 10 total_time = 0.0 for i in xrange(ITERATIONS): start_time = time.time() self.comp_obj.configure( props_from_dict({"stream_id": test_stream_id % i})) stop_time = time.time() total_time += stop_time - start_time time.sleep(0.1) self.assertTrue( total_time < ITERATIONS * MAX_LATENCY, "Average latency (%s) of configure call exceeds max allowed (%s)" % (total_time / ITERATIONS, MAX_LATENCY))
def testNoneImport(self): """Do a test with import configured to None rather than a sequence """ print "\n... running testNoneImport" self.comp.configure(props_from_dict({'import': None})) props = props_to_dict(self.comp.query(props_from_dict({}))) self.assertEqual( props['import'], [], '"import" property set to "%s" (instead of empty sequence) after configured with "None".' % props['import'])
def test_allocationsMethod(self): nb, devMgr = self.launchDeviceManager( '/nodes/test_SADUsesDevice/DeviceManager.dcd.xml', debug=self.debuglevel) # Check that there are no allocations reported allocs = self._allocMgr.allocations([]) self.assertEqual(len(allocs), 0) # Make a single allocation request and check that it looks right props = properties.props_from_dict({'simple_alloc': 1}) request = [_packageRequest('test1', props)] response = self._allocMgr.allocate(request) self.assertEqual(len(request), len(response)) self.assertEqual(request[0].requestID, response[0].requestID) # Save allocation IDs for later checks allocIDs = [resp.allocationID for resp in response] # Check that the reported allocations match expectations allocs = self._allocMgr.allocations([]) self.assertEqual(len(allocs), 1) self.assertEqual(allocs[0].allocationID, allocIDs[0]) # Make two more allocation requests request = [('external', { 'simple_alloc': 1 }), ('matching', { 'DCE:ac73446e-f935-40b6-8b8d-4d9adb6b403f': 2, 'DCE:7f36cdfb-f828-4e4f-b84f-446e17f1a85b': 'BasicTestDevice' })] request = [ _packageRequest(k, properties.props_from_dict(v)) for k, v in request ] response = self._allocMgr.allocate(request) self.assertEqual(len(request), len(response)) allocIDs.extend(resp.allocationID for resp in response) allocs = self._allocMgr.allocations([]) self.assertEqual(len(allocs), 3) # Try to retrieve an invalid allocation ID, making sure it throws an # exception self.assertRaises(CF.AllocationManager.InvalidAllocationId, self._allocMgr.allocations, ['missing']) # Check that we can retrieve a specific allocation allocs = self._allocMgr.allocations(allocIDs[-1:]) self.assertEqual(len(allocs), 1)
def test_AllocationManagerAllocationIterators(self): """ Verifiers that the AllocationManager's allocation iterators return the same sets of allocations as the corresponding attributes. """ nb1, execDevNode1 = self.launchDeviceManager( "/nodes/test_multiDomain_exec/DeviceManager.dcd.xml", domainManager=self._domainManager_1) self.assertNotEqual(execDevNode1, None) nb2, basicDevNode1 = self.launchDeviceManager( "/nodes/test_multiDomain_uses/DeviceManager.dcd.xml", domainManager=self._domainManager_2) self.assertNotEqual(basicDevNode1, None) # Connect the domains to each other self._domainManager_1.registerRemoteDomainManager( self._domainManager_2) allocMgr = self._domainManager_1._get_allocationMgr() # Make a couple of allocation requests that we know will have to be # split across the two domains execcap = { 'DCE:8dcef419-b440-4bcf-b893-cab79b6024fb': 1000, 'DCE:4f9a57fc-8fb3-47f6-b779-3c2692f52cf9': 50.0 } usescap = {'DCE:8cad8ca5-c155-4d1d-ae40-e194aa1d855f': 1} requests = [ allocMgrHelpers.createRequest('exec', properties.props_from_dict(execcap)), allocMgrHelpers.createRequest('uses', properties.props_from_dict(usescap)) ] results = allocMgr.allocate(requests) self.assertEqual(len(requests), len(results)) # Check local allocations local_iter = _iteratorFetch( allocMgr.listAllocations(CF.AllocationManager.LOCAL_ALLOCATIONS, 1)) local_list = allocMgr.localAllocations([]) self.assertTrue( allocMgrHelpers.compareAllocationStatusSequence( local_iter, local_list)) # Check all allocations all_iter = _iteratorFetch( allocMgr.listAllocations(CF.AllocationManager.ALL_ALLOCATIONS, 1)) all_list = allocMgr.allocations([]) self.assertTrue( allocMgrHelpers.compareAllocationStatusSequence( all_iter, all_list))
def test_write_toggle(self): print "\n... Staring write toggle test" config_dict = self._generate_config() data_filename = config_dict['filename'] if os.path.exists(data_filename): os.system("rm %s" % (data_filename+"*")) while os.path.exists(data_filename): time.sleep(self.t_delta) self._generate_keywords() config_dict["write"] = False self.comp.configure(props_from_dict({"write": False})) # Ensure write property is configured before filename (otherwise, file may exist before write is disabled) self.comp.configure(props_from_dict(config_dict)) data_filename = config_dict['filename'] base_filename = data_filename for i in range(10): filename = data_filename if i == 0: # On first pass, file is already created/ opened by DataWriter data_filename = filename else: x = 0 while True: if not os.path.exists(filename): # File does not exist, good to go data_filename = filename break x += 1 filename = base_filename + ".%d" % x sri_filename = data_filename + ".sri" self.keywords_dict["pass"] = i if i == 5: self.comp.configure(props_from_dict({"write": True})) while self.comp.write!=True: time.sleep(self.t_delta) self._send_data() self._sleepTillDone(data_filename) if i >= 5: # expecting output self._validate_data(data_filename, self.expected_data, config_dict["endian"]) self._validate_metadata(sri_filename, self.t_delta, self.stream_id1, self.keywords_dict, self.first_pkt_time, self.last_pkt_time) else: # Verify that data was not written self.assertFalse(os.path.exists(data_filename)) self.assertFalse(os.path.exists(sri_filename))
def test_same_stream_snaps(self): print "\n... Staring multiple snapshots from one stream test" config_dict = self._generate_config() data_filename = config_dict['filename'] if os.path.exists(data_filename): os.system("rm %s" % (data_filename+"*")) while os.path.exists(data_filename): time.sleep(self.t_delta) self._generate_keywords() self.comp.configure(props_from_dict(config_dict)) data_filename = config_dict['filename'] base_filename = data_filename count = 0 expected_data = [] results = [] for i in xrange(6): # Disable file writing, this should cause the current file to be written out # and a new file to be created the next time write in enabled enabled_write = not ((i+1) % 2 == 0) self.comp.configure(props_from_dict({"write": enabled_write})) while props_to_dict(self.comp.query([]))['write']!=enabled_write: time.sleep(self.t_delta) first_pkt_time = None for j in xrange(3): self._send_data(last_pkt_eos=False) # send data but don't send eos (not stopping streaming just yet) first_pkt_time = first_pkt_time or self.first_pkt_time last_pkt_time = self.last_pkt_time if enabled_write: expected_data.extend(self.expected_data) self._sleepTillDone(data_filename, expected_data=expected_data) else: time.sleep(self.t_data_wait) if not enabled_write: # File should have been closed at the start of this pass sri_filename = data_filename + ".sri" self._validate_data(data_filename, expected_data, config_dict["endian"]) self._validate_metadata(sri_filename, self.t_delta, self.stream_id1, self.keywords_dict, first_pkt_time, last_pkt_time) # reset for the next file expected_data = [] count += 1 data_filename = base_filename + ".%d" % count
def myTestCase(self, data, upper_limit, lower_limit): """The main engine for all the test cases - configure, push data, and get output As applicable """ # Configure upper and lower limit self.comp.configure(props_from_dict({'upper_limit':upper_limit,'lower_limit':lower_limit})) kw = sb.SRIKeyword("testkw", 10.0,'double') # Push in data self.src1.push(data,EOS=False, streamID='myStreamID', sampleRate=200, complexData=False, SRIKeywords=[kw], loop=None) #data processing is asynchronos - so wait until the data is all processed count=0 while True: out = self.sink.getData() sri = self.sink.sri() if out: break if count==100: break time.sleep(.01) count+=1 return out,sri
def test_Allocations(self): nb, domMgr = self.launchDomainManager(endpoint="giop:tcp::5679", dbURI=self._dbfile) self.launchDeviceManager("/nodes/test_BasicTestDevice_node/DeviceManager.dcd.xml") # Make a couple of different allocations allocMgr = domMgr._get_allocationMgr() memCapacityId = 'DCE:8dcef419-b440-4bcf-b893-cab79b6024fb' bogoMipsId = 'DCE:5636c210-0346-4df7-a5a3-8fd34c5540a8' nicCapacityId = 'DCE:4f9a57fc-8fb3-47f6-b779-3c2692f52cf9' allocations = { 'test1': {memCapacityId:2048, nicCapacityId:0.125}, 'test2': {bogoMipsId:10000}} requests = [CF.AllocationManager.AllocationRequestType(k, properties.props_from_dict(v), [], [], 'test_Allocations') for k,v in allocations.iteritems()] results = allocMgr.allocate(requests) self.assertEqual(len(results), len(requests)) # Save the allocation state prior to termination pre = dict((al.allocationID, al) for al in allocMgr.allocations([])) self.assertEqual(len(pre), len(results)) # Kill the DomainManager os.kill(nb.pid, signal.SIGTERM) if not self.waitTermination(nb): self.fail("Domain Manager Failed to Die") # Re-launch and check that the allocation state remains the same; # implicitly tests that the AllocationManager reference is persistent self.launchDomainManager(endpoint='giop:tcp::5679', dbURI=self._dbfile) post = dict((al.allocationID, al) for al in allocMgr.allocations([])) self.assertEqual(len(pre), len(post)) self.assertEqual(pre.keys(), post.keys()) for allocId, status in pre.iteritems(): self._compareAllocation(status, post[allocId])
def _test_signal_with_phase(self, shape, sink, signal_function, convert_function): self._generate_config() self.config_params["shape"] = shape self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep( 1. ) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec, sink) delta_phase = self.config_params["frequency"] / self.config_params[ "sample_rate"] expected_values = convert_function( signal_function(self.config_params["magnitude"], int(self.config_params["sample_rate"]), dp=delta_phase)) n_expected = len(expected_values) minlength = min([len(rx_data), len(expected_values)]) rx_data = rx_data[:minlength] expected_values = expected_values[:minlength] for rx_val, exp_val in zip(rx_data, expected_values): #self.assertAlmostEqual(rx_val, exp_val, 5) self.assert_isclose(rx_val, exp_val, PRECISION, NUM_PLACES)
def _generateAlloc(self,tuner_type='RX_DIGITIZER', cf=100e6,sr=25e6,bw=20e6,rf_flow_id=''): value = {} value['ALLOC_ID'] = str(uuid.uuid4()) value['TYPE'] = tuner_type value['BW_TOLERANCE'] = 100.0 value['SR_TOLERANCE'] = 100.0 value['RF_FLOW_ID'] = rf_flow_id value['GROUP_ID'] = '' value['CONTROL'] = True value['CF'] = cf value['SR'] = sr value['BW'] = bw #generate the allocation allocationPropDict = {'FRONTEND::tuner_allocation':{ 'FRONTEND::tuner_allocation::tuner_type': value['TYPE'], 'FRONTEND::tuner_allocation::allocation_id': value['ALLOC_ID'], 'FRONTEND::tuner_allocation::center_frequency': float(value['CF']), 'FRONTEND::tuner_allocation::bandwidth': float(value['BW']), 'FRONTEND::tuner_allocation::bandwidth_tolerance': float(value['BW_TOLERANCE']), 'FRONTEND::tuner_allocation::sample_rate': float(value['SR']), 'FRONTEND::tuner_allocation::sample_rate_tolerance': float(value['SR_TOLERANCE']), 'FRONTEND::tuner_allocation::device_control': value['CONTROL'], 'FRONTEND::tuner_allocation::group_id': value['GROUP_ID'], 'FRONTEND::tuner_allocation::rf_flow_id': value['RF_FLOW_ID'], }} return properties.props_from_dict(allocationPropDict)
def test_Allocations(self): nb, domMgr = self.launchDomainManager(endpoint="giop:tcp::5679", dbURI=self._dbfile) self.launchDeviceManager("/nodes/test_BasicTestDevice_node/DeviceManager.dcd.xml") # Make a couple of different allocations allocMgr = domMgr._get_allocationMgr() memCapacityId = 'DCE:8dcef419-b440-4bcf-b893-cab79b6024fb' bogoMipsId = 'DCE:5636c210-0346-4df7-a5a3-8fd34c5540a8' nicCapacityId = 'DCE:4f9a57fc-8fb3-47f6-b779-3c2692f52cf9' allocations = { 'test1': {memCapacityId:2048, nicCapacityId:0.125}, 'test2': {bogoMipsId:10000}} requests = [CF.AllocationManager.AllocationRequestType(k, properties.props_from_dict(v), [], [], 'test_Allocations') for k,v in allocations.iteritems()] results = allocMgr.allocate(requests) self.assertEqual(len(results), len(requests)) # Save the allocation state prior to termination pre = dict((al.allocationID, al) for al in allocMgr.allocations([])) self.assertEqual(len(pre), len(results)) # Kill the DomainManager os.kill(nb.pid, signal.SIGTERM) if not self.waitTermination(nb): self.fail("Domain Manager Failed to Die") # Re-launch and check that the allocation state remains the same; # implicitly tests that the AllocationManager reference is persistent self.launchDomainManager(endpoint='giop:tcp::5679', dbURI=self._dbfile) post = dict((al.allocationID, al) for al in allocMgr.allocations([])) self.assertEqual(len(pre), len(post)) self.assertEqual(pre.keys(), post.keys()) for allocId, status in pre.iteritems(): self._compareAllocation(status, post[allocId])
def myTestCase(self, testEquation, data1,data2,checkResults=True): """The main engine for all the test cases - configure the equation, push data, and get output As applicable """ if testEquation: print "\n... running myTestCase %s" %testEquation self.comp.configure(props_from_dict({'equation':testEquation})) if data1: self.src1.push(data1) if data2: self.src2.push(data2) #data processing is asynchronos - so wait until the data is all processed count=0 while True: out = self.sink.getData() if out: break if count==100: break time.sleep(.01) count+=1 if checkResults: numOut = self.checkResults(testEquation,data1, data2, out) self.assertNotEqual(numOut,0) return out
def _test_double_start(self, sink): self._generate_config() self.config_params["shape"] = "constant" self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep( 1. ) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() time.sleep(1.) # get one second before calling start a second time self.comp_obj.start() # call start a second time stop_time = time.time( ) + 1.0 # get at least one second after calling start a second time rx_len_sec = stop_time - start_time rx_packets = self._get_received_packets(start_time, rx_len_sec, sink) self.assertTrue(len(rx_packets) > 0, "No packets received.") next_twsec = rx_packets[0][1].twsec next_tfsec = rx_packets[0][1].tfsec xdelta = 1. / self.comp.sample_rate for p in rx_packets: # Data returned is list of test_utils.BufferedPacket self.assert_isclose(p[1].twsec, next_twsec, PRECISION, NUM_PLACES) self.assert_isclose(p[1].tfsec, next_tfsec, PRECISION, NUM_PLACES) time_delta = xdelta * len(p[0]) next_twsec += math.floor(time_delta) next_tfsec += time_delta - math.floor(time_delta) if next_tfsec >= 1.0: next_tfsec -= 1.0 next_twsec += 1.0
def _test_double_start(self, sink): self._generate_config() self.config_params["shape"] = "constant" self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() time.sleep(1.) # get one second before calling start a second time self.comp_obj.start() # call start a second time stop_time = time.time() + 1.0 # get at least one second after calling start a second time rx_len_sec = stop_time-start_time rx_data = self._get_received_data(start_time, rx_len_sec, sink) self.assertTrue(len(rx_data)>0, "No packets received.") print "\nReceived Data Time Range:" print rx_data[0].T print rx_data[-1].T next_twsec = rx_data[0].T.twsec next_tfsec = rx_data[0].T.tfsec xdelta = 1./self.comp.sample_rate for p in rx_data: # Data returned is list of test_utils.BufferedPacket self.assert_isclose(p.T.twsec, next_twsec, PRECISION, NUM_PLACES) self.assert_isclose(p.T.tfsec, next_tfsec, PRECISION, NUM_PLACES) time_delta = xdelta * len(p.data) next_twsec += math.floor(time_delta) next_tfsec += time_delta - math.floor(time_delta) if next_tfsec >= 1.0: next_tfsec -= 1.0 next_twsec += 1.0
def testSriFields(self): sri = bulkio.sri.create("test_sri") sri.xstart = -2.5 sri.xdelta = 0.125 sri.xunits = BULKIO.UNITS_FREQUENCY sri.subsize = 1024 sri.ystart = 2.5 sri.ydelta = 1.0 sri.yunits = BULKIO.UNITS_TIME sri.mode = 1 sri.blocking = 1 sri.keywords = properties.props_from_dict({ 'string': 'value', 'number': 100 }) # Create a stream from the SRI and compare accessors stream = self.port.createStream(sri) self.assertEqual(stream.streamID, sri.streamID) self.assertEqual(stream.xstart, sri.xstart) self.assertEqual(stream.xdelta, sri.xdelta) self.assertEqual(stream.xunits, sri.xunits) self.assertEqual(stream.subsize, sri.subsize) self.assertEqual(stream.ystart, sri.ystart) self.assertEqual(stream.ydelta, sri.ydelta) self.assertEqual(stream.yunits, sri.yunits) self.failUnless(stream.complex) self.failUnless(stream.blocking) self.assertEqual(len(sri.keywords), len(stream.keywords)) self.assertEqual('value', stream.getKeyword('string')) self.assertEqual(100, stream.getKeyword('number'))
def test_AllocationPersistence(self): self.launchDeviceManager( "/nodes/test_multiDomain_exec/DeviceManager.dcd.xml", domainManager=self._domainManager_1, debug=self.debuglevel) self.launchDeviceManager( "/nodes/test_multiDomain_uses/DeviceManager.dcd.xml", domainManager=self._domainManager_2, debug=self.debuglevel) self._domainManager_1.registerRemoteDomainManager( self._domainManager_2) allocMgr_1 = self._domainManager_1._get_allocationMgr() # Make a couple of allocation requests that we know will have to be # split across the two domains execcap = { 'DCE:8dcef419-b440-4bcf-b893-cab79b6024fb': 1000, 'DCE:4f9a57fc-8fb3-47f6-b779-3c2692f52cf9': 50.0 } usescap = {'DCE:8cad8ca5-c155-4d1d-ae40-e194aa1d855f': 1} requests = [ _packageRequest('exec', properties.props_from_dict(execcap)), _packageRequest('uses', properties.props_from_dict(usescap)) ] results = dict((r.requestID, r) for r in allocMgr_1.allocate(requests)) self.assertEqual(len(requests), len(results)) usesId = results['uses'].allocationID execId = results['exec'].allocationID # Save the current allocation state pre = dict((al.allocationID, al) for al in allocMgr_1.allocations([])) # Kill the DomainManager os.kill(self._domainBooter_1.pid, signal.SIGTERM) if not self.waitTermination(self._domainBooter_1): self.fail("Domain Manager Failed to Die") # Re-launch and check that the allocation state remains the same self.launchDomainManager(endpoint='giop:tcp::5679', dbURI=self._dbfile, debug=self.debuglevel) post = dict((al.allocationID, al) for al in allocMgr_1.allocations([])) self.assertEqual(len(pre), len(post)) self.assertEqual(pre.keys(), post.keys()) for allocId, status in pre.iteritems(): self.assert_(_compareAllocations(status, post[allocId]))
def testImport(self): """Do a test with a valid import """ print "\n... running testImport" self.comp.configure(props_from_dict({'import': ['time']})) # This produces the GMT year of seconds since epoch self.myTestCase("time.gmtime(a)[0]*1.0", [float(x * 60 * 60 * 24 * 5) for x in xrange(1024)], [])
def testBadEquation(self): """Do a test with a bad equation to verify we have an invalid configuration """ print "\n... running testBadEquation" print "FYI: A successful test will also cause a stack trace to be displayed" try: self.comp.configure(props_from_dict({'equation': "a+asdf+b"})) except CF.PropertySet.InvalidConfiguration, e: return
def get_available_hardware(self): ''' Returns a list of the available RTL devices >> x.get_vailable_rtl() [ { 'index': 0, 'name': 'ezcap USB 2.0 DVB-T/DAB/FM dongle', 'product': 'RTL2838UHIDIR', 'serial': '000000000', 'vendor': 'Realtek' } ] ''' def fixd(d): return dict([(k.split('::')[-1], v) for k, v in d.items()]) self.device.configure(props_from_dict(dict(update_available_devices=True))) q = props_to_dict(self.device.query(props_from_dict(dict(available_devices=None)))) return [ fixd(d) for d in q['available_devices']]
def testBadEquation(self): """Do a test with a bad equation to verify we have an invalid configuration """ print "\n... running testBadEquation" print "FYI: A successful test will also cause a stack trace to be displayed" try: self.comp.configure(props_from_dict({'equation':"a+asdf+b"})) except CF.PropertySet.InvalidConfiguration, e: return
def _tryAllocation(self, props): request = [ allocMgrHelpers.createRequest('test', properties.props_from_dict(props)) ] response = self.am.allocate(request) if response: self.am.deallocate([r.allocationID for r in response]) return len(response) == len(request)
def testKeywords(self): stream = self.port.createStream("test_keywords") self._writeSinglePacket(stream, 1) self.assertEqual(1, len(self.stub.H)) # Set/get keywords stream.setKeyword('integer', 250) stream.setKeyword('string', "value") stream.setKeyword('double', 101.1e6) stream.setKeyword('boolean', False) self.assertEqual(250, stream.getKeyword('integer')) self.assertEqual('value', stream.getKeyword('string')) self.assertEqual(101.1e6, stream.getKeyword('double')) self.assertEqual(False, stream.getKeyword('boolean')) # Set with a specific type stream.setKeyword('float', -1.25, 'float') self.assertEqual(-1.25, stream.getKeyword('float')) any_value = stream.keywords[-1].value self.assertEqual(CORBA.TC_float, any_value.typecode()) # Erase and check for presence of keywords stream.eraseKeyword('string') self.failUnless(stream.hasKeyword('integer')) self.failIf(stream.hasKeyword('string')) self.failUnless(stream.hasKeyword('double')) self.failUnless(stream.hasKeyword('boolean')) # Write a packet to trigger an SRI update self.assertEqual(1, len(self.stub.H)) self._writeSinglePacket(stream, 1) self.assertEqual(2, len(self.stub.H)) keywords = properties.props_to_dict(self.stub.H[-1].keywords) self.assertEqual(len(stream.keywords), len(keywords)) for key, value in keywords.iteritems(): self.assertEqual(stream.getKeyword(key), value) # Replace keywords with a new set stream.keywords = properties.props_from_dict({ 'COL_RF': 100.0e6, 'CHAN_RF': 101.1e6 }) self.assertEqual(2, len(stream.keywords)) self.assertEqual(100.0e6, stream.getKeyword('COL_RF')) self.assertEqual(101.1e6, stream.getKeyword('CHAN_RF')) # Trigger another SRI update self.assertEqual(2, len(self.stub.H)) self._writeSinglePacket(stream, 1) self.assertEqual(3, len(self.stub.H)) keywords = properties.props_to_dict(self.stub.H[-1].keywords) self.assertEqual(len(stream.keywords), len(keywords)) for key, value in keywords.iteritems(): self.assertEqual(stream.getKeyword(key), value)
def test_complex(self): print "\n... Staring complex data test" self._generate_config() self.comp_obj.configure(props_from_dict(self.config_dict)) self._generate_keywords() self._send_data(complex=True) rx_data = self._get_received_data() print "Received SRI keywords: %s" % props_to_dict(rx_data[-1].sri.keywords) self._validate_data(rx_data, self.expected_data, expected_frame_size=2)
def test_configure_latency(self): print "\n... Starting Test Configure latency" self._generate_config() self.config_params["shape"] = "constant" self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing test_stream_id = 'unit_test_stream_%s' MAX_LATENCY = 0.1 ITERATIONS = 10 total_time = 0.0 for i in xrange(ITERATIONS): start_time = time.time() self.comp_obj.configure(props_from_dict({"stream_id":test_stream_id%i})) stop_time = time.time() total_time += stop_time-start_time time.sleep(0.1) self.assertTrue(total_time < ITERATIONS*MAX_LATENCY, "Average latency (%s) of configure call exceeds max allowed (%s)"%(total_time/ITERATIONS,MAX_LATENCY))
def test_same_stream_snaps_overwrite(self): print "\n... Staring multiple snapshots from one stream test, overwrite enabled" config_dict = self._generate_config() data_filename = config_dict['filename'] config_dict['overwrite'] = True if os.path.exists(data_filename): os.system("rm %s" % (data_filename+"*")) time.sleep(2) self._generate_keywords() self.comp.configure(props_from_dict(config_dict)) data_filename = config_dict['filename'] base_filename = data_filename expected_data = [] results = [] for i in xrange(6): enabled_write = True if (i+1) % 2 == 0: enabled_write = False # Disable file writing, this should cause the current file to be written out # and a new file to be created the next time write in enabled time.sleep(self.t_data_wait) # Hack to wait for DataWriter to finish writing to disk before we verify contents self.comp.configure(props_from_dict({"write": enabled_write})) first_pkt_time = None for j in xrange(3): time.sleep(self.t_data_wait) self._send_data(last_pkt_eos=False) # send data but don't send eos (not stopping streaming just yet) first_pkt_time = first_pkt_time or self.first_pkt_time last_pkt_time = self.last_pkt_time if enabled_write: expected_data.extend(self.expected_data) if not enabled_write: # File should have been closed at the start of this pass sri_filename = data_filename + ".sri" self._validate_data(data_filename, expected_data, config_dict["endian"]) self._validate_metadata(sri_filename, self.t_delta, self.stream_id1, self.keywords_dict, first_pkt_time, last_pkt_time) # reset for the next file expected_data = []
def _test_stream_id_eos(self, sink): self._generate_config() self.config_params["shape"] = "constant" test_stream_id = "unit_test_stream_id_eos" self.config_params.pop( "stream_id" ) # Verify that default stream id value is used initially self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep( 1. ) # Ensure SigGen is sending out the desired signal before continuing self.comp_obj.configure(props_from_dict({"stream_id": test_stream_id})) print "\nConfigured with new stream id:", test_stream_id received_packets = self._get_until_eos(10, sink) self.assertTrue(len(received_packets) > 0, "No packets received.") self.assertTrue(received_packets[-1][2], "No EOS before timeout.")
def get_available_hardware(self): ''' Returns a list of the available RTL devices >> x.get_vailable_rtl() [ { 'index': 0, 'name': 'ezcap USB 2.0 DVB-T/DAB/FM dongle', 'product': 'RTL2838UHIDIR', 'serial': '000000000', 'vendor': 'Realtek' } ] ''' def fixd(d): return dict([(k.split('::')[-1], v) for k, v in d.items()]) self.device.configure( props_from_dict(dict(update_available_devices=True))) q = props_to_dict( self.device.query(props_from_dict(dict(available_devices=None)))) return [fixd(d) for d in q['available_devices']]
def _test_stream_id_eos(self, sink): self._generate_config() self.config_params["shape"] = "constant" test_stream_id = "unit_test_stream_id_eos" self.config_params.pop("stream_id") # Verify that default stream id value is used initially self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing self.comp_obj.configure(props_from_dict({"stream_id":test_stream_id})) print "\nConfigured with new stream id:",test_stream_id rx_data = self._get_until_eos(10, sink) self.assertTrue(len(rx_data)>0, "No packets received.") print "\nReceived Data Time Range:" print rx_data[0].T print rx_data[-1].T self.assertTrue(rx_data[-1].EOS, "No EOS before timeout.")
def test_allocationsMethod(self): nb, devMgr = self.launchDeviceManager('/nodes/test_SADUsesDevice/DeviceManager.dcd.xml', debug=self.debuglevel) # Check that there are no allocations reported allocs = self._allocMgr.allocations([]) self.assertEqual(len(allocs), 0) # Make a single allocation request and check that it looks right props = properties.props_from_dict({'simple_alloc': 1}) request = [_packageRequest('test1', props)] response = self._allocMgr.allocate(request) self.assertEqual(len(request), len(response)) self.assertEqual(request[0].requestID, response[0].requestID) # Save allocation IDs for later checks allocIDs = [resp.allocationID for resp in response] # Check that the reported allocations match expectations allocs = self._allocMgr.allocations([]) self.assertEqual(len(allocs), 1) self.assertEqual(allocs[0].allocationID, allocIDs[0]) # Make two more allocation requests request = [('external', {'simple_alloc': 1}), ('matching', {'DCE:ac73446e-f935-40b6-8b8d-4d9adb6b403f':2, 'DCE:7f36cdfb-f828-4e4f-b84f-446e17f1a85b':'BasicTestDevice'})] request = [_packageRequest(k, properties.props_from_dict(v)) for k, v in request] response = self._allocMgr.allocate(request) self.assertEqual(len(request), len(response)) allocIDs.extend(resp.allocationID for resp in response) allocs = self._allocMgr.allocations([]) self.assertEqual(len(allocs), 3) # Try to retrieve an invalid allocation ID, making sure it throws an # exception self.assertRaises(CF.AllocationManager.InvalidAllocationId, self._allocMgr.allocations, ['missing']) # Check that we can retrieve a specific allocation allocs = self._allocMgr.allocations(allocIDs[-1:]) self.assertEqual(len(allocs), 1)
def myTestCase(self, data, upper_limit, lower_limit): """The main engine for all the test cases - configure, push data, and get output As applicable """ # Configure upper and lower limit self.comp.configure(props_from_dict({'upper_limit':upper_limit,'lower_limit':lower_limit})) kw = sb.SRIKeyword("testkw", 10.0,'double') # Push in data self.src1.push(data,EOS=False, streamID='myStreamID', sampleRate=200, complexData=False, SRIKeywords=[kw], loop=None) return self.waitForData()
def test_MultipleRequests(self): nb, devMgr = self.launchDeviceManager('/nodes/test_SADUsesDevice/DeviceManager.dcd.xml', debug=self.debuglevel) # Try two requests that should succeed props = properties.props_from_dict({'simple_alloc': 1}) request = [_packageRequest('test1', props), _packageRequest('test2', props)] response = self._allocMgr.allocate(request) self.assertEqual(len(request), len(response)) self._allocMgr.deallocate([r.allocationID for r in response]) # The second request should fail props = properties.props_from_dict({'simple_alloc': 8}) request = [_packageRequest('test1', props), _packageRequest('test2', props)] response = self._allocMgr.allocate(request) good_requests = [r.requestID for r in response] self.assertTrue(len(request) > len(response)) self.assertTrue('test1' in good_requests) self.assertFalse('test2' in good_requests) self._allocMgr.deallocate([r.allocationID for r in response]) # The first and second requests should fail, but the third should succeed bad_props = {'simple_alloc': 12} good_props = {'simple_alloc': 8} request = [('test1', bad_props), ('test2', bad_props), ('test3', good_props)] request = [_packageRequest(k, properties.props_from_dict(v)) for k, v in request] response = self._allocMgr.allocate(request) good_requests = [r.requestID for r in response] self.assertTrue(len(request) > len(response)) self.assertEqual(good_requests, ['test3']) self._allocMgr.deallocate([r.allocationID for r in response]) # Ensure that different requests can be allocated to different devices request = [('external', {'simple_alloc': 1}), ('matching', {'DCE:ac73446e-f935-40b6-8b8d-4d9adb6b403f':2, 'DCE:7f36cdfb-f828-4e4f-b84f-446e17f1a85b':'BasicTestDevice'})] request = [_packageRequest(k, properties.props_from_dict(v)) for k, v in request] response = dict((r.requestID, r) for r in self._allocMgr.allocate(request)) self.assertEqual(len(request), len(response)) self.assertFalse(response['external'].allocatedDevice._is_equivalent(response['matching'].allocatedDevice)) self._allocMgr.deallocate([r.allocationID for r in response.values()])
def test_MultipleRequests(self): nb, devMgr = self.launchDeviceManager('/nodes/test_SADUsesDevice/DeviceManager.dcd.xml') # Try two requests that should succeed props = properties.props_from_dict({'simple_alloc': 1}) request = [allocMgrHelpers.createRequest('test1', props), allocMgrHelpers.createRequest('test2', props)] response = self._allocMgr.allocate(request) self.assertEqual(len(request), len(response)) self._allocMgr.deallocate([r.allocationID for r in response]) # The second request should fail props = properties.props_from_dict({'simple_alloc': 8}) request = [allocMgrHelpers.createRequest('test1', props), allocMgrHelpers.createRequest('test2', props)] response = self._allocMgr.allocate(request) good_requests = [r.requestID for r in response] self.assertTrue(len(request) > len(response)) self.assertTrue('test1' in good_requests) self.assertFalse('test2' in good_requests) self._allocMgr.deallocate([r.allocationID for r in response]) # The first and second requests should fail, but the third should succeed bad_props = {'simple_alloc': 12} good_props = {'simple_alloc': 8} request = [('test1', bad_props), ('test2', bad_props), ('test3', good_props)] request = [allocMgrHelpers.createRequest(k, properties.props_from_dict(v)) for k, v in request] response = self._allocMgr.allocate(request) good_requests = [r.requestID for r in response] self.assertTrue(len(request) > len(response)) self.assertEqual(good_requests, ['test3']) self._allocMgr.deallocate([r.allocationID for r in response]) # Ensure that different requests can be allocated to different devices request = [('external', {'simple_alloc': 1}), ('matching', {'DCE:ac73446e-f935-40b6-8b8d-4d9adb6b403f':2, 'DCE:7f36cdfb-f828-4e4f-b84f-446e17f1a85b':'BasicTestDevice'})] request = [allocMgrHelpers.createRequest(k, properties.props_from_dict(v)) for k, v in request] response = dict((r.requestID, r) for r in self._allocMgr.allocate(request)) self.assertEqual(len(request), len(response)) self.assertFalse(response['external'].allocatedDevice._is_equivalent(response['matching'].allocatedDevice)) self._allocMgr.deallocate([r.allocationID for r in response.values()])
def testBadImport(self): """Do a test with various import values known to be bad to verify we have an invalid configuration """ print "\n... running testBadImport" print "FYI: A successful test will also cause stack traces to be displayed" for val in [6, 6.6, 'foo', 'time', [6], [6.6], ['foo'], [None], '']: try: self.comp.configure(props_from_dict({'import': val})) except CF.PropertySet.InvalidConfiguration, e: continue except Exception, e: print 'Configure of import with "%s" did not produce InvalidConfiguration exception, but should.' % val raise e
def _test_pulse(self, sink, type_cast): self._generate_config() self.config_params["shape"] = "pulse" self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep( 1. ) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec, sink) expected_value = type_cast(self.config_params["magnitude"]) for value in rx_data: self.assertTrue(value == expected_value or value == 0)
def _test_push_sri(self, sink): self._generate_config() self.config_params.pop("stream_id") self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep( 1. ) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_packets = self._get_received_packets(start_time, rx_len_sec, sink) for p in rx_packets: #self.assertAlmostEqual(self.config_params["sample_rate"], 1 / p.sri.xdelta) self.assert_isclose(self.config_params["sample_rate"], 1 / p[3].xdelta, PRECISION, NUM_PLACES)
def _test_push_sri(self, sink): self._generate_config() self.config_params.pop("stream_id") self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec= 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec, sink) print "\nReceived Data Time Range:" print rx_data[0].T print rx_data[-1].T for p in rx_data: #self.assertAlmostEqual(self.config_params["sample_rate"], 1 / p.sri.xdelta) self.assert_isclose(self.config_params["sample_rate"], 1 / p.sri.xdelta, PRECISION, NUM_PLACES)
def test_AllocationSubsetLocalRemote(self): """ Test that AllocationManager can split usesdevice allocations across the local domain and a remote one. """ self.launchDeviceManager( "/nodes/MultiDomain1_node/DeviceManager.dcd.xml", domainManager=self._domainManager_1) self.launchDeviceManager( "/nodes/MultiDomain2_node/DeviceManager.dcd.xml", domainManager=self._domainManager_2) # Register second domain with first (no need to do both directions) self._domainManager_1.registerRemoteDomainManager( self._domainManager_2) allocMgr_1 = self._domainManager_1._get_allocationMgr() allocMgr_2 = self._domainManager_2._get_allocationMgr() # Check that the initial state of all allocations is empty self.assertEqual(allocMgr_1.allocations([]), []) self.assertEqual(allocMgr_1.localAllocations([]), []) self.assertEqual(allocMgr_2.allocations([]), []) self.assertEqual(allocMgr_2.localAllocations([]), []) # Make a couple of allocation requests that we know will have to be # split across the local and remote domains usescap = {'count': 1} requests = [ allocMgrHelpers.createRequest('test_%d' % ii, properties.props_from_dict(usescap)) for ii in range(2) ] # Both requests should be satisfied results = allocMgr_1.allocate(requests) self.assertEqual(len(requests), len(results)) expected = set(r.requestID for r in requests) actual = set(r.requestID for r in results) self.assertEqual(expected, actual) # One allocation on the local domain allocations = allocMgr_1.localAllocations([]) self.assertEqual(len(allocations), 1) # One allocation on the remote domain allocations = allocMgr_2.localAllocations([]) self.assertEqual(len(allocations), 1)
def _test_pulse(self, sink, type_cast): self._generate_config() self.config_params["shape"] = "pulse" self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec, sink) print "\nReceived Data Time Range:" print rx_data[0].T print rx_data[-1].T expected_value = type_cast(self.config_params["magnitude"]) for p in rx_data: # Data returned is list of test_utils.BufferedPacket for value in p.data: self.assertTrue(value == expected_value or value == 0)
def test_single_file(self): print "\n... Staring single file write test" config_dict = self._generate_config() data_filename = config_dict['filename'] sri_filename = data_filename + ".sri" if os.path.exists(data_filename): os.remove(data_filename) time.sleep(2) self.comp.configure(props_from_dict(config_dict)) self._generate_keywords() self._send_data() time.sleep(self.t_data_wait) # Hack to wait for DataWriter to finish writing to disk before we verify contents self._validate_data(data_filename, self.expected_data, config_dict["endian"]) self._validate_metadata(sri_filename, self.t_delta, self.stream_id1, self.keywords_dict, self.first_pkt_time, self.last_pkt_time)
def runEnumTest(self, testValues): unknown = [] for prop in testValues: value = {} if prop.id == "floatenum": value['DEFAULT'] = enums.floatenum.DEFAULT value['OTHER'] = enums.floatenum.OTHER elif prop.id == "stringenum": value['START'] = enums.stringenum.START value['STOPPED'] = enums.stringenum.STOPPED elif prop.id == "structprop": number_enums = {} number_enums['ZERO'] = enums.structprop.number.ZERO number_enums['ONE'] = enums.structprop.number.ONE number_enums['TWO'] = enums.structprop.number.TWO value['structprop::number'] = number_enums alpha_enums = {} alpha_enums['ABC'] = enums.structprop.alpha.ABC alpha_enums['DEF'] = enums.structprop.alpha.DEF value['structprop::alpha'] = alpha_enums elif prop.id == "structseq": number_enums = {} number_enums[ 'POSITIVE'] = enums.structseq_struct.number.POSITIVE number_enums['ZERO'] = enums.structseq_struct.number.ZERO number_enums[ 'NEGATIVE'] = enums.structseq_struct.number.NEGATIVE value['structseq::number'] = number_enums text_enums = {} text_enums['HEADER'] = enums.structseq_struct.text.HEADER text_enums['BODY'] = enums.structseq_struct.text.BODY text_enums['FOOTER'] = enums.structseq_struct.text.FOOTER value['structseq::text'] = text_enums else: unknown.append(prop) prop.value = properties.props_to_any( properties.props_from_dict(value)) if unknown: raise CF.UnknownProperties(unknown) return testValues
def test_single_file(self): print "\n... Staring single file write test" config_dict = self._generate_config() data_filename = config_dict['filename'] sri_filename = data_filename + ".sri" if os.path.exists(data_filename): os.remove(data_filename) while os.path.exists(data_filename): time.sleep(self.t_delta) self.comp.configure(props_from_dict(config_dict)) self._generate_keywords() self._send_data() self._sleepTillDone(data_filename) self._validate_data(data_filename, self.expected_data, config_dict["endian"]) self._validate_metadata(sri_filename, self.t_delta, self.stream_id1, self.keywords_dict, self.first_pkt_time, self.last_pkt_time)
def test_constant(self): print "\n... Starting Test Constant" self._generate_config() self.config_params["shape"] = "constant" self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec) print "\nReceived Data Time Range:" print rx_data[0].T print rx_data[-1].T expected_value = self.config_params["magnitude"] for p in rx_data: # Data returned is list of test_utils.BufferedPacket for value in p.data: self.assertEqual(value, expected_value)
def test_throttle(self): print "\n... Starting Throttle Test" self._generate_config() self.config_params["shape"] = "constant" self.config_params["throttle"] = True self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec) expected_num_packets = self.config_params["sample_rate"]/self.config_params["xfer_len"] n_packets = len(rx_data) print "Received %d Packets" % n_packets print "Expected %d Packets (tolerance is +/- 1)" % expected_num_packets self.assertTrue(n_packets >= expected_num_packets-1 and n_packets <= expected_num_packets+1) # Allow for +/- packet tolerance due to how we're getting the data
def myTestCase(self, testEquation, data1, data2, checkResults=True, data1Cx=False, data2Cx=False, validateSRI=True): """The main engine for all the test cases - configure the equation, push data, and get output As applicable """ streamID = "RandomStreamName" if testEquation: print "\n... running myTestCase %s" % testEquation self.comp.configure(props_from_dict({'equation': testEquation})) if data1: self.src1.push(data1, complexData=data1Cx, streamID=streamID) if data2: self.src2.push(data2, complexData=data2Cx, streamID=streamID) #data processing is asynchronos - so wait until the data is all processed count = 0 while True: out = self.sink.getData() if out: break if count == 100: break time.sleep(.01) count += 1 if checkResults: if isinstance(checkResults, list): numOut = 0 for got, expected in zip(out, checkResults): self.assertEqual(got, expected) numOut += 1 else: numOut = self.checkResults(testEquation, data1, data2, out) self.assertNotEqual(numOut, 0) if validateSRI: self.validateSRIHandling(streamID) return out
def _test_lrs(self, sink, convert_function): self._generate_config() self.config_params["shape"] = "lrs" self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec, sink) print "\nReceived Data Time Range:" print rx_data[0].T print rx_data[-1].T expected_values = convert_function(self.waveforms.generate_lrs(self.config_params["magnitude"], self.config_params["xfer_len"])) n_expected = len(expected_values) for p in rx_data: # Data returned is list of test_utils.BufferedPacket self.assertEqual(len(p.data), n_expected) for rx_val, exp_val in zip(p.data, expected_values): #self.assertAlmostEqual(rx_val, exp_val, 5) self.assert_isclose(rx_val, exp_val, PRECISION, NUM_PLACES)
def _test_constant(self, sink, type_cast): self._generate_config() self.config_params["shape"] = "constant" self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep( 1. ) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec, sink) #print "\nReceived Data Time Range:" #print rx_data[0].T #print rx_data[-1].T expected_value = type_cast(self.config_params["magnitude"]) #for p in rx_data: # Data returned is list of test_utils.BufferedPacket for value in rx_data: #self.assertAlmostEqual(value, expected_value) self.assert_isclose(value, expected_value, PRECISION, NUM_PLACES)
def _test_frequency(self, sink): self._generate_config() self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) start_time = time.time() rx_len_sec = 1. rx_data = self._get_received_data(start_time, rx_len_sec, sink) print "\nReceived Data Time Range:" print rx_data[0].T print rx_data[-1].T zero_crossings = 0 expected_zero_crossings = 2 * self.config_params["frequency"] * self.config_params["xfer_len"] / self.config_params["sample_rate"] # 2 * (zc/s /2) * (S/packet) / (S/s) = zc data = rx_data[0].data if abs(data[0]) <= 10**(-1*NUM_PLACES): data[0]=0.0 #same as (but less math): if isclose(data[0], 0, PRECISION, NUM_PLACES): data[0]=0.0 for i in xrange(len(data)-1): if abs(data[i+1]) <= 10**(-1*NUM_PLACES): data[i+1]=0.0 if (data[i] <= 0 and data[i+1] > 0) or (data[i] >= 0 and data[i+1] < 0): zero_crossings += 1 self.assertEqual(zero_crossings, expected_zero_crossings)
def _test_signal_with_phase(self, shape, signal_function): self._generate_config() self.config_params["shape"] = shape self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data rx_data = self._get_received_data(start_time, rx_len_sec) print "\nReceived Data Time Range:" print rx_data[0].T print rx_data[-1].T delta_phase = self.config_params["frequency"] / self.config_params["sample_rate"] expected_values = signal_function(self.config_params["magnitude"], self.config_params["xfer_len"], dp=delta_phase ) n_expected = len(expected_values) for p in rx_data: # Data returned is list of test_utils.BufferedPacket self.assertEqual(len(p.data), n_expected) for rx_val, exp_val in zip(p.data, expected_values): self.assertEqual(rx_val, exp_val)
def setUp(self): """Set up the unit test - this is run before every method that starts with test """ ossie.utils.testing.ScaComponentTestCase.setUp(self) self.src = sb.DataSource() self.sinkAM = sb.DataSink() self.sinkPM = sb.DataSink() self.sinkFM = sb.DataSink() #start all my components self.startComponent() props = {'freqDeviation':10.0, 'phaseDeviation':20.0, 'squelch':-2000.0, 'debug':False} self.comp.configure(props_from_dict(props)) self.comp.start() self.src.start() self.sinkAM.start() self.sinkPM.start() self.sinkFM.start()
def myTestCase(self, data, upper_limit, lower_limit): """The main engine for all the test cases - configure, push data, and get output As applicable """ # Configure upper and lower limit self.comp.configure( props_from_dict({ 'upper_limit': upper_limit, 'lower_limit': lower_limit })) kw = sb.SRIKeyword("testkw", 10.0, 'double') # Push in data self.src1.push(data, EOS=False, streamID='myStreamID', sampleRate=200, complexData=False, SRIKeywords=[kw], loop=None) return self.waitForData()
def set_target_hardware(self, rtlx): ''' Sets the target RTL to use. To choose a target device use a dictionary with the criteria to select (from the available rtl devices). Returns the target that was just set. > x.set_target_rtl(dict(index=0)) [ { 'index': 0, 'name': None, 'product': None, 'serial': None, 'vendor': None, } ] ''' # the device has a bug in that if it gets the same value, it won't trigger # target device. So set it to some weird number and than change it again self.device.configure(dict(target_device=dict(index=-2))) # FIXME: Validate rtlx has the right fields self.device.configure(dict(target_device=rtlx)) return props_to_dict(self.device.query(props_from_dict(dict(target_device=None))))
def setUp(self): """Set up the unit test - this is run before every method that starts with test """ ossie.utils.testing.ScaComponentTestCase.setUp(self) self.src = sb.DataSource() self.sinkAM = sb.DataSink() self.sinkPM = sb.DataSink() self.sinkFM = sb.DataSink() #start all my components self.startComponent() props = { 'freqDeviation': 10.0, 'phaseDeviation': 20.0, 'squelch': -2000.0 } self.comp.configure(props_from_dict(props)) self.comp.start() self.src.start() self.sinkAM.start() self.sinkPM.start() self.sinkFM.start()
def set_target_hardware(self, rtlx): ''' Sets the target RTL to use. To choose a target device use a dictionary with the criteria to select (from the available rtl devices). Returns the target that was just set. > x.set_target_rtl(dict(index=0)) [ { 'index': 0, 'name': None, 'product': None, 'serial': None, 'vendor': None, } ] ''' # the device has a bug in that if it gets the same value, it won't trigger # target device. So set it to some weird number and than change it again self.device.configure(dict(target_device=dict(index=-2))) # FIXME: Validate rtlx has the right fields self.device.configure(dict(target_device=rtlx)) return props_to_dict( self.device.query(props_from_dict(dict(target_device=None))))
def _test_throttle(self, sink): self._generate_config() self.config_params["shape"] = "constant" self.config_params["throttle"] = True self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep( 1. ) # Ensure SigGen is sending out the desired signal before continuing start_time = time.time() rx_len_sec = 1. # Get 1s worth of data expected_num_packets = self.config_params[ "sample_rate"] / self.config_params["xfer_len"] rx_data = self._get_received_data(start_time, rx_len_sec, sink) n_packets = len(rx_data) / self.config_params["xfer_len"] print "Received %d Packets" % n_packets print "Expected %d Packets (tolerance is +/- 1)" % expected_num_packets # Allow for +/- packet tolerance due to how we're getting the data self.assertTrue(n_packets >= expected_num_packets - 1) self.assertTrue(n_packets <= expected_num_packets + 1)
def _test_frequency(self, sink): self._generate_config() self.comp_obj.configure(props_from_dict(self.config_params)) time.sleep(1.) start_time = time.time() rx_len_sec = 1. rx_data = self._get_received_data(start_time, rx_len_sec, sink) zero_crossings = 0 expected_zero_crossings = 2 * self.config_params[ "frequency"] * self.config_params["xfer_len"] / self.config_params[ "sample_rate"] # 2 * (zc/s /2) * (S/packet) / (S/s) = zc data = rx_data[:self.config_params["xfer_len"]] if abs(data[0]) <= 10**(-1 * NUM_PLACES): data[ 0] = 0.0 #same as (but less math): if isclose(data[0], 0, PRECISION, NUM_PLACES): data[0]=0.0 for i in xrange(len(data) - 1): if abs(data[i + 1]) <= 10**(-1 * NUM_PLACES): data[i + 1] = 0.0 if (data[i] <= 0 and data[i + 1] > 0) or (data[i] >= 0 and data[i + 1] < 0): zero_crossings += 1 self.assertEqual(zero_crossings, expected_zero_crossings)