def test_001(self):
     
     src1_data = [0,2,-3,0,12,0]
     src2_data = [0,0,3,0,10,0]
     src3_data = (0,0,3,0,1,0)
     src4_data = (0,1,8,6,3,4)
     a = sb.DataSource(dataFormat="short")
     b = sb.DataSource(dataFormat="short")
     c = sb.DataSource(dataFormat="short")
     d = sb.DataSource(dataFormat="short")
     argmax = gr.argmax(gr.sizeof_short, len(src1_data), 4)
     a.connect(argmax,providesPortName="short_in_1")
     b.connect(argmax,providesPortName="short_in_2")
     c.connect(argmax,providesPortName="short_in_3")
     d.connect(argmax,providesPortName="short_in_4")
     dest1 = gr.vector_sink_s ()
     dest2 = gr.vector_sink_s ()
     argmax.connect(dest1,usesPortName="short_out_1")
     argmax.connect(dest2,usesPortName="short_out_2")
     sb.start()
     a.push(src1_data,EOS=True)
     b.push(src2_data,EOS=True)
     c.push(src2_data,EOS=True)
     d.push(src2_data,EOS=True)
     index = dest1.getData(eos_block=True)
     source = dest2.getData(eos_block=True)
     self.assertEqual ( tuple(index), (4,))
     self.assertEqual ( tuple(source), (0,))
    def test_push_packet_cplx(self):
        in_sri = bulkio.sri.create()
        in_sri.streamID = "VECTOR-PUSHPACKET-CPLX"
        in_sri.mode = 1
        in_sri.xdelta  = 1/33.0
        dsource=sb.DataSource()
        dsink=sb.DataSink()
        c_spd_xml = test_dir + self.c_dir + '/' + self.c_name + '/' + self.c_name + '.spd.xml'
        print "Test Component:" + c_spd_xml
        test_comp=self.launch( c_spd_xml, execparams=self.execparams)
        data=self.seq

        dsource.connect(test_comp, providesPortName=self.c_inport )
        test_comp.connect(dsink, providesPortName=self.sink_inport, usesPortName=self.c_outport)
        sb.start()
        dsource.push( data, EOS=True, streamID=in_sri.streamID, sampleRate=33.0, complexData=(in_sri.mode==1) )
        adata=dsink.getData(eos_block=True)
        #print "Result data: " + str(len(adata))
        #print data
        #print adata
        self.assertEqual(len(data),len(adata),"PUSH PACKET CPLX FAILED....Data Vector Mismatch")

        #
        # check sri values
        #
        sri = dsink.sri()
        print "StreamID   in:" + str(in_sri.streamID)+ " arrive:" + str(sri.streamID) 
        self.assertEqual(sri.streamID,in_sri.streamID,"PUSH PACKET CPLX FAILED....SRI StreamID Mismatch")

        print "Mode in:" + str(in_sri.mode)+ " arrive:" + str(sri.mode) 
        self.assertEqual(sri.mode,in_sri.mode,"PUSH PACKET CPLX FAILED....SRI Mode Mismatch")

        print "SampleRate in:" + str(in_sri.xdelta)+ " arrive:" + str(sri.xdelta) 
        self.assertAlmostEqual(sri.xdelta,in_sri.xdelta, 3, msg="PUSH PACKET CPLX FAILED....SRI SampleRate Mismatch")
    def test_001(self):

        src_data1 = [0,2,-3,0,12,0,2]
        src_data2 = [0,3,-4,1,13,1,2]
        src_data3 = [0,4,-6,3,11,1,4]
        src_data4 = [0,5,-4,5,16,2,1]
        max_src_data = ( max(src_data1), max(src_data2), max(src_data3), max(src_data4))
        expected_result = float(max((max_src_data)))

        a = sb.DataSource(dataFormat="long")
        b = sb.DataSource(dataFormat="long")
        c = sb.DataSource(dataFormat="long")
        d = sb.DataSource(dataFormat="long")
        e = gr.max_(gr.sizeof_int, 4, len(src_data1))
        f = sb.DataSink()
        a.connect(e,providesPortName="long_in_1")
        b.connect(e,providesPortName="long_in_2")
        c.connect(e,providesPortName="long_in_3")
        d.connect(e,providesPortName="long_in_4")
        e.connect(f)
        sb.start()
        a.push(src_data1,EOS=True)
        b.push(src_data2,EOS=True)
        c.push(src_data3,EOS=True)
        d.push(src_data4,EOS=True)
        result_data = f.getData(eos_block=True)
        
        self.assertEqual( expected_result, result_data[0] )
    def test_001(self):

        src_data1 = [0,0.2,-0.3,0.0,12,0.0,2.0]
        src_data2 = [0,0.3,-0.4,1.0,13.0,1.0,2.0]
        src_data3 = [0,0.4,-0.6,3.0,11.0,1.0,4.0]
        src_data4 = [0,0.5,-0.4,5.0,16.0,2.0,1.0]
        max_src_data = ( max(src_data1), max(src_data2), max(src_data3), max(src_data4))
        expected_result = float(max((max_src_data)))

        a = sb.DataSource(dataFormat="float")
        b = sb.DataSource(dataFormat="float")
        c = sb.DataSource(dataFormat="float")
        d = sb.DataSource(dataFormat="float")
        e = gr.max_(gr.sizeof_float, 4, len(src_data1))
        f = sb.DataSink()
        a.connect(e,providesPortName="float_in_1")
        b.connect(e,providesPortName="float_in_2")
        c.connect(e,providesPortName="float_in_3")
        d.connect(e,providesPortName="float_in_4")
        e.connect(f)
        sb.start()
        a.push(src_data1,EOS=True)
        b.push(src_data2,EOS=True)
        c.push(src_data3,EOS=True)
        d.push(src_data4,EOS=True)
        result_data = f.getData(eos_block=True)
        
        self.assertEqual( expected_result, result_data[0] )
    def test_002(self):

        src_data1= [-100,-99,-98,-97,-96,-1]
        src_data2 = [-101,-98, -97,-95, -95,-1]
        src_data3 = [-98,-102, -95,-90, -91,-3]
        src_data4 = [-97,-101, -93,-91, -95,-1]
        max_src_data = ( max(src_data1), max(src_data2), max(src_data3), max(src_data4))
        expected_result = float(max((max_src_data)))

        a = sb.DataSource(dataFormat="long")
        b = sb.DataSource(dataFormat="long")
        c = sb.DataSource(dataFormat="long")
        d = sb.DataSource(dataFormat="long")
        e = gr.max_(gr.sizeof_int, 4, len(src_data1))
        f = sb.DataSink()
        a.connect(e,providesPortName="long_in_1")
        b.connect(e,providesPortName="long_in_2")
        c.connect(e,providesPortName="long_in_3")
        d.connect(e,providesPortName="long_in_4")
        e.connect(f)
        sb.start()
        a.push(src_data1,EOS=True)
        b.push(src_data2,EOS=True)
        c.push(src_data3,EOS=True)
        d.push(src_data4,EOS=True)
        result_data = f.getData(eos_block=True)
        
        self.assertEqual(expected_result, result_data[0] )
    def _test_FileSource(self, format):
        filename = self._tempfileName('source_%s' % format)

        complexData = format.startswith('C')
        typecode = format[1]
        dataFormat, dataType = self.TYPEMAP[typecode]

        indata = [dataType(x) for x in xrange(16)]
        if complexData:
            if dataFormat in ('float', 'double'):
                indata = [complex(x) for x in indata]
            else:
                indata = numpy.reshape(indata, (8,2))
        hdr = bluefile.header(1000, format)
        bluefile.write(filename, hdr, indata)

        source = sb.FileSource(filename, midasFile=True, dataFormat=dataFormat)
        sink = sb.DataSink()
        source.connect(sink)
        sb.start()
        outdata = sink.getData(eos_block=True)
        if complexData:
            self.assertEqual(sink.sri().mode, 1)
            if dataFormat in ('float', 'double'):
                outdata = bulkio_helpers.bulkioComplexToPythonComplexList(outdata)
            else:
                outdata = numpy.reshape(outdata, (len(outdata)/2,2))
        else:
            self.assertEqual(sink.sri().mode, 0)
        self.assertTrue(numpy.array_equal(indata, outdata), msg='%s != %s' % (indata, outdata))
    def testFloatPort(self):
        #######################################################################
        # Test FLOAT Functionality
        print "\n**TESTING FLOAT PORT"

        # Define test files
        dataFileIn = "./data.in"
        dataFileOut = "./data.out"

        # Create Test Data File if it doesn't exist
        if not os.path.isfile(dataFileIn):
            with open(dataFileIn, "wb") as dataIn:
                dataIn.write(os.urandom(1024))

        # Read in Data from Test File
        size = os.path.getsize(dataFileIn)
        with open(dataFileIn, "rb") as dataIn:
            data = list(struct.unpack("f" * (size / 4), dataIn.read(size)))

        # Create Components and Connections
        comp = sb.launch("../FileWriter.spd.xml")
        comp.destination_uri = dataFileOut
        comp.advanced_properties.existing_file = "TRUNCATE"

        source = sb.DataSource(bytesPerPush=64, dataFormat="32f")
        source.connect(comp, providesPortName="dataFloat_in")

        # Start Components & Push Data
        sb.start()
        source.push(data)
        time.sleep(2)
        sb.stop()

        # Check that the input and output files are the same
        try:
            self.assertEqual(filecmp.cmp(dataFileIn, dataFileOut), True)
        except self.failureException as e:
            # unpacked bytes may be NaN, which could cause test to fail unnecessarily
            size = os.path.getsize(dataFileOut)
            with open(dataFileOut, "rb") as dataOut:
                data2 = list(struct.unpack("f" * (size / 4), dataOut.read(size)))
            for a, b in zip(data, data2):
                if a != b:
                    if a != a and b != b:
                        print "Difference in NaN format, ignoring..."
                    else:
                        print "FAILED:", a, "!=", b
                        raise e

        # Release the components and remove the generated files
        finally:
            comp.releaseObject()
            source.releaseObject()
            os.remove(dataFileIn)
            os.remove(dataFileOut)

        print "........ PASSED\n"
        return
 def run(self):
     sb.start()
     for source in self.sources:
         # TODO: only do this if this is an sbSource
         # try statement is a little sloppy
         try:
             source.push()
         except AttributeError:
             pass
 def run(self):
     sb.start()
     for source in self.sources:
         # TODO: only do this if this is an sbSource
         # try statement is a little sloppy
         try:
             source.push()
         except AttributeError:
             pass
     # Give blocks time to get setup
     time.sleep(.1)
 def setUp(self):
     #set up 
     ossie.utils.testing.ScaComponentTestCase.setUp(self)
     self.src = sb.DataSource()
     self.sink = sb.DataSink()
     
     #connect 
     self.startComponent()
     #self.src.connect(self.comp)
     self.comp.connect(self.sink, 'floatIn')
     
     #starts sandbox
     sb.start()
    def test_vector(self):
        dsource=sb.DataSource()
        dsink=sb.DataSink()
        test_comp=sb.Component(self.cname)
        data=range(100)
        dsource.connect(test_comp, providesPortName=self.inport )
        test_comp.connect(dsink, providesPortName=self.sink_port_name, usesPortName=self.outport)
        sb.start()
        dsource.push(data,EOS=True)
        dest_data=dsink.getData(eos_block=True)
        sb.stop()

        self.assertEqual(data, dest_data)
    def testCharPort(self):
        #######################################################################
        # Test Char Functionality
        print "\n**TESTING CHAR PORT"

        # Define test files
        dataFileIn = "./data.in"
        dataFileOut = "./data.out"

        # Create Test Data File if it doesn't exist
        if not os.path.isfile(dataFileIn):
            with open(dataFileIn, "wb") as dataIn:
                dataIn.write(os.urandom(1024))

        # Read in Data from Test File
        size = os.path.getsize(dataFileIn)
        with open(dataFileIn, "rb") as dataIn:
            data = list(struct.unpack("b" * size, dataIn.read(size)))

        # Create Components and Connections
        comp = sb.launch("../FileWriter.spd.xml")
        comp.destination_uri = dataFileOut
        comp.advanced_properties.existing_file = "TRUNCATE"

        source = sb.DataSource(bytesPerPush=64, dataFormat="8t")
        source.connect(comp, providesPortName="dataChar_in")

        # Start Components & Push Data
        sb.start()
        source.push(data)
        time.sleep(2)
        sb.stop()

        # Check that the input and output files are the same
        try:
            self.assertEqual(filecmp.cmp(dataFileIn, dataFileOut), True)
        except self.failureException as e:
            comp.releaseObject()
            source.releaseObject()
            os.remove(dataFileIn)
            os.remove(dataFileOut)
            raise e

        # Release the components and remove the generated files
        comp.releaseObject()
        source.releaseObject()
        os.remove(dataFileIn)
        os.remove(dataFileOut)

        print "........ PASSED\n"
        return
Exemple #13
0
	def run(self):
		print 'starting benchmark'
		sb.start()
		
		#first sample recorded should be a couple away from the start of BenchmarkGen
		#due to the high output rate to fill up the queue
		lastTP = self.bc.packets_per_time_avg*self.samplesAway
		
		#loop counter
		loopCount = 0
		#run until less than 1%
		while 1:
			timer = 0
			#run until time for benchmark runs out
			while timer <= self.bmTime: 
				#run until next value can be collected
				while self.bc.totalPackets <= lastTP: 
					sleep(.1)
					timer = timer + .1;
				#collect data from benchmark component
				self.samples.append(float(timer + loopCount*self.bmTime))
				self.outputRates.append(float(self.bc.avg_output_rate))
				self.times.append(float(self.bc.time))
				lastTP = lastTP + self.bc.packets_per_time_avg
			#calculating statistics
			N = len(self.samples)
			s = numpy.std(self.outputRates)
			x = numpy.mean(self.outputRates)
			value = 1.96*(s/numpy.sqrt(N))
			lower = x - value
			upper = x + value
			percent = (value / x) * 100
			
			#debug
			if self.debug==1:
				print 'N is: ' + str(int(N))
				print 's is: ' + str(float(s))
				print 'x is: ' + str(float(x))
				print 'value is: ' + str(float(value))
				print '95% confidence level: ' + str(float(lower)) + ' - ' + str(float(upper))
				print 'percent away is: ' + str(float(percent))
			loopCount = loopCount + 1
			if(percent <= 1):
				break;
		
		self.printStats()
		if self.showPlot==1:
			self.plot()
		
		sb.reset()
		sb.stop()
 def test_add_ss (self):
     src_data = [1,2,3,4,5, 6,7,8]
     expected_result = [2,4,6,8,10,12,14,16]
     a = sb.DataSource(dataFormat="short")
     b = sb.DataSource(dataFormat="short")
     c = sb.Component("../components/add_ss_2i/add_ss_2i.spd.xml")
     d = sb.DataSink()
     a.connect(c,providesPortName="data_in_0")
     b.connect(c,providesPortName="data_in_1")
     c.connect(d)
     sb.start()
     a.push(src_data,EOS=True)
     b.push(src_data,EOS=True)
     result_data = d.getData(eos_block=True)
     self.assertEqual(expected_result, result_data)
 def test_float_to_complex_2 (self):
     src_data_0 = [0, 1, -1, 3, -3, 2, -4, -2]
     src_data_1 = [0, 0, 0, 4, -4, 4, -4, 4]
     expected_result = (0, 1, -1, 3+4j, -3-4j, 2+4j, -4-4j, -2+4j)
     src0 = sb.DataSource()
     src1 = sb.DataSource()
     op = sb.launch('../components/float_to_complex_2i/float_to_complex_2i.spd.xml')
     dst = gr.vector_sink_c ()
     src0.connect(op, providesPortName='real_float_in')
     src1.connect(op, providesPortName='imag_float_in')
     op.connect(dst)
     sb.start()
     src0.push(src_data_0, EOS=True)
     src1.push(src_data_1, EOS=True)
     actual_result = dst.data ()
     self.assertComplexTuplesAlmostEqual (expected_result, actual_result)
 def test_001(self):
     
     src1_data = [0,0.2,-0.3,0,12,0]
     a = sb.DataSource(dataFormat="float")
     argmax = gr.argmax(gr.sizeof_float, len(src1_data),1 )
     a.connect(argmax,providesPortName="float_in")
     dest1 = gr.vector_sink_s ()
     dest2 = gr.vector_sink_s ()
     argmax.connect(dest1,usesPortName="short_out_1")
     argmax.connect(dest2,usesPortName="short_out_2")
     sb.start()
     a.push(src1_data,EOS=True)
     index = dest1.getData(eos_block=True)
     source = dest2.getData(eos_block=True)
     self.assertEqual ( tuple(index), (4,))
     self.assertEqual ( tuple(source), (0,))
Exemple #17
0
    def setUp(self):
        """ Set up unit test - run before every method that starts with test """
        ossie.utils.testing.ScaComponentTestCase.setUp(self)
        self.src = sb.DataSource()
        self.sink = sb.DataSink()

        # connect components
        self.startComponent()
        self.src.connect(self.comp)
        self.comp.connect(self.sink)

        # starts sandbox
        sb.start()

        # variables
        self.operand = 10
        self.LEN = 100
        self.dataIn = [float(x) for x in xrange(self.LEN)]
    def test_001(self):

        src_data1 = [0,0.2,-0.3,0.0,12,0.0,2.0]
        src_data2 = [0,0.3,-0.4,1.0,13.0,1.0,2.0]
        max_src_data = ( max(src_data1), max(src_data2))
        expected_result = float(max((max_src_data)))

        a = sb.DataSource(dataFormat="float")
        b = sb.DataSource(dataFormat="float")
        c = gr.max_(gr.sizeof_float, 2, len(src_data1))
        d = sb.DataSink()
        a.connect(c,providesPortName="float_in_1")
        b.connect(c,providesPortName="float_in_2")
        c.connect(d)
        sb.start()
        a.push(src_data1,EOS=True)
        b.push(src_data2,EOS=True)
        result_data = d.getData(eos_block=True)
        
        self.assertEqual( expected_result, result_data[0] )
    def test_002(self):

        src_data1=[-100,-99,-98,-97,-96,-1]
        src_data2 = [-101,-98, -97,-95, -95,-1]
        max_src_data = ( max(src_data1), max(src_data2))
        expected_result = float(max((max_src_data)))

        a = sb.DataSource(dataFormat="float")
        b = sb.DataSource(dataFormat="float")
        c = gr.max_(gr.sizeof_float, 2, len(src_data1))
        d = sb.DataSink()
        a.connect(c,providesPortName="float_in_1")
        b.connect(c,providesPortName="float_in_2")
        c.connect(d)
        sb.start()
        a.push(src_data1,EOS=True)
        b.push(src_data2,EOS=True)
        result_data = d.getData(eos_block=True)
        
        self.assertEqual(expected_result, result_data[0] )
    def testXmlPort(self):
        #######################################################################
        # Test XML Functionality
        print "\n**TESTING XML PORT"

        # Create Test Data
        dataFileOut = "./data.out"

        with open("data.xml", "rb") as file:
            inputData = file.read()

        # Connect DataSource to FileWriter
        comp = sb.launch("../FileWriter.spd.xml")
        comp.destination_uri = dataFileOut
        comp.advanced_properties.existing_file = "TRUNCATE"

        source = sb.DataSource(bytesPerPush=64, dataFormat="xml")
        source.connect(comp, providesPortName="dataXML_in")

        # Start Components & Push Data
        sb.start()
        source.push(inputData)
        time.sleep(2)
        sb.stop()

        # Check that the input and output files are the same
        try:
            self.assertEqual(filecmp.cmp("./data.xml", dataFileOut), True)
        except self.failureException as e:
            comp.releaseObject()
            source.releaseObject()
            os.remove(dataFileOut)
            raise e

        # Release the components and remove the generated files
        comp.releaseObject()
        source.releaseObject()
        os.remove(dataFileOut)

        print "........ PASSED\n"
        return
    def test_push_packet(self):
        ##print self.ctx
        dsource=sb.DataSource()
        dsink=sb.DataSink()
        c_spd_xml = test_dir + self.c_dir + '/' + self.c_name + '/' + self.c_name + '.spd.xml'
	print c_spd_xml
        test_comp=sb.launch( c_spd_xml, execparams=self.execparams)
        if self.seq:
            data=self.seq

        cmp_data = data
        if self.cmpData:
            cmp_data = self.cmpData

        dsource.connect(test_comp, providesPortName=self.c_inport )
        test_comp.connect(dsink, providesPortName=self.sink_inport, usesPortName=self.c_outport)
        sb.start()
        dsource.push(data,EOS=True)
        dest_data=dsink.getData(eos_block=True)
        sb.stop()
        self.assertEqual(data, dest_data)
    def _test_FileSink(self, format):
        filename = self._tempfileName('sink_%s' % format)

        complexData = format.startswith('C')
        typecode = format[1]
        dataFormat, dataType = self.TYPEMAP[typecode]
        indata = [dataType(x) for x in xrange(16)]

        source = sb.DataSource(dataFormat=dataFormat)
        sink = sb.FileSink(filename, midasFile=True)
        source.connect(sink)
        sb.start()
        source.push(indata, complexData=complexData, EOS=True)
        sink.waitForEOS()

        hdr, outdata = bluefile.read(filename)
        self.assertEqual(hdr['format'], format)
        if complexData:
            if dataFormat in ('double', 'float'):
                outdata = list(self._flatten(outdata))
            else:
                outdata = outdata.flatten()
        self.assertTrue(numpy.array_equal(indata, outdata), msg="Format '%s' %s != %s" % (format, indata, outdata))
    def test_002(self):

        src_data1=[-100,-99,-98,-97,-96,-1]
        src_data2 = [-101,-98, -97,-95, -95,-1]
        src_data3 = [-98,-102, -95,-90, -91,-3]
        max_src_data = ( max(src_data1), max(src_data2), max(src_data3))
        expected_result = max((max_src_data))

        a = sb.DataSource(dataFormat="short")
        b = sb.DataSource(dataFormat="short")
        c = sb.DataSource(dataFormat="short")
        d = gr.max_(gr.sizeof_short, 3, len(src_data1))
        e = sb.DataSink()
        a.connect(d,providesPortName="short_in_1")
        b.connect(d,providesPortName="short_in_2")
        c.connect(d,providesPortName="short_in_3")
        d.connect(e)
        sb.start()
        a.push(src_data1,EOS=True)
        b.push(src_data2,EOS=True)
        c.push(src_data3,EOS=True)
        result_data = e.getData(eos_block=True)
        
        self.assertEqual(expected_result, result_data[0] )
    def test_001(self):

        src_data1 = [0,2,-3,0,12,0,2]
        src_data2 = [0,3,-4,1,13,1,2]
        src_data3 = [0,4,-6,3,11,1,4]
        max_src_data = ( max(src_data1), max(src_data2), max(src_data3))
        expected_result = float(max((max_src_data)))

        a = sb.DataSource(dataFormat="short")
        b = sb.DataSource(dataFormat="short")
        c = sb.DataSource(dataFormat="short")
        d = gr.max_(gr.sizeof_short, 3, len(src_data1))
        e = sb.DataSink()
        a.connect(d,providesPortName="short_in_1")
        b.connect(d,providesPortName="short_in_2")
        c.connect(d,providesPortName="short_in_3")
        d.connect(e)
        sb.start()
        a.push(src_data1,EOS=True)
        b.push(src_data2,EOS=True)
        c.push(src_data3,EOS=True)
        result_data = e.getData(eos_block=True)
        
        self.assertEqual( expected_result, result_data[0] )
    def test_sandboxComplexProps(self):
        from ossie.utils import sb

        # values from the component PRF file
        expectedDefaults = {
            "boolean": numpy.complex(False, True),
            "ulong": numpy.complex(4, 5),
            "short": numpy.complex(4, 5),
            "float": numpy.complex(4., 5.),
            "octet": numpy.complex(4, 5),
            "ushort": numpy.complex(4, 5),
            "double": numpy.complex(4., 5.),
            "long": numpy.complex(4, 5),
            "longlong": numpy.complex(4, 5),
            "ulonglong": numpy.complex(4, 5)
        }
        '''
            "cFloatSeq"       : component.complexFloatSeq,
            "cFloatStruct"    : component.complexFloatStruct,
            "cFloatStructSeq" : component.complexFloatStructSeq}
            "cFloatSeq"       : [CF.complexFloat(real=1.0, imag=0.0),
                                 CF.complexFloat(real=1.0, imag=0.0),
                                 CF.complexFloat(real=1.0, imag=0.0)],
            "cFloatStruct"    : {"complexFloatStructMember": CF.complexFloat(real=1.0, imag=0.0)},
            "cFloatStructSeq" : [{"complexFloatStructMember": CF.complexFloat(real=1.0, imag=0.0)}]}
        '''

        # Create an instance of the test component in all 3 languages
        components = {
            "cpp": sb.launch("TestComplexProps", impl="cpp"),
            "python": sb.launch("TestComplexProps", impl="python"),
            "java": sb.launch("TestComplexProps", impl="java")
        }

        sb.start()

        for language in components.keys():
            # allow for visual inspection of complex sequences
            # TODO: replace this with an automated comparison
            print language
            print components[language].complexFloatProp
            print "simple struct member"
            print components[language].FloatStruct.FloatStructMember
            components[language].FloatStruct.FloatStructMember = 9
            print components[language].FloatStruct.FloatStructMember
            print "complex struct member"
            print components[
                language].complexFloatStruct.complexFloatStructMember
            components[
                language].complexFloatStruct.complexFloatStructMember = complex(
                    9, 10)
            print components[
                language].complexFloatStruct.complexFloatStructMember

            print components[language].complexFloatSequence
            components[language].complexFloatSequence = [complex(6, 7)] * 3
            print components[language].complexFloatSequence
            print ""

        for componentKey in components.keys():
            # loop through all three languages and query for the default
            # property values
            defaults = self._queryDefaults(components[componentKey])
            for key in defaults.keys():
                # Loop through the default property values and compare them
                # to the expected values.
                self._compareComplexValues(defaults[key],
                                           expectedDefaults[key])

        sb.domainless._cleanUpLaunchedComponents()
Exemple #26
0
    def testSriBlockingToggle(self):
        print "\n-------- TESTING SriBlockingToggle --------"
        #---------------------------------
        # Start component and set fftSize
        #---------------------------------
        sb.start()
        ID = "SriBlockingToggle"
        fftSize = 4096
        self.comp.fftSize = fftSize

        #------------------------------------------------
        # Create a test signal.
        #------------------------------------------------
        # 4096 samples of 7000Hz real signal at 65536 kHz
        cxData = False
        sample_rate = 65536.
        nsamples = 4096
        data = [random.random() for _ in xrange(nsamples)]

        #------------------------------------------------
        # Test Component Functionality.
        #------------------------------------------------
        # ---------- start with sri.blocking = False
        # Push Data
        sri = self.src.sri()
        sri.blocking = False
        self.src.push(data,
                      streamID=ID,
                      sampleRate=sample_rate,
                      complexData=cxData,
                      sri=sri)
        time.sleep(.5)

        # Get Output Data
        fftOut = self.fftsink.getData()[0]  # use first frame
        psdOut = self.psdsink.getData()[0]  # use first frame

        # Validate SRI Pushed Correctly
        self.validateSRIPushing(ID,
                                cxData,
                                sample_rate,
                                fftSize,
                                sriBlocking=sri.blocking)

        # ---------- change to sri.blocking = True
        # Push Data
        sri = self.src.sri()
        sri.blocking = True
        time.sleep(.5)
        self.src.push(data,
                      streamID=ID,
                      sampleRate=sample_rate,
                      complexData=cxData,
                      sri=sri)
        time.sleep(.5)

        # Get Output Data
        fftOut = self.fftsink.getData()[0]  # use first frame
        psdOut = self.psdsink.getData()[0]  # use first frame

        # Validate SRI Pushed Correctly
        self.validateSRIPushing(ID,
                                cxData,
                                sample_rate,
                                fftSize,
                                sriBlocking=sri.blocking)

        print "*PASSED"
Exemple #27
0
    def testRealData2(self):
        print "\n-------- TESTING w/REAL DATA2 --------"
        #---------------------------------
        # Start component and set fftSize
        #---------------------------------
        sb.start()
        ID = "RealData2"
        fftSize = 8192
        self.comp.fftSize = fftSize

        #------------------------------------------------
        # Create a test signal.
        #------------------------------------------------
        # 8192 samples of (3000Hz + 7000Hz) real signal at 32768 kHz
        sample_rate = 32768.
        nsamples = 8192.

        F_3KHz = 3000.
        A_3KHz = 10.0

        F_7KHz = 7000.
        A_7KHz = 5.0

        t = arange(nsamples) / sample_rate
        tmpData = A_7KHz * cos(2 * pi * F_7KHz * t) + A_3KHz * cos(
            2 * pi * F_3KHz * t)

        data = []
        [data.append(float(x)) for x in tmpData]

        #------------------------------------------------
        # Test Component Functionality.
        #------------------------------------------------
        # Push Data
        cxData = False
        self.src.push(data,
                      streamID=ID,
                      sampleRate=sample_rate,
                      complexData=cxData)
        time.sleep(.5)

        # Get Output Data
        fftOut = self.fftsink.getData()[0]  # use first frame
        psdOut = self.psdsink.getData()[0]  # use first frame
        pyFFT = abs(scipy.fft(tmpData, fftSize))

        #Validate SRI Pushed Correctly
        self.validateSRIPushing(ID, cxData, sample_rate, fftSize)

        #Convert Redhawk interleaved complex data to python complex for fftOut
        fftOut = packCx(fftOut)

        # Adjust length of data for accurate comparisons
        pyFFT = pyFFT[0:fftSize / 2]
        fftOut = fftOut[0:fftSize / 2]
        psdOut = psdOut[0:fftSize / 2]

        # Uncomment function below to see output plots:
        #plotFreqData(fftSize, sample_rate, pyFFT, fftOut, psdOut)

        # Find Max values and their corresponding frequency index
        pyFFTMax = max(pyFFT)
        fftOutMax = max(fftOut)
        psdOutMax = max(psdOut)
        pyMaxSquared = pyFFTMax**2

        pyFFTIndex = pyFFT.tolist().index(pyFFTMax)
        fftIndex = fftOut.index(fftOutMax)
        psdIndex = psdOut.index(psdOutMax)

        # Check that the component max values and index values are equal to python's
        threshold = 0.000001
        self.assertFalse(abs(pyFFTMax - fftOutMax) >= fftOutMax * threshold)
        self.assertFalse(
            abs(pyMaxSquared - psdOutMax) >= psdOutMax * threshold)
        self.assertFalse(abs(pyFFTIndex - fftIndex) >= 1.0)
        self.assertFalse(abs(pyFFTIndex - psdIndex) >= 1.0)

        print "*PASSED"
Exemple #28
0
    def testRFInfoPkt(self):

        #create rf_info uses port and connect to MSDD
        out_rf_info_port = frontend.OutRFInfoPort("out_control")
        in_rf_info_port = self.comp.getPort("RFInfo_in")
        out_rf_info_port.connectPort(in_rf_info_port, "test_rf_flow")

        bw = self.alloc_params['wbddc_bw']
        sr = self.alloc_params['wbddc_srate']
        # allocation params
        flow_id = "ca-710-flow-2"
        cf = 100e6

        # send info pkto
        pkt = _generateRFInfoPkt(cf, bw, rf_flow_id=flow_id)
        out_rf_info_port._set_rfinfo_pkt(pkt)

        # check rf_flow_id was set
        n = len(self.comp.frontend_tuner_status)
        expected = [flow_id] * n
        actual = [
            x["FRONTEND::tuner_status::rf_flow_id"]
            for x in self.comp.frontend_tuner_status
        ]
        self.assertEqual(expected, actual,
                         "Mismatch of RF Flow Ids for tuners")

        # allocation for sample rate and rf_flow_id
        alloc1 = frontend.createTunerAllocation(center_frequency=cf,
                                                sample_rate=sr,
                                                rf_flow_id=flow_id)
        ret = self.comp.allocateCapacity(alloc1)
        alloc1_aid = alloc1["FRONTEND::tuner_allocation"][
            "FRONTEND::tuner_allocation::allocation_id"]
        self.assertEqual(True, ret, "Allocation failed using rf_flow_id")

        alloc2_aid = alloc1_aid
        if self.alloc_params['nbddc_srate'] is not None:
            # dual channel we need to provide specific rate so the correct DDC is selected that matches the same rf_flow_id
            if '2w' in self.msdd_id:
                # allocation for center freq and rf_flow_id
                alloc2 = frontend.createTunerAllocation(
                    center_frequency=cf,
                    sample_rate=self.alloc_params['nbddc_srate'],
                    sample_rate_tolerance=1.0,
                    rf_flow_id=flow_id)
            else:
                # allocation for center freq and rf_flow_id
                alloc2 = frontend.createTunerAllocation(center_frequency=cf,
                                                        rf_flow_id=flow_id)
            ret = self.comp.allocateCapacity(alloc2)
            alloc2_aid = alloc2["FRONTEND::tuner_allocation"][
                "FRONTEND::tuner_allocation::allocation_id"]
            self.assertEqual(True, ret,
                             "Allocation failed using rf_flow_id again ")

        # valid rf_flow_id was probagated downstream
        sink = sb.StreamSink()
        sdds_in = sb.launch('rh.SourceSDDS',
                            properties={'interface': INTERFACE})
        sb.start()
        self.comp.connect(sdds_in,
                          connectionId=alloc2_aid,
                          usesPortName='dataSDDS_out')
        sdds_in.connect(sink, usesPortName="dataShortOut")

        kws = None
        try:
            sink_data = sink.read()
            kws = properties.props_to_dict(sink_data.sri.keywords)
        except:
            pass
        self.assertEqual(kws["FRONTEND::RF_FLOW_ID"], flow_id,
                         "Missing RF_FLOW_ID from keyword list")
Exemple #29
0
    def testMultipleStreamsDifferentPort(self):
        self.octetConnect()

        short_source = sb.DataSource()
        short_source.connect(self.comp, usesPortName='shortOut')

        sink = sb.DataSinkSDDS()
        ad_cb = SddsAttachDetachCB()
        sink.registerAttachCallback(ad_cb.attach_cb)
        sink.registerDetachCallback(ad_cb.detach_cb)
        self.comp.connect(sink)
        sb.start()

        goodData1 = 1024 * [1]
        deckedData = 512 * [2]
        goodData2 = 512 * [3]

        # No data pushed, no attaches or detaches
        self.assertEqual(len(ad_cb.get_attach()), 0,
                         "Should not have received any attaches")

        # Push one good packet and confirm it was received
        self.source.push(goodData1,
                         EOS=False,
                         streamID=self.id(),
                         sampleRate=1.0,
                         complexData=False,
                         loop=False)
        self.assertEqual(
            goodData1, list(struct.unpack('1024B',
                                          self.getPacket()[-1024:])))

        # Since we pushed, we should get an attach, no detach
        self.assertEqual(len(ad_cb.get_attach()), 1,
                         "Should have received 1 attach total")

        # Push a new stream, it should get decked, and disabled, confirm we receive no data and still have only a single attach
        short_source.push(deckedData,
                          EOS=False,
                          streamID="Decked Stream",
                          sampleRate=1.0,
                          complexData=False,
                          loop=False)
        self.assertEqual(
            len(self.getPacket()), 0,
            "Should not have passed on new stream, stream already active")
        self.assertEqual(len(ad_cb.get_attach()), 1,
                         "Should have received 1 attach total")

        # Push an EOS which should cause a detach, the decked stream to become active and the goodData pushed and us to have another attach called.
        self.source.push(goodData1,
                         EOS=True,
                         streamID=self.id(),
                         sampleRate=1.0,
                         complexData=False,
                         loop=False)
        self.assertEqual(
            goodData1, list(struct.unpack('1024B',
                                          self.getPacket()[-1024:])))
        time.sleep(2)
        self.assertEqual(len(ad_cb.get_attach()), 2,
                         "Should have received 2 attach total")

        # Push decked data and EOS It.
        short_source.push(deckedData,
                          EOS=True,
                          streamID="Decked Stream",
                          sampleRate=1.0,
                          complexData=False,
                          loop=False)
        self.assertEqual(
            deckedData, list(struct.unpack('!512H',
                                           self.getPacket()[-1024:])))

        # Send a new stream, which means a new attach
        short_source.push(goodData2,
                          EOS=False,
                          streamID="New Stream",
                          sampleRate=1.0,
                          complexData=False,
                          loop=False)
        self.assertEqual(
            goodData2, list(struct.unpack('!512H',
                                          self.getPacket()[-1024:])))
        self.assertEqual(len(ad_cb.get_attach()), 3,
                         "Should have received 3 attach total")

        # Tear stuff down, confirm we get the final detach
        sb.release()
        self.assertEqual(len(ad_cb.get_attach()), 3,
                         "Should have received 3 attach total")
        self.assertEqual(len(ad_cb.get_detach()), 3,
                         "Should have received 3 detach total")
Exemple #30
0
    def testChanRfCxTogglePartial(self):
        print "\n-------- TESTING w/COMPLEX rfFreqUnitsTogglePartial CHAN_RF--------"
        #---------------------------------
        # Start component and set fftSize
        #---------------------------------
        sb.start()
        ID = "rfFreqUnitsTogglePartialChanRf"
        fftSize = 4096
        self.comp.fftSize = fftSize
        self.comp.rfFreqUnits = False

        #------------------------------------------------
        # Create a test signal.
        #------------------------------------------------
        # 4096 samples of 7000Hz real signal at 65536 kHz
        sample_rate = 65536.
        nsamples = 4096

        data = [random.random() for _ in xrange(2 * nsamples)]

        #------------------------------------------------
        # Test Component Functionality.
        #------------------------------------------------
        # Push Data
        cxData = True
        chanRfVal = 100e6
        #keywords = [sb.io_helpers.SRIKeyword('CHAN_RF',chanRfVal, 'float')]
        keywords = [sb.io_helpers.SRIKeyword('CHAN_RF', chanRfVal, 'double')]
        #keywords = [sb.io_helpers.SRIKeyword('CHAN_RF',chanRfVal, 'long')]
        self.src.push(data,
                      streamID=ID,
                      sampleRate=sample_rate,
                      complexData=cxData,
                      SRIKeywords=keywords)
        time.sleep(.5)

        # Get Output Data
        fftOut = self.fftsink.getData()[0]  # use first frame
        psdOut = self.psdsink.getData()[0]  # use first frame
        #pyFFT = abs(scipy.fft(tmpData, fftSize))

        #Validate SRI Pushed Correctly
        self.validateSRIPushing(ID,
                                cxData,
                                sample_rate,
                                fftSize,
                                chanRfVal,
                                SRIKeywords=keywords)

        # Push partial block
        time.sleep(.5)
        self.src.push(data[:nsamples],
                      streamID=ID,
                      sampleRate=sample_rate,
                      complexData=cxData,
                      SRIKeywords=keywords)
        time.sleep(.5)

        # Toggle property and push rest of partial block
        self.comp.rfFreqUnits = True
        time.sleep(.5)
        self.src.push(data[nsamples:],
                      streamID=ID,
                      sampleRate=sample_rate,
                      complexData=cxData,
                      SRIKeywords=keywords)
        time.sleep(.5)

        # Get Output Data
        fftOut = self.fftsink.getData()[0]  # use first frame
        psdOut = self.psdsink.getData()[0]  # use first frame
        self.validateSRIPushing(ID,
                                cxData,
                                sample_rate,
                                fftSize,
                                chanRfVal,
                                SRIKeywords=keywords)

        print "*PASSED"
#------------------------------------------------
# Create components and connections
#------------------------------------------------
display(f,'* Creating Components and Connections\n')
filt = sb.launch('../FilterDecimate.spd.xml',execparams={"DEBUG_LEVEL":3})

inputS=sb.DataSource(bytesPerPush=64);
outputS=sb.DataSink();
inputS.connect(filt,providesPortName='dataFloat_in')
filt.connect(outputS,providesPortName='floatIn')
	
#------------------------------------------------
# Start Components
#------------------------------------------------
display(f,'* Starting Component\n\n') 
sb.start()

case = 0
for ii in lowpass_cases:
	case += 1
	passed_count += 1
	display(f, '*** TEST CASE ' + str(passed_count) + ' ***\n')

	#------------------------------------------------
	# Create a signal for demonstration.
	#------------------------------------------------
	# 16384 samples of (5000Hz + 15000Hz + 30000 Hz) real signal at 256 kHz
	display(f,'* Creating Test Signal with Parameters\n')
	sample_rate = 256000.
	display(f,'sample_rate = 256000\n')
	nsamples = 16384.
siggen = sb.launch('rh.SigGen')
sdds_out = sb.launch('rh.SinkSDDS')
sdds_out.network_settings.interface = 'lo'
sdds_out.network_settings.ip_address = '127.0.0.1'  #sender IP
sdds_out.network_settings.port = 29000
siggen.connect(sdds_out, providesPortName="dataFloatIn")

# test to make sure sig gen is running
sgplot = sb.LinePlot()
siggen.connect(sgplot, providesPortName="floatIn")

sdds_in = sb.launch("rh.SourceSDDS")
sdds_in.interface = "lo"

## SOURCESDDS connection method #1
# this is not necessary if you connect BULKIO ports
#sdds_in.attachment_override.ip_address='127.0.0.1' #sender IP
#sdds_in.attachment_override.port=29000
#sdds_in.attachment_override.enabled=True

## SOURCESDDS connection method #2
## connect SDDS sender (sink) to receiver (source) blocks
## this is not necessary if you do attach override
sdds_out.connect(sdds_in, providesPortName='dataSddsIn')

# see the data flowing through the source.
sddsplot = sb.LinePlot()
sdds_in.connect(sddsplot, providesPortName='floatIn')

sb.start()
    def timerTests(self, pkt_ts=False):
        #######################################################################
        # Test multiple recording timers

        # Define test files
        dataFileIn = "./data.in"
        dataFileOut = "./data.out"
        resultsFileOut = "./results.out"
        sample_rate = 128.0
        start_delay = 0.5
        stop_delay = 2.0

        # Create Test Data File if it doesn't exist
        if not os.path.isfile(dataFileIn):
            with open(dataFileIn, "wb") as dataIn:
                dataIn.write(os.urandom(1024))

        # Read in Data from Test File
        size = os.path.getsize(dataFileIn)
        with open(dataFileIn, "rb") as dataIn:
            data = list(struct.unpack("f" * (size / 4), dataIn.read(size)))

        # Create Components and Connections
        comp = sb.launch("../FileWriter.spd.xml")
        comp.destination_uri = dataFileOut
        comp.recording_enabled = False

        # Create timers
        ts_start = bulkio.bulkio_helpers.createCPUTimestamp()
        start1_wsec = ts_start.twsec + 2.0
        stop1_wsec = start1_wsec + 2.0
        start2_wsec = stop1_wsec + 2.0
        stop2_wsec = start2_wsec + 2.0
        timers = [
            {"recording_enable": True, "use_pkt_timestamp": pkt_ts, "twsec": start1_wsec, "tfsec": ts_start.tfsec},
            {"recording_enable": False, "use_pkt_timestamp": pkt_ts, "twsec": stop1_wsec, "tfsec": ts_start.tfsec},
            {"recording_enable": True, "use_pkt_timestamp": pkt_ts, "twsec": start2_wsec, "tfsec": ts_start.tfsec},
            {"recording_enable": False, "use_pkt_timestamp": pkt_ts, "twsec": stop2_wsec, "tfsec": ts_start.tfsec},
        ]
        # print timers
        comp.recording_timer = timers

        source = sb.DataSource(
            bytesPerPush=64.0, dataFormat="32f", startTime=ts_start.twsec + ts_start.tfsec + start_delay
        )
        source.connect(comp, providesPortName="dataFloat_in")

        # results will be shifted due to start_delay
        results_offset = int((start1_wsec - ts_start.twsec - start_delay) * sample_rate)

        # Start Components & Push Data
        sb.start()
        if pkt_ts:
            source.push(data * 5, sampleRate=sample_rate)  # 5*256 samples per push
            time.sleep(2)
        else:
            # meter to actual sample rate since based on cpu time
            end_ws = stop2_wsec + stop_delay
            num_samps = 16
            loop_delay = num_samps / sample_rate
            idx = results_offset  # necessary to achieve same results as pkt_ts, accounting for start_delay
            ts_now = bulkio.bulkio_helpers.createCPUTimestamp()
            while ts_now.twsec < end_ws:
                source.push(data[idx : idx + num_samps], sampleRate=sample_rate)  # 256 samples per push
                idx = (idx + num_samps) % len(data)
                time.sleep(loop_delay)
                ts_now = bulkio.bulkio_helpers.createCPUTimestamp()
        sb.stop()

        # Create Test Results Files
        results = data[results_offset:] + data[:results_offset]
        with open(resultsFileOut, "wb") as dataIn:
            dataIn.write(struct.pack("f" * len(results), *results))

        # Check that the input and output files are the same
        try:
            try:
                self.assertEqual(filecmp.cmp(resultsFileOut, dataFileOut), True)
            except self.failureException as e:
                # unpacked bytes may be NaN, which could cause test to fail unnecessarily
                size1 = os.path.getsize(dataFileOut)
                with open(dataFileOut, "rb") as dataOut1:
                    data1 = list(struct.unpack("f" * (size1 / 4), dataOut1.read(size1)))

                offset1 = results.index(max(results)) - data1.index(max(data1))
                # print 'offset1 is', offset1
                if offset1 != 0:
                    if abs(offset1) > num_samps:  # allow it to be off by one data push
                        print "FAILED: offset1 =", offset1
                        raise e
                    shifted_res1 = results[offset1:] + results[:offset1]
                else:
                    shifted_res1 = results
                for a, b in zip(shifted_res1, data1):
                    if a != b:
                        if a != a and b != b:
                            print "Difference in NaN format, ignoring..."
                        else:
                            print "1st FAILED:", a, "!=", b
                            raise e
            try:
                self.assertEqual(filecmp.cmp(resultsFileOut, dataFileOut + "-1"), True)
            except self.failureException as e:
                # unpacked bytes may be NaN, which could cause test to fail unnecessarily
                size2 = os.path.getsize(dataFileOut + "-1")
                with open(dataFileOut + "-1", "rb") as dataOut:
                    data2 = list(struct.unpack("f" * (size2 / 4), dataOut.read(size2)))

                offset2 = results.index(max(results)) - data2.index(max(data2))
                # print 'offset2 is', offset2
                if offset2 != 0:
                    if abs(offset2) > num_samps:  # allow it to be off by one data push
                        print "FAILED: offset2 =", offset2
                        raise e
                    shifted_res2 = results[offset2:] + results[:offset2]
                else:
                    shifted_res2 = results
                for a, b in zip(shifted_res2, data2):
                    if a != b:
                        if a != a and b != b:
                            print "Difference in NaN format, ignoring..."
                        else:
                            print "2nd FAILED:", a, "!=", b
                            raise e
        except:
            raise e
        # Release the components and remove the generated files
        finally:
            comp.releaseObject()
            source.releaseObject()
            os.remove(dataFileIn)
            os.remove(dataFileOut)
            os.remove(dataFileOut + "-1")
            os.remove(resultsFileOut)

        # TODO - validate timestamps, perhaps using BLUEFILEs

        print "........ PASSED\n"
        return
    def testBaseUri(self):
        #######################################################################
        # Test base uri w/ keyword substitution
        print "\n**TESTING URI w/ KW Substitution"

        # Define test files
        dataFileIn = "./data.in"
        STREAMID = "baseuritest"
        COL_RF = 1.2e6
        CHAN_RF1 = 1.25e6
        CHAN_RF2 = 1.15e6
        COLRF_HZ = "1200000Hz"
        CF_HZ1 = CHANRF_HZ1 = "1250000Hz"
        CF_HZ2 = CHANRF_HZ2 = "1150000Hz"
        MY_KEYWORD = "customkw"
        dataFileOut_template = "./%s.%s.%s.%s.%s.out"
        dataFileOut1 = dataFileOut_template % (STREAMID, CF_HZ1, COLRF_HZ, CHANRF_HZ1, MY_KEYWORD)
        dataFileOut2 = dataFileOut_template % (STREAMID, CF_HZ2, COLRF_HZ, CHANRF_HZ2, MY_KEYWORD)

        keywords1 = [
            sb.io_helpers.SRIKeyword("COL_RF", COL_RF, "double"),
            sb.io_helpers.SRIKeyword("CHAN_RF", CHAN_RF1, "double"),
            sb.io_helpers.SRIKeyword("MY_KEYWORD", MY_KEYWORD, "string"),
        ]

        keywords2 = [
            sb.io_helpers.SRIKeyword("COL_RF", COL_RF, "double"),
            sb.io_helpers.SRIKeyword("CHAN_RF", CHAN_RF2, "double"),
            sb.io_helpers.SRIKeyword("MY_KEYWORD", MY_KEYWORD, "string"),
        ]

        # Create Test Data File if it doesn't exist
        if not os.path.isfile(dataFileIn):
            with open(dataFileIn, "wb") as dataIn:
                dataIn.write(os.urandom(1024))

        # Read in Data from Test File
        size = os.path.getsize(dataFileIn)
        with open(dataFileIn, "rb") as dataIn:
            data = list(struct.unpack("f" * (size / 4), dataIn.read(size)))

        # Create Components and Connections
        comp = sb.launch("../FileWriter.spd.xml")
        comp.destination_uri = dataFileOut_template % (
            "%STREAMID%",
            "%CF_HZ%",
            "%COLRF_HZ%",
            "%CHANRF_HZ%",
            "%MY_KEYWORD%",
        )
        comp.advanced_properties.existing_file = "TRUNCATE"

        source = sb.DataSource(bytesPerPush=64, dataFormat="32f")
        source.connect(comp, providesPortName="dataFloat_in")

        # Start Components & Push Data
        sb.start()
        source.push(data, streamID=STREAMID, SRIKeywords=keywords1)
        time.sleep(2)
        source.push(data, streamID=STREAMID, SRIKeywords=keywords2)
        time.sleep(2)
        sb.stop()

        # Check that the input and output files are the same
        try:
            dataFileOut = dataFileOut1
            self.assertEqual(filecmp.cmp(dataFileIn, dataFileOut), True)
            dataFileOut = dataFileOut2
            self.assertEqual(filecmp.cmp(dataFileIn, dataFileOut), True)
        except self.failureException as e:
            # unpacked bytes may be NaN, which could cause test to fail unnecessarily
            size = os.path.getsize(dataFileOut)
            with open(dataFileOut, "rb") as dataOut:
                data2 = list(struct.unpack("f" * (size / 4), dataOut.read(size)))
            for a, b in zip(data, data2):
                if a != b:
                    if a != a and b != b:
                        print "Difference in NaN format, ignoring..."
                    else:
                        print "FAILED:", a, "!=", b
                        raise e

        # Release the components and remove the generated files
        finally:
            comp.releaseObject()
            source.releaseObject()
            os.remove(dataFileIn)
            os.remove(dataFileOut1)
            os.remove(dataFileOut2)

        print "........ PASSED\n"
        return
Exemple #35
0
def main():
    f = open('unit_test.log', 'w')

    display(f, "*********************************")
    display(f, "******** BPSK Unit Test *********")
    display(f, "*********************************")

    # Launch the component and the input sources and output sink
    display(f, "\n******* Creating Component ******")
    test_component = sb.launch('../BPSK.spd.xml',
                               execparams={'DEBUG_LEVEL': 5})
    clockSource = sb.DataSource()
    dataSource = sb.DataSource()
    dataSink = sb.DataSink()

    # Connect the output of the clock source and the data source
    # to the inputs of the BPSK.  Connect the output of the BPSK
    # to the input of the data sink
    display(f, "\n****** Creating Connections *****")
    clockSource.connect(test_component, providesPortName='clockFloat_in')
    dataSource.connect(test_component, providesPortName='dataFloat_in')
    test_component.connect(dataSink, providesPortName='shortIn')
    display(f, "Connections created")

    display(f, "\n******** Generating Data ********")
    # Generate a simple sine wave for use as the clock and the
    # data
    convenient_time_data = linspace(0, 48 * pi, 2400)
    clock = sin(convenient_time_data).tolist()

    display(f, "Single Packet Case...")
    # Use 24 bits to generate a BPSK modulated signal
    single_packet_data = [
        0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1
    ]
    single_packet_keyed_data = []
    i = 0
    for j in single_packet_data:
        for k in range(i, i + 100):
            if j == 1:
                single_packet_keyed_data.append(clock[k])
            else:
                single_packet_keyed_data.append(-clock[k])

        i += 100

    display(f, "Two Packet Case...")
    # Use a 16 bit packet and an 8 bit packet to generate two
    # BPSK modulated signals using the same clock
    two_packet_data1 = [0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1]
    two_packet_data2 = [0, 1, 0, 1, 0, 1, 1, 1]
    two_packet_keyed_data1 = []
    two_packet_keyed_data2 = []
    i = 0
    for j in two_packet_data1:
        for k in range(i, i + 100):
            if j == 1:
                two_packet_keyed_data1.append(clock[k])
            else:
                two_packet_keyed_data1.append(-clock[k])

        i += 100

    for j in two_packet_data2:
        for k in range(i, i + 100):
            if j == 1:
                two_packet_keyed_data2.append(clock[k])
            else:
                two_packet_keyed_data2.append(-clock[k])

        i += 100

    display(f, "Two Packets, Unaligned Case...")
    # Reuse the one packet from above, but send it as two
    # that aren't lined up on the period boundary
    two_packet_unaligned_keyed_data1 = []
    two_packet_unaligned_keyed_data2 = []
    i = 0
    for j in single_packet_data[:14]:
        for k in range(i, i + 100):
            if j == 1:
                two_packet_unaligned_keyed_data1.append(clock[k])
            else:
                two_packet_unaligned_keyed_data1.append(-clock[k])

        i += 100

    for k in range(i, i + 100):
        # Put the first 27 samples into the first packet and the next
        # 73 into the second packet
        if k < (i + 27):
            if single_packet_data[14] == 1:
                two_packet_unaligned_keyed_data1.append(clock[k])
            else:
                two_packet_unaligned_keyed_data1.append(-clock[k])
        else:
            if single_packet_data[14] == 1:
                two_packet_unaligned_keyed_data2.append(clock[k])
            else:
                two_packet_unaligned_keyed_data2.append(-clock[k])

    i += 100

    for j in single_packet_data[15:]:
        for k in range(i, i + 100):
            if j == 1:
                two_packet_unaligned_keyed_data2.append(clock[k])
            else:
                two_packet_unaligned_keyed_data2.append(-clock[k])

        i += 100

    display(f, "\n******* Starting Components ******")
    sb.start()
    display(f, "Component started")
    display(f, "** Testing Single Packet Case **")
    # For now, the only accepted output rate of the BPSK is
    # 1187.5 bps.  Since 100 samples of the clock represents
    # a period, this will force the output to be one bit per
    # period
    clockSource.push(clock, False, 'Test', (100 * 1187.5), False, [], None)
    dataSource.push(single_packet_keyed_data, False, 'Test', (100 * 1187.5),
                    False, [], None)
    time.sleep(1)

    received_data = dataSink.getData()
    passed_test_1 = True
    test_1_message = ""

    displayList(f, "Received Data:  ", received_data)
    displayList(f, "Original Data:  ", single_packet_data)

    # Make sure that the received data and original data
    # are of the same length and that they match
    if len(received_data) == len(single_packet_data):
        for i in range(0, len(received_data)):
            if received_data[i] != single_packet_data[i]:
                passed_test_1 = False
                test_1_message = "received_data[" + str(
                    i) + "] != original_data[" + str(i) + "]"
                break
    else:
        passed_test_1 = False
        test_1_message = "len(received_data) != len(original_data)"

    #******************************************************
    display(f, "\n*** Testing Two Packet Case ****")

    clockSource.push(clock[:len(two_packet_keyed_data1)], False, 'Test',
                     (100 * 1187.5), False, [], None)
    dataSource.push(two_packet_keyed_data1, False, 'Test', (100 * 1187.5),
                    False, [], None)
    time.sleep(1)

    received_data = dataSink.getData()

    clockSource.push(clock[len(two_packet_keyed_data1):], False, 'Test',
                     (100 * 1187.5), False, [], None)
    dataSource.push(two_packet_keyed_data2, False, 'Test', (100 * 1187.5),
                    False, [], None)
    time.sleep(1)

    received_data += dataSink.getData()
    passed_test_2 = True
    test_2_message = ""

    displayList(f, "Received Data:  ", received_data)
    displayList(f, "Original Data1: ", two_packet_data1)
    displayList(f, "Original Data2: ", two_packet_data2)

    # Make sure that the received data and original data
    # are of the same length and that they match
    if len(received_data) == (len(two_packet_data1) + len(two_packet_data2)):
        for i in range(0, len(received_data)):
            if i < len(two_packet_data1):
                if received_data[i] != two_packet_data1[i]:
                    passed_test_2 = False
                    test_2_message = "received_data[" + str(
                        i) + "] != original_data1[" + str(i) + "]"
                    break
            else:
                if received_data[i] != two_packet_data2[i -
                                                        len(two_packet_data1)]:
                    passed_test_2 = False
                    test_2_message = "received_data[" + str(
                        i) + "] != original_data2[" + str(
                            i - len(two_packet_data1)) + "]"
    else:
        passed_test_2 = False
        test_2_message = "len(received_data) != len(original_data1) + len(original_data2)"

    #******************************************************
    display(f, "\n** Testing Two Packet, Unaligned Case **")
    clockSource.push(clock[:len(two_packet_unaligned_keyed_data1)], False,
                     'Test', (100 * 1187.5), False, [], None)
    dataSource.push(two_packet_unaligned_keyed_data1, False, 'Test',
                    (100 * 1187.5), False, [], None)
    time.sleep(1)

    received_data = dataSink.getData()

    clockSource.push(clock[len(two_packet_unaligned_keyed_data1):], False,
                     'Test', (100 * 1187.5), False, [], None)
    dataSource.push(two_packet_unaligned_keyed_data2, False, 'Test',
                    (100 * 1187.5), False, [], None)
    time.sleep(1)

    received_data += dataSink.getData()
    passed_test_3 = True
    test_3_message = ""

    displayList(f, "Received Data:  ", received_data)
    displayList(f, "Original Data:  ", single_packet_data)

    # Make sure that the received data and original data
    # are of the same length and that they match
    if len(received_data) == len(single_packet_data):
        for i in range(0, len(received_data)):
            if received_data[i] != single_packet_data[i]:
                passed_test_3 = False
                test_3_message = "received_data[" + str(
                    i) + "] != original_data[" + str(i) + "]"
                break
    else:
        passed_test_3 = False
        test_3_message = "len(received_data) != len(original_data1) + len(original_data2)"

    display(f, "\n******* Stopping Components ******")
    sb.stop()
    display(f, "Components stopped")

    # Display the results of the unit test
    if passed_test_1:
        display(
            f, "\nSingle Packet Test ...................." +
            u'\u2714'.encode('utf8'))
    else:
        display(
            f, "\nSingle Packet Test ...................." +
            u'\u2718'.encode('utf8') + '\t' + test_1_message)

    if passed_test_2:
        display(
            f, "Two Packet Test ......................." +
            u'\u2714'.encode('utf8'))
    else:
        display(
            f, "Two Packet Test ......................." +
            u'\u2718'.encode('utf8') + '\t' + test_2_message)

    if passed_test_3:
        display(
            f, "Two Packet, Unaligned Test ............" +
            u'\u2714'.encode('utf8'))
    else:
        display(
            f, "Two Packet, Unaligned Test ............" +
            u'\u2718'.encode('utf8') + '\t' + test_3_message)

    display(f, '\n')
    display(f, "Unit Test Complete")

    f.close()
Exemple #36
0
def load_and_run_scenario(json_file, time_inc=1, wfm=""):
    """Load a scenario and run

    Parameters
    ----------
    json_file : str, dict
        The path to a JSON specifying the scenario.  This should include
        "components", "connections", "simulation"

    time_inc : float
        Time increment to run simulation.  After each increment, check
        the debug (throughput)

    wfm : str
        Specify a file to save the scenario to waveform.
        Don't save if empty string
    """
    if isinstance(json_file, str):
        settings = json.load(open(json_file), encoding='ascii')
        settings = convert_dict(settings)
    elif isinstance(json_file, dict):
        settings = json_file
    else:
        raise ValueError("Expecting a string json filepath or dict")

    # extract from dictionary (verify keys exist)
    comp_specs = settings.get("components", {})
    wave_specs = settings.get("waveforms", {})
    domain_specs = settings.get("domains", {})
    conns = settings["connections"]
    simm = settings["simulation"]
    debug = settings.get("debug", {})

    if domain_specs:
        setup_domains(domain_specs)

    # ---------------------------  load components  -------------------------
    comp_dict = component_helper.launch_components(sb, comp_specs)

    # --------------------------  load waveforms  ---------------------------
    wfm_dict = waveform_helper.launch_waveforms(wave_specs)

    # ----------------------  connect message sinks  ------------------------
    msg_sinks, msg_store = message_helper.connect_msg_sinks(
        sb, comp_dict, wfm_dict, debug)

    # -------------------------  setup connections  -------------------------
    for conn in conns:
        try:
            obj_1 = get_instance(conn[0], comp_dict, wfm_dict)
            port_1 = obj_1.getPort(str(conn[1]))
            obj_2 = get_instance(conn[2], comp_dict, wfm_dict)
            port_2 = obj_2.getPort(str(conn[3]))
            port_1.connectPort(port_2,
                "conn_%s_to_%s_"%(str(conn[0]), str(conn[2]))\
                + str(uuid.uuid1()))
        except Exception as e:
            print("Error running connection %s" % str(conn))
            raise
    # ---------------------------  setup debug  ---------------------------
    throughput_ports = throughput_helper.setup_throughput(debug.get(
        "throughput", []),
                                                          comp_dict=comp_dict,
                                                          wfm_dict=wfm_dict)

    # --------------------------  save waveform  ----------------------------
    if wfm:
        raise NotImplementedError("Waveform Generation is not working")
        # NOTE: Generated waveform does not track component settings

        with open(wfm + ".sad.xml", "w") as fid:
            fid.write(sb.generateSADXML(wfm))

    # --------------------------  run simulation  ---------------------------
    if simm["type"].lower() in ["time"]:
        print("In time simulation")
        waveform_helper.start_waveforms(wfm_dict)
        component_helper.start_in_reverse_order(comp_dict)

        tic = time.time()
        while time.time() - tic < simm["value"]["duration"]:
            # show message being passed
            message_helper.show_messages(msg_sinks, msg_store)

            # show port throughput statistics
            throughput_helper.show_throughput(throughput_ports)

            # sleep a little
            time.sleep(time_inc)

        component_helper.stop_in_order(comp_dict)
        waveform_helper.stop_waveforms(wfm_dict)
        #sb.stop()

    elif simm["type"].lower() in ["user"]:
        # run till user hits enter
        sb.start()
        resp = user_prompt("Hit enter to exit")
        sb.stop()

    else:
        raise RuntimeError("Unexpected type of simulation")

    # save messages
    if msg_store:
        message_helper.save_messages(msg_store)

    # TODO: release components/waveforms/devices/domains
    waveform_helper.release_waveforms(wfm_dict)
    throughput_helper.close(throughput_ports)
    def test_bpsk_decode(self):
        nbits = 6000
        samples_per_symbol = 4

        # Generate random bit sequence and convert to BPSK symbols
        bits_in = [random.randint(0,1) for x in range(nbits)]
        samples_in = symbols_to_samples(bits_to_symbols(bits_in), samples_per_symbol)

        # Add Gaussian noise and a slow rotation to the symbols
        sig_in = apply_rotation(add_noise(samples_in, 0.10), 0.01)

        # Convert from a list of complex pairs to a single list of floats
        sig_in = expand_complex(sig_in)

        source = sb.DataSource()
        sink = sb.DataSink()

        # FLL
        fll_ntaps = 55
        freq_recov = sb.Component('../components/fll_band_edge_cc_4o/fll_band_edge_cc_4o.spd.xml',execparams=execparams)
        freq_recov.samples_per_symbol = samples_per_symbol
        freq_recov.rolloff = 0.35
        freq_recov.filter_size = fll_ntaps
        freq_recov.bandwidth = 2.0*math.pi/100.0

        # Timing recovery
        nfilts = 32
        time_recov = sb.Component('../components/pfb_clock_sync_ccf_4o/pfb_clock_sync_ccf_4o.spd.xml', execparams=execparams)
        time_recov.sps = samples_per_symbol
        time_recov.loop_bw = 2*math.pi/100.0
        # Note: taps are hard-coded
        time_recov.taps = list(bpsk_taps.taps)
        time_recov.filter_size = nfilts
        time_recov.init_phase = nfilts/2
        time_recov.max_rate_deviation = 1.5
        time_recov.osps = 1

        # BPSK symbol decode
        receiver = sb.Component('../components/psk_demod_cb/psk_demod_cb.spd.xml', execparams=execparams)
        receiver.constellation = 1
        receiver.loop_bw = 2*math.pi/100.0
        receiver.fmin = -0.25
        receiver.fmax = 0.25

        # Connect components
        source.connect(freq_recov)
        freq_recov.connect(time_recov, usesPortName='data_complex_out')
        time_recov.connect(receiver, usesPortName='data_complex_out')
        receiver.connect(sink)

        # Push data through components, waiting for completion
        sb.start()
        source.push(sig_in, EOS=True, complexData=True)
        sym_out = sink.getData(eos_block=True)
        sb.stop()

        # The symbol is equivalent to the bit value; unpack from a char to
        # a number.
        bits_out = map(lambda x: struct.unpack('b', x)[0], sym_out)

        # The output data is delayed by 34 samples
        delay = 34

        # Verify that our delayed output data matches the input
        bit_errors = 0
        for ii in range(len(bits_out)-delay):
            if bits_out[ii+delay] != bits_in[ii]:
                bit_errors += 1

        self.failUnless(bit_errors == 0, '%d bit errors' % (bit_errors,))
Exemple #38
0
 def start(self):
     sb.start()
Exemple #39
0
    def testComplexData2(self):
        print "\n-------- TESTING w/COMPLEX DATA2 --------"
        #---------------------------------
        # Start component and set fftSize
        #---------------------------------
        sb.start()
        ID = "ComplexData2"
        fftSize = 4096
        self.comp.fftSize = fftSize

        #------------------------------------------------
        # Create test signal
        #------------------------------------------------
        # 4096 samples of 7000Hz real signal at 65536 kHz
        sample_rate = 65536.
        nsamples = 4096.

        F_7KHz = 7000.
        A_7KHz = 5.0

        t = arange(nsamples) / sample_rate
        tmpData = A_7KHz * np.exp(1j * 2 * pi * F_7KHz * t)

        #Convert signal from python complex to RedHawk interleaved complex
        data = unpackCx(tmpData)

        #------------------------------------------------
        # Test Component Functionality.
        #------------------------------------------------
        # Push Data
        cxData = True
        self.src.push(data,
                      streamID=ID,
                      sampleRate=sample_rate,
                      complexData=cxData)
        time.sleep(.5)

        # Get Output Data
        fftOut = self.fftsink.getData()[0]  # use first frame
        psdOut = self.psdsink.getData()[0]  # use first frame
        pyFFT = abs(scipy.fft(tmpData, fftSize))

        #Validate SRI Pushed Correctly
        self.validateSRIPushing(ID, cxData, sample_rate, fftSize)

        #Convert Redhawk interleaved complex data to python complex for fftOut
        fftOut = packCx(fftOut)

        # Adjust length of data for accurate comparisons
        pyFFT = pyFFT[0:fftSize / 2]
        psdOut = psdOut[fftSize / 2:fftSize]
        fftOut = fftOut[fftSize / 2:fftSize]

        # Uncomment function below to see output plots:
        #plotFreqData(fftSize, sample_rate, pyFFT, fftOut, psdOut)

        # Normalize the data for accurate comparison with python fft
        pyFFTMax = max(pyFFT)
        fftOutMax = max(fftOut)
        psdOutMax = max(psdOut)
        pyMaxSquared = pyFFTMax**2

        pyFFTIndex = pyFFT.tolist().index(pyFFTMax)
        fftIndex = fftOut.index(fftOutMax)
        psdIndex = psdOut.index(psdOutMax)

        # additional checks due to floating point precision
        # look at value before peak to see if w/in tolerance
        # if so, use that as peak index to report the FIRST peak value
        rel_tol = 10**(-1 * PRECISION)
        abs_tol = 10**(-1 * NUM_PLACES)
        if isclose(pyFFT[pyFFTIndex - 1], pyFFT[pyFFTIndex], rel_tol, abs_tol):
            pyFFTIndex = pyFFTIndex - 1
        if isclose(fftOut[fftIndex - 1], fftOut[fftIndex], rel_tol, abs_tol):
            fftIndex = fftIndex - 1
        if isclose(psdOut[psdIndex - 1], psdOut[psdIndex], rel_tol, abs_tol):
            psdIndex = psdIndex - 1

        # Check that the component max values and index values are equal to python's
        self.assert_isclose(pyFFTMax, fftOutMax, PRECISION, NUM_PLACES)
        self.assert_isclose(pyMaxSquared, psdOutMax, PRECISION, NUM_PLACES)
        self.assertEqual(pyFFTIndex, fftIndex)
        self.assertEqual(pyFFTIndex, psdIndex)

        print "*PASSED"
Exemple #40
0
    def testEosEmpty(self):
        print "\n-------- TESTING EOS w/COMPLEX DATA EMPTY --------"
        #---------------------------------
        # Start component and set fftSize
        #---------------------------------
        sb.start()
        ID = "eosEmpty"
        fftSize = 4096
        self.comp.fftSize = fftSize
        self.comp.rfFreqUnits = False

        #------------------------------------------------
        # Create a test signal.
        #------------------------------------------------
        # 4096 samples of 7000Hz real signal at 65536 kHz
        sample_rate = 65536.
        nsamples = 4096

        data = [random.random() for _ in xrange(2 * nsamples)]

        #------------------------------------------------
        # Test Component Functionality.
        #------------------------------------------------
        # Push Data
        cxData = True
        colRfVal = 100e6
        keywords = [sb.io_helpers.SRIKeyword('COL_RF', colRfVal, 'float')]
        self.src.push(data,
                      streamID=ID,
                      sampleRate=sample_rate,
                      complexData=cxData,
                      SRIKeywords=keywords)
        time.sleep(.5)

        # Get Output Data
        fftOut = self.fftsink.getData()[0]  # use first frame
        psdOut = self.psdsink.getData()[0]  # use first frame
        #pyFFT = abs(scipy.fft(tmpData, fftSize))

        #Validate SRI Pushed Correctly
        self.validateSRIPushing(ID,
                                cxData,
                                sample_rate,
                                fftSize,
                                colRfVal,
                                SRIKeywords=keywords)

        time.sleep(.5)
        self.src.push([],
                      EOS=True,
                      streamID=ID,
                      sampleRate=sample_rate,
                      complexData=cxData,
                      SRIKeywords=keywords)
        print 'pushed %s samples w/ EOS=true, fftSize=%s' % (0, fftSize)
        time.sleep(.5)

        # Get Output Data
        self.assertTrue(self.fftsink.eos())
        self.assertTrue(self.psdsink.eos())
        fftOut = self.fftsink.getData()  # should be empty
        psdOut = self.psdsink.getData()  # should be empty
        self.validateSRIPushing(ID,
                                cxData,
                                sample_rate,
                                fftSize,
                                colRfVal,
                                SRIKeywords=keywords)

        print "*PASSED"
Exemple #41
0
    def testBlueShortPortSwapped(self):
        #######################################################################
        # Test Bluefile Swapped SHORT Functionality
        print "\n**TESTING BLUEFILE Swapped + SHORT PORT"

        #Define test files
        dataFileIn = './bluefile.in'
        dataFileInSwap = './bluefile.in.swap'

        #Create Test Data File if it doesn't exist
        if not os.path.isfile(dataFileIn):
            tmpSink = bluefile_helpers.BlueFileWriter(dataFileIn,
                                                      BULKIO__POA.dataShort)
            tmpSink.start()
            tmpSri = createSri('bluefileShortSwapped', 5000)
            #kwVal = 1234
            #kwSwap = swap([kwVal], 'long')[0]
            #tmpSri.keywords = props_from_dict({'TEST_KW':kwSwap})
            tmpSri.keywords = props_from_dict({'TEST_KW': 1234})
            tmpSink.pushSRI(tmpSri)
            tmpTs = createTs()
            #tmpSink.pushPacket(swap(range(1024),'short'), tmpTs, True, 'bluefileShortSwapped')
            tmpSink.pushPacket(range(1024), tmpTs, True,
                               'bluefileShortSwapped')

        #Read in Data from Test File, modify header, and rewrite
        hdr, d = bluefile.read(dataFileIn, dict)
        hdr['file_name'] = dataFileInSwap
        hdr['head_rep'] = 'IEEE'
        hdr['data_rep'] = 'IEEE'
        bluefile.write(dataFileInSwap, hdr, d)

        #Read in Data from Swapped Test File
        hdr, d = bluefile.read(dataFileInSwap, dict)
        data = list(d)
        keywords = hdr['ext_header']

        #Create Components and Connections
        comp = sb.launch('../FileReader.spd.xml')
        comp.source_uri = dataFileInSwap
        comp.file_format = 'BLUEFILE'

        sink = sb.DataSink()
        comp.connect(sink, usesPortName='dataShort_out')

        #Start Components & Push Data
        sb.start()
        comp.playback_state = 'PLAY'
        time.sleep(2)
        readData = sink.getData()
        readKeywords = props_to_dict(sink.sri().keywords)
        sb.stop()

        #Check that the input and output files are the same
        try:
            self.assertEqual(data, readData)
        except self.failureException as e:
            comp.releaseObject()
            sink.releaseObject()
            os.remove(dataFileIn)
            os.remove(dataFileInSwap)
            raise e

        #Check that the keywords are the same
        try:
            self.assertEqual(keywords, readKeywords)
        except self.failureException as e:
            comp.releaseObject()
            sink.releaseObject()
            os.remove(dataFileIn)
            os.remove(dataFileInSwap)
            raise e

        #Release the components and remove the generated files
        comp.releaseObject()
        sink.releaseObject()
        os.remove(dataFileIn)
        os.remove(dataFileInSwap)

        print "........ PASSED\n"
        return
    def testRFFlowID(self): 

        self.fei_dev, alloc_params = self.getAllocationContext(ALLOCATE_RCV)

        #create rf_info uses port and connect to MSDD
        out_rf_info_port=frontend.OutRFInfoPort("out_control")
        in_rf_info_port=self.fei_dev.getPort("RFInfo_in")
        out_rf_info_port.connectPort(in_rf_info_port,"test_rf_flow")

	# get params for allocations
	bw=alloc_params['wbddc_bw']
	sr=alloc_params['wbddc_srate']

        # allocation params
        flow_id = "ca-710-flow"
        cf=100e6

        # set rf flow id
        out_rf_info_port._set_rf_flow_id(flow_id)

        # check rf_flow_id was set
        n=len(self.fei_dev.frontend_tuner_status)
        expected=[flow_id]*n
        actual=[ x["FRONTEND::tuner_status::rf_flow_id"] for x in self.fei_dev.frontend_tuner_status ]
        self.assertEqual(expected,actual, "Mismatch of RF Flow Ids for tuners")

        # allocation for sample rate and rf_flow_id
        alloc1=frontend.createTunerAllocation(center_frequency=cf, sample_rate=sr, rf_flow_id=flow_id)
        ret=self.fei_dev.allocateCapacity(alloc1)
        self.alloc1=alloc1
        alloc1_aid =  alloc1["FRONTEND::tuner_allocation"]["FRONTEND::tuner_allocation::allocation_id"]
        self.assertEqual(True,ret, "Allocation failed using rf_flow_id")

        alloc2_aid = alloc1_aid
        if alloc_params['nbddc_srate'] is not None :
               # allocation for center freq and rf_flow_id
               alloc2=frontend.createTunerAllocation(center_frequency=cf,  rf_flow_id=flow_id)
               ret=self.fei_dev.allocateCapacity( alloc2)
               alloc2_aid =  alloc2["FRONTEND::tuner_allocation"]["FRONTEND::tuner_allocation::allocation_id"]
               self.assertEqual(True,ret, "Allocation failed using rf_flow_id again ")
               self.alloc2=alloc2

        # valid rf_flow_id was probagated downstream
        sink=sb.StreamSink()
        sdds_in=sb.launch('rh.SourceSDDS',  properties={'interface': INTERFACE,
                                                        'advanced_optimizations': {
                                                         'advanced_optimizations::sdds_pkts_per_bulkio_push' : 5
                                                        },
                                                        'attachment_override' : {
                                                         'attachment_override:endianness': "1234"
                                                         }

                                                })
        sb.start()
        self.fei_dev.connect(sdds_in, connectionId=alloc2_aid, usesPortName='dataSDDS_out')
        sdds_in.connect(sink, usesPortName="dataShortOut")

        kws=None
        try:
            sink_data=sink.read()
            kws=properties.props_to_dict(sink_data.sri.keywords)
        except:
            pass
        self.assertEqual( kws["FRONTEND::RF_FLOW_ID"] , flow_id, "Missing RF_FLOW_ID from keyword list")
Exemple #43
0
 def startFlow(self):
     sb.start()
     self.comp.playback_state = 'PLAY'
     time.sleep(2)
     sb.stop()
     return self.sink.getData()
    def test_sandboxComplexProps(self):
        from ossie.utils import sb

        # values from the component PRF file
        expectedDefaults = {
            "boolean"   : numpy.complex(False, True),
            "ulong"     : numpy.complex(4,5),
            "short"     : numpy.complex(4,5),
            "float"     : numpy.complex(4.,5.),
            "octet"     : numpy.complex(4,5),
            "ushort"    : numpy.complex(4,5),
            "double"    : numpy.complex(4.,5.),
            "long"      : numpy.complex(4,5),
            "longlong"  : numpy.complex(4,5),
            "ulonglong" : numpy.complex(4,5)}


        '''
            "cFloatSeq"       : component.complexFloatSeq,
            "cFloatStruct"    : component.complexFloatStruct,
            "cFloatStructSeq" : component.complexFloatStructSeq}
            "cFloatSeq"       : [CF.complexFloat(real=1.0, imag=0.0),
                                 CF.complexFloat(real=1.0, imag=0.0),
                                 CF.complexFloat(real=1.0, imag=0.0)],
            "cFloatStruct"    : {"complexFloatStructMember": CF.complexFloat(real=1.0, imag=0.0)},
            "cFloatStructSeq" : [{"complexFloatStructMember": CF.complexFloat(real=1.0, imag=0.0)}]}
        '''

        # Create an instance of the test component in all 3 languages
        components = {"cpp"   : sb.launch("TestComplexProps", impl="cpp"),
                      "python": sb.launch("TestComplexProps", impl="python")}
        if java_support:
            components["java"] = sb.launch("TestComplexProps", impl="java")

        sb.start()

        for language in components.keys():
            # allow for visual inspection of complex sequences
            # TODO: replace this with an automated comparison
            print language
            print components[language].complexFloatProp
            print "simple struct member"
            print components[language].FloatStruct.FloatStructMember
            components[language].FloatStruct.FloatStructMember = 9
            print components[language].FloatStruct.FloatStructMember
            print "complex struct member"
            print components[language].complexFloatStruct.complexFloatStructMember
            components[language].complexFloatStruct.complexFloatStructMember = complex(9,10)
            print components[language].complexFloatStruct.complexFloatStructMember


            print components[language].complexFloatSequence
            components[language].complexFloatSequence = [complex(6,7)]*3
            print components[language].complexFloatSequence
            print ""


        for componentKey in components.keys():
            # loop through all three languages and query for the default
            # property values
            defaults = self._queryDefaults(components[componentKey])
            for key in defaults.keys():
                # Loop through the default property values and compare them
                # to the expected values.
                self._compareComplexValues(defaults[key], expectedDefaults[key])

        sb.domainless._cleanUpLaunchedComponents()
Exemple #45
0
    def testRFFlowID(self):

        #create rf_info uses port and connect to MSDD
        out_rf_info_port = frontend.OutRFInfoPort("out_control")
        in_rf_info_port = self.comp.getPort("RFInfo_in")
        out_rf_info_port.connectPort(in_rf_info_port, "test_rf_flow")

        # get params for allocations
        bw = float(self.comp.frontend_tuner_status[0]
                   ["FRONTEND::tuner_status::available_bandwidth"])
        sr = float(self.comp.frontend_tuner_status[0]
                   ["FRONTEND::tuner_status::available_sample_rate"])

        # allocation params
        flow_id = "ca-710-flow"
        cf = 100e6

        # set rf flow id
        out_rf_info_port._set_rf_flow_id(flow_id)

        # check rf_flow_id was set
        n = len(self.comp.frontend_tuner_status)
        expected = [flow_id] * n
        actual = [
            x["FRONTEND::tuner_status::rf_flow_id"]
            for x in self.comp.frontend_tuner_status
        ]
        self.assertEqual(expected, actual,
                         "Mismatch of RF Flow Ids for tuners")

        # allocation for sample rate and rf_flow_id
        alloc1 = frontend.createTunerAllocation(center_frequency=cf,
                                                sample_rate=sr,
                                                rf_flow_id=flow_id)
        ret = self.comp.allocateCapacity(alloc1)
        alloc1_aid = alloc1["FRONTEND::tuner_allocation"][
            "FRONTEND::tuner_allocation::allocation_id"]
        self.assertEqual(True, ret, "Allocation failed using rf_flow_id")

        # allocation for center freq and rf_flow_id
        alloc2 = frontend.createTunerAllocation(center_frequency=cf,
                                                rf_flow_id=flow_id)
        ret = self.comp.allocateCapacity(alloc2)
        alloc2_aid = alloc2["FRONTEND::tuner_allocation"][
            "FRONTEND::tuner_allocation::allocation_id"]
        self.assertEqual(True, ret,
                         "Allocation failed using rf_flow_id again ")

        # valid rf_flow_id was probagated downstream
        sink = sb.StreamSink()
        sdds_in = sb.launch('rh.SourceSDDS',
                            properties={'interface': INTERFACE})
        sb.start()
        self.comp.connect(sdds_in,
                          connectionId=alloc2_aid,
                          usesPortName='dataSDDS_out')
        sdds_in.connect(sink, usesPortName="dataShortOut")

        kws = None
        try:
            sink_data = sink.read()
            kws = properties.props_to_dict(sink_data.sri.keywords)
        except:
            pass
        self.assertEqual(kws["FRONTEND::RF_FLOW_ID"], flow_id,
                         "Missing RF_FLOW_ID from keyword list")
Exemple #46
0
    def testMultipleStreamsSamePort(self):
        self.octetConnect()
        sink = sb.DataSinkSDDS()
        ad_cb = SddsAttachDetachCB()
        sink.registerAttachCallback(ad_cb.attach_cb)
        sink.registerDetachCallback(ad_cb.detach_cb)
        self.comp.connect(sink)
        sb.start()

        goodData1 = 1024 * [1]
        deckedStream = 1024 * [2]
        goodData2 = 1024 * [3]

        # No data pushed, no attaches or detaches
        self.assertEqual(
            len(ad_cb.get_attach()), 0,
            "Should not have received any attaches but we have: %s " %
            len(ad_cb.get_attach()))
        #         self.assertEqual(len(ad_cb.get_detach()), 0, "Should not have received any detaches")

        # Push one good packet and confirm it was received
        self.source.push(goodData1,
                         EOS=False,
                         streamID=self.id(),
                         sampleRate=1.0,
                         complexData=False,
                         loop=False)
        self.assertEqual(
            goodData1, list(struct.unpack('1024B',
                                          self.getPacket()[-1024:])))
        # Since we pushed, we should get an attach, no detach
        self.assertEqual(len(ad_cb.get_attach()), 1,
                         "Should have received 1 attach total")
        #         self.assertEqual(len(ad_cb.get_detach()), 0, "Should not have received any detaches")

        # Push a new stream, it should get ignored, confirm we receive no data and still have only a single attach
        self.source.push(deckedStream,
                         EOS=False,
                         streamID="Decked Stream",
                         sampleRate=1.0,
                         complexData=False,
                         loop=False)
        self.assertEqual(
            len(self.getPacket()), 0,
            "Should not have passed on new stream, stream already active")
        self.assertEqual(len(ad_cb.get_attach()), 1,
                         "Should have received 1 attach total")
        #         self.assertEqual(len(ad_cb.get_detach()), 0, "Should not have received any detaches")

        # Push an EOS which should cause a detach
        self.source.push(goodData1,
                         EOS=True,
                         streamID=self.id(),
                         sampleRate=1.0,
                         complexData=False,
                         loop=False)
        self.assertEqual(
            goodData1, list(struct.unpack('1024B',
                                          self.getPacket()[-1024:])))
        time.sleep(1)
        self.source.push(deckedStream,
                         EOS=False,
                         streamID="Decked Stream",
                         sampleRate=1.0,
                         complexData=False,
                         loop=False)
        time.sleep(1)
        self.assertEqual(len(ad_cb.get_attach()), 2,
                         "Should have received 2 attach total")
        #         self.assertEqual(len(ad_cb.get_detach()), 1, "Should have received 1 detach total")

        # Send a new stream, which means a new attach
        self.source.push(goodData2,
                         EOS=False,
                         streamID="Another on the deck stream",
                         sampleRate=1.0,
                         complexData=False,
                         loop=False)
        self.assertEqual(
            deckedStream, list(struct.unpack('1024B',
                                             self.getPacket()[-1024:])))
        self.assertEqual(len(ad_cb.get_attach()), 2,
                         "Should have received 2 attach total")
        #         self.assertEqual(len(ad_cb.get_detach()), 1, "Should have received 1 detach total")

        # Tear stuff down, confirm we get the final detach
        sb.release()
        self.assertEqual(len(ad_cb.get_attach()), 2,
                         "Should have received 2 attach total")
        self.assertEqual(len(ad_cb.get_detach()), 2,
                         "Should have received 2 detach total")
Exemple #47
0
    def _test_FileSinkType2000(self, format, subsize):
        filename = self._tempfileName('sink_2000_%s' % format)

        complexData = format.startswith('C')
        typecode = format[1]
        dataFormat, dataType = self.TYPEMAP[typecode]

        sink = sb.FileSink(filename, midasFile=True)
        sb.start()

        # Manually create our own SRI, because DataSource doesn't support
        # setting the Y-axis fields
        sri = bulkio.sri.create('test_stream')
        if complexData:
            sri.mode = 1
        else:
            sri.mode = 0
        sri.subsize = subsize
        sri.ystart = subsize / -2.0
        sri.ydelta = 1.0
        sri.yunits = 3

        # Generate test data; unlike the type 1000 tests, we have to generate
        # input data compatible with CORBA because we're bypassing DataSource
        frames = 4
        samples = subsize * frames
        if complexData:
            samples *= 2
        if dataFormat == 'char':
            indata = numpy.arange(samples, dtype=numpy.int8)
            packet = indata.tostring()
        else:
            indata = [dataType(x) for x in xrange(samples)]
            packet = indata

        # Push the SRI and data directly to the sink's port
        port = sink.getPort(dataFormat + 'In')
        port.pushSRI(sri)
        port.pushPacket(packet, bulkio.timestamp.now(), True, sri.streamID)

        sink.waitForEOS()

        hdr, outdata = bluefile.read(filename)
        self.assertEqual(hdr['format'], format)
        self.assertEqual(hdr['subsize'], subsize)
        self.assertEqual(hdr['ystart'], sri.ystart)
        self.assertEqual(hdr['ydelta'], sri.ydelta)
        self.assertEqual(hdr['yunits'], sri.yunits)
        self.assertEqual(len(outdata), frames)

        if complexData:
            if dataFormat == 'float':
                indata = numpy.array(indata,
                                     dtype=numpy.float32).view(numpy.complex64)
                indata = numpy.reshape(indata, (-1, subsize))
            elif dataFormat == 'double':
                indata = numpy.array(indata, dtype=numpy.float64).view(
                    numpy.complex128)
                indata = numpy.reshape(indata, (-1, subsize))
            else:
                indata = numpy.reshape(indata, (-1, subsize, 2))
        else:
            indata = numpy.reshape(indata, (-1, subsize))

        self.assertTrue(numpy.array_equal(indata, outdata),
                        msg="Format '%s' %s != %s" % (format, indata, outdata))
    def test_bpsk_decode(self):
        nbits = 6000
        samples_per_symbol = 4

        # Generate random bit sequence and convert to BPSK symbols
        bits_in = [random.randint(0, 1) for x in range(nbits)]
        samples_in = symbols_to_samples(bits_to_symbols(bits_in),
                                        samples_per_symbol)

        # Add Gaussian noise and a slow rotation to the symbols
        sig_in = apply_rotation(add_noise(samples_in, 0.10), 0.01)

        # Convert from a list of complex pairs to a single list of floats
        sig_in = expand_complex(sig_in)

        source = sb.DataSource()
        sink = sb.DataSink()

        # FLL
        fll_ntaps = 55
        freq_recov = sb.Component(
            '../components/fll_band_edge_cc_4o/fll_band_edge_cc_4o.spd.xml',
            execparams=execparams)
        freq_recov.samples_per_symbol = samples_per_symbol
        freq_recov.rolloff = 0.35
        freq_recov.filter_size = fll_ntaps
        freq_recov.bandwidth = 2.0 * math.pi / 100.0

        # Timing recovery
        nfilts = 32
        time_recov = sb.Component(
            '../components/pfb_clock_sync_ccf_4o/pfb_clock_sync_ccf_4o.spd.xml',
            execparams=execparams)
        time_recov.sps = samples_per_symbol
        time_recov.loop_bw = 2 * math.pi / 100.0
        # Note: taps are hard-coded
        time_recov.taps = list(bpsk_taps.taps)
        time_recov.filter_size = nfilts
        time_recov.init_phase = nfilts / 2
        time_recov.max_rate_deviation = 1.5
        time_recov.osps = 1

        # BPSK symbol decode
        receiver = sb.Component(
            '../components/psk_demod_cb/psk_demod_cb.spd.xml',
            execparams=execparams)
        receiver.constellation = 1
        receiver.loop_bw = 2 * math.pi / 100.0
        receiver.fmin = -0.25
        receiver.fmax = 0.25

        # Connect components
        source.connect(freq_recov)
        freq_recov.connect(time_recov, usesPortName='data_complex_out')
        time_recov.connect(receiver, usesPortName='data_complex_out')
        receiver.connect(sink)

        # Push data through components, waiting for completion
        sb.start()
        source.push(sig_in, EOS=True, complexData=True)
        sym_out = sink.getData(eos_block=True)
        sb.stop()

        # The symbol is equivalent to the bit value; unpack from a char to
        # a number.
        bits_out = map(lambda x: struct.unpack('b', x)[0], sym_out)

        # The output data is delayed by 34 samples
        delay = 34

        # Verify that our delayed output data matches the input
        bit_errors = 0
        for ii in range(len(bits_out) - delay):
            if bits_out[ii + delay] != bits_in[ii]:
                bit_errors += 1

        self.failUnless(bit_errors == 0, '%d bit errors' % (bit_errors, ))
def main(argv):
    sadfile = ''
    inputfile = ''
    dataformat = ''
    samplerate = ''
    iscomplex = False
    outputfile = ''
    try:
        opts, args = getopt.getopt(argv, "hs:i:f:r:co:", [
            "sadfile=", "ifile=", "dataformat=", "samplerate=", "complex",
            "ofile="
        ])
    except getopt.GetoptError:
        print 'runWaveform.py -s <sadfile> -i <inputfile> -f <dataFormat> -r <sampleRate> -c -o <outputfile>'
        sys.exit(2)
    for opt, arg in opts:
        #print 'evaluating opt - ',opt,' arg - ',arg
        if opt == '-h':
            print 'runWaveform.py -s <sadfile> -i <inputfile> -f <dataFormat> -r <sampleRate> -c o <outputfile>'
            sys.exit()
        elif opt in ("-s", "--sadfile"):
            sadfile = arg
        elif opt in ("-i", "--ifile"):
            inputfile = arg
        elif opt in ("-f", "--dataformat"):
            dataformat = arg
        elif opt in ("-r", "--samplerate"):
            samplerate = arg
        elif opt in ("-c", "--complex"):
            iscomplex = True
        elif opt in ("-o", "--ofile"):
            outputfile = arg
            print 'setting outputfile', outputfile
    print 'Processing ', inputfile, " through waveform - ", sadfile
    sadFile = open(sadfile)
    sadFileString = sadFile.read()
    usesPort = ''
    usesComponent = ''
    providesPort = ''
    providesComponent = ''
    sadXML = parsers.sad.parseString(sadFileString)
    if sadXML.get_externalports():
        for port in sadXML.get_externalports().get_port():
            if port.get_usesidentifier():
                usesPort = port.get_usesidentifier()
                usesComponent = port.get_componentinstantiationref()
            elif port.get_providesidentifier():
                providesPort = port.get_providesidentifier()
                providesComponent = port.get_componentinstantiationref()
        if not usesPort and not providesPort:
            print 'Need uses and provides external ports'
            sys.exit()
    else:
        print 'No external ports'
        sys.exit()
    print usesPort, providesPort
    if not usesPort or not providesPort:
        print 'Require external uses & provides port'
        sys.exit()
    sb.loadSADFile(sadfile)
    fileSource = sb.FileSource(filename=inputfile,
                               dataFormat=dataformat,
                               sampleRate=samplerate)
    fileSink = sb.FileSink(filename=outputfile)
    #FIXME check file type matches external port
    fileSource.connect(sb.getComponent(providesComponent.get_refid()),
                       providesPortName=providesPort)
    sb.getComponent(usesComponent.get_refid()).connect(fileSink,
                                                       usesPortName=usesPort)
    sb.start()
    fileSink.waitForEOS()
    sb.stop()
    sb.release()
Exemple #50
0
def main():
	f = open('unit_test.log', 'w')

	display(f, "*********************************")
	display(f, "******** BPSK Unit Test *********")
	display(f, "*********************************")

	# Launch the component and the input sources and output sink
	display(f, "\n******* Creating Component ******")
	test_component = sb.launch('../BPSK.spd.xml', execparams={'DEBUG_LEVEL':5})
	clockSource = sb.DataSource()
	dataSource = sb.DataSource()
	dataSink = sb.DataSink()

	# Connect the output of the clock source and the data source
	# to the inputs of the BPSK.  Connect the output of the BPSK
	# to the input of the data sink
	display(f, "\n****** Creating Connections *****")
	clockSource.connect(test_component, providesPortName='clockFloat_in')
	dataSource.connect(test_component, providesPortName='dataFloat_in')
	test_component.connect(dataSink, providesPortName='shortIn')
	display(f, "Connections created")

	display(f, "\n******** Generating Data ********")
	# Generate a simple sine wave for use as the clock and the
	# data
	convenient_time_data = linspace(0, 48*pi, 2400)
	clock = sin(convenient_time_data).tolist()

	display(f, "Single Packet Case...")
	# Use 24 bits to generate a BPSK modulated signal
	single_packet_data = [0,1,0,1,0,0,0,0,0,1,0,0,0,0,1,1,0,1,0,1,0,1,1,1]
	single_packet_keyed_data = []
	i = 0
	for j in single_packet_data:
		for k in range(i, i+100):
			if j == 1:
				single_packet_keyed_data.append(clock[k])
			else:
				single_packet_keyed_data.append(-clock[k])
	
		i += 100

	display(f, "Two Packet Case...")
	# Use a 16 bit packet and an 8 bit packet to generate two
	# BPSK modulated signals using the same clock
	two_packet_data1 = [0,1,0,1,0,0,0,0,0,1,0,0,0,0,1,1]
	two_packet_data2 = [0,1,0,1,0,1,1,1]
	two_packet_keyed_data1 = []
	two_packet_keyed_data2 = []
	i = 0
	for j in two_packet_data1:
		for k in range(i, i+100):
			if j == 1:
				two_packet_keyed_data1.append(clock[k])
			else:
				two_packet_keyed_data1.append(-clock[k])

		i += 100

	for j in two_packet_data2:
		for k in range(i, i+100):
			if j == 1:
				two_packet_keyed_data2.append(clock[k])
			else:
				two_packet_keyed_data2.append(-clock[k])
	
		i += 100

	display(f, "Two Packets, Unaligned Case...")
	# Reuse the one packet from above, but send it as two
	# that aren't lined up on the period boundary
	two_packet_unaligned_keyed_data1 = []
	two_packet_unaligned_keyed_data2 = []
	i = 0
	for j in single_packet_data[:14]:
		for k in range(i, i+100):
			if j == 1:
				two_packet_unaligned_keyed_data1.append(clock[k])
			else:
				two_packet_unaligned_keyed_data1.append(-clock[k])
		
		i += 100
	
	for k in range(i, i+100):
		# Put the first 27 samples into the first packet and the next
		# 73 into the second packet
		if k < (i+27):
			if single_packet_data[14] == 1:
				two_packet_unaligned_keyed_data1.append(clock[k])
			else:
				two_packet_unaligned_keyed_data1.append(-clock[k])
		else:
			if single_packet_data[14] == 1:
				two_packet_unaligned_keyed_data2.append(clock[k])
			else:
				two_packet_unaligned_keyed_data2.append(-clock[k])

	i += 100

	for j in single_packet_data[15:]:
                for k in range(i, i+100):
                        if j == 1:
                                two_packet_unaligned_keyed_data2.append(clock[k])
                        else:
                                two_packet_unaligned_keyed_data2.append(-clock[k])

                i += 100

	display(f, "\n******* Starting Components ******")
	sb.start()
	display(f, "Component started")
	display(f, "** Testing Single Packet Case **")
	# For now, the only accepted output rate of the BPSK is
	# 1187.5 bps.  Since 100 samples of the clock represents
	# a period, this will force the output to be one bit per
	# period
	clockSource.push(clock, False, 'Test', (100 * 1187.5), False, [], None)
	dataSource.push(single_packet_keyed_data, False, 'Test', (100 * 1187.5), False, [], None)
	time.sleep(1)
	
	received_data = dataSink.getData()
	passed_test_1 = True
	test_1_message = ""

	displayList(f, "Received Data:  ",received_data)
        displayList(f, "Original Data:  ", single_packet_data)
	
	# Make sure that the received data and original data
	# are of the same length and that they match
	if len(received_data) == len(single_packet_data):
		for i in range(0, len(received_data)):
			if received_data[i] != single_packet_data[i]:
				passed_test_1 = False
				test_1_message = "received_data[" + str(i) + "] != original_data[" + str(i) + "]"
				break
	else:
		passed_test_1 = False
		test_1_message = "len(received_data) != len(original_data)"

	#******************************************************
	display(f, "\n*** Testing Two Packet Case ****")

	clockSource.push(clock[:len(two_packet_keyed_data1)], False, 'Test', (100 * 1187.5), False, [], None)
	dataSource.push(two_packet_keyed_data1, False, 'Test', (100 * 1187.5), False, [], None)
	time.sleep(1)
	
	received_data = dataSink.getData()
	
	clockSource.push(clock[len(two_packet_keyed_data1):], False, 'Test', (100 * 1187.5), False, [], None)
	dataSource.push(two_packet_keyed_data2, False, 'Test', (100 * 1187.5), False, [], None)
	time.sleep(1)
	
	received_data += dataSink.getData()
	passed_test_2 = True
	test_2_message = ""

	displayList(f, "Received Data:  ", received_data)
	displayList(f, "Original Data1: ", two_packet_data1)
	displayList(f, "Original Data2: ", two_packet_data2)
	
	# Make sure that the received data and original data
        # are of the same length and that they match
	if len(received_data) == (len(two_packet_data1) + len(two_packet_data2)):
		for i in range(0, len(received_data)):
			if i < len(two_packet_data1):
				if received_data[i] != two_packet_data1[i]:
					passed_test_2 = False
					test_2_message = "received_data[" + str(i) + "] != original_data1[" + str(i) + "]"
					break
			else:
				if received_data[i] != two_packet_data2[i-len(two_packet_data1)]:
					passed_test_2 = False
					test_2_message = "received_data[" + str(i) + "] != original_data2[" + str(i - len(two_packet_data1)) + "]"
	else:
		passed_test_2 = False
		test_2_message = "len(received_data) != len(original_data1) + len(original_data2)"

	#******************************************************
	display(f, "\n** Testing Two Packet, Unaligned Case **")
	clockSource.push(clock[:len(two_packet_unaligned_keyed_data1)], False, 'Test', (100 * 1187.5), False, [], None)
	dataSource.push(two_packet_unaligned_keyed_data1, False, 'Test', (100 * 1187.5), False, [], None)
	time.sleep(1)

	received_data = dataSink.getData()

	clockSource.push(clock[len(two_packet_unaligned_keyed_data1):], False, 'Test', (100 * 1187.5), False, [], None)
	dataSource.push(two_packet_unaligned_keyed_data2, False, 'Test', (100 * 1187.5), False, [], None)
	time.sleep(1)

	received_data += dataSink.getData()
	passed_test_3 = True
	test_3_message = ""

	displayList(f, "Received Data:  ", received_data)
	displayList(f, "Original Data:  ", single_packet_data)

	# Make sure that the received data and original data
	# are of the same length and that they match
	if len(received_data) == len(single_packet_data):
		for i in range(0, len(received_data)):
                	if received_data[i] != single_packet_data[i]:
                        	passed_test_3 = False
                                test_3_message = "received_data[" + str(i) + "] != original_data[" + str(i) + "]"
                                break
        else:
                passed_test_3 = False
                test_3_message = "len(received_data) != len(original_data1) + len(original_data2)"

	display(f, "\n******* Stopping Components ******")
	sb.stop()
	display(f, "Components stopped")

	# Display the results of the unit test
	if passed_test_1:
		display(f, "\nSingle Packet Test ...................." + u'\u2714'.encode('utf8'))
	else:
	        display(f, "\nSingle Packet Test ...................." + u'\u2718'.encode('utf8') + '\t' + test_1_message)
	
	if passed_test_2:
		display(f, "Two Packet Test ......................." + u'\u2714'.encode('utf8'))
	else:
		display(f, "Two Packet Test ......................." + u'\u2718'.encode('utf8') + '\t' + test_2_message)
	
	if passed_test_3:
		display(f, "Two Packet, Unaligned Test ............" + u'\u2714'.encode('utf8'))
	else:
		display(f, "Two Packet, Unaligned Test ............" + u'\u2718'.encode('utf8') + '\t' + test_3_message)

	display(f, '\n')
	display(f, "Unit Test Complete")

	f.close()
Exemple #51
0
    def testComplexData1(self):
        print "\n-------- TESTING w/COMPLEX DATA1 --------"
        #---------------------------------
        # Start component and set fftSize
        #---------------------------------
        sb.start()
        ID = "ComplexData1"
        fftSize = 8192
        self.comp.fftSize = fftSize

        #------------------------------------------------
        # Create test signal
        #------------------------------------------------
        # 8192 samples of (3000Hz + 7000Hz) complex signal at 32768 kHz
        sample_rate = 32768.
        nsamples = 8192.

        F_3KHz = 3000.
        A_3KHz = 10.0

        F_7KHz = 7000.
        A_7KHz = 5.0

        t = arange(nsamples) / sample_rate
        tmpData = A_7KHz * np.exp(1j * 2 * pi * F_7KHz * t) + A_3KHz * np.exp(
            1j * 2 * pi * F_3KHz * t)

        #Convert signal from python complex to RedHawk interleaved complex
        data = unpackCx(tmpData)

        #------------------------------------------------
        # Test Component Functionality.
        #------------------------------------------------
        # Push Data
        cxData = True
        self.src.push(data,
                      streamID=ID,
                      sampleRate=sample_rate,
                      complexData=cxData)
        time.sleep(.5)

        # Get Output Data
        fftOut = self.fftsink.getData()[0]  # use first frame
        psdOut = self.psdsink.getData()[0]  # use first frame
        pyFFT = abs(scipy.fft(tmpData, fftSize))

        #Validate SRI Pushed Correctly
        self.validateSRIPushing(ID, cxData, sample_rate, fftSize)

        #Convert Redhawk interleaved complex data to python complex for fftOut
        fftOut = packCx(fftOut)

        # Adjust length of data for accurate comparisons
        pyFFT = pyFFT[0:fftSize / 2]
        psdOut = psdOut[fftSize / 2:fftSize]
        fftOut = fftOut[fftSize / 2:fftSize]

        # Uncomment function below to see output plots:
        #plotFreqData(fftSize, sample_rate, pyFFT, fftOut, psdOut)

        # Normalize the data for accurate comparison with python fft
        pyFFTMax = max(pyFFT)
        fftOutMax = max(fftOut)
        psdOutMax = max(psdOut)
        pyMaxSquared = pyFFTMax**2

        pyFFTIndex = pyFFT.tolist().index(pyFFTMax)
        fftIndex = fftOut.index(fftOutMax)
        psdIndex = psdOut.index(psdOutMax)

        # Check that the component max values and index values are equal to python's
        threshold = 0.000001
        self.assertFalse(abs(pyFFTMax - fftOutMax) >= fftOutMax * threshold)
        self.assertFalse(
            abs(pyMaxSquared - psdOutMax) >= psdOutMax * threshold)
        self.assertFalse(abs(pyFFTIndex - fftIndex) >= 1.0)
        self.assertFalse(abs(pyFFTIndex - psdIndex) >= 1.0)

        print "*PASSED"