Example #1
0
    def work(self, input_items, output_items):
        in0 = input_items[0]
        for i in range(len(in0)):
            time = gr.high_res_timer_now()
            if time > self.last_update + self.update_res:
                self.setCompassValue(in0[i])
                self.last_update = gr.high_res_timer_now()

        return len(input_items[0])
Example #2
0
    def work(self, input_items, output_items):
        in0 = input_items[0]
        for i in range(len(in0)):
            time = gr.high_res_timer_now()
            value = self.expand_factor(in0[i])
            value = self.adjust_ref_level(value)
            value = self.calculate_average(value)
            value = self.find_peak(value)
            if not self.hold:
                if time > self.last_update + self.update_res:
                    self.last_update = gr.high_res_timer_now()
                    self.setValue(value)

        return len(input_items[0])
Example #3
0
def main():
    parser = add_argparser()
    args = parser.parse_args()
    verbose = args.verbose
    kwargs = {}

    ic = None
    if enable_ctrlport: 
        ic = Ice.initialize(sys.argv)

    if(args.file is None and args.iscripts is None):
        print "Please specify either a config file or a list of scripts to run.\n"
        parser.print_help()
        sys.exit(1)

    if args.file is None:
        nscripts = len(args.iscripts)
        if(args.nitems is None or args.iters is None):
            print "Please specify a number of items and number of iterations for each benchmark script provided.\n"
            parser.print_help()
            sys.exit()
            
        if(len(args.nitems) != nscripts or len(args.iters) != nscripts):
            print "Please specify a number of items and number of iterations for each benchmark script provided.\n"
            parser.print_help()
            sys.exit()

        tests = []
        for n in xrange(nscripts):
            newtest = dict()
            newtest['module'] = args.iscripts[n]
            newtest['testname'] = args.iscripts[n]
            newtest['nitems'] = args.nitems[n]
            newtest['iters'] = args.iters[n]
            tests.append(newtest)

    else:
        f = json.load(open(args.file, 'rb'))
        if args.directory:
            script_dir = args.directory
        else:
            try:
                script_dir = f['directory']
            except KeyError:
                script_dir = "bm_scripts"
        tests = f['tests']

    find_tests = re.compile('run*')
    tpms = gr.high_res_timer_tps()/1000.0 # ticks per millisecond

    procinfo = gr_profiler.cpuinfo()
    procver = gr_profiler.kversion()
    sysinfo = procinfo + procver
    #print sysinfo

    results = {}

    for ntest, t in enumerate(tests):
        test_name = t['module'] + "." + t['testname']
        qa = __import__(script_dir + '.' + t['module'], globals(), locals(), t['testname'])
        iters = t['iters']
        nitems = t['nitems']
        if(t.has_key('kwargs')):
            kwargs = t['kwargs']
        fresults = {}

        # Turn off a test by setting iters = 0
        if iters == 0:
            continue

        # Get the name of the test class in the module
        test_suite = getattr(qa, t['testname'])

        # Search for all tests in the test class
        test_funcs = []
        for f in dir(test_suite):
            testf = find_tests.match(f)
            if testf:
                test_funcs.append(testf.string)

        obj = test_suite(nitems, **kwargs)

        # Run each test case iters number of iterations
        for f in test_funcs:
            print "\nRUNNING FUNCTION: {0}.{1}".format(str(test_name), str(f))

            _program_time = numpy.array(iters*[0,])
            _all_blocks_time = numpy.array(iters*[0,])
            _nblocks = 0

            # Run function setup
            if hasattr(obj, f.replace("run_", "setup_")):
                _x = getattr(obj, f.replace("run_", "setup_"))()

            for i in xrange(iters):
                _start_time = gr.high_res_timer_now()
                _x = getattr(obj, f)
                _x()
                _end_time = gr.high_res_timer_now()

                _program_time[i] = _end_time - _start_time

                times = {}
                if enable_ctrlport:
                    times = get_block_times(ic, obj.tb._tb.alias())

                if _nblocks == 0:
                    n = len(times.keys())
                    _blocks_times = dict()
                    for bt in times:
                        _blocks_times[bt] = numpy.array(iters*[0,])

                _nblocks = len(times.keys())
                for bt in times:
                    _all_blocks_time[i] += times[bt]
                    _blocks_times[bt][i] = times[bt]

            pt_min = _program_time.min()/tpms
            pt_avg = _program_time.mean()/tpms
            pt_var = (_program_time/tpms).var()

            bt_min = _all_blocks_time.min()/tpms
            bt_avg = _all_blocks_time.mean()/tpms
            bt_var = (_all_blocks_time/tpms).var()

            bt_blks_min = dict()
            bt_blks_avg = dict()
            bt_blks_var = dict()
            for bt in _blocks_times:
                bt_blks_min[bt] = _blocks_times[bt].min()/tpms
                bt_blks_avg[bt] = _blocks_times[bt].mean()/tpms
                bt_blks_var[bt] = (_blocks_times[bt]/tpms).var()

            if(verbose):
                print "Num. Blocks:   {0}".format(_nblocks)
                print "Program Time:  {0:.2f} ms".format(pt_avg)
                print "     std dev:  {0:.2e} ms".format(numpy.sqrt(pt_var))
                print "Block Time:    {0:.2f} ms".format(bt_avg)
                print "     std dev:  {0:.2e} ms".format(numpy.sqrt(bt_var))
                print "Ratio:         {0:.2f}".format(bt_avg/pt_avg)
            fresults[f] = (pt_min, pt_avg, pt_var, bt_min, bt_avg, bt_var,
                           _nblocks, bt_blks_min, bt_blks_avg, bt_blks_var)
        results[t['testname']] = fresults

    #print ""
    #print results
    #print ""

    test_suite = getattr(qa, t['testname'])

    pickle.dump([sysinfo, tests, results], open(args.ofile, 'wb'))
    def get_audio_rate(self):
        return self.audio_rate

    def set_audio_rate(self, audio_rate):
        self.audio_rate = audio_rate
        self.pfb_arb_resampler_xxx_0.set_rate( self.audio_rate / 50e3)

    def get_audio_ntaps(self):
        return self.audio_ntaps

    def set_audio_ntaps(self, audio_ntaps):
        self.audio_ntaps = audio_ntaps

if __name__ == '__main__':
    parser = OptionParser(option_class=eng_option, usage="%prog: [options]")
    (options, args) = parser.parse_args()
    if gr.enable_realtime_scheduling() != gr.RT_OK:
        print "Error: failed to enable realtime scheduling."

    for max_noutput_items in range(64,4096, 128):
        tb = fm_channelize_latency()
        start_time = gr.high_res_timer_now()
        tb.start(max_noutput_items)
        time.sleep(5)
        stop_time = gr.high_res_timer_now()
        throughput = tb.west_timestamp_sink_f_0.pc_throughput_avg()
        tb.stop()
        print "throughput is %i" % (throughput)