def test_mapper(self):
        fn_stocks = os.path.join(self.data_dir, 'map_stock1.py')
        target.Stocks = read_test_data(fn_stocks)
        fn_dates = os.path.join(self.data_dir, 'map_date1.py')
        target.Dates = read_test_data(fn_dates)

        fn_map_input = os.path.join(self.data_dir, 'test_stock.csv')
        with open(fn_map_input, 'r') as f_map_input:
            fn_map_output = None
            with tempfile.NamedTemporaryFile(mode='w',
                    suffix='.tmp', prefix='map',
                    dir=self.data_dir,
                    delete=False) as f_map_out:
                fn_map_output = f_map_out.name
                with StdioSwitcher(f_map_input, f_map_out):
                    target.mapper()
            fn_expected = os.path.join(self.data_dir, 'map_expected1.txt')

            # check result
            self.assertTrue(filecmp.cmp(fn_expected, fn_map_output),
                    'check {}'.format(fn_map_output))

            # delete output file
            if fn_map_output is not None:
                os.remove(fn_map_output)
    def test_mapper(self):
        fn_stocks = os.path.join(self.data_dir, 'map_stock1.py')
        target.Stocks = read_test_data(fn_stocks)
        fn_dates = os.path.join(self.data_dir, 'map_date1.py')
        target.Dates = read_test_data(fn_dates)

        fn_map_input = os.path.join(self.data_dir, 'test_stock.csv')
        with open(fn_map_input, 'r') as f_map_input:
            fn_map_output = None
            with tempfile.NamedTemporaryFile(mode='w',
                                             suffix='.tmp',
                                             prefix='map',
                                             dir=self.data_dir,
                                             delete=False) as f_map_out:
                fn_map_output = f_map_out.name
                with StdioSwitcher(f_map_input, f_map_out):
                    target.mapper()
            fn_expected = os.path.join(self.data_dir, 'map_expected1.txt')

            # check result
            self.assertTrue(filecmp.cmp(fn_expected, fn_map_output),
                            'check {}'.format(fn_map_output))

            # delete output file
            if fn_map_output is not None:
                os.remove(fn_map_output)
Exemple #3
0
 def __init__(self, options):
     self.sources = options['sources']
     self.verbose = options['verbose']
     self.scan_results_path = './intermediate/result.json'
     self.scan_results = {}
     self.file_types = ['.h', '.m', '.mm', '.pbxproj', '.pch']
     if 'rule' in options:
         self.mapper = mapper.mapper(rule_path=options['rule'])
     else:
         self.mapper = mapper.mapper()
     self.matcher = matcher.matcher()
Exemple #4
0
    def test_mapping(self):
        src_data = (
            0,
            0,
            0,
            1,
            0,
            0,
            1,
            0,
            1,
            1,
            1,
            1,
            1,
            1,
            0,
            0,
            1,
            0,
            0,
            1,
            1,
            0,
            0,
            1,
        ) * 16
        src = blocks.vector_source_b(src_data, False)
        mapped = mapper.mapper(mapper.PSK8, ([0, 1, 3, 2, 6, 7, 5, 4]))
        demapped = mapper.demapper(mapper.PSK8, ([0, 1, 3, 2, 6, 7, 5, 4]))

        dst = blocks.vector_sink_b(1)
        self.tb.connect(src, mapped, demapped, dst)
        self.tb.run()
        self.assertEqual(src_data, dst.data())
Exemple #5
0
    def test_preamble(self):
        pream_len = 52
        pream = (mapper.preamble_generator(pream_len, 511,
                                           1033)).get_preamble()

        rand_src = blocks.vector_source_b(
            map(int, numpy.random.randint(0, 2, 1024)), True)
        head = blocks.head(gr.sizeof_char * 1, 1024)
        src_sink = blocks.vector_sink_b(1)
        pream_inst = mapper.preamble_insert_bb(pream_len * 10, (pream))
        bit2symb = mapper.mapper(mapper.BPSK, ([0, 1]))
        pream_sync = mapper.preamble_sync_cc(pream_len * 10, (pream),
                                             mapper.BPSK, ([0, 1]), .97, .90)
        symb2bit = mapper.demapper(mapper.BPSK, ([0, 1]))
        rec_sink = blocks.vector_sink_b(1)

        self.tb.connect((rand_src, 0), (head, 0))
        self.tb.connect((head, 0), (pream_inst, 0))
        self.tb.connect((head, 0), (src_sink, 0))
        self.tb.connect((pream_inst, 0), (bit2symb, 0))
        self.tb.connect((bit2symb, 0), (pream_sync, 0))
        self.tb.connect((pream_sync, 0), (symb2bit, 0))
        self.tb.connect((symb2bit, 0), (rec_sink, 0))

        self.tb.start()
        sleep(1)
        self.tb.stop()

        data_space = pream_len * 9
        sd = src_sink.data()
        rd = rec_sink.data()
        self.assertEqual(sd[0:data_space], rd[0:data_space])
Exemple #6
0
def main():
    wsizex = 512
    wsizey = 512
    margin = 10
    root = Tk()
    root.title(" Simulador de Doenças ")
    world = MyWorld()
    # maps the world rectangle onto a viewport of wsizex x wsizey pixels .
    canvas = Canvas(root, width=512, height=512, background='dark grey')
    sp = SimulationPanel(world, canvas)
    sp.wvmap = mapper([0, 0, world.getWidth() - 1,
                       world.getHeight() - 1],
                      [margin, margin, wsizex - margin, wsizey - margin],
                      False, False)
    print([0, 0, world.getWidth() - 1,
           world.getHeight() - 1],
          [margin, margin, wsizex - margin, wsizey - margin])
    poll = Timer(root, sp.draw, 500)
    canvas.bind("<Configure>", sp.resize)
    root.bind("<Escape>", lambda _: root.destroy())
    root.bind("s", lambda _: poll.stop())
    root.bind("r", lambda _: poll.restart())
    root.bind("p", sp.printData)
    root.bind("<Button-1>", lambda e: sp.mousePressed(e))
    poll.run()
    root.mainloop()
    def __init__(self, sps=2.0, rolloff=0.35, preamble=[0,0,0,0,0,0,1,1,0,1,1,0,1,1,0,0,1,1,1,1,0,0,0,0],
                    modtype=mapper.QPSK, greymap=[0,1,3,2] ):
        gr.hier_block2.__init__(self, "preamble_correlator",
                                    gr.io_signature(1,1,gr.sizeof_gr_complex),
                                    gr.io_signature(1,1,gr.sizeof_float))
                                    #gr.io_signature(0,0,0))

        # vet preamble bits
        for b in preamble:
            assert(b >= 0 and b<=1);

        tb = gr.top_block();
        vs = blocks.vector_source_b( preamble );
        mp = mapper.mapper(modtype, greymap);  
        it = filter.interp_fir_filter_ccf(2, firdes.root_raised_cosine(1, 1.0, 1.0/sps, rolloff, 21))
        vk = blocks.vector_sink_c();
        tb.connect(vs,mp,it,vk);
        tb.run();
        self.taps = list(vk.data());
        self.taps.reverse();
        self.taps = map(lambda x: x.conjugate(), self.taps);

        self.flt = filter.fft_filter_ccc(1, self.taps);
        self.mag = blocks.complex_to_mag_squared();
        self.connect(self, self.flt, self.mag);

        # connect output
        self.connect(self.mag, self);
Exemple #8
0
    def test_preamble (self):
        pream_len = 52
        pream = (mapper.preamble_generator(pream_len,511,1033)).get_preamble()
        
        rand_src    = blocks.vector_source_b(map(int, numpy.random.randint(0, 2, 1024)), True)
        head        = blocks.head(gr.sizeof_char*1, 1024)
        src_sink    = blocks.vector_sink_b(1)
        pream_inst  = mapper.preamble_insert_bb(pream_len*10, (pream))
        bit2symb    = mapper.mapper(mapper.BPSK, ([0,1]))
        pream_sync  = mapper.preamble_sync_cc(pream_len*10, (pream), mapper.BPSK, ([0,1]), .97, .90)
        symb2bit    = mapper.demapper(mapper.BPSK, ([0,1]))
        rec_sink    = blocks.vector_sink_b(1)

        self.tb.connect((rand_src, 0), (head, 0))
        self.tb.connect((head, 0), (pream_inst, 0))
        self.tb.connect((head, 0), (src_sink, 0))
        self.tb.connect((pream_inst, 0), (bit2symb, 0))
        self.tb.connect((bit2symb, 0), (pream_sync, 0))
        self.tb.connect((pream_sync, 0), (symb2bit, 0))
        self.tb.connect((symb2bit, 0), (rec_sink, 0))
        
        self.tb.start()
        sleep(1)
        self.tb.stop()
        
        data_space = pream_len*9
        sd = src_sink.data()
        rd = rec_sink.data()
        self.assertEqual(sd[0:data_space],rd[0:data_space])
 def __init__(self,
              modtype,
              symvals,
              txname,
              samples_per_symbol=2,
              excess_bw=0.35):
     # this is the block of whitened source bits
     gr.hier_block2.__init__(self, txname,
                             gr.io_signature(1, 1, gr.sizeof_char),
                             gr.io_signature(1, 1, gr.sizeof_gr_complex))
     # the  block of GR symbol mapper
     self.mod = mapper.mapper(modtype, symvals)
     # all the following part of the function is PFB RRC filter interpolation block defined on the paper in Signal Modulation Source
     # pulse shaping filter
     nfilts = 32
     ntaps = nfilts * 11 * int(
         samples_per_symbol)  # make nfilts filters of ntaps each
     rrc_taps = filter.firdes.root_raised_cosine(
         nfilts,  # gain
         nfilts,  # sampling rate based on 32 filters in resampler
         1.0,  # symbol rate
         excess_bw,  # excess bandwidth (roll-off factor)
         ntaps)
     self.rrc_filter = filter.pfb_arb_resampler_ccf(samples_per_symbol,
                                                    rrc_taps)
     self.connect(self, self.mod, self.rrc_filter, self)
     # error found here self.rate = const.bits_per_symbol() the const is not a resolved reference
     # so we need to import constellation from gnuradio.digital with the right modulation
     self.rate = constellation.bits_per_symbol(modtype)
def extractwordsfromfile(fl):
    fl = open(fl)
    content = []
    for line in fl.readlines():
        words = m.mapper(line, ' ')
        content.extend(words)
    fl.close()
    return content
Exemple #11
0
def master(urls):
    keywords = Counter()
    # call mapper on all urls and reduce to one master dict
    for url in urls:
        temp_dict = Counter(mapper(url))
        keywords = keywords + temp_dict
    # sort by count and return
    return sort(keywords)
Exemple #12
0
def initiate_case(batchSize):
    reference = decoder("data/KR233687.fasta")
    sequence = decoder("data/ERR1293055_first100.fastq")
    refKmer = kmer_maker(13, reference, True)
    seqKmer = kmer_maker(13, sequence, False)
    reference_trie = Trie()
    sternum = mapper(refKmer, seqKmer, reference_trie, batchSize)
    sternum.filter_matching()
    return sternum
Exemple #13
0
    def test_mapping (self):
        src_data = (0,0,0,1,0,0,1,0,1,1,1,1,1,1,0,0,1,0,0,1,1,0,0,1,)*16
        src = blocks.vector_source_b(src_data, False)
        mapped = mapper.mapper(mapper.PSK8, ([0,1,3,2,6,7,5,4]))
        demapped = mapper.demapper(mapper.PSK8, ([0,1,3,2,6,7,5,4]))

        dst = blocks.vector_sink_b(1)
        self.tb.connect(src, mapped, demapped, dst)
        self.tb.run()
        self.assertEqual(src_data, dst.data())
Exemple #14
0
def auto_sqm():
    global sqm
    global lat, lon, angle_min, angle_max
    global threshold_percent, threshold_mag
    global single, output

    lat, lon, sqm = read_sqm(args.file)
    angle_min, angle_max = sqm_angles(sqm, threshold_percent, threshold_mag,
                                      args.opening, single)
    print(angle_min, angle_max)
    result = algorithm(df, lat, lon, distance, angle_min, angle_max, adjacent)
    mapper(result, lon, lat, "sqm", output, indicator)

    if not result.empty:
        result.to_csv(output + '.csv', index=False)
        print('Result saved in: ' + output + '.csv')

    else:
        print('No matching records found')
Exemple #15
0
def auto_tas():
    global tas
    global lat, lon, angle_min, angle_max, distance
    global threshold_percent, threshold_mag, adjacent
    global single, output
    global m10

    lat, lon, tas = read_tas(args.file)
    angle_min, angle_max, m10 = tas_angles(tas, threshold_percent,
                                           threshold_mag, args.opening, m10,
                                           single)
    result = algorithm(df, lat, lon, distance, angle_min, angle_max, adjacent,
                       m10)
    mapper(result, lon, lat, "tas", output, indicator)

    if not result.empty:
        result.to_csv(output + '.csv', index=False)
        print("Result saved in: " + output + ".csv")
    else:
        print("No matching records found")
Exemple #16
0
    def __init__(self, xrf_prefix='13SDD1:', configfile=None):
        self._pvs = {}
        self.state = 'idle'
        self.xmap = None
        self.xsp3 = None
        self.xrdcam = None

        conf = self.mapconf = FastMapConfig(configfile)
        print(" Using Configfile : ", configfile)
        struck        = conf.get('general', 'struck')
        scaler        = conf.get('general', 'scaler')
        basedir       = conf.get('general', 'basedir')
        mapdb         = conf.get('general', 'mapdb')
        self.use_xrd  = conf.get('xrd_ad', 'use')
        self.use_xrf  = conf.get('xrf',   'use')
        self.xrf_type = conf.get('xrf', 'type')
        self.xrf_pref = conf.get('xrf', 'prefix')

        self.mapper = mapper(prefix=mapdb)
        self.scan_t0  = time.time()
        self.Connect_ENV_PVs()

        self.ROI_Written = False
        self.ENV_Written = False
        self.ROWS_Written = False
        self.xps = XPSTrajectory(**conf.get('xps'))
        self.dtime = debugtime()

        self.struck = Struck(struck, scaler=scaler)
        self.struck.read_all_mcas()

        print 'Using xrf type/prefix= ', self.xrf_type, self.xrf_pref
        if self.use_xrf:
            if self.xrf_type.startswith('xmap'):
                self.xmap = MultiXMAP(self.xrf_pref)
            elif self.xrf_type.startswith('xsp'):
                self.xsp3 = Xspress3(self.xrf_pref, fileroot='/T/')

        if self.use_xrd:
            filesaver = conf.get('xrd_ad', 'fileplugin')
            prefix    = conf.get('xrd_ad', 'prefix')
            xrd_type  = conf.get('xrd_ad', 'type')
            print(" Use XRD ", prefix, xrd_type, filesaver)
            self.xrdcam = PerkinElmer_AD(prefix, filesaver=filesaver)
            # self.xrdcam = Dexela_AD(prefix, filesaver=filesaver)

        self.positioners = {}
        for pname in conf.get('slow_positioners'):
            self.positioners[pname] = self.PV(pname)
        self.mapper.add_callback('Start', self.onStart)
        self.mapper.add_callback('Abort', self.onAbort)
        self.mapper.add_callback('basedir', self.onDirectoryChange)
        self.prepare_beam_ok()
Exemple #17
0
    def connect_mapper(self):
        "setup epics callbacks for PVs from mapper "
        mapper_pv = self.config['general']['mapdb']
        self.mapper = mapper(mapper_pv)
        self.mapper.add_callback('Start',self.onMapStart)
        self.mapper.add_callback('message',self.onMapMessage)
        self.mapper.add_callback('info',self.onMapInfo)
        self.mapper.add_callback('nrow',self.onMapRow)
        if self._pvs is None:
            self._pvs = {}
            for pvname,label in self.config['slow_positioners'].items():
                self._pvs[label] = epics.PV(pvname)

        os.chdir(nativepath(self.mapper.basedir))
        self.SetMotorLimits()
Exemple #18
0
 def test_001_t (self):
     source_data = np.random.randint(0, high=4, size=10, dtype=np.int)
     lut = np.array([(+1+1j), (-1+1j), (+1-1j), (-1-1j)], dtype=complex)
     expected_result = lut[source_data]
     source = blocks.vector_source_i(source_data)
     qpsk_mapper = mapper()
     sink = blocks.vector_sink_c()
     self.tb.connect(source, qpsk_mapper)
     self.tb.connect(qpsk_mapper, sink)
     self.tb.run()
     result_data = np.asarray(sink.data(), dtype=complex)
     # Check data
     comparison = expected_result == result_data
     check = comparison.all()
     print(check)
Exemple #19
0
 def __init__(self, modtype, symvals, txname, samples_per_symbol=2, excess_bw=0.35):
     gr.hier_block2.__init__(self, txname,
         gr.io_signature(1, 1, gr.sizeof_char),
         gr.io_signature(1, 1, gr.sizeof_gr_complex))
     self.mod = mapper.mapper(modtype, symvals)
     # pulse shaping filter
     nfilts = 32
     ntaps = nfilts * 11 * int(samples_per_symbol)    # make nfilts filters of ntaps each
     rrc_taps = filter.firdes.root_raised_cosine(
         nfilts,          # gain
         nfilts,          # sampling rate based on 32 filters in resampler
         1.0,             # symbol rate
         excess_bw, # excess bandwidth (roll-off factor)
         ntaps)
     self.rrc_filter = filter.pfb_arb_resampler_ccf(samples_per_symbol, rrc_taps)
     self.connect(self, self.mod, self.rrc_filter, self)
Exemple #20
0
def bowhandler(data, path, namer, qua):
    new = time.time()
    perm = np.load(path + namer + data + '.npy')
    feat = open(path + namer + data + 'bow.json', 'w')
    newname = 0
    for i in perm:
        val = namerdict[str(i)]
        qua.seek(val)
        line = qua.readline()
        qares = qareg.search(line)
        qares = mapper(qares.group(1))
        qares = reducer(qares, 1)
        feat.write(str(qares) + '\n')
        end = time.time()
    print('%1s%5s took: %6.3fs' % (namer, data, end - new))
    feat.close()
Exemple #21
0
def run():
    reference = decoder(args.reference)
    sequence = decoder(args.sequence)
    if int(args.method) == 3:
        spine = BWT(reference)
        refKmer = reference
    else:
        refKmer = kmer_maker(int(args.ksize), reference, True)
    seqKmer = kmer_maker(int(args.ksize), sequence, False)
    if int(args.method) == 1:  # mapping through Suffix Trie
        spine = Trie()
    elif int(args.method) == 2:  # mapping through Suffix Array
        spine = SA(reference)
    sternum = mapper(refKmer, seqKmer, spine, int(args.batchSize))
    sternum.filter_matching(int(args.minKcount), int(args.minPercentage))
    reporter(sternum, args.outputPrefix + "_" + str(args.method) + "_")
Exemple #22
0
    def __init__(self,
                 sps=2.0,
                 rolloff=0.35,
                 preamble=[
                     0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1,
                     1, 0, 0, 0, 0
                 ],
                 modtype=mapper.QPSK,
                 greymap=[0, 1, 3, 2]):
        gr.hier_block2.__init__(self, "preamble_correlator",
                                gr.io_signature(1, 1, gr.sizeof_gr_complex),
                                gr.io_signature(1, 1, gr.sizeof_float))
        #gr.io_signature(0,0,0))

        # vet preamble bits
        for b in preamble:
            assert (b >= 0 and b <= 1)

        tb = gr.top_block()
        vs = blocks.vector_source_b(preamble)
        mp = mapper.mapper(modtype, greymap)
        it = filter.interp_fir_filter_ccf(
            2, firdes.root_raised_cosine(1, 1.0, 1.0 / sps, rolloff, 21))
        vk = blocks.vector_sink_c()
        tb.connect(vs, mp, it, vk)
        tb.run()
        self.taps = list(vk.data())
        self.taps.reverse()
        self.taps = map(lambda x: x.conjugate(), self.taps)

        self.flt = filter.fft_filter_ccc(1, self.taps)
        self.mag = blocks.complex_to_mag_squared()
        self.connect(self, self.flt, self.mag)

        # connect output
        self.connect(self.mag, self)
Exemple #23
0
def run_pacman(db, simulator='pynn'):
    """
    Runs PACMAN on the db passed.
     - pre-splitter cleans the *.dat files in binaries
     - splitter
     - post-splitter (empty)
     - grouper (disabled in pacman.cfg for this version)
     - mapper
     - post-mapper (stdp_table_generator, allocate_monitoring_cores) (pynn)
     - create_core_list
    
    Generates the following files:
       - routing
       - patch_router_for_robot
       - patch_routing_tables for unused chips
       - synapse generation
       - lookup_table_generator
       - neuron generation
       - spike_source_array structures (pynn)
       - ybug_file_writer
    
    """
    import splitter
    import grouper
    import mapper
    import binary_files_generation.stdp_table_generator as stdp_table_generator    
    import binary_files_generation.invoker as invoker
    import binary_files_generation.synapse_writer as synapse_writer
#    import binary_files_generation.data_structure_creator as data_structure_creator
    import binary_files_generation.neuron_writer as neuron_writer
    import binary_files_generation.ybug_file_writer as ybug_file_writer
    import binary_files_generation.routing_patcher as routing_patcher           #   needed to patch routing tables for non used chips and robots
    import binary_files_generation.generate_convolution_block as generate_convolution_block           #   needed to patch routing tables for non used chips and robots
    t0 = time.time()
    
    def tstamp(t0): return(time.time() - t0)
    
    
    
    #### PRE-PACMAN
    if pacman_configuration.getboolean('pre-pacman', 'run') == True:
        print "\n[ pacman ] : Running pre-pacman plugins"
        print "[ pacman ] : Cleaning *.dat files in %s" % BINARIES_DIRECTORY
        import glob
        for f in glob.glob("%s/*.dat" % BINARIES_DIRECTORY):    os.remove(f)
        

    #### SPLITTING        
    print "\n[ pacman ] : Splitting....."
    t0 = time.time()
    splitter.split_populations(db)
    splitter.split_projections(db)
    splitter.split_probes(db)
    print "\n[ pacman ] : Splitting done, commit...", tstamp(t0)    
    db.commit() 

    #### POST-SPLITTING    
    if pacman_configuration.getboolean('post-splitter', 'run') == True:
        print "\n[ pacman ] : Running post-splitter plugins"

    #### GROUPER
    print "[ pacman ] : Running grouper"
    t0 = time.time()
    if pacman_configuration.getboolean('grouper', 'run'):
        raise NotImplementedError("Grouping is not supported. Please turn it off in pacman.cfg")
        # deprecated
        groups = grouper.get_groups(db)
        grouper(db, groups)
        print "[ pacman ] : grouping terminated, going with mapping"
        grouper.update_core_offsets(db)

    else:
        print "[ pacman ] : Bypassing Grouping"        
        grouper.bypass_grouping(db)

    print "\n[ pacman ] : Grouping done, commit...", tstamp(t0)
    db.commit()     

    #### MAPPER            
    print "[ pacman ] : Mapping..."    
    t0 = time.time()
    mapper.mapper(db)
    
	# check for post-mapper triggers
    if pacman_configuration.getboolean('post-mapper', 'run'):
        print "[ pacman ] : Running post-mapper plugins"

		# stdp table generation
        if pacman_configuration.getboolean('post-mapper', 'generate_stdp_tables') and simulator=='pynn':
            print "[ pacman ] : running compile_stdp_table_from_db"
            stdp_table_generator.compile_stdp_table_from_db(db)

		# allocating app monitoring
        if pacman_configuration.getboolean('post-mapper', 'allocate_monitoring_cores') and simulator=='pynn':
            print "[ pacman ] : running allocate_monitoring_cores"
            mapper.allocate_monitoring_cores(db)
    
    # core list creation
    mapper.create_core_list(db)

    print "[ pacman ] : Mapping done, commit...", tstamp(t0)
    db.commit() 
            
    #### BINARY FILES GENERATION    #####

    #### ROUTING
    print "\n[ pacman ] : Routing..."        
    t0 = time.time()    
    inv1  = invoker.invoker(db)
    inv1.file_gen()

    print "[ pacman ] : Routing done, commit...", tstamp(t0)
    db.commit() 
    
    if pacman_configuration.getboolean('routing', 'patch_routing_tables') == True:
        print "\n[ pacman ] : Patching routing tables to handle non used chips"
        routing_patcher.patch_routing_entries_missing_chips(db)

    if pacman_configuration.getboolean('routing', 'patch_routing_for_robot') == True:
        print "\n[ pacman ] : Patching routing tables for robotic use"
        routing_patcher.patch_router_for_robot(db)
        routing_patcher.patch_router_for_sensors(db)

    if pacman_configuration.getboolean('routing', 'patch_routing_for_output_board') == True:
        print "\n[ pacman ] : Patching routing tables for using with the output board"
        routing_patcher.patch_router_for_output_board(db)


    
    #### SDRAM
    print "\n[ pacman ] : Synapse generation..."
    t0 = time.time()
    synapse_writer.synapse_writer(db)
    
    print "[ pacman ] : Synapse generation done, commit...", tstamp(t0)   

    if pacman_configuration.getboolean('convolution', 'run') == True:
        print "\n[ pacman ] : Evaluating SDRAM entries for convolutional cores"
        generate_convolution_block.patch_SDRAM_for_convolution(db)

    
#    # FIXME horrible hack for avoiding db locked errors
#    try:    db.commit()
#    except: pass

    
#    #### LOOKUP TABLE GENERATOR
#    print "\n[ pacman ] : Lookup table generation..."
#    t0 = time.time()
#    curdir = os.getcwd()
#    os.chdir(PACMAN_DIR)
#    os.chdir(os.pardir)
#    os.chdir('binary_files_generation')
    
##    print "./lookup_table_generator %s %s/sql/model_library.db" % (db.db_abs_path, PACMAN_DIR)            
#    model_library_address = "%s/sql/model_library.db" % PACMAN_DIR
#    system_library_address = "%s/sql/%s" % (PACMAN_DIR, pacman_configuration.get('board', 'system_library'))
#    LEGACY_LOOKUP = pacman_configuration.getboolean('synapse_writer', 'run_legacy_lookup')
#    if LEGACY_LOOKUP:   os.system("./lookup_table_generator %s %s %s" % (db.db_abs_path, model_library_address, system_library_address))
#    print "[ pacman ] : Lookup table generation done, commit...", tstamp(t0)        
#    os.chdir(curdir)        
    
    ### DATA STRUCTURE CREATOR
    print "\n[ pacman ] : Neuron generation..."
    t0 = time.time()
#    data_structure_creator.write_neural_data_structures(db)
    neuron_writer.write_neural_data_structures(db)
    db.commit()    # not needed?
    print "[ pacman ] : Neuron generation done, commit...", tstamp(t0)
    
    
    if pacman_configuration.getboolean('neural_data_structure_creator', 'compile_spike_source_arrays') and simulator=='pynn':
        import binary_files_generation.generate_input_spikes as generate_input_spikes
        
        print "[ pacman ] : running compile_spike_source_arrays"
        generate_input_spikes.compile_spike_source_array(db)

    
    #### YBUG file writer
    print "\n[ pacman ] : yBug file writer..."
    t0 = time.time()
    
    SCRIPT_LOCATION = '%s/automatic.ybug' % BINARIES_DIRECTORY
    ybug_file_writer.write_ybug_file(db, SCRIPT_LOCATION)
    print "[ pacman ] : yBug file writer done...", tstamp(t0)
    
    #### DONE!
    if pacman_configuration.getboolean('pyNN.spiNNaker', 'run_simulation') == False and simulator=='pynn':     
        print "[ pacman ] : closing db...."
        db.close_connection()       # will close the connection to the db and commit the transaction    
Exemple #24
0
import sys
from mapper import mapper
from reducer import reducer

file_name = sys.argv[1]
occurence = mapper(file_name)
combined_occurence = reducer(occurence)

for key, value in combined_occurence.items():
    print("<{0}, {1}>".format(key, value))
Exemple #25
0
from EscanWriter import EscanWriter
from mapper import mapper
import os


m = mapper('13XRM:map:')

basedir = m.basedir
scandir = m.workdir
fname   = m.filename

outfile = os.path.join(basedir, scandir, fname)
print outfile

saver   = EscanWriter(folder="%s/%s" %(basedir, scandir))

nlines = saver.process()

f = open(outfile, 'w')
f.write("%s\n" % '\n'.join(saver.buff))
f.close()


Exemple #26
0
import os
import sys
sys.path.insert(0, 'src')
import getopt
from mapper import mapper


def usage(status=0):
    print '''Usage: ./mapper_test.py'''
    sys.exit(status)


# main execution
if __name__ == '__main__':
    # parse options
    try:
        opts, args = getopt.getopt(sys.argv[1:], "h")
    except getopt.GetoptError as err:
        print err
        usage()

    for o, a in opts:
        usage(1)

    # call mapper with demo url
    keywords = mapper('http://michaelsills.com/sample_links.html')

    # print results
    for word, count in keywords.iteritems():
        print word + ' ' + str(count)
import mapper as m
import reducer as r

#lines = ["this is python class","hello this is shubham learning python","hi python is on","this is a python program for mapper and reducer"]
f1 = open("textfile.txt")
content = []
for w in f1.readlines():
    words = m.mapper(w, ' ')
    content = content + words
f1.close()

data = r.reducer(content)
fl = open(r'keyValueFile.csv', 'w')
for k, v in data.items():
    print("%s : %d" % (k, v))
    data = str(k) + ":" + str(v) + "\n"
    fl.write(data)
fl.close()
## Dot product of vectors v0 and v1.
dotProd = lambda v0, v1: sum([u * v for u, v in zip(v0, v1)])


## Cross product of vectors u and v.
def crossProd(u, v):
    return u[0] * v[1] - v[0] * u[1]


## counter-clockwise rotation about the Z axis
ROT_Z = lambda z: [[cos(z), -sin(z), 0, 0], [sin(z), cos(z), 0, 0],
                   [0, 0, 1, 0], [0, 0, 0, 1]]

## Maps window coordinates to GIS coordinates.
map = mapper([-1, -1, 1, 1], [-1, -1, 1, 1])


def title(s):
    pass


def setup(width, height, startx=0, starty=0):
    pass


def bgcolor(r, g, b):
    pass


def window_width():
Exemple #29
0
# main execution
if __name__ == '__main__':
    # user input
    try:
        opts, args = getopt.getopt(sys.argv[1:], "u:s:oh")
    except getopt.GetoptError as err:
        print err
        usage()

    for o, a in opts:
        if o == '-u':
            URL = a
        elif o == '-s':
            SORT = a
        elif o == '-o':
            OUTPUT = True
        else:
            usage(1)

    if URL == '' or URL not in db.url_map:
        usage(1)

    # get the list of unsorted words
    words = mapper(URL)
    # run the sorting algorithm
    sorted_words = sort(words, SORT)
    # print results
    if OUTPUT:
        for word in sorted_words:
            print word[0] + ' ' + str(word[1])
Exemple #30
0
import os, sys
import subprocess
from mapper import mapper
from reducer import reducer

#subprocess = subprocess.Popen("sudo find /", shell=True, stdout=subprocess.PIPE)
#subprocess_return = subprocess.stdout.read()
#print(subprocess_return)

file_name = sys.argv[1]
f = open(file_name, "r")

combined_occurrences = {}
for line in f.readlines():
    occurrence = mapper(line)
    combined_occurrence = reducer(occurrence)
    combined_occurrences.update(combined_occurrence)

max_len = -1
for key, value in combined_occurrences.items():
    if value > max_len:
        max_len = value
        longest_path = key

print("Longest Path:", longest_path)
print("Length:", max_len)
Exemple #31
0
reader = csv.DictReader(csvfile, fieldnames)

for row in reader:
    d[row['ISO']] = row

csvfile2 = open('data/MPI_subnational.csv', 'r')
fieldnames2 = ("ISO country code", "Country", "Sub-national region",
               "World region", "MPI National", "MPI Regional",
               "Headcount Ratio Regional", "Intensity of deprivation Regional")

next(csvfile2)
reader2 = csv.DictReader(csvfile2, fieldnames2)
for row in reader2:

    try:
        d[row['ISO country code']]['subnational'].append(row)
    except:
        d[row['ISO country code']]['subnational'] = [row]

t = []
for k in d:
    t.extend(mapper.mapper(k, d[k]))

res = defaultdict(list)
for x in t:
    res[x[0]].append(x[1])

for x in res:
    for y in reducer.reducer(x, res[x]):
        print(y)
Exemple #32
0
#!/usr/bin/env python
#import actor
import mapper
import curses

global y
global x
global py
#global px
global moving
y = py = 1
x = 1
moving = True

global mapper
mapper = mapper.mapper()
#from mapper.py generate a map
global map
map = mapper.map

#Initialization of the curses window stdscr
stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
curses.curs_set(0)
stdscr.keypad(1)


def QuitGame():
    curses.nocbreak()
    stdscr.keypad(0)
Exemple #33
0
import json

import mapper

if __name__ == '__main__':
    with open('/dev/input') as fp:
        data = fp.read()
    try:
        record = json.loads(data)
    except:
        record = {'data': data}
    mapped = mapper.mapper(record)
    with open('/dev/out/reducer', 'a') as fp:
        json.dump(mapped, fp)
##################################################
pream_len = pream_len = 52
samp_rate = samp_rate = 32000
pream = pream = (mapper.preamble_generator(pream_len, 511,
                                           1033)).get_preamble()

##################################################
# Blocks
##################################################
rand_src = blocks.vector_source_b(map(int, numpy.random.randint(0, 2, 1024)),
                                  True)
#head        = blocks.head(gr.sizeof_char*1, 1024)
src_sink = blocks.vector_sink_b(1)
pream_inst = mapper.preamble_insert_bb(pream_len * 10, (pream))
pre_sink = blocks.vector_sink_b(1)
bit2symb = mapper.mapper(mapper.BPSK, ([0, 1]))
sym_sink = blocks.vector_sink_c(1)
pream_sync = mapper.preamble_sync_cc(pream_len * 10, (pream), mapper.BPSK,
                                     ([0, 1]), .97, .90)
snc_sink = blocks.vector_sink_c(1)
symb2bit = mapper.demapper(mapper.BPSK, ([0, 1]))
rec_sink = blocks.vector_sink_b(1)

##################################################
# Connections
##################################################
#tb.connect((rand_src, 0), (head, 0))
tb.connect((rand_src, 0), (pream_inst, 0))
tb.connect((rand_src, 0), (src_sink, 0))
#tb.connect((head, 0), (pream_inst, 0))
#tb.connect((head, 0), (src_sink, 0))
Exemple #35
0
    def __init__(self):
        gr.top_block.__init__(self, "Sync Test")
        Qt.QWidget.__init__(self)
        self.setWindowTitle("Sync Test")
        try:
             self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
        except:
             pass
        self.top_scroll_layout = Qt.QVBoxLayout()
        self.setLayout(self.top_scroll_layout)
        self.top_scroll = Qt.QScrollArea()
        self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
        self.top_scroll_layout.addWidget(self.top_scroll)
        self.top_scroll.setWidgetResizable(True)
        self.top_widget = Qt.QWidget()
        self.top_scroll.setWidget(self.top_widget)
        self.top_layout = Qt.QVBoxLayout(self.top_widget)
        self.top_grid_layout = Qt.QGridLayout()
        self.top_layout.addLayout(self.top_grid_layout)

        self.settings = Qt.QSettings("GNU Radio", "sync_test")
        self.restoreGeometry(self.settings.value("geometry").toByteArray())


        ##################################################
        # Variables
        ##################################################
        self.pream_len = pream_len = 84
        self.samp_rate = samp_rate = 32000
        self.pream = pream = (mapper.preamble_generator(pream_len,511,1033)).get_preamble()
        self.SNR = SNR = 40
        self.Rotation = Rotation = 0
        self.Offset = Offset = 0

        ##################################################
        # Blocks
        ##################################################
        self._SNR_tool_bar = Qt.QToolBar(self)
        self._SNR_tool_bar.addWidget(Qt.QLabel("SNR"+": "))
        self._SNR_line_edit = Qt.QLineEdit(str(self.SNR))
        self._SNR_tool_bar.addWidget(self._SNR_line_edit)
        self._SNR_line_edit.returnPressed.connect(
        	lambda: self.set_SNR(eng_notation.str_to_num(self._SNR_line_edit.text().toAscii())))
        self.top_layout.addWidget(self._SNR_tool_bar)
        self._Rotation_layout = Qt.QVBoxLayout()
        self._Rotation_label = Qt.QLabel("Rotation")
        self._Rotation_slider = Qwt.QwtSlider(None, Qt.Qt.Horizontal, Qwt.QwtSlider.BottomScale, Qwt.QwtSlider.BgSlot)
        self._Rotation_slider.setRange(0, 2*pi, pi/100)
        self._Rotation_slider.setValue(self.Rotation)
        self._Rotation_slider.setMinimumWidth(200)
        self._Rotation_slider.valueChanged.connect(self.set_Rotation)
        self._Rotation_label.setAlignment(Qt.Qt.AlignBottom | Qt.Qt.AlignHCenter)
        self._Rotation_layout.addWidget(self._Rotation_label)
        self._Rotation_layout.addWidget(self._Rotation_slider)
        self.top_layout.addLayout(self._Rotation_layout)
        self._Offset_layout = Qt.QVBoxLayout()
        self._Offset_tool_bar = Qt.QToolBar(self)
        self._Offset_layout.addWidget(self._Offset_tool_bar)
        self._Offset_tool_bar.addWidget(Qt.QLabel("Offset"+": "))
        self._Offset_counter = Qwt.QwtCounter()
        self._Offset_counter.setRange(-100, 100, 1)
        self._Offset_counter.setNumButtons(2)
        self._Offset_counter.setValue(self.Offset)
        self._Offset_tool_bar.addWidget(self._Offset_counter)
        self._Offset_counter.valueChanged.connect(self.set_Offset)
        self._Offset_slider = Qwt.QwtSlider(None, Qt.Qt.Horizontal, Qwt.QwtSlider.BottomScale, Qwt.QwtSlider.BgSlot)
        self._Offset_slider.setRange(-100, 100, 1)
        self._Offset_slider.setValue(self.Offset)
        self._Offset_slider.setMinimumWidth(200)
        self._Offset_slider.valueChanged.connect(self.set_Offset)
        self._Offset_layout.addWidget(self._Offset_slider)
        self.top_layout.addLayout(self._Offset_layout)
        self.qtgui_const_sink_x_0_1 = qtgui.const_sink_c(
        	840-84, #size
        	"QT GUI Plot", #name
        	1 #number of inputs
        )
        self.qtgui_const_sink_x_0_1.set_update_time(0.10)
        self.qtgui_const_sink_x_0_1.set_y_axis(-2, 2)
        self.qtgui_const_sink_x_0_1.set_x_axis(-2, 2)
        self._qtgui_const_sink_x_0_1_win = sip.wrapinstance(self.qtgui_const_sink_x_0_1.pyqwidget(), Qt.QWidget)
        self.top_layout.addWidget(self._qtgui_const_sink_x_0_1_win)
        self.qtgui_const_sink_x_0 = qtgui.const_sink_c(
        	840-84, #size
        	"QT GUI Plot", #name
        	1 #number of inputs
        )
        self.qtgui_const_sink_x_0.set_update_time(0.10)
        self.qtgui_const_sink_x_0.set_y_axis(-2, 2)
        self.qtgui_const_sink_x_0.set_x_axis(-2, 2)
        self._qtgui_const_sink_x_0_win = sip.wrapinstance(self.qtgui_const_sink_x_0.pyqwidget(), Qt.QWidget)
        self.top_layout.addWidget(self._qtgui_const_sink_x_0_win)
        self.mapper_preamble_sync_cc_0 = mapper.preamble_sync_cc(pream_len*10, (pream), mapper.PSK8, ([0,1,2,3,4,5,6,7]), .97, .90)
        self.mapper_preamble_insert_bb_0 = mapper.preamble_insert_bb(pream_len*10, (pream))
        self.mapper_mapper_0 = mapper.mapper(mapper.PSK8, ([0,1,2,3,4,5,6,7]))
        self.mapper_demapper_0 = mapper.demapper(mapper.PSK8, ([0,1,2,3,4,5,6,7]))
        self.digital_costas_loop_cc_0 = digital.costas_loop_cc(1.5*pi/100, 8)
        self.channels_channel_model_0 = channels.channel_model(
        	noise_voltage=10**(-SNR/20.0),
        	frequency_offset=Offset,
        	epsilon=1.0,
        	taps=(exp( (0 + 1j*Rotation) ), ),
        	noise_seed=0,
        	block_tags=False
        )
        self.blocks_vector_sink_x_0_0 = blocks.vector_sink_b(1)
        self.blocks_unpack_k_bits_bb_0 = blocks.unpack_k_bits_bb(3)
        self.blocks_throttle_0 = blocks.throttle(gr.sizeof_char*1, samp_rate)
        self.analog_random_source_x_0 = blocks.vector_source_b(map(int, numpy.random.randint(0, 7, 10000)), True)

        ##################################################
        # Connections
        ##################################################
        self.connect((self.mapper_preamble_insert_bb_0, 0), (self.mapper_mapper_0, 0))
        self.connect((self.mapper_preamble_sync_cc_0, 0), (self.mapper_demapper_0, 0))
        self.connect((self.mapper_mapper_0, 0), (self.channels_channel_model_0, 0))
        self.connect((self.channels_channel_model_0, 0), (self.digital_costas_loop_cc_0, 0))
        self.connect((self.digital_costas_loop_cc_0, 0), (self.mapper_preamble_sync_cc_0, 0))
        self.connect((self.mapper_preamble_sync_cc_0, 0), (self.qtgui_const_sink_x_0_1, 0))
        self.connect((self.analog_random_source_x_0, 0), (self.blocks_unpack_k_bits_bb_0, 0))
        self.connect((self.blocks_unpack_k_bits_bb_0, 0), (self.blocks_throttle_0, 0))
        self.connect((self.blocks_throttle_0, 0), (self.mapper_preamble_insert_bb_0, 0))
        self.connect((self.mapper_demapper_0, 0), (self.blocks_vector_sink_x_0_0, 0))
        self.connect((self.channels_channel_model_0, 0), (self.qtgui_const_sink_x_0, 0))
Exemple #36
0
sys.path.insert(0,'src')
import getopt
from mapper import mapper
import db

def usage(status=0):
    print '''Usage: ./mapper_test.py [options]...

Options:
    -u URL      path of website to analyze (ex: http://www.wsj.com/)
    -o          output data to stdout
    -h          help'''
    sys.exit(status)

# main execution
if __name__ == '__main__':
    try:
        opts,args = getopt.getopt(sys.argv[1:], "h")
    except getopt.GetoptError as err:
        print err
        usage()

    for o,a in opts:
        if o == '-h':
            usage(1)

    for url in db.urls:
        words = mapper(url)
        print url + '\t' + str(len(words))
        
##################################################
# Variables
##################################################
pream_len = pream_len = 52
samp_rate = samp_rate = 32000
pream = pream = (mapper.preamble_generator(pream_len,511,1033)).get_preamble()

##################################################
# Blocks
##################################################
rand_src    = blocks.vector_source_b(map(int, numpy.random.randint(0, 2, 1024)), True)
#head        = blocks.head(gr.sizeof_char*1, 1024)
src_sink    = blocks.vector_sink_b(1)
pream_inst  = mapper.preamble_insert_bb(pream_len*10, (pream))
pre_sink    = blocks.vector_sink_b(1)
bit2symb    = mapper.mapper(mapper.BPSK, ([0,1]))
sym_sink    = blocks.vector_sink_c(1)
pream_sync  = mapper.preamble_sync_cc(pream_len*10, (pream), mapper.BPSK, ([0,1]), .97, .90)
snc_sink    = blocks.vector_sink_c(1)
symb2bit    = mapper.demapper(mapper.BPSK, ([0,1]))
rec_sink    = blocks.vector_sink_b(1)

##################################################
# Connections
##################################################
#tb.connect((rand_src, 0), (head, 0))
tb.connect((rand_src, 0), (pream_inst, 0))
tb.connect((rand_src, 0), (src_sink, 0))
#tb.connect((head, 0), (pream_inst, 0))
#tb.connect((head, 0), (src_sink, 0))
tb.connect((pream_inst, 0), (bit2symb, 0))
Exemple #38
0
def core_mapping(in_file = '/Users/tieqiangli/mapperinput/CorrelationArray1d_0708.csv',
                   # out_path = '/Users/tieqiangli/mapperinput/output/', 
                   intervals = 15,
                   overlap = 50):
                        
    data = np.loadtxt(str(in_file), delimiter=',', dtype=np.float)
    
#    metricpar = {'metric': 'euclidean'}
#    
#    point_labels = np.array(['a','b','c','d'])
##    point_labels = np.array([600001,600002,600003,600004])
#    mask = [1,2,3,4]
    point_labels = None
    mask = None

#    data, point_labels = mapper.mask_data(data, mask, point_labels)
    
    '''
        Step 2: Metric
    '''
    intrinsic_metric = False
    if intrinsic_metric:
        is_vector_data = data.ndim != 1
        if is_vector_data:
            metric = Euclidean
            if metric != 'Euclidean':
                raise ValueError('Not implemented')
        data = mapper.metric.intrinsic_metric(data, k=1, eps=1.0)
    is_vector_data = data.ndim != 1
    '''
        Step 3: Filter function
    '''
    if is_vector_data:
        metricpar = {'metric': 'euclidean'}
        f = mapper.filters.Gauss_density(data,
            metricpar=metricpar,
            sigma=1.0)
    else:
        f = mapper.filters.Gauss_density(data,
            sigma=1.0)
    '''
        Step 4: Mapper parameters
    '''
    cover = mapper.cover.cube_cover_primitive(intervals=intervals, overlap=overlap)
    cluster = mapper.single_linkage()
    if not is_vector_data:
        metricpar = {}
    mapper_output = mapper.mapper(data, f,
        cover=cover,
        cluster=cluster,
        point_labels=point_labels,
        cutoff=None,
        metricpar=metricpar)
    mapper.scale_graph(mapper_output, f, cover=cover,
                       weighting='inverse', maxcluster=100, expand_intervals=False, exponent=10,
                       simple=False)
#    cutoff = mapper.cutoff.first_gap(gap=0.1)
#    mapper_output.cutoff(cutoff, f, cover=cover, simple=False)
    
    '''
        Step 5: Save results
    '''
    
    # store the x-y coordinate of the mapper_output simplicial complex 
    # as well as the scale parameters in a dictionary
    temp_out = mapper_output.draw_2D()
    Output = {}
    Output['tag'] = 'scaleParam_' + intervals + '_' + overlap
    # vertex_pos stores the x y coordinates however, not checked the right way of extracting it from mapper_output
    Output['x'] = temp_out.vertex_pos[:, 0]
    Output['y'] = temp_out.vertex_pos[:, 1]
    Output['intervals'] = intervals
    Output['overlap'] = overlap
    
    return Output
Exemple #39
0
while (True):
    # Capture frame-by-frame
    ret, frame = cap.read()

    # Our operations on the frame come here
    gray = (frame[:, :, 0] > 30)

    hot_points = grid[gray]

    cov = mapper.cover.balanced_cover_1d(30)
    flt = hot_points[:, 0].reshape(-1, 1).astype(np.float)
    pts = hot_points.astype(np.float)

    with Capturing() as output:
        mappered = mapper.mapper(pts, flt, cov, mapper.cutoff.histogram(30))

    canvas = np.zeros((gray.shape[0], gray.shape[1], 3), dtype=np.uint8)

    for node in mappered.nodes:
        color, = cm.viridis(np.random.rand(1)) * 255
        for x, y in hot_points[node.points]:
            canvas[x, y, 0] = color[0]
            canvas[x, y, 1] = color[1]
            canvas[x, y, 2] = color[2]

    # Display the resulting frame
    cv2.imshow('frame', canvas)
    if cv2.waitKey(1) & 0xFF == ord('q'):
        break
Exemple #40
0
def mapper_cluster(in_file = '/Users/tieqiangli/mapperinput/CorrelationArray1d_0708.csv',
                   out_path = '/Users/tieqiangli/mapperinput/output/'):
                        
    data = np.loadtxt(str(in_file), delimiter=',', dtype=np.float)
    
#    metricpar = {'metric': 'euclidean'}
#    
#    point_labels = np.array(['a','b','c','d'])
##    point_labels = np.array([600001,600002,600003,600004])
#    mask = [1,2,3,4]
    point_labels = None
    mask = None

#    data, point_labels = mapper.mask_data(data, mask, point_labels)
    
    '''
        Step 2: Metric
    '''
    intrinsic_metric = False
    if intrinsic_metric:
        is_vector_data = data.ndim != 1
        if is_vector_data:
            metric = Euclidean
            if metric != 'Euclidean':
                raise ValueError('Not implemented')
        data = mapper.metric.intrinsic_metric(data, k=1, eps=1.0)
    is_vector_data = data.ndim != 1
    '''
        Step 3: Filter function
    '''
    if is_vector_data:
        metricpar = {'metric': 'euclidean'}
        f = mapper.filters.Gauss_density(data,
            metricpar=metricpar,
            sigma=1.0)
    else:
        f = mapper.filters.Gauss_density(data,
            sigma=1.0)
    '''
        Step 4: Mapper parameters
    '''
    cover = mapper.cover.cube_cover_primitive(intervals=5, overlap=90.0)
    cluster = mapper.single_linkage()
    if not is_vector_data:
        metricpar = {}
    mapper_output = mapper.mapper(data, f,
        cover=cover,
        cluster=cluster,
        point_labels=point_labels,
        cutoff=None,
        metricpar=metricpar)
    mapper.scale_graph(mapper_output, f, cover=cover,
                       weighting='inverse', maxcluster=100, expand_intervals=False, exponent=10,
                       simple=False)
#    cutoff = mapper.cutoff.first_gap(gap=0.1)
#    mapper_output.cutoff(cutoff, f, cover=cover, simple=False)
    
    '''
        Step 5: Save results
    '''
    t = op.basename(in_file)    
#    date_stamp = t[len(t)-8:len(t)-4]    
    
#    mapper_output.draw_scale_graph()
#    out_file = out_path + 'scale_graph_'  + '_' + t + '.pdf'
#    plt.savefig(out_file)
    
    minsizes = []
    mapper_output.draw_2D(minsizes=minsizes)
    out_file = out_path + 'mapper_output_' + '_' + t + '.pdf'
    plt.savefig(out_file)
mask = None
crop = mapper.crop
# Custom filter transformation

# End custom filter transformation
'''
'''
    Step 4: Mapper parameters
'''
cover = mapper.cover.cube_cover_primitive(intervals=3, overlap=20.0)
cluster = mapper.average_linkage()
if not is_vector_data:
    metricpar = {}
mapper_output = mapper.mapper(data, f,
    cover=cover,
    cluster=cluster,
    #point_labels=point_labels,
    cutoff=None,
    metricpar=metricpar)
cutoff = mapper.cutoff.first_gap(gap=0.1)
mapper_output.cutoff(cutoff, f, cover=cover, simple=False)
mapper_output.draw_scale_graph()
plt.savefig('scale_graph.pdf')
'''
    Step 5: Display parameters
'''
# Node coloring
#'''
from mapper.tools import qhull, shortest_path, pdfwriter, graphviz_node_pos, dict_values
nodes = mapper_output.nodes
vertices, vertex_pos = graphviz_node_pos(mapper_output.simplices, nodes)
vertices = np.array(vertices)
def setworldcoordinates(x0, y0, x1, y1):
    global map
    map = mapper([x0, y0, x1, y1], [-180, -90, 180, 90], True)
Exemple #43
0
    def __init__(self):
        gr.top_block.__init__(self, "Sync Test")
        Qt.QWidget.__init__(self)
        self.setWindowTitle("Sync Test")
        try:
            self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
        except:
            pass
        self.top_scroll_layout = Qt.QVBoxLayout()
        self.setLayout(self.top_scroll_layout)
        self.top_scroll = Qt.QScrollArea()
        self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
        self.top_scroll_layout.addWidget(self.top_scroll)
        self.top_scroll.setWidgetResizable(True)
        self.top_widget = Qt.QWidget()
        self.top_scroll.setWidget(self.top_widget)
        self.top_layout = Qt.QVBoxLayout(self.top_widget)
        self.top_grid_layout = Qt.QGridLayout()
        self.top_layout.addLayout(self.top_grid_layout)

        self.settings = Qt.QSettings("GNU Radio", "sync_test")
        self.restoreGeometry(self.settings.value("geometry").toByteArray())

        ##################################################
        # Variables
        ##################################################
        self.pream_len = pream_len = 84
        self.samp_rate = samp_rate = 32000
        self.pream = pream = (mapper.preamble_generator(pream_len, 511,
                                                        1033)).get_preamble()
        self.SNR = SNR = 40
        self.Rotation = Rotation = 0
        self.Offset = Offset = 0

        ##################################################
        # Blocks
        ##################################################
        self._SNR_tool_bar = Qt.QToolBar(self)
        self._SNR_tool_bar.addWidget(Qt.QLabel("SNR" + ": "))
        self._SNR_line_edit = Qt.QLineEdit(str(self.SNR))
        self._SNR_tool_bar.addWidget(self._SNR_line_edit)
        self._SNR_line_edit.returnPressed.connect(lambda: self.set_SNR(
            eng_notation.str_to_num(self._SNR_line_edit.text().toAscii())))
        self.top_layout.addWidget(self._SNR_tool_bar)
        self._Rotation_layout = Qt.QVBoxLayout()
        self._Rotation_label = Qt.QLabel("Rotation")
        self._Rotation_slider = Qwt.QwtSlider(None, Qt.Qt.Horizontal,
                                              Qwt.QwtSlider.BottomScale,
                                              Qwt.QwtSlider.BgSlot)
        self._Rotation_slider.setRange(0, 2 * pi, pi / 100)
        self._Rotation_slider.setValue(self.Rotation)
        self._Rotation_slider.setMinimumWidth(200)
        self._Rotation_slider.valueChanged.connect(self.set_Rotation)
        self._Rotation_label.setAlignment(Qt.Qt.AlignBottom
                                          | Qt.Qt.AlignHCenter)
        self._Rotation_layout.addWidget(self._Rotation_label)
        self._Rotation_layout.addWidget(self._Rotation_slider)
        self.top_layout.addLayout(self._Rotation_layout)
        self._Offset_layout = Qt.QVBoxLayout()
        self._Offset_tool_bar = Qt.QToolBar(self)
        self._Offset_layout.addWidget(self._Offset_tool_bar)
        self._Offset_tool_bar.addWidget(Qt.QLabel("Offset" + ": "))
        self._Offset_counter = Qwt.QwtCounter()
        self._Offset_counter.setRange(-100, 100, 1)
        self._Offset_counter.setNumButtons(2)
        self._Offset_counter.setValue(self.Offset)
        self._Offset_tool_bar.addWidget(self._Offset_counter)
        self._Offset_counter.valueChanged.connect(self.set_Offset)
        self._Offset_slider = Qwt.QwtSlider(None, Qt.Qt.Horizontal,
                                            Qwt.QwtSlider.BottomScale,
                                            Qwt.QwtSlider.BgSlot)
        self._Offset_slider.setRange(-100, 100, 1)
        self._Offset_slider.setValue(self.Offset)
        self._Offset_slider.setMinimumWidth(200)
        self._Offset_slider.valueChanged.connect(self.set_Offset)
        self._Offset_layout.addWidget(self._Offset_slider)
        self.top_layout.addLayout(self._Offset_layout)
        self.qtgui_const_sink_x_0_1 = qtgui.const_sink_c(
            840 - 84,  #size
            "QT GUI Plot",  #name
            1  #number of inputs
        )
        self.qtgui_const_sink_x_0_1.set_update_time(0.10)
        self.qtgui_const_sink_x_0_1.set_y_axis(-2, 2)
        self.qtgui_const_sink_x_0_1.set_x_axis(-2, 2)
        self._qtgui_const_sink_x_0_1_win = sip.wrapinstance(
            self.qtgui_const_sink_x_0_1.pyqwidget(), Qt.QWidget)
        self.top_layout.addWidget(self._qtgui_const_sink_x_0_1_win)
        self.qtgui_const_sink_x_0 = qtgui.const_sink_c(
            840 - 84,  #size
            "QT GUI Plot",  #name
            1  #number of inputs
        )
        self.qtgui_const_sink_x_0.set_update_time(0.10)
        self.qtgui_const_sink_x_0.set_y_axis(-2, 2)
        self.qtgui_const_sink_x_0.set_x_axis(-2, 2)
        self._qtgui_const_sink_x_0_win = sip.wrapinstance(
            self.qtgui_const_sink_x_0.pyqwidget(), Qt.QWidget)
        self.top_layout.addWidget(self._qtgui_const_sink_x_0_win)
        self.mapper_preamble_sync_cc_0 = mapper.preamble_sync_cc(
            pream_len * 10, (pream), mapper.PSK8, ([0, 1, 2, 3, 4, 5, 6, 7]),
            .97, .90)
        self.mapper_preamble_insert_bb_0 = mapper.preamble_insert_bb(
            pream_len * 10, (pream))
        self.mapper_mapper_0 = mapper.mapper(mapper.PSK8,
                                             ([0, 1, 2, 3, 4, 5, 6, 7]))
        self.mapper_demapper_0 = mapper.demapper(mapper.PSK8,
                                                 ([0, 1, 2, 3, 4, 5, 6, 7]))
        self.digital_costas_loop_cc_0 = digital.costas_loop_cc(
            1.5 * pi / 100, 8)
        self.channels_channel_model_0 = channels.channel_model(
            noise_voltage=10**(-SNR / 20.0),
            frequency_offset=Offset,
            epsilon=1.0,
            taps=(exp((0 + 1j * Rotation)), ),
            noise_seed=0,
            block_tags=False)
        self.blocks_vector_sink_x_0_0 = blocks.vector_sink_b(1)
        self.blocks_unpack_k_bits_bb_0 = blocks.unpack_k_bits_bb(3)
        self.blocks_throttle_0 = blocks.throttle(gr.sizeof_char * 1, samp_rate)
        self.analog_random_source_x_0 = blocks.vector_source_b(
            map(int, numpy.random.randint(0, 7, 10000)), True)

        ##################################################
        # Connections
        ##################################################
        self.connect((self.mapper_preamble_insert_bb_0, 0),
                     (self.mapper_mapper_0, 0))
        self.connect((self.mapper_preamble_sync_cc_0, 0),
                     (self.mapper_demapper_0, 0))
        self.connect((self.mapper_mapper_0, 0),
                     (self.channels_channel_model_0, 0))
        self.connect((self.channels_channel_model_0, 0),
                     (self.digital_costas_loop_cc_0, 0))
        self.connect((self.digital_costas_loop_cc_0, 0),
                     (self.mapper_preamble_sync_cc_0, 0))
        self.connect((self.mapper_preamble_sync_cc_0, 0),
                     (self.qtgui_const_sink_x_0_1, 0))
        self.connect((self.analog_random_source_x_0, 0),
                     (self.blocks_unpack_k_bits_bb_0, 0))
        self.connect((self.blocks_unpack_k_bits_bb_0, 0),
                     (self.blocks_throttle_0, 0))
        self.connect((self.blocks_throttle_0, 0),
                     (self.mapper_preamble_insert_bb_0, 0))
        self.connect((self.mapper_demapper_0, 0),
                     (self.blocks_vector_sink_x_0_0, 0))
        self.connect((self.channels_channel_model_0, 0),
                     (self.qtgui_const_sink_x_0, 0))