Ejemplo n.º 1
0
class MapperTests(unittest.TestCase):
        
    def setUp(self):
        self.mapper = Mapper();
        self.utils = Utils();
        utils_for_tests = UtilsForTests();
        self.test_map_scale_to_white_keys = utils_for_tests.loadTestCorpus('test_corpus/test_to_white_keys_corpus');
        self.test_get_map = utils_for_tests.loadTestCorpus('test_corpus/test_get_map_corpus');
    
                
    def test_mapScaleToWhiteKeys(self):
        for case in self.test_map_scale_to_white_keys:
            mapped_scale = self.mapper.mapScaleToWhiteKeys(case[0]);
            self.assertDictEqual(mapped_scale, case[1]);
    
    def test_getMap(self):
        for case in self.test_get_map:
            map = self.mapper.getMap(case[0],case[1]);
            self.assertDictEqual(map, case[2]);
            
    @unittest.skip("Preformance test")        
    def test_TimeitGetMap(self):
        setup = "from utils import Utils; from mapper import Mapper; mapper = Mapper(); utils = Utils();"
        code_to_test = """
        for scale in utils.getAvailableScales():
            for note in utils.getNotes():
                mapper.getMap(note, scale);
        """
        result_first = timeit.repeat(code_to_test, setup = setup,repeat=100, number=100);
        result_avg = reduce(lambda x, y: x + y, result_first) / len(result_first)
        print("Result avg: " + str(result_avg));
Ejemplo n.º 2
0
def cards_to_database(database, cards_file):
    """
    put data in cards_file into database.
    """
    
    mapper = Mapper(configuration.map_file)
    conn = sqlite3.connect(database)
    cursor = conn.cursor()
    
    cursor.execute("DROP TABLE IF EXISTS cards")
    cursor.execute("""
                   CREATE TABLE IF NOT EXISTS cards
                  (posid TEXT,
                   time INTEGER,
                   statid TEXT)
                   """)
    cursor.execute("CREATE INDEX time_index ON cards(time)")
    cursor.execute("CREATE INDEX statid_index ON cards(statid)")
    cursor.execute("CREATE INDEX posid_index ON cards(posid)")
    with open(cards_file, 'r') as reader:
        for line in reader:
            parts = line.strip().split(',')
            assert len(parts) == 15
            if not mapper.has_statid(parts[9]):
                continue
            if parts[5].count(':') == 1:
                parts[5] = parts[5] + ":00"
            parts[5] = datetime.strptime(parts[5], "%Y/%m/%d %H:%M:%S")
            parts[5] = calendar.timegm(parts[5].utctimetuple())
            cursor.execute("INSERT INTO cards VALUES (?, ?, ?)",
                           (parts[3], parts[5], parts[9]))
            
    cursor.close()
    conn.commit()
    conn.close()
def start_thememapper():
    global nav
    global mapper
    #initialize the necessary classes
    mapper = Mapper(get_settings())
    nav = Navigation()
    # Adds the ability to set config file and port through commandline
    p = optparse.OptionParser()
    p.add_option('--port', '-p', default=mapper.port,help='port thememapper should run at')
    p.add_option('--diazo', '-d', default=False,action="store_true",dest="diazo",help='force diazo server to run')
    p.add_option('--diazo_port', '-f', default=mapper.diazo_port,help='port diazo should run at')
    options = p.parse_args()[0]
    mapper.port = options.port
    mapper.diazo_port = options.diazo_port
    #start thememapper
    print "Starting thememapper on http://0.0.0.0:" + mapper.port
    HTTPServer(WSGIContainer(app)).listen(mapper.port)
    if options.diazo or mapper.diazo_run == 'True':
        try: 
            from thememapper.diazo import server
            print "Starting diazo on http://0.0.0.0:" + mapper.diazo_port
            HTTPServer(server.get_application(mapper)).listen(mapper.diazo_port)
        except ImportError: 
            print "You will need to install thememapper.diazo before being able to use this function." 
    ioloop = IOLoop.instance()
    autoreload.watch(os.path.join(os.path.dirname(__file__), 'settings.properties'))
    autoreload.add_reload_hook(reload)
    autoreload.start(ioloop)
    ioloop.start()
Ejemplo n.º 4
0
def load_source(source, action='load'):


    if source != None:
        db.session.query(Release).filter(Release.source_id == source.id).delete() 

    error_hash = {}
    mapper = Mapper(source)
    for release, error in  mapper.to_ocds():
        if error != None:
            if error[0] not in error_hash:
                error_hash[error[0]] = []
            error_hash[error[0]].append(error[1])
        else:
            load_ocds(release, type='dict', source=source)


    if len(error_hash) > 0:
        message = "Erreurs lors du chargement du fichier %s \n\n" % (source.url)
        message += "\n".join(["Erreur: %s pour les lignes: %s" % (error_type, lines) for error_type, lines in error_hash.items()])
        app.logger.error(message)
    else: 
        app.logger.info("Succès du chargement du fichier : %s" % (source.url))

    source.last_retrieve = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    db.session.commit()
Ejemplo n.º 5
0
 def __init__(self, dbName, dictClasses=None):
     Mapper.__init__(self, dictClasses)
     self.__initObjDict()
     self.__initUpdateDict()
     try:
         self.db = SqliteObjectsDb(dbName)
     except Exception, ex:
         raise Exception("Error creating SqliteMapper, dbName: %s\n error: %s" % (dbName, ex))
Ejemplo n.º 6
0
 def test_given_an_image_with_no_points_a_point_map_returned(self):
     img = cv2.imread(os.path.join(self.test_data_path,'SimpleTestImage5.png'),1)
     expected = [-1 for i in range(0,20)]
     colour = [255,255,255]
     threshold = 0
     mapper = Mapper(colour,threshold)
     actual = mapper.get_points(img)
     self.assertEquals(expected,actual)
Ejemplo n.º 7
0
 def test_given_an_colour_image_and_specific_colour_a_point_map_returned(self):
     img = cv2.imread(os.path.join(self.test_data_path,'SimpleTestImage2.png'),1)
     expected = [i for i in range(0,20)]
     colour = [255,128,0]
     threshold = 0
     mapper = Mapper(colour,threshold)
     actual = mapper.get_points(img)
     self.assertEquals(expected,actual)
Ejemplo n.º 8
0
 def test_given_a_threshold_items_in_threshold_work_for_blue(self):
     img = cv2.imread(os.path.join(self.test_data_path,'BlueThresholdTest.png'),1)
     threshold = 20
     expected = [0,0,0,-1,-1] 
     colour = [128,128,128]
     mapper = Mapper(colour, threshold)
     actual = mapper.get_points(img)
     self.assertEquals(expected,actual)
Ejemplo n.º 9
0
 def test_a_threshold_can_be_changed(self):
     img = cv2.imread(os.path.join(self.test_data_path,'GreenThresholdTest.png'),1)
     initial_threshold = 20
     new_threshold = 21
     expected = [0,0,0,0,0] 
     colour = [128,128,128]
     mapper = Mapper(colour, initial_threshold)
     mapper.set_threshold(new_threshold)
     actual = mapper.get_points(img)
     self.assertEquals(expected,actual)
Ejemplo n.º 10
0
class TestMapper(object):
    used = False

    def empty(self, *args, **kw):
        self.used = True

    def setUp(self):
        self.used = False
        self.opts = Opts()
        self.mapper = Mapper(self.opts)
        self.mapper.save_config = self.empty

    def parse_opts_test(self):
        self.opts.title = 'название 1'.decode('utf-8')
        self.mapper.prepare()
        assert (self.mapper.config['title'] == u'название 1')
        assert self.used

    def load_config_test(self):
        config = {'title': 'название 2'}
        f = NamedTemporaryFile()
        json.dump(config, f, encoding='utf-8')
        f.flush()
        self.opts.config = f.name
        self.mapper.prepare()
        assert (self.mapper.config['title'] == u'название 2')

    def get_pagesize_test(self):
        assert self.mapper.get_page_size('a4') == self.mapper.get_page_size('A4')
        x, y = self.mapper.get_page_size('a4')
        assert self.mapper.get_page_size('a4', True) == (y, x)
        assert self.mapper.get_page_size('a3', False) == (y, x * 2)

    def get_coords_test(self):
        assert self.mapper.get_coords('235,117.2') == [235., 117.2]
Ejemplo n.º 11
0
 def __init__(self, dbName, dictClasses=None, tablePrefix=''):
     Mapper.__init__(self, dictClasses)
     self._objTemplate = None
     try:
         self.db = SqliteFlatDb(dbName, tablePrefix)
         self.doCreateTables = self.db.missingTables()
         
         if not self.doCreateTables:
             self.__loadObjDict()
     except Exception, ex:
         raise Exception('Error creating SqliteFlatMapper, dbName: %s, tablePrefix: %s\n error: %s' % (dbName, tablePrefix, ex))
Ejemplo n.º 12
0
class Store(object):
    """Store.
       Required constructor parameters: path, block_size, hash_algorithm,
       blockpool, mappool.
    """

    def __init__(self, **params):
        pb = {'blocksize': params['block_size'],
              'hashtype': params['hash_algorithm'],
              'archipelago_cfile': params['archipelago_cfile'],
              }
        self.blocker = Blocker(**pb)
        pm = {'namelen': self.blocker.hashlen,
              'hashtype': params['hash_algorithm'],
              'archipelago_cfile': params['archipelago_cfile'],
              }
        self.mapper = Mapper(**pm)

    def map_get(self, name, size):
        return self.mapper.map_retr(name, size)

    def map_put(self, name, map, size, block_size):
        self.mapper.map_stor(name, map, size, block_size)

    def map_delete(self, name):
        pass

    def map_copy(self, dst, src, size):
        self.mapper.map_copy(dst, src, size)

    def block_get(self, hash):
        blocks = self.blocker.block_retr((hash,))
        if not blocks:
            return None
        return blocks[0]

    def block_get_archipelago(self, hash):
        blocks = self.blocker.block_retr_archipelago((hash,))
        if not blocks:
            return None
        return blocks[0]

    def block_put(self, data):
        hashes, absent = self.blocker.block_stor((data,))
        return hashes[0]

    def block_update(self, hash, offset, data):
        h, e = self.blocker.block_delta(hash, offset, data)
        return h

    def block_search(self, map):
        return self.blocker.block_ping(map)
Ejemplo n.º 13
0
class Store(object):
    """Store.
       Required constructor parameters: path, block_size, hash_algorithm,
       umask, blockpool, mappool.
    """

    def __init__(self, **params):
        umask = params['umask']
        if umask is not None:
            os.umask(umask)

        path = params['path']
        if path and not os.path.exists(path):
            os.makedirs(path)
        if not os.path.isdir(path):
            raise RuntimeError("Cannot open path '%s'" % (path,))

        p = {'blocksize': params['block_size'],
             'blockpath': os.path.join(path + '/blocks'),
             'hashtype': params['hash_algorithm'],
             'blockpool': params['blockpool']}
        self.blocker = Blocker(**p)
        p = {'mappath': os.path.join(path + '/maps'),
             'namelen': self.blocker.hashlen,
             'mappool': params['mappool']}
        self.mapper = Mapper(**p)

    def map_get(self, name):
        return self.mapper.map_retr(name)

    def map_put(self, name, map):
        self.mapper.map_stor(name, map)

    def map_delete(self, name):
        pass

    def block_get(self, hash):
        blocks = self.blocker.block_retr((hash,))
        if not blocks:
            return None
        return blocks[0]

    def block_put(self, data):
        hashes, absent = self.blocker.block_stor((data,))
        return hashes[0]

    def block_update(self, hash, offset, data):
        h, e = self.blocker.block_delta(hash, offset, data)
        return h

    def block_search(self, map):
        return self.blocker.block_ping(map)
Ejemplo n.º 14
0
 def test_given_an_colour_image_and_specific_colour_a_point_map_returned(self):
     img = cv2.imread(os.path.join(self.test_data_path,'SimpleTestImage2.png'),1)
     def test(x,y):
         if x == y: 
             return 255
         else:
             return 0
     expected = [[test(x,y) for x in range(0,20)] for y in range(0,20)]
     colour = [255,128,0]
     threshold = 0
     mapper = Mapper(colour,threshold)
     actual = mapper.get_threshold_array(img)
     self.assertNumpyArrayEquals(expected,actual)
Ejemplo n.º 15
0
    def test_a_colour_can_be_changed(self):
        img = cv2.imread(os.path.join(self.test_data_path,'GreenThresholdTest.png'),1)
        threshold = 20
        initial_expected = [0,0,0,-1,-1] 
        initial_colour = [128,128,128]
        new_expected = [-1,-1,-1,-1,-1]
        new_colour = [64,64,64]
        mapper = Mapper(initial_colour, threshold)
        initial_result = mapper.get_points(img)
        self.assertEquals(initial_expected,initial_result)

        mapper.set_colour(new_colour)
        new_result = mapper.get_points(img)
        self.assertEquals(new_expected,new_result)
Ejemplo n.º 16
0
 def __init__(self, lcg, map_size):
     self.current_player_index = None
     self.game_started = False
     self._players = []
     self.lcg = lcg
     self.handlers = Handlers(self)
     self.mapper = Mapper(self, *map_size)
 def __init__(self, connection_config):
     self.connection_config = connection_config
     provider_name = self.connection_config['cloud_provider_name']
     provider_name = transfer_cloud_provider_name(provider_name)
     from mapper import Mapper
     self.mapper = Mapper(provider_name)
     self.driver = self.mapper.connect(self.connection_config)
Ejemplo n.º 18
0
def map_result():
    
    if request.method == 'POST':
        rdf = request.form['data']
        try:
            gg = Graph()
            rdf_content = StringIO.StringIO(rdf.encode('utf-8'))
            gg.parse(rdf_content,  format="xml")
            mapper = Mapper(gg)
            #TODO-remove this test block after release
            batch_json = open("test_mapper_batch.json","r").read()
            result = mapper.mapping(batch_json)
            #End TODO-remove this test block after release
            return Response(result, mimetype='application/rdf+xml') 
        except:
            pass
Ejemplo n.º 19
0
    def __init__(self, outputRepo, inputRepos=None):
        """Construct a Butler to manage an output (read/write) repository,
        attaching zero or more input (read-only) repositories."""

        self.mapper = Mapper.create(outputRepo, inputRepos)
        self.registryPath = self.mapper.registryPath
        self.provenance = []
Ejemplo n.º 20
0
def job(grouperNum, chunksQueue, listSaveStateNameGrouper, listListLastCallNum):
    print 'Starting worker ' + str(grouperNum)      
    while True:
        # Get new chunck to process
        chunk = chunksQueue.get()         
        # Work
        print 'Worker ' + str(grouperNum) + ' mapping chunk ' + str(chunk)
        MapIterator = MapChunkIterator(mapChunksNameGenerator(chunk)) # Iterator to iterate through the chunck        
        theContext = MapContext(groupChunksNameGenerator(chunk),MapIterator)        
        Mapper.map(theContext)
        print 'Worker ' + str(grouperNum) + ' grouping locally chunck ' + str(chunk)        
        idx = listListLastCallNum[grouperNum]+1        
        theGrouper = Grouper(grouperNum,idx,idx-1,directory);        
        listSaveStateNameGrouper[grouperNum] = theGrouper.group(theContext)
        listListLastCallNum[grouperNum] = idx ;      
        # "Close" chunk
        chunksQueue.task_done()
Ejemplo n.º 21
0
 def __init__(self):
     #may want to enhance this with a pre-load file to prepopulate the DB
     self.db = {}                   # dictionary of DNSClassifierEntrys
     self.mapper = Mapper()
     self.new_callbacks = []        # For each new entry
     self.update_callbacks = []     # For each time an entry is updated
     self.all_callbacks = []        # When entry is updated or new
     self.class_callbacks = {}      # Dictionary of lists of callbacks per
Ejemplo n.º 22
0
    def __init__(self, b, n, k, mapper=None):
        """Creates a new Reed-Solomon Encoder/Decoder object configured with
        the given b, n and k values.
        b is the base to use, must be prime
        n is the length of a codeword, must be less than b
        k is the length of the message, must be less than n
        mapper is an class with encode and decode methods used to translate
        between strings and arrays of integers

        The code will have error correcting power s where 2s = n - k

        The typical RSCoder is RSCoder(256, 255, 223)
        """
        
        if n < 0 or k < 0:
            raise ValueError("n and k must be positive")
        if not n < b:
            raise ValueError("n must be less than b")
        if not k < n:
            raise ValueError("Codeword length n must be greater than message length k")

        if mapper is None:
            if b <= len(mapper_default_alphabet):
                self.mapper = Mapper(mapper_default_alphabet, mapper_default_equivs)
            else:
                raise ValueError("Base b too large for default mapper")
        else:
            self.mapper = mapper

        # α (a) is the generator of the field being used. This must be picked
        # appropriately if the field is changed. For integers mod p a generator
        # is a number such that for every n in ints mod p, There exists and l
        # Such that α^l=n mod p
        # For p=59 α=2 works (this can be verified easily through brute force
        self.PFint = PFint(b)
        self.a = self.PFint(findgen(b))

        self.b = b
        self.n = n
        self.k = k

        # Generate the generator polynomial for RS codes
        # g(x) = (x-α^1)(x-α^2)...(x-α^(n-k))
        

        g = Polynomial((self.PFint(1),))
        for l in xrange(1,n-k+1):
            p = Polynomial((self.PFint(1), -self.PFint(self.a)**l))
            g = g * p

        self.g = g

        # h(x) = (x-α^(n-k+1))...(x-α^n)
        h = Polynomial((self.PFint(1),))
        for l in xrange(n-k+1,n+1):
            p = Polynomial((self.PFint(1), self.PFint(self.a)**l))
            h = h * p
        self.h = h
Ejemplo n.º 23
0
class Nes(object):
    __metaclass__ = Singleton

    def __init__(self):
        self.cpu = Cpu()
        self.ppu = Ppu()

        self.rom = None
        self.memory = None

        self.is_running = False

    def reset(self):
        if self.memory:
            self.memory.reset()

        self.cpu.reset()
        self.ppu.reset()

    def start(self):
        pass
    
    def stop(self):
        pass
    
    def load(self, filename):
        if self.is_running:
            self.stop()

        self.rom = Rom()
        self.rom.load(filename)

        if self.rom.is_valid:
            self.memory = Mapper(self.rom.mapper_type)
            if self.memory == None:
                raise Exception('Unknown mapper: %d' % self.rom.mapper_type)

            self.memory.load()
            self.ppu.set_mirroring(self.rom.get_mirrowing())

        return self.rom.is_valid

    def reload(self):
        if self.rom and self.rom.filename:
            self.load(self.rom.filename)
 def __init__(self,
              provider_config=None,
              is_verbose_output=False):
     super(ProviderManager, self).\
         __init__(provider_config,
                  is_verbose_output)
     provider_name = provider_config['connection']['cloud_provider_name']
     provider_name = transfer_cloud_provider_name(provider_name)
     from mapper import Mapper
     self.mapper = Mapper(provider_name)
Ejemplo n.º 25
0
 def __init__(self, **params):
     pb = {'blocksize': params['block_size'],
           'hashtype': params['hash_algorithm'],
           'archipelago_cfile': params['archipelago_cfile'],
           }
     self.blocker = Blocker(**pb)
     pm = {'namelen': self.blocker.hashlen,
           'archipelago_cfile': params['archipelago_cfile'],
           }
     self.mapper = Mapper(**pm)
Ejemplo n.º 26
0
 def __init__(self, station):
     # selection of the algorithm to use
     self.algorithm = getattr(settings, 'SCHEDULER_ALGORITHM')()
     self.station = station
     self.current_user_imsi = None
     self.last_user_imsi = None
     self.start_time = None
     self.iteration_time = None
     self.mapper = Mapper()
     self.rbgs = None
     return super(Scheduler, self).__init__()
Ejemplo n.º 27
0
 def __init__(self, filename, dictClasses=None, rootName='ALL', **args):
     self.filename = filename        
     if exists(filename):
         self._read()           
     else:
         self._create(rootName, **args)
     Mapper.__init__(self, dictClasses)
     # Objects map (id should be defined)  
     self.objDict = {}      
     # Store some objects during parsing
     # that have some Pointer attributes and need to be fixed later 
     self.pendingPtrDict = {}
     # This dictionary serve to define how to write classes
     # for example if the pair 'Integer': 'attribute' is present
     # all Integer will be store as attributes in the xml
     # possible values are: 
     #   'attribute', 'class_only', 'class_name', 'name_class', 'name_only'
     self.classTags = {} 
     # Counter to provide default objects id's
     self.objCount = 0
class LibcloudConnector(object):

    def __init__(self, connection_config):
        self.connection_config = connection_config
        provider_name = self.connection_config['cloud_provider_name']
        provider_name = transfer_cloud_provider_name(provider_name)
        from mapper import Mapper
        self.mapper = Mapper(provider_name)
        self.driver = self.mapper.connect(self.connection_config)

    def get_driver(self):
        return self.driver
Ejemplo n.º 29
0
class ScaleCache:    
    cache = {};

    def __init__(self, cacheAll = True):
        self.mapper = Mapper();
        self.utils = Utils();
        if cacheAll:
            self.cacheAllScales();

    def cacheAllScales(self):
        for scale in self.utils.getAllAvailableScales():
            self.cacheHoleScale(scale);

    def cacheHoleScale(self, scale):
        for note in self.utils.getNotes():
            self.cacheScale(note, scale);

    def cacheScale(self, note, scale):
        scale_to_map = self.mapper.getScaleToMap(note, scale);
        mapped_scale = self.mapper.getMap(scale_to_map);
        
        result = {'scale_to_map':scale_to_map, 'mapped_scale': mapped_scale}
        
        self.cache[note + scale] = result;

    def checkInCache(self, note, scale):
        return (note + scale) in self.cache;

    def clearCache(self):
        self.cache.clear();

    def getScaleFromCache(self, note, scale):
        if self.checkInCache(note, scale):
            return self.cache[note + scale];
        else:
            self.cacheScale(note, scale);
            return self.cache[note + scale];

    def __del__(self):
        self.clearCache();
Ejemplo n.º 30
0
class TestMapperMethod(unittest.TestCase):

    def setUp(self):
        self.mapper = Mapper("data/refFlatMm10.txt")

    
    def test_normal_case(self):
        cds_pos, aa_pos = self.mapper.map(101153495, "NM_146145")
        self.assertEqual(cds_pos, 3462)
        self.assertEqual(aa_pos, 1154)


    def test_coord_in_intron(self):
        cds_pos, aa_pos = self.mapper.map(101153494, "NM_146145")
        self.assertEqual(cds_pos, None)
        self.assertEqual(aa_pos, None)


    def test_refseq_id_not_in_file(self):
        cds_pos, aa_pos = self.mapper.map(101153495, "NM_899287")
        self.assertEqual(cds_pos, None)
        self.assertEqual(aa_pos, None)
Ejemplo n.º 31
0
# 8-bit color driver for 0.96 inch OLED
from ssd1331 import SSD1331
# Optional 16-bit color driver
# from ssd1331_16bit import SSD1331
from mapper import Mapper  # Maps temperature to rgb color
from amg88xx import AMG88XX

# For timer callback demo:
# import pyb

# Temperature range to cover
TMAX = 30
TMIN = 15

# Instantiate color mapper
mapper = Mapper(TMIN, TMAX)

# Instantiate display
pdc = machine.Pin('X1', machine.Pin.OUT_PP, value=0)
pcs = machine.Pin('X2', machine.Pin.OUT_PP, value=1)
prst = machine.Pin('X3', machine.Pin.OUT_PP, value=1)
spi = machine.SPI(1)
ssd = SSD1331(spi, pcs, pdc, prst)
ssd.fill(0)
ssd.show()

# Instantiate temperature sensor
i2c = machine.I2C(1)
sensor = AMG88XX(i2c)
sensor.ma_mode(True)  # Moving average mode
Ejemplo n.º 32
0
 def __init__(self):
     self._mapper = Mapper()
     self._conn = psycopg2.connect("dbname='ip'")
Ejemplo n.º 33
0
class Uploader:
    def __init__(self):
        self._mapper = Mapper()
        self._conn = psycopg2.connect("dbname='ip'")

    def create_dataset(self):
        cur = self._conn.cursor()
        cur.execute("INSERT INTO datasets (columns) VALUES ('{}') RETURNING *")
        results = cur.fetchall()
        return results[0][0]

    def upload_rows(self, f, dataset_id):
        """ Takes in a file, returns an list of dicts, each row being a dict. """
        # return [self._augment_row(r) for r in csv.DictReader(f)]

        # TODO update columsn in datasets table for dataset id

        cur = self._conn.cursor()
        reader = csv.DictReader(f)
        first = reader.next() # get header line

        sanitize = lambda k: k.replace('(', '_').replace(')', '_').replace(' ', '_').replace('-', '_')
        cols = set(map(sanitize, first.keys()))
        cols.remove('ip_address')
        cols = sorted(list(cols))

        cur.execute("UPDATE datasets SET columns = '%s';" % (lst2pgarr(['ip_address'] + cols)))

        cur.execute("CREATE TABLE dataset_{} ({});".format(
            dataset_id,
            ", ".join(['ip_address inet'] + (map(lambda c: c + " double precision", cols)))
        ))

        for r in reader:
            cols_in_order = ['ip_address'] + cols
            r = dict([(sanitize(key), val) for (key, val) in r.items()])
            vals = map(lambda c: r[sanitize(c)], cols_in_order)
            vals[0] = "'" + vals[0] + "'"
            cur.execute("INSERT INTO dataset_{} ({}) VALUES ({});".format(
                dataset_id,
                ", ".join(cols_in_order),
                ", ".join(vals).replace(", ,", ", NULL,").replace(", ,", ", NULL,"),
            ))

        cur.execute("""
        CREATE VIEW dataset_view_{} AS (
            -- SELECT d.*, ip_to_zip.zip, u.unemp_rate, u.num_in_sample, p.population, p.land_sq_mi, p.density_per_sq_mile
            SELECT d.*, ip_to_zip.zip
            FROM dataset_{} d, ip_to_zip
            WHERE (
              ip_to_zip.ip >> d.ip_address::inet
            )
            -- LEFT JOIN unemployment u ON u.zip = ip_to_zip.zip
            -- LEFT JOIN popdense p ON u.zip = p.zip
        );
        CREATE INDEX ON dataset_{} USING gist ((ip_address::inet) inet_ops);
        """.format(dataset_id, dataset_id, dataset_id))
        self._conn.commit()

    def _augment_row(self, row):
        zip_code = self._mapper.ip_to_zip(row['ip_address'])
        if zip_code is None:
            return {}
        census_data = self._mapper.zip_to_census_data(zip_code)
        return merge_two_dicts(row, census_data)
Ejemplo n.º 34
0
class DNSClassifier:
    def __init__(self):
        #may want to enhance this with a pre-load file to prepopulate the DB
        self.db = {}                   # dictionary of DNSClassifierEntrys
        self.mapper = Mapper()
        self.new_callbacks = []        # For each new entry
        self.update_callbacks = []     # For each time an entry is updated
        self.all_callbacks = []        # When entry is updated or new
        self.class_callbacks = {}      # Dictionary of lists of callbacks per
                                       # classification

    def parse_new_DNS(self, packet):
        # Only look at responses with 'No error' reply code
        dns_parsed = dns.parser(packet)
        if (dns_parsed.qr and dns_parsed.rcode == 0000):
            # skip the questions...
            # we don't care about authorities
            # we care about answers
            # we care about additional - could be some goodies in there
            for resp in (dns_parsed.answers + dns_parsed.additional):
                # save off the ttl, classification, calculate expiry time
                # Name of item that's being saved, 
                if (resp.qtype == dns.rr.A_TYPE):
                    classification = self.mapper.searchType(resp.name)
                    addr = addrconv.ipv4.bin_to_text(resp.rddata)
                    
                    if addr not in self.db.keys():
                        self.db[addr] =  Entry(addr, list(), classification,
                                               resp.ttl)
                        self.db[addr].names.append(resp.name)
                        for callback in self.new_callbacks:
                            callback(addr, self.db[addr])
                        if classification in self.class_callbacks.keys():
                            for callback in self.class_callbacks[classification]:
                                callback(addr, self.db[addr])
                    else:
                        self.db[addr].update_expiry(resp.ttl)
                        old_class = self.db[addr].classification
                        self.db[addr].classification = classification

                        if resp.name not in self.db[addr].names:
                            self.db[addr].names.append(resp.name)
                        for callback in self.update_callbacks:
                            callback(addr, self.db[addr])
                        if old_class != classification:
                            if classification in self.class_callbacks.keys():
                                for callback in self.class_callbacks[classification]:
                                    callback(addr, self.db[addr])

                    for callback in self.all_callbacks:
                        callback(addr, self.db[addr])

                elif (resp.qtype == dns.rr.AAAA_TYPE):
                    #placeholder
                    print "Found a AAAA"
                elif (resp.qtype == dns.rr.CNAME_TYPE):
                    #placeholder
                    print "Found a CNAME!"
                elif (resp.qtype == dns.rr.MX_TYPE):
                    #placeholder
                    print "Found an MX!"

    def _clean_expiry_full(self):
        # Loop through everything to check for expired DNS entries
        for key in self.db.keys():
            entry = self.db[key]
            if entry.is_expired():
                del self.db[key]

    def clean_expired(self):
        self._clean_expiry_full()
        
    def print_entries(self):
        for key in self.db.keys():
            self.db[key].print_entry()

    def set_new_callback(self, cb):
        if cb not in self.new_callbacks:
            self.new_callbacks.append(cb)

    def remove_new_callback(self, cb):
        self.new_callbacks.remove(cb)

    def set_update_callback(self, cb):
        if cb not in self.update_callbacks:
            self.update_callbacks.append(cb)

    def remove_update_callback(self, cb):
        self.update_callbacks.remove(cb)

    #TODO: classication change callback?

    def set_all_callback(self, cb):
        if cb not in self.update_callbacks:
            self.all_callbacks.append(cb)

    def remove_all_callback(self, cb):
        self.all_callbacks.remove(cb)

    def set_classification_callback(self, cb, classification):
        print "set_classification_callback: " + str(cb)
        print "classification:              " + str(classification)
        if classification not in self.class_callbacks.keys():
            self.class_callbacks[classification] = list()
        if cb not in self.class_callbacks[classification]:
            self.class_callbacks[classification].append(cb)

    def remove_classification_callback(self, cb, classification):
        if classification not in self.class_callbacks.keys():
            return
        self.class_callbacks[classification].remove(cb)

    def find_by_ip(self, addr):
        """Returns the entry specified by the ip 'addr' if it exists
        """
        if addr in self.db.keys():
            return self.db[addr]
        return None

    def get_classification(self, IP):
        """Returns the classification for the entry specified by the ip 'addr'
           if it exists
        """
        if addr in self.db.keys():
            return self.db[addr].classification
        return None

    def find_by_classification(self, classification):
        """Returns a dictionary of database entries from a particular category  
           Dictionary will be ipaddr:dbentry
        """
        retdict = {}
        for key in self.db.keys():
            if classification == self.db[key].classification:
                retdict[key] = self.db[key]
        return retdict

    def find_by_name(self, name):
        """Returns a dictionary of database entries for a particular webname
           Dictionary will be ipaddr:dbentry
        """
        retdict = {}
        for key in self.db.keys():
            for nameval in self.db[key].names:
                if nameval == name:
                    retdict[key] = self.db[key]
                    continue # don't need to look at any more of the names
        return retdict

    def has(self, ipaddr):
        """Returns true if we have a record for a particular IP address.
           Returns fase if we don't have an active record for a particular
           IP address.
        """
        if ipaddr not in self.db.keys():
            return false        
        return self.db[ipaddr].is_expired()
Ejemplo n.º 35
0
import numpy as np
from mapper import Mapper

points = np.load('point_clouds/spirals.npy')

mapper = Mapper(bins=7, overlap=0.04, coordinate=0)

graph = mapper.fit(points)
mapper.plot_vertices()
mapper.plot_intervals()
mapper.plot_clusters()
mapper.plot_graph()
mapper.plot_graph_in_plane()
mapper.plot_persistence_homology()
Ejemplo n.º 36
0
def test_standard_search():

    # Assert that heart attack (22298006) and cancer (363346000) are matched
    test_text = 'heart attack and cancer'
    m = Mapper(test_text)
    df = m.standard_search()
    assert (22298006 in m.codes)
    assert (363346000 in m.codes)

    # Test extra_terms with diabetes (73211009)
    test_text = 'diabetes'
    m = Mapper(test_text)
    df = m.standard_search()
    assert (73211009 in m.codes)

    test_text = 'diabetez'
    m = Mapper(test_text)
    df = m.standard_search()
    assert (73211009 in m.codes)

    # Test acronym matcher
    test_text = 't2dm'
    m = Mapper(test_text)
    df = m.standard_search()
    assert (44054006 in m.codes)

    test_text = 'nash'
    m = Mapper(test_text)
    df = m.standard_search()
    assert (442685003 in m.codes)

    # Test user defined terms
    test_text = 'antidepressant'
    m = Mapper(test_text)
    df = m.standard_search()
    assert (35489007 in m.codes)
Ejemplo n.º 37
0
class Cryptsession(object):
    def __init__(self):
        #TODO Move frequency stuff to two separate models of same type. one for reference and one for ciphertext
        #map that stores the frequeny model for the plainText
        #self.symbol_ref = {}
        #self.bigram_ref = {}
        #load the reference frequencies from frequencies/<language>/
        self.reference = ReferenceModel.for_language("english")

        #map that stores the absolute frequencies of letters in the ciphertext
        #init the dictionary with 0s
        # self.symbol_counts = dict.fromkeys(unicode(string.ascii_uppercase),0.0)
        # #map that stores the frequency model for the ciphertext
        # #init the dictionary with 0s
        # self.symbol_freqs = dict.fromkeys(unicode(string.ascii_uppercase),0.0)
        #map to store the assumed mappings between symbol in ciphertext and symbol in plaintext
        #self.substitutions = {}
        self.mapper = Mapper()

        #blacklist of punctuation characters
        # self.blacklist = [u"É",u"!", u".", u",", u"|", u"?", u":", u";",u"+", u"-",u"\"",u"§",u"$",u"%",u"&",u"/",u"(",u")",u"=",u"[",u"]",u"{",u"}",u"@",u"1",u"2",u"3",u"4",u"5",u"6",u"7",u"8",u"9",u"0"]
        # #map that stores absolute word frequencies
        # self.word_counts = {}
        # self.word_freqs = {}
        # self.bigram_counts = {}
        # self.bigram_freqs = {}
        # self.trigram_counts = {}
        # self.trigram_freqs = {}

    def show_most_frequent_symbols(self, n=5):
        smbls = self.ciphertext.get_letter_freqs()
        smbls = sorted(smbls, key=lambda x: x[1], reverse=True)
        print "=== %d most frequent symbols ===" % n
        for i in range(n):
            symbol = smbls[i][0]
            out = u"{} ({:.2f} %)".format(symbol.upper(), smbls[i][1])
            #if there is a known mapping, print it
            if self.mapper.has_mapping_from(symbol):
                plain = self.mapper.get_mapping_from(symbol)
                out += u" --> {} ({:.2f} %)".format(
                    plain.lower(), self.reference.get_letter_freq(plain))
            print out

    def show_most_frequent_bigrams(self, n=5):
        bgrms = self.ciphertext.get_bigram_freqs()
        bgrms = sorted(bgrms, key=lambda x: x[1], reverse=True)
        print "=== %d most frequent bigrams ===" % n
        for i in range(n):
            bgrm = bgrms[i][0]
            out = u"{} ({:.2f} %)".format(bgrm.upper(), bgrms[i][1])
            #print bigram mapping (using current mappings)
            plainbgrm = u""
            #for each letter in the bigram
            for symbol in bgrm:
                #check if we have a mapping
                if self.mapper.has_mapping_from(symbol):
                    plainbgrm += self.mapper.get_mapping_from(symbol)
                else:
                    #if we do not have a mapping use ?
                    plainbgrm += u"?"
            #if none of the bigram letters has a mapping don't show bigram-mapping
            if plainbgrm.count(u"?") < len(bgrm):
                out += u" --> {}".format(plainbgrm.lower())
            print out

    def show_most_frequent_trigrams(self, n=5):
        trgrms = self.ciphertext.get_trigram_freqs()
        trgrms = sorted(trgrms, key=lambda x: x[1], reverse=True)
        print "=== %d most freqent trigrams ===" % n
        for i in range(n):
            trgrm = trgrms[i][0]
            out = u"{} ({:.2f} %)".format(trgrm.upper(), trgrms[i][1])
            #print trigram mapping (using current mappings)
            plaintrgrm = u""
            #for each letter in the trigram
            for symbol in trgrm:
                #check if we have a mapping
                if self.mapper.has_mapping_from(symbol):
                    plaintrgrm += self.mapper.get_mapping_from(symbol)
                else:
                    #if we do not have a mapping use ?
                    plaintrgrm += u"?"
            #if none of the trigram letters has a mapping don't show trigram-mapping
            if plaintrgrm.count(u"?") < len(trgrm):
                out += u" --> {}".format(plaintrgrm.lower())
            print out

    def show_most_frequent_words(self, n=5):
        cwords = self.c.word_counts.items()
        cwords = sorted(cwords, key=lambda x: x[1], reverse=True)
        print "=== %d most frequent words ===" % n
        for i in range(n):
            word = cwords[i][0]
            out = u"{} ({:.2f} %)".format(word.upper(), cwords[i][1])
            #print word mapping (using current mappings)
            plainword = u""
            #for each letter in the word
            for symbol in word:
                #check if we have a mapping
                if self.mapper.has_mapping_from(symbol):
                    plainword += self.mapper.get_mapping_from(symbol)
                else:
                    #if we do not have a mapping use ?
                    plainword += u"?"
            #if not at least half of the letters have a mapping don't show word-mapping
            if plainword.count(u"?") <= len(word) / 2:
                out += u" --> {}".format(plainword.lower())
            print out

    def show_plaintext(self):
        decrypted = ''
        for symbol in self.ciphertext.text:
            #check if there is a substitution-rule
            if self.mapper.has_mapping_from(symbol):
                #use it
                decrypted += self.mapper.get_mapping_from(symbol).lower()
            else:
                #use letter from ciphertext instead
                decrypted += symbol
        print decrypted

    def show_menu(self):
        choice = u''
        while True:
            print "======== Available Actions ========"
            print "[0] Read ciphertext from file"
            print "[1] Show ciphertext"
            #print "[2] Analyse ciphertext"
            print "[3] Show reference frequencies (symbols)"
            #TODO Show absolute frequencies
            print "[4] Show ciphertext frequencies (symbols)"
            print "[5] Shwo n most frequent symbols"
            print "[6] Show n most frequent bigrams"
            print "[7] Show n most frequent trigrams"
            print "[8] Show n most frequent words"
            print "[9] Create n substitution rules using symbol-frequencies "
            print "[10] Define substitution rule for ciphertext -> plaintext"
            print "[11] Remove substitution rule"
            print "[12] Show substitution rules"
            print "[13] Show decrypted text (uses substitution rules)"
            print "==================================="
            choice = input("Please choose: ")
            try:
                if choice == 0:
                    fn = raw_input("Path to ciphertext: ")
                    lan = raw_input(
                        "Language of ciphertext (german/english): ")
                    self.reference = ReferenceModel.for_language(lan)
                    self.ciphertext = AnalysisModel.from_file(
                        fn, self.reference)
                    self.show_most_frequent_symbols()
                    self.show_most_frequent_bigrams()
                    self.show_most_frequent_trigrams()
                elif choice == 1:
                    self.ciphertext.show_text()
                elif choice == 2:
                    self.analyze()
                elif choice == 3:
                    self.reference.show_letter_freqs()
                elif choice == 4:
                    self.ciphertext.show_letter_freqs()
                elif choice == 5:
                    n = raw_input("n: ").decode(sys.stdout.encoding)
                    self.show_most_frequent_symbols(int(n))
                elif choice == 6:
                    n = raw_input("n: ").decode(sys.stdout.encoding)
                    self.show_most_frequent_bigrams(int(n))
                elif choice == 8:
                    n = raw_input("n: ").decode(sys.stdout.encoding)
                    self.show_most_frequent_words(int(n))
                elif choice == 7:
                    n = raw_input("n: ").decode(sys.stdout.encoding)
                    self.show_most_frequent_trigrams(int(n))
                elif choice == 9:
                    n = raw_input("n: ").decode(sys.stdout.encoding)
                    self.mapper.generate_mappings(self.reference,
                                                  self.ciphertext, int(n))
                elif choice == 10:
                    ciph = raw_input("From: ").decode(sys.stdout.encoding)
                    plain = raw_input("To: ").decode(sys.stdout.encoding)
                    self.mapper.add_mapping(ciph, plain)
                elif choice == 11:
                    ciph = raw_input(
                        "Remove substitution for which letter?").decode(
                            sys.stdout.encoding)
                    self.mapper.remove_mapping(ciph)
                elif choice == 12:
                    self.mapper.show_mappings()
                elif choice == 13:
                    self.show_plaintext()
                elif choice == 14:
                    fn = raw_input("filename: ").decode(sys.stdout.encoding)
                    self.mapper.load_mappings(fn)
                elif choice == 15:
                    fn = raw_input("filename: ").decode(sys.stdout.encoding)
                    self.mapper.store_mappings(fn)
                elif choice == 16:
                    self.mapper.generate_candidates(self.reference,
                                                    self.ciphertext)
                elif choice == 'q':
                    system.exit(0)
                else:
                    print "Unknown option"
            except:
                raise
Ejemplo n.º 38
0
    def execute(self, map_func, reduce_func, kill_idx=-1):
        '''
			Executes the Master worker to complete the MapReduce task
			Args:
				1. map_func - handle for UDF map function
				2. reduce_func - handle for UDF reduce function
				3. kill_idx - specifies the worker to be killed; used to simulate fault tolerance when >= 0
		'''

        # Logic for coordinating mappers and reducer
        self.mappers = []
        self.reducers = []
        self.active_reducers = []

        #instantiate mappers
        for idx in range(len(self.input_file_paths)):
            self.mappers.append(
                Mapper(idx, self.R, self.input_file_paths[idx],
                       f'{self.TMP_DIR}/intermediate', map_func))

        # NOTE: Keeping this for future exextuion time comparison
        # for m in mappers:
        # 	m.execute_map()
        # 	while (m.status != 'DONE'):
        # 		continue
        # 	self.active_reducers = self.active_reducers | m.reducer_ids
        # 	print('MAPPER {} finished executing'.format(m.id+1)) #, m.id, m.status)

        print("Map phase:")
        self.phase_flag = 0
        #instantiate processes for map phase
        self.processes = [None] * self.M
        self.reducer_ids = [None] * self.M
        self.ps, self.cs = [None] * self.M, [None] * self.M
        self.mapper_status = [True] * self.M
        self.attempts = [0] * self.M

        for i, m in enumerate(self.mappers):

            #queue used for message passing
            self.reducer_ids[i] = mp.Queue()
            # ps[i], cs[i] = mp.Pipe()
            self.cs[i] = mp.Queue()
            self.processes[i] = mp.Process(target=m.execute_map,
                                           args=(self.reducer_ids[i],
                                                 self.cs[i]))
            #execute mapper
            self.processes[i].start()
            #simulate process crash to test fault tolerance
            if (kill_idx == i):
                print(f"Killing process {i}")
                self.processes[i].kill()

        # Code for testing fault tolerance timeout
        if (kill_idx == -2):
            print(f"Killing process 1")
            self.processes[1].kill()

        #wait until all mappers have finished
        #mapping_status: Checks if phase is complete
        mapping_status = False
        while (mapping_status == False):
            mapping_status = True
            for i, m in enumerate(self.mappers):
                curr_status = None
                while True:
                    try:
                        #heartbeat message
                        [curr_status,
                         timestamp] = self.cs[i].get(timeout=self.timeout)
                        break
                    except:
                        #no message received, check if max attempts reached
                        if (self.attempts[i] < self.max_attempts):
                            # restart replacement worker, increment attempt count
                            self.restart_process(i, self.M, kill_idx)
                            self.attempts[i] += 1
                        else:
                            for i, m in enumerate(self.mappers):
                                self.processes[i].kill()
                            raise ValueError(
                                "RETRY_ERROR: Maximum attempts reached, job failed"
                            )

            #check status received
            if curr_status == 'DONE' and self.mapper_status[i] == True:
                self.mapper_status[i] = False
                #get all valid reducer_ids
                self.active_reducers += self.reducer_ids[i].get()
                #wait until all processes have been completed
                self.processes[i].join()
            else:
                mapping_status = False

        print("\nAll mappers have finished executing")
        print("\nReduce phase:")
        self.phase_flag = 1
        # NOTE: Keeping this for future exextuion time comparison
        # for r in reducer:
        # 	r.execute_reduce()
        # 	while (r.status != 'DONE'):
        # 		continue
        # 	print('REDUCER {} finished executing'.format(r.id+1))#, r.id, r.status)

        #similar to map phase, instantiate all reducers and processes
        self.active_reducers = (list(set(self.active_reducers)))
        self.processes = [None] * self.R
        self.ps, self.cs = [None] * self.R, [None] * self.R
        self.reducer_status = [True] * len(self.active_reducers)

        for idx in (self.active_reducers):
            self.reducers.append(
                Reducer(idx, len(self.input_file_paths),
                        f'{self.TMP_DIR}/intermediate', self.OUT_DIR,
                        reduce_func))

        #setting up processes for reducers
        for i, r in enumerate(self.reducers):
            self.cs[i] = mp.Queue()
            self.processes[i] = mp.Process(target=r.execute_reduce,
                                           args=(self.cs[i], ))
            self.processes[i].start()
            #killing certain workers to test fault tolerance
            if (kill_idx == i):
                print(f"Killing process {i+1}")
                self.processes[i].kill()

        #check for heartbeat messages, similar to map phase
        reducing_status = False
        while reducing_status == False:
            reducing_status = True
            for i, r in enumerate(self.reducers):
                curr_status = None
                while True:
                    try:
                        #print(self.reducer_status[i])
                        if (self.reducer_status[i] is True):
                            [curr_status,
                             timestamp] = self.cs[i].get(timeout=self.timeout)
                        break
                    except:
                        if (self.attempts[i] < self.max_attempts):
                            self.restart_process(i, self.R, kill_idx)
                            self.attempts[i] += 1
                        else:
                            print("Max attempts reached, task not completed")
                            for i, m in enumerate(self.reducers):
                                self.processes[i].kill()
                            raise ValueError(
                                "TIMEOUT ERROR: Max attempts reached, task not completed"
                            )

                if curr_status == 'DONE' and self.reducer_status[i] == True:
                    self.reducer_status[i] = False
                    self.processes[i].join()
                elif curr_status == 'RUNNING':
                    reducing_status = False

        print("\nAll reducing tasks have been completed")
    if data_type == '0':
        return int(float(data))
    if data_type == '1':
        return float(data)
    if data_type == '2':
        return string(data)


if __name__ == '__main__':
    #input file name
    #in_file = ["../data/attack/10.1.1.8Burst.txt","../data/attack/10.1.1.8CPUinsert.txt","../data/attack/10.1.1.8CPUVMInsert.txt","../data/attack/10.1.1.8Normal.txt"]
    #num_sample = 100
    #num_feature = 5

    #general format data
    mapper = Mapper("map_config.txt")
    general_data = mapper.get_general_data()
    print("general data:", general_data)

    #label
    label_file = "label.txt"
    with open(label_file) as f:
        content = f.readlines()
    label = [int(x.strip()) for x in content]
    label = numpy.array(label)

    #classify the features based on the labels
    total_data = {}
    label_type = []
    for i in range(label.shape[0]):
        if label[i] not in total_data.keys():
def main():
    # load the data
    user_item_df = pd.read_csv('data/0000_part_00', sep='|')
    user_item_df = user_item_df.sort_values(
        'clicked_epoch', ascending=True).reset_index(drop=True)

    # getting popular items and setting redis key
    popular_items = user_item_df.groupby(
        'sourceprodid', as_index=False).count().sort_values(
            "uuid", ascending=False).sourceprodid.iloc[:10]
    redis_client.set("popular_items", {"items": list(popular_items)})

    # constructing user and item mappers
    user_mapper = Mapper(keys=list(user_item_df.uuid.unique()))
    item_groupby = user_item_df.groupby('sourceprodid', as_index=False).first()
    item_mapper = Mapper(
        keys=list(item_groupby['sourceprodid'].values),
        attributes=item_groupby[constants.ITEM_ATTRIBUTES].values.tolist())

    user_item_df['user_code'] = user_item_df['uuid'].map(
        user_mapper.mapper_dict)
    user_item_df['item_code'] = user_item_df['sourceprodid'].map(
        item_mapper.mapper_dict)
    user_item_df['item_code'] = user_item_df.item_code.apply(lambda x: x[0])

    n_users = len(user_mapper)
    n_items = len(item_mapper)

    user_item_df['target'] = 1.0
    event_id_mapper = {'pageView': 1, 'addToCart': 2, 'buy': 3}
    user_item_df['event_code'] = user_item_df.userevent.map(event_id_mapper)

    cf_model = SimpleCF(n_users,
                        n_items,
                        embedding_length=40,
                        init=torch.nn.init.normal_,
                        mean=0.,
                        std=.1)
    objective = weighted_mse_loss
    device = 'cuda' if torch.cuda.is_available() else 'cpu'

    model = Step(cf_model, objective, device=device, mode='bulk')

    features = ['user_code', 'item_code', 'event_code']
    target = ['target']
    # user_item_df = user_item_df
    data_set = TensorDataset(torch.tensor(user_item_df[features].values),
                             torch.tensor(user_item_df[target].values))
    data_loader = DataLoader(data_set,
                             batch_size=constants.INITIAL_BATCH_SIZE,
                             shuffle=False)

    # training
    for epoch in range(constants.INITIAL_TRAINING_EPOCHS):
        print("Epoch: {}".format(epoch + 1))
        with tqdm(total=len(data_loader)) as pbar:
            for _, (features, target) in enumerate(data_loader):
                model.batch_fit(features, target)

                pbar.update(1)
    print("Done with training")

    # dump the mappers
    user_mapper.save(os.path.join(script_dir, 'mappers', 'user'))
    item_mapper.save(os.path.join(script_dir, 'mappers', 'item'))

    # dump the model weights
    model.save(os.path.join(script_dir, 'models', 'model_weights.pth'))

    for embedding, num_embeddings in [('user', len(user_mapper)),
                                      ('item', len(item_mapper))]:
        embeddings = np.array(model.model.get_embeddings(embedding)).astype(
            'float32')[:num_embeddings]
        np.save(
            os.path.join(script_dir, 'models', embedding + '_embeddings.npy'),
            embeddings)

    # dump the model metadata
    model_metadata = {}
    model_metadata[
        'num_users'] = model.model.user_embeddings.weight.data.shape[0]
    model_metadata[
        'num_items'] = model.model.item_embeddings.weight.data.shape[0]
    with open(os.path.join(script_dir, 'mappers', 'metadata.json'), 'w') as f:
        f.write(json.dumps(model_metadata))

    print("Done with dumping of model and mappers.")
Ejemplo n.º 41
0
class Overseer(object):
    def __init__(self, lcg, map_size):
        self.current_player_index = None
        self.game_started = False
        self._players = []
        self.lcg = lcg
        self.handlers = Handlers(self)
        self.mapper = Mapper(self, *map_size)

    def handle(self, environ, start_response):
        logger.info('Using overseer %s' % id(self))
        socket = environ["wsgi.websocket"]
        logger.debug(socket.__dict__)
        player = Player(socket, start_response)
        enter_teh_infiniteh_loopah = True
        if self.game_started:
            logger.info(
                '%s tried to connect, but game has already started' % player.id
            )
            logger.info('Delegating %s to new overseer...' % player.id)
            self.lcg.new_overseer()
            return self.lcg.redirect_to_overseer(environ, start_response)
        logger.info('%s connected' % player.id)
        while enter_teh_infiniteh_loopah:
            try:
                line = socket.receive()
            except socketerror:
                break
            if not line:
                break
            line = line.strip()
            if not line:
                break
            logger.debug('%s -> %s' % (player.id, line))
            try:
                parsed = json.loads(line)
            except ValueError:
                player.exception('What the hell are you sending to me?')
                continue
            try:
                self.delegate(player, parsed)
            except (GameError, ServerError) as e:
                player.exception(e)
                logger.info('%s raised %s' % (player.id, e))
                continue
        self.remove_player(player)
        try:
            socket.close()
        except socketerror:  # whatever, I no more care about this socket
            pass
        logger.info('%s disconnected' % player.id)

    def delegate(self, who, msg):
        if not ('type' in msg and 'message' in msg):
            raise ServerError('Not enough JSON fields')
        what = msg['type']
        message = msg['message']
        self.handlers.do(who, what, message)

    @property
    def players(self):
        return [p for p in self._players if p and p.name]

    @property
    def current_player(self):
        return self._players[self.current_player_index]

    @current_player.setter
    def current_player(self, player):
        self.current_player_index = self._players.index(player)

    def next_player(self):
        if self.current_player_index is None:
            self.current_player_index = 0
        else:
            while self.current_player:
                i = (self.current_player_index+1) % len(self._players)
                self.current_player_index = i
                valid = (
                    self.current_player and
                    self.current_player in self.mapper.remaining_players
                )
                if valid:
                    break
        townhall_count = self.mapper.get_townhalls_count(self.current_player)
        self.current_player.action_points = 10 + (townhall_count-1)*2
        for p in [p for p in self.players if p is not self.current_player]:
            p.send('nextPlayer', {'nickname': self.current_player.name})
        self.current_player.send('yourTurn', self.current_player.state)

    def remove_player(self, player):
        try:
            if not self.game_started:
                self._players.remove(player)
            else:
                i = self._players.index(player)
                self._players[i] = None
        except ValueError:  # lol
            pass
        # Notify others
        for p in self.players:
            p.send('playerLeft', {'nickname': player.name})

    def end_game(self, winner):
        for p in self.players:
            p.send('gameEnd', {'winner': winner.name})
        self._players = [p for p in self.players]
        self.game_started = False
        self.current_player_index = None
Ejemplo n.º 42
0
class Navigator(object):
    '''
    The Navigator encapsulates the core Robot logic and is responsible for
    the following:
        - Maintaining the current location and heading values on behalf of
            for the Robot given the movements and rotations issued while
            exploring.
        - Deciding the best rotation and movement to make while exploring
            the maze given the current state of the Navigator's Mapper instance.
        - Finding the optimal path through the maze.
    '''
    def __init__(self, maze_dim):
        self.maze_dim = maze_dim

        self.start_location = (0, 0)
        self.location = self.start_location
        self.heading = 'up'
        self.latest_sensor_reading = None

        self.optimal_path = None
        self.optimal_steps = None
        self.goal_visited = False

        self.take_additional_steps = False
        self.additional_step_instructions = []
        self.nodes_visited = [self.start_location]
        self.take_second_step = False
        self.second_step_instructions = None

        self.mapper = Mapper(maze_dim)

    def update_map(self, sensors):
        '''
        Save the latest sensor readings and the mapper to update its knowledge
        about the mze walls.
        '''
        self.latest_sensor_reading = sensors
        self.mapper.update_wall_knowledge(self.location, self.heading, sensors)

    def explore(self):
        '''
        Decide the rotation and movement the robot should make in order to
        maximise knowledge of the maze.
        '''
        if self.mapper.goal_location == self.location and not self.goal_visited:
            self.goal_visited = True

        step = self.calculate_next_step()
        rotation, movement = step

        print "{} ||| {} | {} ||| {}".format(self.latest_sensor_reading,
                                             self.location, self.heading, step)
        self.mapper.pretty_print_maze_map(self.location, self.heading)

        self.location = self.calculate_node(self.location, self.heading, step)
        self.heading = self.calculate_heading(self.heading, rotation)

        self.nodes_visited.append(self.location)

        return step

    def calculate_next_step(self):
        '''
        In order to improve the efficiency of the Robot's traversal of the maze
        and to avoid the Robot getting stuck 'ping-ponging' between two nodes
        indefinitely, the Navigator will attempt to follow the first two steps
        of each calculated path through the maze. The only cases when this isn't
        done is when the path has only one step or when the second step becomes
        invalid as a result of the sensor reaidngs following the first step.

        When attempting to calculate a new path, the target node is either the
        closest node with the greatest uncertainty or the goal node if its
        location is known, but has not yet been visited.
        '''
        loc = self.location
        heading = self.heading

        if self.take_additional_steps:
            next_step = self.additional_step_instructions.pop()
            next_node = self.calculate_node(loc, heading, next_step)
            if next_node != None and self.move_is_valid(loc, next_node):
                self.take_additional_steps = len(
                    self.additional_step_instructions) > 0
                return next_step
            else:
                self.take_additional_steps = False

        second_step_node = None
        second_step = self.second_step_instructions

        if self.take_second_step and second_step != None:
            second_step_node = self.calculate_node(loc, heading, second_step)

        if second_step_node != None and self.move_is_valid(
                loc, second_step_node):
            self.take_second_step = False
            return second_step

        # Navigate to the location of the maze with least knowledge.
        target = self.closest_least_certain_node()
        # If the goal has been found, but not yet visited, go there instead.
        if not self.goal_visited and self.mapper.goal_found():
            target = self.mapper.goal_location
        maze_graph = self.convert_maze_map_to_graph()
        path = self.best_path_through_graph(maze_graph, loc, target)
        steps = self.convert_path_to_steps(path, heading)

        repeat_length = self.check_for_repeats_in_visited_nodes()
        if repeat_length > 0:
            self.take_additional_steps = True
            self.additional_step_instructions = steps[1:repeat_length + 1]

        if len(steps) > 1:
            self.take_second_step = True
            self.second_step_instructions = steps[1]
        else:
            self.second_step_instructions = None

        return steps[0]

    def check_for_repeats_in_visited_nodes(self):
        '''
        Check to see if the Robot is stuck 'ping-ponging' between a set of nodes. This checks for repeated paths of lengths between 2 and 6. The robot is conidered to be in a stuck state if it follows a path, retraces its steps, and then follows the original path again. It is assumed that if this happens, the Robot would continue this pattern indefinitely.
        '''
        loop_lengths = range(2, 6)
        robot_is_stuck = False
        repeat_length = 0
        for length in loop_lengths:
            first_path = self.nodes_visited[-length:]
            second_path = self.nodes_visited[-length * 2 + 1:-length + 1]
            second_path.reverse()
            third_path = self.nodes_visited[-length * 3 + 2:-length * 2 + 2]
            if first_path == second_path and second_path == third_path:
                repeat_length = length
                break

        return repeat_length

    def closest_least_certain_node(self):
        '''
        Find the node with the greatest uncertainty (greatest number of
        possible shapes) that is closest to the current location.
        '''
        uncertainties = self.mapper.cell_possibilities
        max_uncertainty = max([max(column) for column in uncertainties])
        peak_locations = []
        for i in range(self.maze_dim):
            for j in range(self.maze_dim):
                if uncertainties[i][j] == max_uncertainty:
                    peak_locations.append((i, j))
        closest_peak = peak_locations[0]
        if len(peak_locations) > 1:
            loc = self.location
            for k in range(len(peak_locations)):
                dist_a = self.distance_between_nodes(loc, closest_peak)
                dist_b = self.distance_between_nodes(loc, peak_locations[k])
                if dist_a > dist_b:
                    closest_peak = peak_locations[k]
        return closest_peak

    def convert_maze_map_to_graph(self,
                                  fastest_route=False,
                                  treat_unknown_as_walls=False):
        '''
        Convert the maze map to an undirected graph.
        - If fastest_route, allow the path to include steps with maximum strides
            even if this means moving past unvisited nodes.
        - If treat_unknown_as_walls, prevent the path from passing between nodes
            when the state of the wall / opening between them is unknown.
        '''
        graph = {}
        open_list = set([self.start_location])

        while len(open_list) > 0:
            # Pop the next element of the open_list and set it as the current
            # location.
            location = open_list.pop()
            # If the current location is a key in the graph, move to the next
            # iteration.
            if location in graph.keys():
                next
            else:
                graph[location] = []
            # From current location, add all valid movements from 1-3 in all
            # four directions to the graph and the open_list.
            x, y = location
            for direction in ['up', 'right', 'down', 'left']:
                for i in range(1, 4):
                    tx, ty = x, y
                    if direction == 'up':
                        tx = x + i
                    elif direction == 'right':
                        ty = y + i
                    elif direction == 'down':
                        tx = x - i
                    elif direction == 'left':
                        ty = y - i

                    target = (tx, ty)

                    if self.move_is_valid(location, target,
                                          treat_unknown_as_walls):
                        graph[location].append(target)
                        if target not in graph.keys():
                            open_list.add(target)
                        # Unless the path should include the fastest route,
                        # ensure that the graph does not allow skipping over
                        # unexplored nodes. This helps improve the efficacy
                        # of exploration.
                        if not fastest_route and self.mapper.cell_possibilities[
                                tx][ty] > 1:
                            break
                    else:
                        break

        return graph

    def move_is_valid(self, location, target, treat_unknown_as_walls=False):
        '''
        Will moving from location to target, given the current knowledge of the
        maze, result in hitting a wall?
        - If treat_unknown_as_walls, an attempt to move from location to target
            through a wall / openning of unknown state is considered invalid.
        '''
        valid_move = True
        x, y = location
        tx, ty = target

        wall_values = [1]
        if treat_unknown_as_walls:
            wall_values.append(-1)

        if y == ty:
            if tx < 0 or tx >= self.maze_dim:
                valid_move = False
            elif x < tx:
                for i in range(tx - x):
                    if self.mapper.walls[2 * (x + i + 1)][y] in wall_values:
                        valid_move = False
                        break
            else:
                for i in range(x - tx):
                    if self.mapper.walls[2 * (x - i)][y] in wall_values:
                        valid_move = False
                        break
        else:
            if ty < 0 or ty >= self.maze_dim:
                valid_move = False
            elif y < ty:
                for i in range(ty - y):
                    if self.mapper.walls[2 * x + 1][y + i + 1] in wall_values:
                        valid_move = False
                        break
            else:
                for i in range(y - ty):
                    if self.mapper.walls[2 * x + 1][y - i] in wall_values:
                        valid_move = False
                        break

        return valid_move

    def best_path_through_graph(self,
                                graph,
                                start,
                                target,
                                print_path_costs=False):
        '''
        Use Dijkstra's algorithm to find the fastest path from start to target
        through the the given undirected graph.
        '''
        optimal_path = []

        # Make sure the target is in the graph
        if target in graph.keys():
            # Assign to every node a tentative distance value: set it to zero for
            # our initial node and to infinity for all other nodes.

            largest_possible_cost = self.maze_dim**2

            path_costs = {}

            # Used for sorting by path cost.
            cost_for_node = lambda n: path_costs[n]

            for node in graph.keys():
                path_costs[node] = largest_possible_cost
            path_costs[start] = 0

            # Set the initial node as current. Mark all other nodes unvisited.
            # Create a set of all the unvisited nodes called the unvisited set.
            current_node = start
            unvisited_list = copy.copy(graph.keys())

            while len(unvisited_list) > 0:
                # For the current node, consider all of its neighbours and
                # calculate their tentative distances. Compare the newly
                # calculated tentative distance to the current assigned value
                # and assign the smaller one otherwise, keep the current value.

                distance = path_costs[current_node] + 1
                for neighbour in graph[current_node]:
                    if path_costs[neighbour] > distance:
                        path_costs[neighbour] = distance

                # When we are done considering all of the neighbors of the current
                # node, mark the current node as visited and remove it from the
                # unvisited set. A visited node will never be checked again.

                unvisited_list.remove(current_node)

                if len(unvisited_list) > 0:
                    # Select the unvisited node that is marked with the
                    # smallest tentative distance, set it as the new
                    # "current node", and go back to the beginning of the loop.
                    current_node = sorted(unvisited_list, key=cost_for_node)[0]

            if print_path_costs:
                print 'Path costs for each explored space within the maze:'
                self.mapper.pretty_print_maze_map((0, 0), 'up', path_costs)

            optimal_path.append(target)
            current_node = target
            # Construct the optimal path by following the gradient of path costs
            # from the goal to the start.
            while start not in optimal_path:
                current_node = sorted(graph[current_node],
                                      key=cost_for_node)[0]
                optimal_path = [current_node] + optimal_path

        return optimal_path

    def convert_path_to_steps(self, path, initial_heading):
        '''
        Convert the given path to a list of step instructions
        (rotation, movement) given the initial heading.
        '''
        start = path.pop(0)
        heading = initial_heading
        steps = []
        deltas = self.convert_path_to_deltas_max_3(start, path)
        for delta_x, delta_y in deltas:
            up = heading == 'up'
            right = heading == 'right'
            down = heading == 'down'
            left = heading == 'left'
            rotation = 0
            if ((up and delta_y < 0) or (right and delta_x < 0)
                    or (down and delta_y > 0) or (left and delta_x > 0)):
                movement = -max(abs(delta_x), abs(delta_y))
            else:
                if delta_y == 0:
                    if delta_x > 0:
                        if up:
                            rotation = 90
                        elif down:
                            rotation = -90
                    else:
                        if up:
                            rotation = -90
                        elif down:
                            rotation = 90
                else:
                    if delta_y > 0:
                        if left:
                            rotation = 90
                        elif right:
                            rotation = -90
                    else:
                        if left:
                            rotation = -90
                        elif right:
                            rotation = 90
                movement = max(abs(delta_x), abs(delta_y))
            steps.append((rotation, movement))
            heading = self.calculate_heading(heading, rotation)

        return steps

    def convert_path_to_deltas_max_3(self, start, path):
        '''
        Break down the path to the x/y difference between each node in the
        path with a maximum chnage of 3. This will ensure that maximum movement
        made by the Robot while navigating path is not exceeded.
        '''
        x, y = start
        deltas = []
        for node_x, node_y in path:
            if y == node_y:
                step = node_x - x
                while step > 3 or step < -3:
                    if step > 0:
                        deltas.append((3, 0))
                        step -= 3
                    else:
                        deltas.append((-3, 0))
                        step += 3
                deltas.append((step, 0))
            else:
                step = node_y - y
                while step > 3 or step < -3:
                    if step > 0:
                        deltas.append((0, 3))
                        step -= 3
                    else:
                        deltas.append((0, -3))
                        step += 3
                deltas.append((0, step))

            x, y = node_x, node_y
        return deltas

    def found_optimal_path(self):
        '''
        Determine whether the optimal path through the maze has been found.
        If this is the first time the optimal path has been found, save it.
        '''
        if not self.mapper.goal_found():
            return False
        goal_location = self.mapper.goal_location

        # print "Goal Location is: {}".format(goal_location)

        if self.optimal_path != None:
            return True

        known_maze_graph = self.convert_maze_map_to_graph(True, True)
        if goal_location not in known_maze_graph.keys():
            print "Goal not yet navigable!"
            return False

        open_maze_graph = self.convert_maze_map_to_graph(True, False)

        # Compare the best path through the maze assuming all unknown walls
        # are walls vs all unknown walls are opennings. If the path lengths are
        # the same, the optimal path has been found.
        shortest_known_path = self.best_path_through_graph(
            known_maze_graph, self.start_location, goal_location)
        shortest_possible_path = self.best_path_through_graph(
            open_maze_graph, self.start_location, goal_location)
        optimal_path_found = len(shortest_known_path) == len(
            shortest_possible_path)

        if optimal_path_found:
            self.optimal_path = shortest_known_path
            self.optimal_steps = self.convert_path_to_steps(
                self.optimal_path, 'up')
        return optimal_path_found

    def print_maze_with_path_costs(self):
        '''
        Print the explored map including the path costs for each explored cell.
        '''
        if not self.mapper.goal_found():
            print "Can not print maze with path costs. The goal has not been found."
            return False
        known_maze_graph = self.convert_maze_map_to_graph(True, True)
        self.best_path_through_graph(known_maze_graph, self.start_location,
                                     self.mapper.goal_location, True)

    # Navigation utility methods:

    def distance_between_nodes(self, a, b):
        ''' Return the distance between the two given nodes. '''
        xa, ya = a
        xb, yb = b
        return math.hypot(xb - xa, yb - ya)

    def calculate_node(self, location, heading, instructions):
        '''
        Given a location and heading, determine which node a set of instructions
        would lead to.
        '''
        rotation, movement = instructions
        x, y = location
        up, right, down, left, ccw, fwd, cw = self.heading_rotation_bools(
            heading, rotation)
        if (up and ccw) or (down and cw) or (left and fwd):
            x -= movement
        elif (up and fwd) or (right and ccw) or (left and cw):
            y += movement
        elif (up and cw) or (right and fwd) or (down and ccw):
            x += movement
        elif (right and cw) or (down and fwd) or (left and ccw):
            y -= movement

        return (x, y)

    def calculate_heading(self, heading, rotation):
        '''
        Given a heading and rotation, wwhat would the new heading be if the
        rotation was made?
        '''
        up, right, down, left, ccw, fwd, cw = self.heading_rotation_bools(
            heading, rotation)
        if fwd:
            return heading
        if (ccw and up) or (cw and down):
            return 'left'
        if (ccw and right) or (cw and left):
            return 'up'
        if (ccw and down) or (cw and up):
            return 'right'
        if (ccw and left) or (cw and right):
            return 'down'

    def heading_rotation_bools(self, heading, rotation):
        '''
        Convert the heading and rotation values to booleans.
        '''
        up = heading == 'up'
        right = heading == 'right'
        down = heading == 'down'
        left = heading == 'left'
        # counterclockwise
        ccw = rotation == -90
        # forward
        fwd = rotation == 0
        # clockwise
        cw = rotation == 90
        return up, right, down, left, ccw, fwd, cw
Ejemplo n.º 43
0
# You may uncomment the smaller graphs for development and testing purposes.
# map_file = "maps/test_line.txt"
# map_file = "maps/test_cross.txt"
# map_file = "maps/test_loop.txt"
# map_file = "maps/test_loop_fork.txt"
map_file = "maps/main_maze.txt"

# Loads the map into a dictionary
room_graph = literal_eval(open(map_file, "r").read())
world.load_graph(room_graph)
# print(room_graph)

# Print an ASCII map
world.print_rooms()

mapper = Mapper(world.starting_room)

## Map Dictionary
# this is a map of every room in the graph and the directions it has in order to use it to help with traversal while walking the maze
map_dict = {}

## Labyrinth String
# Add each step made through the maze, until no new directions available (and not pivot paths). Start at end of lab string and step back until stack direction is available
lab_string = []

## Opposites Table
# Used for swapping directionals around for connections
opposites = {'n': 's', 'e': 'w', 's': 'n', 'w': 'e'}

## Cannot use stack??
# stack:{000, E}, {104, S}
Ejemplo n.º 44
0
class ProviderManager(BaseProviderClass):

    schema = PROVIDER_CONFIG_SCHEMA

    CONFIG_NAMES_TO_MODIFY = (
        ('networking', 'agents_security_group'),
        ('networking', 'management_security_group'),
        ('compute', 'management_server', 'instance'),
        ('compute', 'management_server', 'management_keypair'),
        ('compute', 'agent_servers', 'agents_keypair'),
    )

    CONFIG_FILES_PATHS_TO_MODIFY = (
        ('compute', 'agent_servers', 'private_key_path'),
        ('compute', 'management_server', 'management_keypair',
         'private_key_path'),
    )

    def __init__(self, provider_config=None, is_verbose_output=False):
        super(ProviderManager, self).\
            __init__(provider_config,
                     is_verbose_output)
        provider_name = provider_config['connection']['cloud_provider_name']
        provider_name = transfer_cloud_provider_name(provider_name)
        from mapper import Mapper
        self.mapper = Mapper(provider_name)

    def validate(self):
        connection_conf = self.provider_config['connection']
        if not self.mapper.is_initialized():
            raise RuntimeError('Error during trying to create context'
                               ' for a cloud provider: {0}'.format(
                                   connection_conf['cloud_provider_name']))

        connector = LibcloudConnector(connection_conf)
        validation_errors = {}

        validator = self.mapper.generate_validator(connector,
                                                   self.provider_config,
                                                   validation_errors,
                                                   self.logger)

        validator.validate()

        self.logger.error('resource validation failed!') if validation_errors \
            else self.logger.info('resources validated successfully')

        return validation_errors

    def provision(self):
        driver = self.get_driver(self.provider_config)
        public_ip, private_ip, ssh_key, ssh_user, provider_context = \
            driver.create_topology()
        driver.copy_files_to_manager(public_ip, ssh_key, ssh_user)
        return public_ip, private_ip, ssh_key, ssh_user, provider_context

    def teardown(self, provider_context, ignore_validation=False):
        driver = self.get_driver(self.provider_config)
        driver.delete_topology(ignore_validation,
                               provider_context['resources'])

    def get_driver(self, provider_config, provider_context=None):
        provider_context = provider_context if provider_context else {}
        connector = LibcloudConnector(provider_config['connection'])
        return self.mapper.generate_cosmo_driver(connector, provider_context,
                                                 provider_config)
Ejemplo n.º 45
0
import numpy as np
from mapper import Mapper

points = np.load('point_clouds/lion.npy')

mapper = Mapper(bins=7, overlap=0.1, filter_function='distance_from_point')

graph = mapper.fit(points)
mapper.plot_vertices()
mapper.plot_intervals()
mapper.plot_clusters()
mapper.plot_graph()
mapper.plot_graph_in_plane()  # Disconnected tail.
mapper.plot_persistence_homology()
Ejemplo n.º 46
0
    d = np.linspace(0, 2 * np.pi, points, endpoint=False)
    x = np.cos(d)
    y = np.sin(d)
    z = np.zeros(points)

    # Add Gaussian noise.
    x = np.random.normal(x, noise)
    y = np.random.normal(y, noise)

    return np.vstack((x, y, z)).T


points = noisy_circle()
print(points.shape)

mapper = Mapper(
    coordinate=1,
    bins=3,
    clustering_function="agglomerative",
    linkage="average",
    distance=1.5,
)

graph = mapper.fit(points)
mapper.plot_vertices()
mapper.plot_intervals()
mapper.plot_clusters()
mapper.plot_graph()
mapper.plot_graph_in_plane()
mapper.plot_persistence_homology()
Ejemplo n.º 47
0
import numpy as np
from mapper import Mapper

with open('point_clouds/chair.txt') as f:
    data = f.readlines()

points = np.array([list(map(float, p.strip().split(' '))) for p in data])

mapper = Mapper(overlap=0.05, bins=8, coordinate=1)

graph = mapper.fit(points)
mapper.plot_vertices()
mapper.plot_intervals()
mapper.plot_clusters()
mapper.plot_graph()
mapper.plot_graph_in_plane(seed=22)
mapper.plot_persistence_homology()
Ejemplo n.º 48
0
 def setUp(self):
     self.mapper = Mapper("data/refFlatMm10.txt")
Ejemplo n.º 49
0
 def requires(self):
     return Mapper()
Ejemplo n.º 50
0
from prometheus_client import make_wsgi_app
from utils import get_env_vars
from pytimeparse import parse
from logger import Logger
from mapper import Mapper
from state import State
from time import sleep
from datetime import datetime
import traceback
import threading

env_vars = get_env_vars()

log = Logger(env_vars)
st = State(env_vars)
ma = Mapper(env_vars, log)

# Define the endpoint for the state manipulation
app = Flask(__name__)


@app.route('/states', methods=['POST'])
def states():
    try:
        log.info(request.get_data())
        req_data = request.get_json(force=True)
        ma.apply_mapping(req_data)
        st.check_inc_dec(req_data)
        return jsonify(st.current_state)
    except Exception as err:
        now = datetime.utcnow()
Ejemplo n.º 51
0
    def parseFile(self):

        self.blockSet = BlockSet()
        self.blockSet.build(self.lines)
        self.mapper = Mapper(self.blockSet)
        self.mapper.buildPaths()
Ejemplo n.º 52
0
class Test_Mapper_Preserve_Mapping(unittest.TestCase):
    def setUp(self):
        # TODO: we need prf_map, prf_imgs and prf_eth_map
        self.rtfparser = RTF_Parser()
        self.pm = Profile_Manager("No Profile", "newganmanager/testing/")
        self.mapper = Mapper("newganmanager/test/", self.pm)
        self.pm.prf_cfg["img_dir"] = "newganmanager/test/"
        # data: UID, first_nat, sec_nat, eth-code
        self.data_simple = self.rtfparser.parse_rtf(
            "newganmanager/test/test_simple.rtf")
        self.data_all_cases = self.rtfparser.parse_rtf(
            "newganmanager/test/test_allcases.rtf")
        self.data_subset1 = self.rtfparser.parse_rtf(
            "newganmanager/test/allcases_subset1.rtf")
        self.data_subset2 = self.rtfparser.parse_rtf(
            "newganmanager/test/allcases_subset2.rtf")
        self.data_exclusive = self.rtfparser.parse_rtf(
            "newganmanager/test/test_exclusive.rtf")
        for eth in [
                "African", "Asian", "EECA", "Italmed", "SAMed",
                "South American", "SpanMed", "YugoGreek", "MENA", "MESA",
                "Caucasian", "Central European", "Scandinavian", "Seasian"
        ]:
            map = [eth + str(i) for i in range(20)]
            self.mapper.eth_map[eth] = map

    def tearDown(self):
        shutil.rmtree("newganmanager/testing/.config/")
        shutil.copytree("newganmanager/.config/",
                        "newganmanager/testing/.config/")
        shutil.rmtree("newganmanager/testing/.user/")
        shutil.copytree("newganmanager/.user/", "newganmanager/testing/.user/")
        with open("newganmanager/test/config.xml", "w") as cfg:
            cfg.write('OUTSIDE')

    def test_preserve_mapping_simple(self):
        mapping = self.mapper.generate_mapping(self.data_simple, "Preserve")
        self.assertEqual("SpanMed", mapping[0][1])
        self.assertEqual("1915714540", mapping[0][0])
        self.assertEqual("MESA", mapping[1][1])
        self.assertEqual("1915576430", mapping[1][0])

    def test_preserve_mapping_all_cases(self):
        mapping = self.mapper.generate_mapping(self.data_all_cases, "Preserve")
        self.assertEqual("SpanMed", mapping[0][1])
        self.assertEqual("1915714540", mapping[0][0])
        self.assertEqual("MESA", mapping[1][1])
        self.assertEqual("1915576430", mapping[1][0])
        self.assertEqual("Italmed", mapping[2][1])
        self.assertEqual("1915576432", mapping[2][0])
        self.assertEqual("EECA", mapping[3][1])
        self.assertEqual("1915576433", mapping[3][0])
        self.assertEqual("SAMed", mapping[4][1])
        self.assertEqual("1915576434", mapping[4][0])
        self.assertEqual("South American", mapping[5][1])
        self.assertEqual("1915576435", mapping[5][0])
        self.assertEqual("YugoGreek", mapping[6][1])
        self.assertEqual("1915576436", mapping[6][0])
        self.assertEqual("African", mapping[7][1])
        self.assertEqual("1915576437", mapping[7][0])
        self.assertEqual("African", mapping[8][1])
        self.assertEqual("1915576438", mapping[8][0])
        self.assertEqual("African", mapping[9][1])
        self.assertEqual("1915576439", mapping[9][0])
        self.assertEqual("African", mapping[10][1])
        self.assertEqual("1915576440", mapping[10][0])
        self.assertEqual("African", mapping[11][1])
        self.assertEqual("1915576441", mapping[11][0])
        self.assertEqual("Asian", mapping[12][1])
        self.assertEqual("1915576442", mapping[12][0])
        self.assertEqual("MENA", mapping[13][1])
        self.assertEqual("1915576444", mapping[13][0])
        self.assertEqual("Seasian", mapping[14][1])
        self.assertEqual("1915576445", mapping[14][0])
        self.assertEqual("Scandinavian", mapping[15][1])
        self.assertEqual("1915576446", mapping[15][0])
        self.assertEqual("Caucasian", mapping[16][1])
        self.assertEqual("1915576447", mapping[16][0])
        self.assertEqual("Central European", mapping[17][1])
        self.assertEqual("1915576448", mapping[17][0])
        self.assertEqual("MESA", mapping[18][1])
        self.assertEqual("1915576450", mapping[18][0])

    def test_preserve_mapping_double(self):
        simple_mapping = self.mapper.generate_mapping(self.data_simple,
                                                      "Preserve")
        self.pm.write_xml(simple_mapping)
        next_mapping = self.mapper.generate_mapping(self.data_simple,
                                                    "Preserve")
        self.pm.write_xml(next_mapping)
        self.assertSequenceEqual(simple_mapping, next_mapping)

    def test_preserve_mapping_double_exclusive(self):
        simple_mapping = self.mapper.generate_mapping(self.data_simple,
                                                      "Preserve")
        self.pm.write_xml(simple_mapping)
        next_mapping = self.mapper.generate_mapping(self.data_exclusive,
                                                    "Preserve")
        self.pm.write_xml(next_mapping)
        self.assertEqual(simple_mapping, next_mapping[2:])
        self.assertEqual(len(next_mapping), 4)

    def test_preserve_mapping_complete_subset(self):
        simple_mapping = self.mapper.generate_mapping(self.data_simple,
                                                      "Preserve")
        self.pm.write_xml(simple_mapping)
        next_mapping = self.mapper.generate_mapping(self.data_all_cases,
                                                    "Preserve")
        self.pm.write_xml(next_mapping)
        self.assertEqual(simple_mapping, next_mapping[:2])

    def test_preserve_mapping_complete_subset_reverse(self):
        next_mapping = self.mapper.generate_mapping(self.data_all_cases,
                                                    "Preserve")
        self.pm.write_xml(next_mapping)
        simple_mapping = self.mapper.generate_mapping(self.data_simple,
                                                      "Preserve")
        self.pm.write_xml(simple_mapping)
        self.assertEqual(simple_mapping, next_mapping)

    def test_preserve_mapping_partial_subset(self):
        sub2_mapping = self.mapper.generate_mapping(self.data_subset2,
                                                    "Preserve")
        self.pm.write_xml(sub2_mapping)
        sub1_mapping = self.mapper.generate_mapping(self.data_subset1,
                                                    "Preserve")
        self.pm.write_xml(sub1_mapping)
        self.assertEqual(sub1_mapping[:5], sub2_mapping[:5])
        self.assertIn(sub2_mapping[5], sub1_mapping)
        self.assertIn(sub2_mapping[6], sub1_mapping)
        self.assertIn(sub2_mapping[7], sub1_mapping)
        self.assertIn(sub2_mapping[8], sub1_mapping)
        self.assertIn(sub2_mapping[9], sub1_mapping)
        self.assertEqual(len(sub1_mapping), 12)

    def test_preserve_mapping_partial_subset_reverse(self):
        sub1_mapping = self.mapper.generate_mapping(self.data_subset1,
                                                    "Preserve")
        self.pm.write_xml(sub1_mapping)
        sub2_mapping = self.mapper.generate_mapping(self.data_subset2,
                                                    "Preserve")
        self.pm.write_xml(sub2_mapping)
        self.assertEqual(sub1_mapping[:5], sub2_mapping[:5])
        self.assertEqual(len(sub2_mapping), 12)
Ejemplo n.º 53
0
class GameMode():
    """Class manages game rules and flow."""

    def __init__(self):
        """Docstring."""
        self.gameScreen = pygame.Surface(settings.SCREEN_SIZE)
        self.world = Mapper()
        self.turnTime = settings.TURN_TIME
        self.controller = Controller()
        self.running = False

    def startGame(self):
        """Docs."""
        self.world.load_map()
        worker = Worker()
        self.world.spawn_object(
            worker,
            (settings.TABLE_SIZE[0]-5, settings.TABLE_SIZE[1]-15))

    def update_all(self):
        """Docs."""
        self.world.update()
        self.gameScreen.blit(self.world.get_surface(), (0, 0))

    def quit(self):
        """Docs."""
        self.running = False

    def mouse_action(self, event):
        """Docs."""
        if event.type == pygame.MOUSEBUTTONDOWN:
            self.controller.mouse_button_down(
                self.world,
                event.button,
                event.pos)
        if event.type == pygame.MOUSEBUTTONUP:
            self.controller.mouse_button_up(
                self.world,
                event.button,
                event.pos)
        if event.type == pygame.MOUSEMOTION:
            self.controller.mouse_motion(
                self.world,
                event.buttons,
                event.pos,
                event.rel)

    def save(self):
        """Docs."""
        self.world.save_map()

    def laod(self):
        """Docs."""
        self.world.load_map()

    def get_game_screen(self):
        """Docs."""
        return self.gameScreen
 def __init__(self):
     self.mapper = Mapper()
Ejemplo n.º 55
0
 def _mapper_instantiation(self, template):
     """ Retrieve the keylist from mapper """
     logger.debug("instantiation of mapper and retrieve keylists")
     mapper = Mapper(template)
     keylists = mapper.keylists
     return mapper.topology
Ejemplo n.º 56
0
def get_mapper(arguments):
    config_file = arguments['<config>']
    dir_path = os.path.dirname(os.path.realpath(__file__))
    config_path = dir_path + '/configs/' + config_file
    mapper = Mapper(config_path)
    return mapper
Ejemplo n.º 57
0
def make_conversion_mapper(period, conversion):
    mapper = Mapper()
    mapper.prop('type', conversion.get('type'))

    value_key = '_'.join([conversion.get('prefix'), 'current', period])
    mapper.project_one('value', make_data_path(value_key))

    prev_value_key = '_'.join([conversion.get('prefix'), 'previous', period])
    mapper.project_one('prevValue', make_data_path(prev_value_key))

    if conversion.get('final', False):
        mapper.prop('final', True)

    for indicator in conversion.get('indicators', []):
        mapper.project_list('indicators',
                            make_indicator_mapper(period, indicator))

    return mapper
Ejemplo n.º 58
0
class Cam:
    def __init__(self, txt_rf_ms, verbose):
        self.txt_rf_ms = txt_rf_ms
        self.verbose = verbose
        self.tmax = 30  # Initial temperature range
        self.tmin = 15
        self.mode = _NORM
        # Enable initial update
        self.rf_disp = True
        self.rf_txt = True

        # Instantiate color mapper
        self.mapper = Mapper(self.tmin, self.tmax)

        # Instantiate switches
        self.timer = Delay_ms(duration=2000)  # Long press delay
        # Release arg rarg enables calling switch to be identified.
        for item in (('X4', self.chmax, 5, self.ar, 0), ('Y1', self.chmax, -5,
                                                         self.ar, 1),
                     ('X5', self.chmin, 5, eliza, 2), ('X6', self.chmin, -5,
                                                       eliza, 3)):
            sw, func, arg, long_func, rarg = item
            cs = Switch(Pin(sw, Pin.IN, Pin.PULL_UP))
            cs.close_func(self.press, (func, arg))
            cs.open_func(self.release, (long_func, rarg))

        # Instantiate display
        pdc = Pin('X1', Pin.OUT_PP, value=0)
        pcs = Pin('X2', Pin.OUT_PP, value=1)
        prst = Pin('X3', Pin.OUT_PP, value=1)
        # In practice baudrate made no difference to update rate which is
        # dominated by interpolation time
        spi = SPI(2, baudrate=13_500_000)
        verbose and print('SPI:', spi)
        ssd = SSD(spi, pcs, pdc, prst)  # Create a display instance
        ssd.fill(0)
        ssd.show()

        self.avg = 0.0
        # Instantiate PIR temperature sensor
        i2c = I2C(2)
        pir = AMG88XX(i2c)
        pir.ma_mode(True)  # Moving average mode

        # Run the camera
        asyncio.create_task(self.run(pir, ssd))

    # A switch was pressed. Change temperature range.
    def press(self, func, arg):
        self.timer.trigger()
        self.mode = _NORM
        if self.mode == _AUTO:  # Short press clears auto range, leaves range unchanged
            self.rf_disp = True
            self.rf_txt = True
        else:
            self.rf_disp = False  # Disable display updates in case it's a long press
            func(arg)  # Change range

    # Change .tmax
    def chmax(self, val):
        if self.tmax + val > self.tmin:  # val can be -ve
            self.tmax += val

    # Change .tmin
    def chmin(self, val):
        if self.tmin + val < self.tmax:
            self.tmin += val

    def release(self, func, arg):
        if self.timer.running():  # Brief press: re-enable display
            self.rf_txt = True  # Show changed range
            self.rf_disp = True
        else:
            func(arg)  # eliza will leave it with rf_disp False

    def ar(self, sw):  # Long press callback, top switch
        if sw:  # Animal detection mode
            self.mode = _HOG
            self.tmin = self.avg
            self.tmax = self.avg + 5
        else:  # Auto range
            self.mode = _AUTO
        self.rf_disp = True
        self.rf_txt = True  # Show changed range

    # Draw color scale at right of display
    def draw_scale(self, ssd):
        col = 75
        val = self.tmax
        dt = (self.tmax - self.tmin) / 31
        for row in range(32):
            ssd.rect(col, row * 2, 15, 2, ssd.rgb(*self.mapper(val)))
            val -= dt

    # Refreshing text is slow so do it periodically to maximise mean image framerate
    async def refresh_txt(self):
        while True:
            await asyncio.sleep_ms(self.txt_rf_ms)
            self.rf_txt = True

    # Run the camera
    async def run(self, pir, ssd):
        # Define colors
        white = ssd.rgb(255, 255, 255)
        black = ssd.rgb(0, 0, 0)
        red = ssd.rgb(255, 0, 0)
        blue = ssd.rgb(0, 0, 255)
        yellow = ssd.rgb(255, 255, 0)
        green = ssd.rgb(0, 255, 0)

        # Instantiate CWriters
        wri_l = CWriter(ssd, font, green, black, self.verbose)  # Large font.
        wri_s = CWriter(ssd, arial10, white, black, self.verbose)  # Small text

        # Instantiate interpolator and draw the scale
        interp = Interpolator(pir)
        self.draw_scale(ssd)

        while True:
            t = ticks_ms()  # For verbose timing
            self.mapper.set_range(self.tmin, self.tmax)
            interp.refresh()  # Acquire data
            max_t = -1000
            min_t = 1000
            sum_t = 0
            for row in range(32):
                for col in range(32):
                    # Transpose, reflect and invert
                    val = interp((31 - col) / 31, row / 31)
                    max_t = max(max_t, val)
                    min_t = min(min_t, val)
                    sum_t += val
                    ssd.rect(col * 2, row * 2, 2, 2,
                             ssd.rgb(*self.mapper(val)))
                await asyncio.sleep(0)
            self.avg = round(sum_t / 1024)
            if self.mode == _AUTO:
                self.tmin = round(min_t)
                self.tmax = round(max_t)
            if self.rf_disp:
                if self.rf_txt:
                    wri_l.set_textpos(ssd, 66, 0)
                    wri_l.printstring('Max:{:+4d}C\n'.format(int(max_t)))
                    wri_l.printstring('Min:{:+4d}C\n'.format(int(min_t)))
                    wri_l.printstring('Avg:{:+4d}C'.format(self.avg))
                    wri_s.set_textpos(ssd, 128 - arial10.height(), 64)
                    wri_s.setcolor(yellow, black)
                    wri_s.printstring('Chip:{:5.1f}C'.format(
                        pir.temperature()))
                    wri_s.set_textpos(ssd, 0, 90)
                    wri_s.setcolor(red, black)
                    wri_s.printstring('{:4d}C '.format(self.tmax))
                    wri_s.set_textpos(ssd, 28, 95)
                    wri_s.setcolor(green, black)
                    if self.mode == _HOG:
                        wri_s.printstring('Hog  ')
                    elif self.mode == _NORM:
                        wri_s.printstring('Norm  ')
                    else:
                        wri_s.printstring('Auto  ')
                    wri_s.set_textpos(ssd, 64 - arial10.height(), 90)
                    wri_s.setcolor(blue, black)
                    wri_s.printstring('{:4d}C '.format(self.tmin))
                    self.rf_txt = False
                ssd.show()
            self.verbose and print(ticks_diff(ticks_ms(), t))
            gc.collect()
Ejemplo n.º 59
0

# This is for figuring out if there's a wall in front of the robot
def wall_sense(cell, direction):

    walled = False

    if not testmaze.is_permissible(cell, direction):
        walled = True
    return walled


if __name__ == '__main__':

    # Initialize the blank 'maze.' Requires test_maze_01 template included in file
    testmap = Mapper(str(sys.argv[1]))

    # Intitialize Mazey herself
    testmaze = Maze(testmap.walls)

    for run in range(60000):
        # Fresh robot every run, with a random color
        testrobot = Robot()
        # Reset maze to empty configuration without resetting Qtable
        testmaze.walls = np.copy(testmap.walls)
        # Reset Mazey's valid actions without resetting Qtable
        testmaze.reset()
        # Adjust exploration factor as runs apporach 60,000
        if run > 5000 and run < 10000:
            testmaze.epsilon = 0.2
        if run > 10000 and run < 15000:
Ejemplo n.º 60
0
 def setUp(self):
     confd = None
     self.mapo = Mapper(confd)