예제 #1
0
    def setup_controls(self):

        self.channel_faders = bidict()
        self.bump_button_on = bidict()
        self.bump_button_off = bidict()
        self.mask_buttons = bidict()
        self.look_indicators = bidict()

        # add controls for all mixer channels for this page
        offset = self.page * self.page_size

        for chan in xrange(self.page_size):
            # tricky; need to offset the internal channel while keeping the midi
            # channel in the range 0-7 to match the APC layout.
            self.channel_faders[chan+offset] = ControlChangeMapping(chan, 0x7)
            self.bump_button_on[chan+offset] = NoteOnMapping(chan, 0x32)
            self.bump_button_off[chan+offset] = NoteOffMapping(chan, 0x32)
            self.mask_buttons[chan+offset] = NoteOnMapping(chan, 0x31)
            self.look_indicators[chan+offset] = NoteOnMapping(chan, 0x30)

        # update the controls
        self.set_callback_for_mappings(
            self.channel_faders.itervalues(), self.handle_channel_fader)
        self.set_callback_for_mappings(
            self.bump_button_on.itervalues(), self.handle_bump_button_on)
        self.set_callback_for_mappings(
            self.bump_button_off.itervalues(), self.handle_bump_button_off)
        self.set_callback_for_mappings(
            self.mask_buttons.itervalues(), self.handle_mask_button)

        # configure video channel select
        # broken out as a method as we will probably want to move this eventually
        self.setup_video_channel_select()
예제 #2
0
파일: base.py 프로젝트: MariyaS/neurokernel
    def __init__(self, port_data=PORT_DATA, port_ctrl=PORT_CTRL,
                 port_time=PORT_TIME):

        # Unique object ID:
        self.id = uid()

        # Set logger name:
        LoggerMixin.__init__(self, 'man %s' % self.id)

        self.port_data = port_data
        self.port_ctrl = port_ctrl
        self.port_time = port_time

        # Set up a router socket to communicate with other topology
        # components; linger period is set to 0 to prevent hanging on
        # unsent messages when shutting down:
        self.zmq_ctx = zmq.Context()
        self.sock_ctrl = self.zmq_ctx.socket(zmq.ROUTER)
        self.sock_ctrl.setsockopt(zmq.LINGER, LINGER_TIME)
        self.sock_ctrl.bind("tcp://*:%i" % self.port_ctrl)
        
        # Data structures for instances of objects that correspond to processes
        # keyed on object IDs (bidicts are used to enable retrieval of
        # broker/module IDs from object instances):
        self.brokers = bidict.bidict()
        self.modules = bidict.bidict()

        # Set up a dynamic table to contain the routing table:
        self.routing_table = RoutingTable()

        # Number of emulation steps to run:
        self.max_steps = float('inf')

        # Set up process to handle time data:
        self.time_listener = TimeListener(self.port_ctrl, self.port_time)
예제 #3
0
파일: base.py 프로젝트: azukas/neurokernel
    def __init__(self, port_data=PORT_DATA, port_ctrl=PORT_CTRL):

        # Unique object ID:
        self.id = uid()

        self.logger = twiggy.log.name('manage %s' % self.id)
        self.port_data = port_data
        self.port_ctrl = port_ctrl

        # Set up a router socket to communicate with other topology
        # components; linger period is set to 0 to prevent hanging on
        # unsent messages when shutting down:
        self.zmq_ctx = zmq.Context()
        self.sock_ctrl = self.zmq_ctx.socket(zmq.ROUTER)
        self.sock_ctrl.setsockopt(zmq.LINGER, LINGER_TIME)
        self.sock_ctrl.bind("tcp://*:%i" % self.port_ctrl)

        # Set up a poller for detecting acknowledgements to control messages:
        self.ctrl_poller = zmq.Poller()
        self.ctrl_poller.register(self.sock_ctrl, zmq.POLLIN)
        
        # Data structures for storing broker, module, and connectivity instances:
        self.brok_dict = bidict.bidict()
        self.mod_dict = bidict.bidict()
        self.conn_dict = bidict.bidict()

        # Set up a dynamic table to contain the routing table:
        self.routing_table = RoutingTable()

        # Number of emulation steps to run:
        self.steps = np.inf
def main(trials=10, card=100000, hi_lo = 10000000):
    # Example with int: str
    total_time_bidict = 0
    total_time_revdict = 0
    for i in range(trials):
        ints_to_strs = construct_ints_to_strs(card, hi_lo)
        start_time = time.time()
        b = B.bidict(ints_to_strs)
        total_time_bidict += time.time() - start_time
        start_time = time.time()
        rev = {v: k for k, v in ints_to_strs}
        total_time_revdict += time.time() - start_time
    print('Example with int: str.')
    print('''In {} trials, average time for a list of cardinality {}:\n'''
            '''    bidict:        {:.4f} sec.\n'''
            '''    reversed dict: {:.4f} sec.'''.
            format(trials, card, total_time_bidict/trials,
                total_time_revdict/trials))
    # Example with str: int
    total_time_bidict = 0
    total_time_revdict = 0
    for i in range(trials):
        strs_to_ints = construct_strs_to_ints(card, hi_lo)
        start_time = time.time()
        b = B.bidict(strs_to_ints)
        total_time_bidict += time.time() - start_time
        start_time = time.time()
        rev = {v: k for k, v in ints_to_strs}
        total_time_revdict += time.time() - start_time
    print('\nExample with str: int.')
    print('''In {} trials, average time for a list of cardinality {}:\n'''
            '''    bidict:        {:.4f} sec.\n'''
            '''    reversed dict: {:.4f} sec.'''.
            format(trials, card, total_time_bidict/trials,
                total_time_revdict/trials))
예제 #5
0
def test_delete():
    element_by_symbol = B.bidict(H='hydrogen')
    element_by_symbol['He':] = 'helium'
    element_by_symbol[:'lithium'] = 'Li'
    del element_by_symbol['H':]
    del element_by_symbol[:'lithium']
    assert element_by_symbol == B.bidict({'He': 'helium'})
예제 #6
0
    def train(self, samples, labels):
        """
        Train LDA model.
        Parameters
        ----------
        X : ndarray, shape = [n_vowelsamples, n_MFCCsamples, n_MFCCcoefficients]
          Training ndarray, where n_vowelsamples is the number of vowels sampled,
          n_MFCCsamples is the number of MFCCs extracted from one vowel and
          n_MFCCcoefficients is the number of coefficients in each MFCC vector
        y : ndarray, shape = [n_vowelsamples]
          Vowel labels (string or integer)
        Returns
        ----------
        LDAobject
    
        Leaving out check for sample size match between X and y, since LDA does that for us.
        """
        nsamples, nframes, ncepstra = samples.shape
        samples = np.resize(samples,(nsamples, nframes*ncepstra))
        
        # build label map
        label_set = sorted(list(set(labels)))
        self._label_map = bidict(zip(range(len(label_set)), label_set))

        self.lda.fit(samples, map(lambda x:self._label_map[:x], labels))
        return self
예제 #7
0
    def __init__(self, required_args=["sel", "sel_in", "sel_out", "sel_gpot", "sel_spike"], ctrl_tag=CTRL_TAG):
        super(Manager, self).__init__(ctrl_tag)

        # Required constructor args:
        self.required_args = required_args

        # One-to-one mapping between MPI rank and module ID:
        self.rank_to_id = bidict.bidict()

        # Unique object ID:
        self.id = uid()

        # Set up a dynamic table to contain the routing table:
        self.routing_table = RoutingTable()

        # Number of emulation steps to run:
        self.steps = np.inf

        # Variables for timing run loop:
        self.start_time = 0.0
        self.stop_time = 0.0

        # Variables for computing throughput:
        self.counter = 0
        self.total_sync_time = 0.0
        self.total_sync_nbytes = 0.0
        self.received_data = {}

        # Average step synchronization time:
        self._average_step_sync_time = 0.0

        # Computed throughput (only updated after an emulation run):
        self._average_throughput = 0.0
        self._total_throughput = 0.0
        self.log_info("manager instantiated")
예제 #8
0
    def __init__(self, filename):
        corpus_file = open(filename)
        corpus_data = json.load(corpus_file)
        corpus_file.close()

        if not type(corpus_data) is dict:
            raise Exception("Invalid Corpus Format")

        num_keys = len(corpus_data)
        self.mapping = bidict()
        self.matrix = lil_matrix((num_keys,num_keys))

        highest_empty_index = 0

        for cur_key, cur_dict in corpus_data.items():

            if not type(cur_dict) is dict:
                raise Exception("Invalid Corpus Format")

            if not cur_key in self.mapping:
                self.mapping[cur_key] = highest_empty_index
                highest_empty_index += 1
            
            start_index = self.mapping[cur_key]

            for target, probability in cur_dict.items():
                if not target in self.mapping:
                    self.mapping[target] = highest_empty_index
                    highest_empty_index += 1
                
                target_index = self.mapping[target]

                self.matrix[start_index, target_index] = probability
예제 #9
0
 def __init__(self, description="", version="2010-09-09"):
   self.description = description
   self.version = version
   self.namespace = bidict()
   self.parameters = []
   self.mappings = []
   self.resources = []
예제 #10
0
파일: task_map.py 프로젝트: DC23/scriptabit
    def __init__(self, filename=None):
        """ Initialise the TaskMap instance.

        Args:
            filename (str): The optional filename to load from.
        """
        super().__init__()

        # try to load from the file, defaulting to empty bidict if the load
        # fails for any reason
        try:
            self.__bidict = bidict()
            with open(filename, 'r') as f:
                self.__bidict = bidict(json.load(f))
        except:
            self.__bidict = bidict()
예제 #11
0
def test_bidirectional_mappings(d):
    b = bidict(d)
    for k, v in b.items():
        v_ = b[k]
        k_ = b.inv[v]
        assert k == k_ or both_nan(k, k_)
        assert v == v_ or both_nan(v, v_)
예제 #12
0
파일: marz.py 프로젝트: Lexandro92/marz
 def __init__(self, routes, riot):
     self.riot = riot
     riot.register_udp(self)
     self.routes = routes
     self.ports = [port for port in self.routes.keys()]
     self.sockets = dict()
     self.connections = bidict()
예제 #13
0
def full_cost(tree_adj, tree, gt_graph, X):
    import graph_tool.centrality as gt
    from bidict import bidict
    vbetween, ebetween = gt.betweenness(gt_graph)
    info, prt = augmented_ancestor(tree_adj, X)
    root = None
    for u, parent in prt.items():
        if parent is None:
            root = u
            break
    assert root is not None
    Xbar = set(tree_adj.keys()) - X
    edge_to_vertex = bidict({})
    raw_edge_features = {}
    for edge in tree:
        tail = edge_tail(edge, prt)[0]
        edge_to_vertex[edge] = tail
        raw_edge_features[edge] = [ebetween[gt_graph.edge(*edge)],
                                   vbetween[gt_graph.vertex(tail)],
                                   info[tail][1]/len(tree_adj),
                                   info[tail][2]/len(X), [], []]
    distances = []
    for i in X:
        for j in Xbar:
            path = actual_tree_path(i, j, prt)
            path_len = len(path)
            distances.append(path_len)
            if j != root:
                raw_edge_features[edge_to_vertex[:j]][4].append(path_len)
            for edge in path:
                raw_edge_features[edge][5].append(path_len)
    return raw_edge_features, sum(distances)/len(distances)
예제 #14
0
 def __init__(self,parent,proto):
     #super(ClientPacketFactory,self).__init__()
     self.parent = parent
     self.proto = proto
     self.clients = bidict.bidict()
     self.sesscounter = 0
     self.cllock = threading.Lock()
예제 #15
0
def load_corpus():
    global typeid2label
    global corpus
    global targetid2label
    # load corpus from hard disk

    logging.info("Loading the mapping file...")
    # load the typeid2label
    with open("./SogouC.ClassList.txt", "r", encoding="cp936") as c:
        for line in c:
            _id, _name = line.strip().split()
            typeid2label[_id] = _name

    # build the target-label bidict
    labels = list(typeid2label.values())
    targetid2label = bidict([(i, labels[i]) for i in range(0, len(labels))])  # need to be saved

    # load the corpus
    logging.info("loading the corpus...")
    for dp, dn, fn in os.walk("./ClassFile"):
        for f in fn:
            full_path = os.path.join(dp, f)
            par_name = os.path.basename(os.path.dirname(full_path))
            try:
                with open(full_path, "r", encoding="cp936") as d:
                    text = d.read().strip()
                    data = (text, targetid2label[: typeid2label[par_name]])
                    corpus.append(data)
            except Exception as e:
                logging.error(e)
                continue

    logging.info("Total corpus #{}".format(len(corpus)))
    return
예제 #16
0
def update_s2idx(s2idx, removal_list):
    '''
    The purpose of this function is to update the s2idx
    bidictionary after the removal of some species, and 
    return a list of the indices to keep

    >>> s2idx = bidict( zip(['manzanita', 'baldEagle', 'fogMoisture', 'skunk', 'understoryPlants'],range(5)) )
    >>> removal_list = ['skunk','manzanita']
    >>> s2idx_new, keepIdxs = update_s2idx(s2idx,removal_list)
    >>> s2idx_new.inv[0]
    'baldEagle'
    >>> s2idx_new.inv[1]
    'fogMoisture'
    >>> s2idx_new.inv[2]
    'understoryPlants'
    >>> keepIdxs
    [1, 2, 4]

    '''

    n = len(s2idx)

    spp_list = get_spp_list(s2idx)

    spp_list_new = list( filter(lambda x: x not in removal_list, spp_list) )
    keepIdxs = sorted( [s2idx[s] for s in spp_list_new] )
    s2idx_new = bidict( zip(spp_list_new, range(len(spp_list_new))) )

    return s2idx_new, keepIdxs
예제 #17
0
def _build_grid_button_map(page):
    mapping = {}
    col_offset = BeamMatrixMinder.col_per_page * page
    for row in xrange(BeamMatrixMinder.n_rows):
        for column in xrange(BeamMatrixMinder.col_per_page):
            mapping[(row, column+col_offset)] = NoteOnMapping(column, row + 0x35)
    return bidict(mapping)
예제 #18
0
    def add_controls(self, control_map, callback):
        """Attach a control map to a specified callback.

        Returns the bidirectional version of the control map.
        """
        self.set_callback_for_mappings(control_map.itervalues(), callback)
        return bidict(control_map)
예제 #19
0
def test_update_withdup(benchmark):
    elements_ = bidict(elements)

    def runner():
        with pytest.raises(ValueDuplicationError):
            elements_.update(update_withdupval)

    benchmark(runner)
예제 #20
0
def test_put_withdup(benchmark):
    elements_ = bidict(elements)

    def runner():
        with pytest.raises(ValueDuplicationError):
            elements_.put('key_with_dup_val', 'hydrogen')

    benchmark(runner)
예제 #21
0
	def __init__(self, start_time, end_time, *args, **kwargs):
		'''
		@param start_time: The file start time.
		@param end_time: The file end time.
		'''
		TimeSeriesData.__init__(self, start_time, end_time, *args, **kwargs)
		BiDirectionalLookupNonTerminalNode.__init__(self, None, *args, **kwargs)
		self._tiers_by_name = bidict()
예제 #22
0
파일: session.py 프로젝트: mardix/libmunin
    def __init__(self, name, mask, config=DEFAULT_CONFIG):
        """Create a new session:

        :param name: The name of the session. Used to load it again from disk.
        :param mask: The mask. See :term:`Mask`
        :param config: A dictionary with config values. See :class:`DefaultConfig` for available keys.
        """
        self._config = config
        self._name = name

        # Publicly readable attribute.
        self.mapping = {}

        # Make access to the mask more efficient
        self._mask = copy(mask)
        self._attribute_list = sorted(mask)
        self._listidx_to_key = {k: i for i, k in enumerate(self._attribute_list)}

        # Lookup tables for those attributes (fast access is crucial here)
        def make_index(idx, default_func):
            index = {}
            for key, descr in self._mask.items():
                if descr[idx] is not None:
                    index[key] = descr[idx]
                else:
                    index[key] = default_func(key)

            return index

        # Import this locally, since we might get circular import otherway:
        from munin.distance import DistanceFunction
        from munin.provider import Provider

        # Build indices and set default values:
        self._key_to_providers = make_index(0,
                lambda key: Provider()
        )
        self._key_to_distfuncs = make_index(1,
                lambda key: DistanceFunction(self._key_to_providers[key])
        )
        self._key_to_weighting = make_index(2,
                lambda key: 1.0
        )

        # Sum of the individual weights, pre-calculated once.
        self._weight_sum = sum((descr[2] for descr in mask.values()))

        # Create the associated database.
        self._database = Database(self)

        # Filtering related:
        self._filtering_enabled = config['recom_history_sieving']
        self._recom_history = RecommendationHistory(
            penalty_map=config['recom_history_penalty']
        )

        # Publicly readable attribute.
        self.mapping = bidict()
예제 #23
0
파일: __init__.py 프로젝트: mardix/libmunin
    def __init__(self, compress=False):
        """Create a new Provider with the following attributes:

        :param compress: Deduplicate data?
        """
        self.compress = compress
        if compress:
            self._store = bidict()
            self._last_id = 0
예제 #24
0
파일: indexer.py 프로젝트: fenekku/Masters
    def __init__(self, indexfilepath):
        self.indexfilepath = indexfilepath
        self._id_to_i = bidict()
        self._id_index = 0

        if exists(self.indexfilepath):
            with open(self.indexfilepath, "rb") as f:
                self._id_to_i = cPickle.load(f)
                self._id_index = len(self._id_to_i)
예제 #25
0
 def __init__(self, title=None):
     self.title = title
     self.programs = []
     self.listItems = []
     self.programsByListItem = bidict.bidict()
     self.episodesDone = False
     self.postersDone = False
     self.backgroundsDone = False
     self.index = 0
예제 #26
0
 def __init__(self, pathfile, item_id='id', class_id='class'):
     self.dict = {'name': '',
                  'attributes': {},
                  'attribute_map': bidict({}),
                  'data': None,
                  'targets': [],
                  'target_names': [],
                  'target_map': bidict({}),
                  'item_names': [],
                  'item_map': bidict({})}
     self.arff_attribute_id_counter = 0
     self.matrix_attribute_id_counter = 0
     self.item_id = item_id
     self.item_id_nr = None
     self.class_id = class_id
     self.class_id_nr = None
     self.map_arff_aid_to_matrix_col = bidict({})
     self._load_sparse_arff(pathfile)
예제 #27
0
 def restrict_feature_set_to(self, feature_name_filter_func):
     r = deepcopy(self)
     mask = np.array([1 if feature_name_filter_func(r.columns[i]) else 0
                      for i in sorted(r.columns.keys())], dtype=bool)
     r.X = csr_matrix(r.X.todense()[:, mask])
     remaining_feature_names = [
         r.columns[i] for i, v in enumerate(mask) if v == 1]
     r.columns = bidict({
         i: v for i, v in enumerate(remaining_feature_names)})
     return r
예제 #28
0
def get_fuzzy_name_mapping(billboard_path):
    mapping = bidict()
    track_ids = read_billboard_track_ids(billboard_path)
    for track_id in track_ids:
        with open(mcgill_path(billboard_path, track_id, 'echonest.json')) as f:
            meta = json.load(f)['meta']
            artist = meta['artist']
            title = meta['title']
            mapping[(fuzzy(artist), fuzzy(title))] = track_id
    return mapping
예제 #29
0
 def __init__(self):
         self.filePath = "./damai/"
         self.mcid={'演唱会':1,'音乐会':2,'话剧歌剧':3,'舞蹈芭蕾':4,'曲苑杂坛':5,'体育比赛':6,'度假休闲':7}
         self.ccid={'流行':9,'摇滚':10,'民族':11,'音乐节':12,'其他演唱会':13,
                   '管弦乐':14, '独奏':15,'室内乐及古乐':16, '声乐及合唱':17, '其他音乐会':18,
                   '话剧 ':19,'歌剧 ':20,'歌舞剧 ':21,'音乐剧 ':22,'儿童剧 ':23,
                   '舞蹈 ':24,'芭蕾 ':25,'舞剧 ':26,
                   '相声 ':27,'魔术 ':28,'马戏 ':29,'杂技 ':30,'戏曲 ':31,'其他曲苑杂坛 ':32,
                   '球类运动':33,'搏击运动':34,'其它竞技':35,
                   '主题公园':36, '风景区':37, '展会':38, '特色体验':39, '温泉':40, '滑雪':41, '游览线路':42, '度假村':43, '代金券':44, '酒店住宿':45
                   }
         self.mcidDict=~bidict(self.mcid)
         self.ccidDict=~bidict(self.ccid)
         self.geoCodingDict = {}
         self.geoCodingDictFile = "./geoCodingDict.txt"
         #self.readGeoCodingDict(self.geoCodingDictFile)
         self.geoCoder = GeoCoder()
         self.avosClassName = "damai"
         self.avosManager = AvosManager()
예제 #30
0
 def __init__(self):
     self.mcid={u'演唱会':1,u'音乐会':2,u'话剧歌剧':3,u'舞蹈芭蕾':4,u'曲苑杂坛':5,u'体育比赛':6,u'度假休闲':7}
     self.ccid={'流行':9,'摇滚':10,'民族':11,'音乐节':12,'其他演唱会':13,
               '管弦乐':14, '独奏':15,'室内乐及古乐':16, '声乐及合唱':17, '其他音乐会':18,
               '话剧 ':19,'歌剧 ':20,'歌舞剧 ':21,'音乐剧 ':22,'儿童剧 ':23,
               '舞蹈 ':24,'芭蕾 ':25,'舞剧 ':26,
               '相声 ':27,'魔术 ':28,'马戏 ':29,'杂技 ':30,'戏曲 ':31,'其他曲苑杂坛 ':32,
               '球类运动':33,'搏击运动':34,'其它竞技':35,
               '主题公园':36, '风景区':37, '展会':38, '特色体验':39, '温泉':40, '滑雪':41, '游览线路':42, '度假村':43, '代金券':44, '酒店住宿':45
               }
     self.mcidDict=~bidict(self.mcid)
     self.ccidDict=~bidict(self.ccid)
     self.geoCodingDict = {}
     self.geoCodingDictFile = "./geoCodingDict.txt"
     #self.readGeoCodingDict(self.geoCodingDictFile)
     self.geoCoder = GeoCoder()
     self.start_urls = []
     self.init_start_url()
     self.host = 'http://www.damai.cn'
예제 #31
0
from bidict import bidict
from enum import Enum


class ProductLayerType(Enum):
    UNKNOWN = 0
    BATHYMETRY = 1
    UNCERTAINTY = 2
    DESIGNATED = 3
    MOSAIC = 4


layer_type_prefix = bidict({
    ProductLayerType.UNKNOWN: "UNK",
    ProductLayerType.BATHYMETRY: "BAT",
    ProductLayerType.UNCERTAINTY: "UNC",
    ProductLayerType.DESIGNATED: "DES",
    ProductLayerType.MOSAIC: "MOS"
})
예제 #32
0
                                Str)
from middlewared.validators import Range
from middlewared.service import (CallError, CRUDService, SystemServiceService,
                                 ValidationErrors, private)
from middlewared.utils import run
from middlewared.async_validators import check_path_resides_within_volume

import bidict
import errno
import ipaddress
import re
import os

AUTHMETHOD_LEGACY_MAP = bidict.bidict({
    'None': 'NONE',
    'CHAP': 'CHAP',
    'CHAP Mutual': 'CHAP_MUTUAL',
})
RE_IP_PORT = re.compile(r'^(.+?)(:[0-9]+)?$')
RE_TARGET_NAME = re.compile(r'^[-a-z0-9\.:]+$')


class ISCSIGlobalService(SystemServiceService):
    class Config:
        datastore_extend = 'iscsi.global.config_extend'
        datastore_prefix = 'iscsi_'
        service = 'iscsitarget'
        service_model = 'iscsitargetglobalconfiguration'
        namespace = 'iscsi.global'

    @private
예제 #33
0
    original).
    :param flags:
    :param image: Surface to create a colorized copy of
    :param new_color: RGB color to use (original alpha values are preserved)
    :return: New colorized Surface instance
    """
    image = image.copy()
    # add in new RGB values
    image.fill(new_color[0:3] + (0, ), None, flags)
    return image


tile_names = bidict({
    "S_WALL": 0,
    "S_FLOOR": 1,
    "W_WALL": 2,
    "W_FLOOR": 3,
    "H_WALL": 4,
    "H_FLOOR": 5,
})


# noinspection PyArgumentEqualDefault
class Assets:
    def __init__(self):
        # FONTS#
        self.FONT_DEBUG_MESSAGE = pygame.font.Font("data/joystix.ttf", 20)
        self.FONT_MESSAGE_TEXT = pygame.font.Font("data/joystix.ttf", 20)
        self.FONT_CURSOR_TEXT = pygame.font.Font("data/joystix.ttf",
                                                 constants.CELL_HEIGHT)
        self.FONT_FANTY = pygame.font.Font(
            "data/fanty.ttf", int(round(constants.CELL_HEIGHT / 2)))
예제 #34
0
 def __init__(self):
     self._data = dict()
     self._data[TAGS] = bidict()
     self._data[OBJECTS] = bidict()
     self._data[FIGURES] = bidict()
     self._data[VIDEOS] = bidict()
예제 #35
0
 def att_class_array_opts_undershorts(cls):
     options_by_value = bidict({
         0: PlayerAttributeOption.OPT_N,
         128: PlayerAttributeOption.OPT_Y,
     })
     return options_by_value
예제 #36
0
empty_attributes = {
    "condition_type": -1,
    "quantity": -1,
    "attribute": -1,
    "unit_object": -1,
    "next_object": -1,
    "object_list": -1,
    "source_player": -1,
    "technology": -1,
    "timer": -1,
    "area_1_x": -1,
    "area_1_y": -1,
    "area_2_x": -1,
    "area_2_y": -1,
    "object_group": -1,
    "object_type": -1,
    "ai_signal": -1,
    "inverted": -1,
    "variable": -1,
    "comparison": -1,
    "target_player": -1,
    "unit_ai_action": -1,
    "xs_function": ""
}

# Set using the version json files
condition_names = bidict()
default_attributes = {}
attributes = {}
예제 #37
0
from bidict import bidict


events = bidict([
    ('wink', 1), ('double blink', 2), ('close your eyes', 4), ('jump', 8)])


def is_valid_number(number):
    if isinstance(number, str):
        return all(c in "01" for c in number)
    return 0 < number < 32


def is_valid_code(secret_code):
    return all(e in events for e in secret_code)


def handshake(number):
    if not is_valid_number(number):
        return []
    if isinstance(number, str):
        number = int(number, 2)
    sequence = [e for e, i in events.items() if number & i]
    return sequence[::1 if (not number & 16) else -1]


def code(secret_code):
    if not is_valid_code(secret_code):
        return '0'
    sequence = [events[e] for e in secret_code]
    r = (sequence != sorted(sequence))
예제 #38
0
unit_names = bidict({
    0: "moveable_map_revealer",
    1: "legionary",
    4: "archer",
    5: "hand_cannoneer",
    6: "elite_skirmisher",
    7: "skirmisher",
    8: "longbowman",
    11: "mangudai",
    13: "fishing_ship",
    15: "junk",
    17: "trade_cog",
    21: "war_galley",
    24: "crossbowman",
    25: "teutonic_knight",
    36: "bombard_cannon",
    38: "knight",
    39: "cavalry_archer",
    40: "cataphract",
    41: "huskarl",
    42: "trebuchet",
    46: "janissary",
    52: "royal_janissary",
    56: "fisherman",
    73: "chu_ko_nu",
    74: "militia",
    75: "man_at_arms",
    77: "long_swordsman",
    83: "villager_male",
    93: "spearman",
    118: "builder",
    120: "forager",
    122: "hunter",
    123: "lumberjack",
    124: "stone_miner",
    125: "monk",
    128: "trade_cart_empty",
    156: "repairer",
    185: "slinger",
    188: "flamethrower",
    204: "trade_cart_full",
    207: "imperial_camel_rider",
    232: "woad_raider",
    239: "war_elephant",
    250: "longboat",
    259: "farmer",
    275: "centurion",
    279: "scorpion",
    280: "mangonel",
    281: "throwing_axeman",
    282: "mameluke",
    283: "cavalier",
    286: "monk_with_relic",
    291: "samurai",
    293: "villager_female",
    299: "bandit",
    305: "llama",
    329: "camel_rider",
    330: "heavy_camel_rider",
    331: "trebuchet_packed",
    358: "pikeman",
    359: "halberdier",
    361: "norse_warrior",
    420: "cannon_galleon",
    422: "capped_ram",
    434: "king",
    440: "petard",
    441: "hussar",
    442: "galleon",
    448: "scout_cavalry",
    473: "two_handed_swordsman",
    474: "heavy_cavalry_archer",
    492: "arbalester",
    493: "heavy_crossbowman",
    527: "demolition_ship",
    528: "heavy_demolition_ship",
    529: "fire_ship",
    530: "elite_longbowman",
    531: "elite_throwing_axeman",
    532: "fast_fire_ship",
    533: "elite_longboat",
    534: "elite_woad_raider",
    539: "galley",
    542: "heavy_scorpion",
    545: "transport_ship",
    546: "light_cavalry",
    548: "siege_ram",
    550: "onager",
    553: "elite_cataphract",
    554: "elite_teutonic_knight",
    555: "elite_huskarl",
    556: "elite_mameluke",
    557: "elite_janissary",
    558: "elite_war_elephant",
    559: "elite_chu_ko_nu",
    560: "elite_samurai",
    561: "elite_mangudai",
    567: "champion",
    569: "paladin",
    579: "gold_miner",
    588: "siege_onager",
    592: "shepherd",
    594: "sheep",
    639: "penguin",
    691: "elite_cannon_galleon",
    692: "berserk",
    694: "elite_berserk",
    705: "cow_a",
    725: "jaguar_warrior",
    726: "elite_jaguar_warrior",
    748: "cobra_car",
    751: "eagle_scout",
    752: "elite_eagle_warrior",
    753: "eagle_warrior",
    755: "tarkan",
    757: "elite_tarkan",
    763: "plumed_archer",
    765: "elite_plumed_archer",
    771: "conquistador",
    773: "elite_conquistador",
    775: "missionary",
    778: "canoe",
    814: "horse_a",
    825: "amazon_warrior",
    827: "war_wagon",
    829: "elite_war_wagon",
    831: "turtle_ship",
    832: "elite_turtle_ship",
    833: "turkey",
    846: "donkey",
    850: "amazon_archer",
    854: "torch_a_convertable",
    860: "furious_the_monkey_boy",
    866: "genoese_crossbowman",
    868: "elite_genoese_crossbowman",
    869: "magyar_huszar",
    871: "elite_magyar_huszar",
    873: "elephant_archer",
    875: "elite_elephant_archer",
    876: "boyar",
    878: "elite_boyar",
    879: "kamayuk",
    881: "elite_kamayuk",
    882: "condottiero",
    892: "heavy_pikeman",
    894: "eastern_swordsman",
    897: "camel",
    1001: "organ_gun",
    1003: "elite_organ_gun",
    1004: "caravel",
    1006: "elite_caravel",
    1007: "camel_archer",
    1009: "elite_camel_archer",
    1010: "genitour",
    1012: "elite_genitour",
    1013: "gbeto",
    1015: "elite_gbeto",
    1016: "shotel_warrior",
    1018: "elite_shotel_warrior",
    1023: "priest",
    1060: "goat",
    1103: "fire_galley",
    1104: "demolition_raft",
    1105: "siege_tower",
    1120: "ballista_elephant",
    1122: "elite_ballista_elephant",
    1123: "karambit_warrior",
    1125: "elite_karambit_warrior",
    1126: "arambai",
    1128: "elite_arambai",
    1129: "rattan_archer",
    1131: "elite_rattan_archer",
    1132: "battle_elephant",
    1134: "elite_battle_elephant",
    1142: "water_buffalo",
    1145: "ninja",
    1155: "imperial_skirmisher",
    1222: "sharkatzor",
    1225: "konnik",
    1227: "elite_konnik",
    1228: "keshik",
    1230: "elite_keshik",
    1231: "kipchak",
    1233: "elite_kipchak",
    1234: "leitis",
    1236: "elite_leitis",
    1237: "bactrian_camel",
    1243: "goose",
    1245: "pig",
    1252: "konnik_dismounted",
    1253: "elite_konnik_dismounted",
    1258: "battering_ram",
    1263: "flaming_camel",
    1271: "ox_cart",
    1273: "ox_wagon",
    1275: "khan",
    1291: "invisible_object",
    1292: "queen",
    1300: "alfred_the_alpaca",
    1302: "dragon_ship",
    1304: "relic_cart",
    1338: "cart",
    1356: "horse_b",
    1370: "steppe_lancer",
    1372: "elite_steppe_lancer",
    1374: "iroquois_warrior",
    1377: "torch_b_convertable",
    1400: "priest_with_relic",
    1570: "xolotl_warrior",
    1572: "merchant",
    1577: "photonman",
    1596: "cow_b",
    1598: "cow_c",
    1600: "cow_d",
    1602: "horse_c",
    1604: "horse_d",
    1606: "horse_e",
})
예제 #39
0
def init_comm():
    #identify which port belongs to tvc, the other belongs to mfc-n2-1.
    # right now we just assign it blindly.
    chan_d['mfc-n2-1']['port'] = serial.Serial(
        assoc_port['mfc-n2-1'])  #, timeout=chan_d['tvc']['timeout'])
    chan_d['tvc']['port'] = serial.Serial(assoc_port['tvc'],
                                          timeout=chan_d['tvc']['timeout'])


def finish_comm():
    for k in chan_d.keys():
        k['port'].close()


_cmd_mod = chan_d['tvc']['cmd_mod']
_cmd_status = bidict({'0': 'No error', '1': 'Unrecognised command', '2': 'Bad data value',\
        '3': 'Command ignored', '4': 'Reserved for future use'})


def _cmd_io(cmdstr, eps=50):
    res = ''
    with chan_d['tvc']['lock']:
        chan_d['tvc']['port'].write(_cmd_mod + cmdstr + '\n')
        if eps == 0: return
        else: res = chan_d['tvc']['port'].read(eps)[:-1]
    #if len(res)==0: # time out occurred
    #    raise TVC_Error('Timeout occurred', '')
    if res[0] != '0': raise TVC_Error(_cmd_status[res[0]], res)
    return res[1:]


class sierra_mfc():
예제 #40
0
DATA_TYPES = bidict({  # allowed values: 0x00 - 0x7f
    # Layer
    ('layer', 'control'):      0x00,
    ('layer', 'version'):      0x01,
    ('layer', 'capabilities'): 0x02,
    ('layer', 'error'):        0x03,
    ('layer', 'warn'):         0x04,
    ('layer', 'info'):         0x05,
    ('layer', 'debug'):        0x06,
    ('layer', 'trace'):        0x07,
    ('layer', 'metrics'):      0x08,

    # Bytes
    ('bytes', 'buffer'): 0x10,
    ('bytes', 'stream'): 0x11,
    ('bytes', 'chunk'):  0x12,

    # Transport
    ('transport', 'frame'):              0x20,
    ('transport', 'datagram'):           0x21,
    ('transport', 'validated_datagram'): 0x22,
    ('transport', 'reliable_buffer'):    0x23,
    ('transport', 'ported_buffer'):      0x24,

    # Presentation
    ('presentation', 'document'): 0x40,

    # Application Framework
    ('application', 'pubsub'): 0x60,
    ('application', 'rpc'):    0x61,
    ('application', 'rest'):   0x62,

})
예제 #41
0
    'Stockholm', 'Barcelona'
]
_TIMEZONES = [
    'America/New_York', 'America/New_York', 'America/Los_Angeles',
    'America/New_York', 'America/Indiana/Indianapolis', 'America/Los_Angeles',
    'America/Los_Angeles', 'America/Chicago', 'America/Chicago',
    'America/Chicago', 'Europe/London', 'Europe/Paris', 'Europe/Berlin',
    'Europe/Rome', 'Europe/Prague', 'Europe/Moscow', 'Europe/Amsterdam',
    'Europe/Helsinki', 'Europe/Stockholm', 'Europe/Madrid'
]

#NAMES = ['Chicago','Barcelona']
#_TIMEZONES = ['America/Chicago','Europe/Madrid']
UTC_TZ = pytz.utc
SHORT_KEY = [short_name(city) for city in NAMES]
FULLNAMES = bidict.bidict(zip(SHORT_KEY, NAMES))
get_tz = lambda tz: pytz.timezone(tz).localize(dt.utcnow()).tzinfo
TZ = {city: get_tz(tz) for tz, city in zip(_TIMEZONES, SHORT_KEY)}
INDEX = {short_name(city): _id for _id, city in enumerate(NAMES)}
middle = lambda bbox: (.5 * (bbox[0] + bbox[2]), (.5 * (bbox[1] + bbox[3])))
GEO_TO_2D = {
    name: lc.LocalCartesian(*middle(city)).forward
    for name, city in zip(SHORT_KEY, US + EU)
}
BBOXES = dict(zip(SHORT_KEY, [bbox_to_polygon(b) for b in US + EU]))


def euclidean_to_geo(city, coords):
    """Convert back from 2D `coords` [lat, lng] to latitude and longitude
    whithin `city` using an external program (so it's not fast)."""
    print 'cities.py/euclidean_to_geo'
    def __build_content_embeddings__(self, nodes: List[Node], edges: List[Edge],
                                     node_data: Dict[Node, Dict[FeatureName, object]], n_dims):
        self.log.debug("ContentRecommendation::__build_embeddings__:: Started...")
        all_embeddings = None
        node_to_idx_internal = bidict()
        for nt in self.node_types:
            nt_embedding = None
            nt_nodes = list(filter(lambda n: n.node_type == nt, nodes))
            assert len(set(nt_nodes) - set(node_data.keys())) == 0 or len(set(nt_nodes) - set(node_data.keys())) == len(
                set(nt_nodes))
            assert len(set(nt_nodes)) == len(nt_nodes)
            if len(set(nt_nodes) - set(node_data.keys())) == len(set(nt_nodes)):
                nt_embedding = np.zeros((len(nt_nodes), 1))
            else:
                nt_nodes_features: List[Dict[FeatureName, object]] = [node_data[ntn] for ntn in nt_nodes]
                feature_names = list(nt_nodes_features[0].keys())

                for f in feature_names:
                    feature = [ntnf[f] for ntnf in nt_nodes_features]
                    embedding = self.embedding_mapper[nt][f].fit_transform(feature)
                    if nt_embedding is None:
                        nt_embedding = embedding
                    else:
                        np.concatenate((nt_embedding, embedding), axis=1)
                nt_embedding = unit_length(nt_embedding, axis=1)

            #
            cur_len = len(node_to_idx_internal)
            node_to_idx_internal.update(bidict(zip(nt_nodes, range(cur_len, cur_len + len(nt_nodes)))))
            if all_embeddings is None:
                all_embeddings = nt_embedding
            else:
                c1 = np.concatenate((all_embeddings, np.zeros((all_embeddings.shape[0], nt_embedding.shape[1]))),
                                    axis=1)
                c2 = np.concatenate((np.zeros((nt_embedding.shape[0], all_embeddings.shape[1])), nt_embedding), axis=1)
                all_embeddings = np.concatenate((c1, c2), axis=0)

        all_embeddings = all_embeddings[[node_to_idx_internal[n] for n in nodes]]
        nts = np.array([n.node_type for n in nodes]).reshape((-1, 1))
        ohe_node_types = OneHotEncoder(sparse=False).fit_transform(nts)
        all_embeddings = np.concatenate((all_embeddings, ohe_node_types), axis=1)
        self.log.debug(
            "ContentRecommendation::__build_embeddings__:: AutoEncoder with dims = %s" % str(all_embeddings.shape))
        n_dims = n_dims if n_dims is not None and not np.isinf(n_dims) else 2 ** int(np.log2(all_embeddings.shape[1]))
        from sklearn.decomposition import IncrementalPCA
        all_embeddings = IncrementalPCA(n_components=n_dims, batch_size=2**16).fit_transform(all_embeddings)
        all_embeddings = unit_length(all_embeddings, axis=1)
        extra_dims = 2 ** int(np.ceil(np.log2(ohe_node_types.shape[1]))) - ohe_node_types.shape[1]
        if extra_dims != 0:
            ohe_node_types = np.concatenate((ohe_node_types, np.zeros((ohe_node_types.shape[0], extra_dims))), axis=1)
        all_embeddings = np.concatenate((all_embeddings, ohe_node_types), axis=1)
        self.log.info("ContentRecommendation::__build_embeddings__:: Built Content Embedding with dims = %s" % str(
            all_embeddings.shape))
        edges = list(edges) + [Edge(n, n, 1.0) for n in nodes]
        adjacency_list = defaultdict(list)
        for src, dst, w in edges:
            adjacency_list[src].append(dst)
            adjacency_list[dst].append(src)
        nodes_to_idx = self.nodes_to_idx
        adjacent_vectors = np.vstack([all_embeddings[[nodes_to_idx[adj] for adj in adjacency_list[n]]].mean(0) for n in nodes])
        assert adjacent_vectors.shape == all_embeddings.shape
        all_embeddings = (all_embeddings + adjacent_vectors)/2.0
        return all_embeddings
예제 #43
0
 def handle_join_game(self, join_game_packet):
     self.logger.info('Connected and joined game as entity id %d',
                      join_game_packet.entity_id)
     self.player_list = bidict()
     self.connection_retries = 0
예제 #44
0
    def __init__(self, config_path):
        self.config = Configuration(config_path)
        if self.config.debugging_enabled:
            debugpy.listen(
                (self.config.debugging_ip, self.config.debugging_port))
        self.credentials = (self.config.mc_username, self.config.mc_password)
        self.return_code = os.EX_OK
        self.session_token = ""
        self.uuid_cache = bidict()
        self.webhooks = []
        self.bot_username = ""
        self.next_message_time = datetime.now(timezone.utc)
        self.previous_message = ""
        self.player_list = bidict()
        self.previous_player_list = bidict()
        self.accept_join_events = False
        self.tab_header = ""
        self.tab_footer = ""
        # Initialize the discord part
        self.discord_bot = discord.Client()
        self.connection_retries = 0
        self.auth_token = None
        self.connection = None
        self.setup_logging(self.config.logging_level)
        self.database_session = DatabaseSession()
        self.logger = logging.getLogger("bridge")
        self.database_session.initialize(self.config)
        self.bot_perms = discord.Permissions()
        self.bot_perms.update(manage_messages=True, manage_webhooks=True)
        # Async http request pool
        self.req_future_session = FuturesSession(max_workers=100)
        self.reactor_thread = Thread(target=self.run_auth_server,
                                     args=(self.config.auth_port, ))
        self.aioloop = asyncio.get_event_loop()
        # We need to import twisted after setting up the logger because twisted hijacks our logging
        from . import auth_server
        auth_server.DATABASE_SESSION = self.database_session
        if self.config.es_enabled:
            if self.config.es_auth:
                self.es_logger = ElasticsearchLogger(self.req_future_session,
                                                     self.config.es_url,
                                                     self.config.es_username,
                                                     self.config.es_password)
            else:
                self.es_logger = ElasticsearchLogger(self.req_future_session,
                                                     self.config.es_url)

        @self.discord_bot.event
        async def on_ready():  # pylint: disable=W0612
            self.logger.info("Discord bot logged in as %s (%s)",
                             self.discord_bot.user.name,
                             self.discord_bot.user.id)
            self.logger.info(
                "Discord bot invite link: %s",
                discord.utils.oauth_url(client_id=self.discord_bot.user.id,
                                        permissions=self.bot_perms))
            await self.discord_bot.change_presence(
                activity=discord.Game("mc!help for help"))
            self.webhooks = []
            session = self.database_session.get_session()
            channels = session.query(DiscordChannel).all()
            session.close()
            for channel in channels:
                channel_id = channel.channel_id
                discord_channel = self.discord_bot.get_channel(channel_id)
                if discord_channel is None:
                    session = self.database_session.get_session()
                    session.query(DiscordChannel).filter_by(
                        channel_id=channel_id).delete()
                    session.close()
                    continue
                channel_webhooks = await discord_channel.webhooks()
                found = False
                for webhook in channel_webhooks:
                    if webhook.name == "_minecraft" and webhook.user == self.discord_bot.user:
                        self.webhooks.append(webhook.url)
                        found = True
                    self.logger.debug("Found webhook %s in channel %s",
                                      webhook.name, discord_channel.name)
                if not found:
                    # Create the hook
                    await discord_channel.create_webhook(name="_minecraft")

        @self.discord_bot.event
        async def on_message(message):  # pylint: disable=W0612
            # We do not want the bot to reply to itself
            if message.author == self.discord_bot.user:
                return
            this_channel = message.channel.id

            # PM Commands
            if message.content.startswith("mc!help"):
                try:
                    send_channel = message.channel
                    if isinstance(message.channel, discord.abc.GuildChannel):
                        await message.delete()
                        dm_channel = message.author.dm_channel
                        if not dm_channel:
                            await message.author.create_dm()
                        send_channel = message.author.dm_channel
                    msg = self.get_discord_help_string()
                    await send_channel.send(msg)
                    return
                except discord.errors.Forbidden:
                    if isinstance(message.author, discord.abc.User):
                        msg = f"{message.author.mention}, please allow private messages from this bot."
                        error_msg = await message.channel.send(msg)
                        await asyncio.sleep(3)
                        await error_msg.delete()
                    return

            elif message.content.startswith("mc!register"):
                try:
                    send_channel = message.channel
                    if isinstance(message.channel, discord.abc.GuildChannel):
                        await message.delete()
                        dm_channel = message.author.dm_channel
                        if not dm_channel:
                            await message.author.create_dm()
                        send_channel = message.author.dm_channel
                    session = self.database_session.get_session()
                    discord_account = session.query(DiscordAccount).filter_by(
                        discord_id=message.author.id).first()
                    if not discord_account:
                        new_discord_account = DiscordAccount(message.author.id)
                        session.add(new_discord_account)
                        session.commit()
                        discord_account = session.query(
                            DiscordAccount).filter_by(
                                discord_id=message.author.id).first()

                    new_token = self.generate_random_auth_token(16)
                    account_link_token = AccountLinkToken(
                        message.author.id, new_token)
                    discord_account.link_token = account_link_token
                    session.add(account_link_token)
                    session.commit()
                    msg = f"Please connect your minecraft account to " \
                          f"`{new_token}.{self.config.auth_dns}:{self.config.auth_port}`" \
                          "in order to link it to this bridge!"
                    session.close()
                    del session
                    await send_channel.send(msg)
                    return
                except discord.errors.Forbidden:
                    if isinstance(message.author, discord.abc.User):
                        msg = f"{message.author.mention}, please allow private messages from this bot."
                        error_msg = await message.channel.send(msg)
                        await asyncio.sleep(3)
                        await error_msg.delete()
                    return

            # Global Commands
            elif message.content.startswith("mc!chathere"):
                if isinstance(message.channel, discord.abc.PrivateChannel):
                    msg = "Sorry, this command is only available in public channels."
                    await message.channel.send(msg)
                    return
                if message.author.id not in self.config.admin_users:
                    await message.delete()
                    try:
                        dm_channel = message.author.dm_channel
                        if not dm_channel:
                            await message.author.create_dm()
                        dm_channel = message.author.dm_channel
                        msg = "Sorry, you do not have permission to execute that command!"
                        await dm_channel.send(msg)
                        return
                    except discord.errors.Forbidden:
                        if isinstance(message.author, discord.abc.User):
                            msg = f"{message.author.mention}, please allow private messages from this bot."
                            error_msg = await message.channel.send(msg)
                            await asyncio.sleep(3)
                            await error_msg.delete()
                        return
                session = self.database_session.get_session()
                channels = session.query(DiscordChannel).filter_by(
                    channel_id=this_channel).all()
                if not channels:
                    new_channel = DiscordChannel(this_channel)
                    session.add(new_channel)
                    session.commit()
                    session.close()
                    del session
                    webhook = await message.channel.create_webhook(
                        name="_minecraft")
                    self.webhooks.append(webhook.url)
                    msg = "The bot will now start chatting here! To stop this, run `mc!stopchathere`."
                    await message.channel.send(msg)
                else:
                    msg = "The bot is already chatting in this channel! To stop this, run `mc!stopchathere`."
                    await message.channel.send(msg)
                    return

            elif message.content.startswith("mc!stopchathere"):
                if isinstance(message.channel, discord.abc.PrivateChannel):
                    msg = "Sorry, this command is only available in public channels."
                    await message.channel.send(msg)
                    return
                if message.author.id not in self.config.admin_users:
                    await message.delete()
                    try:
                        dm_channel = message.author.dm_channel
                        if not dm_channel:
                            await message.author.create_dm()
                        dm_channel = message.author.dm_channel
                        msg = "Sorry, you do not have permission to execute that command!"
                        await dm_channel.send(msg)
                        return
                    except discord.errors.Forbidden:
                        if isinstance(message.author, discord.abc.User):
                            msg = f"{message.author.mention}, please allow private messages from this bot."
                            error_msg = await message.channel.send(msg)
                            await asyncio.sleep(3)
                            await error_msg.delete()
                        return
                session = self.database_session.get_session()
                deleted = session.query(DiscordChannel).filter_by(
                    channel_id=this_channel).delete()
                session.commit()
                session.close()
                for webhook in await message.channel.webhooks():
                    if webhook.name == "_minecraft" and webhook.user == self.discord_bot.user:
                        # Copy the list to avoid some problems since
                        # we're deleting indicies form it as we loop
                        # through it
                        if webhook.url in self.webhooks[:]:
                            self.webhooks.remove(webhook.url)
                        await webhook.delete()
                if deleted < 1:
                    msg = "The bot was not chatting here!"
                    await message.channel.send(msg)
                    return
                else:
                    msg = "The bot will no longer chat here!"
                    await message.channel.send(msg)
                    return

            elif message.content.startswith("mc!tab"):
                send_channel = message.channel
                try:
                    if isinstance(message.channel, discord.abc.GuildChannel):
                        await message.delete()
                        dm_channel = message.author.dm_channel
                        if not dm_channel:
                            await message.author.create_dm()
                        send_channel = message.author.dm_channel
                    player_list = ", ".join(
                        list(map(lambda x: x[1], self.player_list.items())))
                    msg = f"{self.escape_markdown(self.strip_colour(self.tab_header))}\n" \
                        f"Players online: {self.escape_markdown(self.strip_colour(player_list))}\n" \
                        f"{self.escape_markdown(self.strip_colour(self.tab_footer))}"
                    await send_channel.send(msg)
                    return
                except discord.errors.Forbidden:
                    if isinstance(message.author, discord.abc.User):
                        msg = f"{message.author.mention}, please allow private messages from this bot."
                        error_msg = await message.channel.send(msg)
                        await asyncio.sleep(3)
                        await error_msg.delete()
                    return

            elif message.content.startswith("mc!botlink"):
                send_channel = message.channel
                try:
                    if isinstance(message.channel, discord.abc.GuildChannel):
                        await message.delete()
                        dm_channel = message.author.dm_channel
                        if not dm_channel:
                            await message.author.create_dm()
                        send_channel = message.author.dm_channel
                    msg = "Use the following link to invite this bot to a guild:\n" \
                          f"{discord.utils.oauth_url(client_id=self.discord_bot.user.id, permissions=self.bot_perms)}"
                    await send_channel.send(msg)
                    return
                except discord.errors.Forbidden:
                    if isinstance(message.author, discord.abc.User):
                        msg = "{message.author.mention}, please allow private messages from this bot."
                        error_msg = await message.channel.send(msg)
                        await asyncio.sleep(3)
                        await error_msg.delete()
                    return

            elif message.content.startswith("mc!about"):
                send_channel = message.channel
                try:
                    if isinstance(message.channel, discord.abc.GuildChannel):
                        await message.delete()
                        dm_channel = message.author.dm_channel
                        if not dm_channel:
                            await message.author.create_dm()
                        send_channel = message.author.dm_channel
                    msg = f"This bot is running minecraft-discord-bridge version  " \
                          f"{minecraft_discord_bridge.__version__}.\n" \
                          "The source code is available at https://github.com/starcraft66/minecraft-discord-bridge"
                    await send_channel.send(msg)
                    return
                except discord.errors.Forbidden:
                    if isinstance(message.author, discord.abc.User):
                        msg = f"{message.author.mention}, please allow private messages from this bot."
                        error_msg = await message.channel.send(msg)
                        await asyncio.sleep(3)
                        await error_msg.delete()
                    return

            elif message.content.startswith("mc!"):
                # Catch-all
                send_channel = message.channel
                try:
                    if isinstance(message.channel, discord.abc.GuildChannel):
                        await message.delete()
                        dm_channel = message.author.dm_channel
                        if not dm_channel:
                            await message.author.create_dm()
                        send_channel = message.author.dm_channel
                    msg = "Unknown command, type `mc!help` for a list of commands."
                    await send_channel.send(msg)
                    return
                except discord.errors.Forbidden:
                    if isinstance(message.author, discord.abc.User):
                        msg = f"{message.author.mention}, please allow private messages from this bot."
                        error_msg = await message.channel.send(msg)
                        await asyncio.sleep(3)
                        await error_msg.delete()
                    return

            elif not message.author.bot:
                session = self.database_session.get_session()
                channel_should_chat = session.query(DiscordChannel).filter_by(
                    channel_id=this_channel).first()
                if channel_should_chat:
                    await message.delete()
                    discord_user = session.query(DiscordAccount).filter_by(
                        discord_id=message.author.id).first()
                    if discord_user:
                        if discord_user.minecraft_account:
                            minecraft_uuid = discord_user.minecraft_account.minecraft_uuid
                            session.close()
                            del session
                            minecraft_username = self.mc_uuid_to_username(
                                minecraft_uuid)

                            # Max chat message length: 256, bot username does not count towards this
                            # Does not count|Counts
                            # <BOT_USERNAME> minecraft_username: message
                            padding = 2 + len(minecraft_username)

                            message_to_send = self.remove_emoji(
                                message.clean_content.encode('utf-8').decode(
                                    'ascii', 'replace')).strip()
                            message_to_discord = self.escape_markdown(
                                message.clean_content)

                            total_len = padding + len(message_to_send)
                            if total_len > 256:
                                message_to_send = message_to_send[:(256 -
                                                                    padding)]
                                message_to_discord = message_to_discord[:(
                                    256 - padding)]
                            elif not message_to_send:
                                return

                            session = self.database_session.get_session()
                            channels = session.query(DiscordChannel).all()
                            session.close()
                            del session
                            if message_to_send == self.previous_message or \
                                    datetime.now(timezone.utc) < self.next_message_time:
                                send_channel = message.channel
                                try:
                                    if isinstance(message.channel,
                                                  discord.abc.GuildChannel):
                                        dm_channel = message.author.dm_channel
                                        if not dm_channel:
                                            await message.author.create_dm()
                                        send_channel = message.author.dm_channel
                                    msg = f"Your message \"{message.clean_content}\" has been rate-limited."
                                    await send_channel.send(msg)
                                    return
                                except discord.errors.Forbidden:
                                    if isinstance(message.author,
                                                  discord.abc.User):
                                        msg = f"{message.author.mention}, please allow private messages from this bot."
                                        error_msg = await message.channel.send(
                                            msg)
                                        await asyncio.sleep(3)
                                        await error_msg.delete()
                                    return

                            self.previous_message = message_to_send
                            self.next_message_time = datetime.now(
                                timezone.utc) + timedelta(
                                    seconds=self.config.message_delay)

                            self.logger.info(
                                "Outgoing message from discord: Username: %s Message: %s",
                                minecraft_username, message_to_send)

                            for channel in channels:
                                discord_channel = self.discord_bot.get_channel(
                                    channel.channel_id)
                                if not discord_channel:
                                    session = self.database_session.get_session(
                                    )
                                    session.query(DiscordChannel).filter_by(
                                        channel_id=channel.channel_id).delete(
                                        )
                                    session.close()
                                    continue
                                webhooks = await discord_channel.webhooks()
                                for webhook in webhooks:
                                    if webhook.name == "_minecraft":
                                        await webhook.send(
                                            username=minecraft_username,
                                            avatar_url=
                                            f"https://visage.surgeplay.com/face/160/{minecraft_uuid}",
                                            content=message_to_discord)

                            packet = serverbound.play.ChatPacket()
                            packet.message = f"{minecraft_username}: {message_to_send}"
                            self.connection.write_packet(packet)
                    else:
                        send_channel = message.channel
                        try:
                            if isinstance(message.channel,
                                          discord.abc.GuildChannel):
                                dm_channel = message.author.dm_channel
                                if not dm_channel:
                                    await message.author.create_dm()
                                send_channel = message.author.dm_channel
                            msg = "Unable to send chat message: there is no Minecraft account linked to this discord " \
                                "account, please run `mc!register`."
                            await send_channel.send(msg)
                            return
                        except discord.errors.Forbidden:
                            if isinstance(message.author, discord.abc.User):
                                msg = f"{message.author.mention}, please allow private messages from this bot."
                                error_msg = await message.channel.send(msg)
                                await asyncio.sleep(3)
                                await error_msg.delete()
                            return
                        finally:
                            session.close()
                            del session
                else:
                    session.close()
                    del session
예제 #45
0
"""
from time import sleep
from ctypes import c_uint32

from uio.ti.icss import Icss
from uio.device import Uio
from bidict import bidict
import Adafruit_BBIO.GPIO as GPIO

IRQ = 2  # range 2 .. 9
PRU0_ARM_INTERRUPT = 19  # range 16 .. 31

# constants needed from  laser-scribe-constants.h
COMMANDS = ['CMD_EMPTY', 'CMD_SCAN_DATA', 'CMD_SCAN_DATA_NO_SLED']
COMMANDS += ['CMD_EXIT', 'CMD_DONE']
COMMANDS = bidict(enumerate(COMMANDS))
ERRORS = ['ERROR_NONE', 'ERROR_DEBUG_BREAK', 'ERROR_MIRROR_SYNC']
ERRORS += ['ERROR_TIME_OVERRUN']
ERRORS = bidict(enumerate(ERRORS))
RPM = 2400
FACETS = 4
SCANLINE_DATA_SIZE = 937
SCANLINE_HEADER_SIZE = 1
SCANLINE_ITEM_SIZE = SCANLINE_HEADER_SIZE + SCANLINE_DATA_SIZE
TICKS_PER_MIRROR_SEGMENT = 12500
QUEUE_LEN = 8
ERROR_RESULT_POS = 0
SYNC_FAIL_POS = 1
START_RINGBUFFER = 5
SINGLE_FACET = False
DURATION = 10  # seconds
예제 #46
0
def _unify(initial_exprs: List[ir.Expr],
           local_var_definitions: Mapping[str, ir.Expr],
           patterns: List[ir.Expr], expr_variables: Set[str],
           pattern_variables: Set[str], identifier_generator: Iterable[str],
           verbose: bool) -> UnificationResult:
    # We need to replace local literals before doing the unification, to avoid assuming that e.g. T in an expr
    # is equal to T in a pattern just because they have the same name.

    lhs_type_literal_names = set(local_var_definitions.keys())
    for expr in itertools.chain(initial_exprs, local_var_definitions.values()):
        for expr_literal in expr.get_free_vars():
            lhs_type_literal_names.add(expr_literal.cpp_type)

    unique_var_name_by_expr_type_literal_name = bidict({
        lhs_type_literal_name: next(identifier_generator)
        for lhs_type_literal_name in lhs_type_literal_names
    })

    unique_var_name_by_pattern_type_literal_name = bidict({
        pattern_literal.cpp_type: next(identifier_generator)
        for pattern in patterns for pattern_literal in pattern.get_free_vars()
    })

    unique_var_names = set()
    for expr_var_name, unique_var_name in unique_var_name_by_expr_type_literal_name.items(
    ):
        if expr_var_name in expr_variables or expr_var_name in local_var_definitions:
            unique_var_names.add(unique_var_name)
    for pattern_var_name, unique_var_name in unique_var_name_by_pattern_type_literal_name.items(
    ):
        if pattern_var_name in pattern_variables:
            unique_var_names.add(unique_var_name)

    literal_expr_by_unique_name: Dict[str, ir.AtomicTypeLiteral] = dict()

    lhs = [
        _replace_var_names_in_expr(expr,
                                   unique_var_name_by_expr_type_literal_name)
        for expr in initial_exprs
    ]
    rhs = [
        _replace_var_names_in_expr(
            pattern, unique_var_name_by_pattern_type_literal_name)
        for pattern in patterns
    ]
    context = [
        (unique_var_name_by_expr_type_literal_name[local_var_name],
         _replace_var_names_in_expr(value,
                                    unique_var_name_by_expr_type_literal_name))
        for local_var_name, value in local_var_definitions.items()
    ]

    lhs = [
        _unpack_if_variable(expr, unique_var_names,
                            literal_expr_by_unique_name) for expr in lhs
    ]
    rhs = [
        _unpack_if_variable(pattern, unique_var_names,
                            literal_expr_by_unique_name) for pattern in rhs
    ]
    context = {
        _unpack_if_variable(var, unique_var_names,
                            literal_expr_by_unique_name):
        _unpack_if_variable(expr, unique_var_names,
                            literal_expr_by_unique_name)
        for var, expr in context
    }

    unification_strategy = _ExprUnificationStrategy(
        unique_var_names,
        set(unique_var_name_by_pattern_type_literal_name.inv.keys()),
        literal_expr_by_unique_name)
    try:
        var_expr_equations, expanded_var_expr_equations = unify(
            [(lhs, rhs)], context, unification_strategy)
    except UnificationFailedException:
        if verbose:
            print(
                'unify(exprs=[%s], local_var_definitions={%s}, patterns=[%s], expr_variables=[%s], pattern_variables=[%s], ...):\nUsing name mappings: %s, %s\nReturning IMPOSSIBLE due to exception: %s'
                %
                (', '.join(expr_to_cpp_simple(expr)
                           for expr in initial_exprs), ', '.join(
                               '%s = %s' % (var, expr_to_cpp_simple(expr))
                               for var, expr in local_var_definitions.items()),
                 ', '.join(
                     expr_to_cpp_simple(pattern)
                     for pattern in patterns), ', '.join(
                         expr_variable
                         for expr_variable in expr_variables), ', '.join(
                             pattern_variable
                             for pattern_variable in pattern_variables),
                 unique_var_name_by_expr_type_literal_name,
                 unique_var_name_by_pattern_type_literal_name,
                 traceback.format_exc()))
        return UnificationResult(UnificationResultKind.IMPOSSIBLE)
    except UnificationAmbiguousException:
        if verbose:
            print(
                'unify(exprs=[%s], local_var_definitions={%s}, patterns=[%s], expr_variables=[%s], pattern_variables=[%s], ...):\nUsing name mappings: %s, %s\nReturning POSSIBLE due to exception: %s'
                %
                (', '.join(expr_to_cpp_simple(expr)
                           for expr in initial_exprs), ', '.join(
                               '%s = %s' % (var, expr_to_cpp_simple(expr))
                               for var, expr in local_var_definitions.items()),
                 ', '.join(
                     expr_to_cpp_simple(pattern)
                     for pattern in patterns), ', '.join(
                         expr_variable
                         for expr_variable in expr_variables), ', '.join(
                             pattern_variable
                             for pattern_variable in pattern_variables),
                 unique_var_name_by_expr_type_literal_name,
                 unique_var_name_by_pattern_type_literal_name,
                 traceback.format_exc()))
        return UnificationResult(UnificationResultKind.POSSIBLE)
    except AssertionError as e:
        if verbose:
            print(
                'unify(exprs=[%s], local_var_definitions={%s}, patterns=[%s], expr_variables=[%s], pattern_variables=[%s], ...):\nUsing name mappings: %s, %s\nAssertionError'
                %
                (', '.join(expr_to_cpp_simple(expr)
                           for expr in initial_exprs), ', '.join(
                               '%s = %s' % (var, expr_to_cpp_simple(expr))
                               for var, expr in local_var_definitions.items()),
                 ', '.join(
                     expr_to_cpp_simple(pattern)
                     for pattern in patterns), ', '.join(
                         expr_variable
                         for expr_variable in expr_variables), ', '.join(
                             pattern_variable
                             for pattern_variable in pattern_variables),
                 unique_var_name_by_expr_type_literal_name,
                 unique_var_name_by_pattern_type_literal_name))
        raise

    try:
        var_expr_equations = canonicalize(var_expr_equations,
                                          expanded_var_expr_equations,
                                          unification_strategy)
    except CanonicalizationFailedException:
        if verbose:
            print(
                'unify(exprs=[%s], local_var_definitions={%s}, patterns=[%s], expr_variables=[%s], pattern_variables=[%s], ...):\nUsing name mappings: %s, %s\nReturning POSSIBLE due to exception: %s'
                %
                (', '.join(expr_to_cpp_simple(expr)
                           for expr in initial_exprs), ', '.join(
                               '%s = %s' % (var, expr_to_cpp_simple(expr))
                               for var, expr in local_var_definitions.items()),
                 ', '.join(
                     expr_to_cpp_simple(pattern)
                     for pattern in patterns), ', '.join(
                         expr_variable
                         for expr_variable in expr_variables), ', '.join(
                             pattern_variable
                             for pattern_variable in pattern_variables),
                 unique_var_name_by_expr_type_literal_name,
                 unique_var_name_by_pattern_type_literal_name,
                 traceback.format_exc()))
        return UnificationResult(UnificationResultKind.POSSIBLE)
    except AssertionError as e:
        if verbose:
            print(
                'unify(exprs=[%s], local_var_definitions={%s}, patterns=[%s], expr_variables=[%s], pattern_variables=[%s], ...):\nUsing name mappings: %s, %s\nvar_expr_equations = %s\nAssertionError'
                %
                (', '.join(expr_to_cpp_simple(expr)
                           for expr in initial_exprs), ', '.join(
                               '%s = %s' % (var, expr_to_cpp_simple(expr))
                               for var, expr in local_var_definitions.items()),
                 ', '.join(
                     expr_to_cpp_simple(pattern)
                     for pattern in patterns), ', '.join(
                         expr_variable
                         for expr_variable in expr_variables), ', '.join(
                             pattern_variable
                             for pattern_variable in pattern_variables),
                 unique_var_name_by_expr_type_literal_name,
                 unique_var_name_by_pattern_type_literal_name,
                 var_expr_equations))
        raise

    var_expr_equations = [
        (_pack_if_variable(var, literal_expr_by_unique_name), [
            _pack_if_variable(expr, literal_expr_by_unique_name)
            for expr in exprs
        ] if isinstance(exprs, list) else _pack_if_variable(
            exprs, literal_expr_by_unique_name))
        for var, exprs in var_expr_equations
    ]

    # At this point all equations should be of the form var=expr, with var a variable from a pattern and expr containing
    # no vars from patterns.
    for lhs_var, exprs in var_expr_equations:
        if isinstance(lhs_var, ir.VariadicTypeExpansion):
            lhs_var = lhs_var.expr
        if lhs_var.cpp_type in unique_var_name_by_pattern_type_literal_name.inv:
            if isinstance(exprs, list):
                for expr in exprs:
                    for rhs_var in expr.get_free_vars():
                        assert rhs_var.cpp_type not in unique_var_name_by_pattern_type_literal_name.inv
            else:
                for rhs_var in exprs.get_free_vars():
                    assert rhs_var.cpp_type not in unique_var_name_by_pattern_type_literal_name.inv

    # We reverse the var renaming done above
    result_var_expr_equations: List[Tuple[ir.AtomicTypeLiteral,
                                          List[ir.Expr]]] = []
    result_expanded_var_expr_equations: List[Tuple[ir.AtomicTypeLiteral,
                                                   List[ir.Expr]]] = []
    for var, exprs in var_expr_equations:
        if isinstance(var, ir.VariadicTypeExpansion):
            assert isinstance(exprs, list)
            result_expanded_var_expr_equations.append(
                (_replace_var_names_in_expr(
                    var, unique_var_name_by_pattern_type_literal_name.inv), [
                        _replace_var_names_in_expr(
                            expr,
                            unique_var_name_by_expr_type_literal_name.inv)
                        for expr in exprs
                    ]))
        else:
            result_var_expr_equations.append(
                (_replace_var_names_in_expr(
                    var, unique_var_name_by_pattern_type_literal_name.inv),
                 _replace_var_names_in_expr(
                     exprs, unique_var_name_by_expr_type_literal_name.inv)))

    for var, exprs in var_expr_equations:
        for expr in (exprs if isinstance(exprs, list) else (exprs, )):
            if var.expr_type != expr.expr_type:
                if verbose:
                    print(
                        'unify(exprs=[%s], local_var_definitions={%s}, patterns=[%s], expr_variables=[%s], pattern_variables=[%s], ...):\nUsing name mappings: %s, %s\nReturning IMPOSSIBLE due to type mismatch:\n%s\nwith type:\n%s\n=== vs ===\n%s\nwith type:\m%s'
                        %
                        (', '.join(
                            expr_to_cpp_simple(expr)
                            for expr in initial_exprs),
                         ', '.join(
                             '%s = %s' % (var, expr_to_cpp_simple(expr))
                             for var, expr in local_var_definitions.items()),
                         ', '.join(
                             expr_to_cpp_simple(pattern)
                             for pattern in patterns), ', '.join(
                                 expr_variable
                                 for expr_variable in expr_variables),
                         ', '.join(pattern_variable
                                   for pattern_variable in pattern_variables),
                         unique_var_name_by_expr_type_literal_name,
                         unique_var_name_by_pattern_type_literal_name,
                         expr_to_cpp_simple(var), str(var.expr_type),
                         expr_to_cpp_simple(expr), str(expr.expr_type)))
                return UnificationResult(UnificationResultKind.IMPOSSIBLE)

    for var, _ in result_var_expr_equations:
        assert isinstance(var, ir.AtomicTypeLiteral)

    for var, _ in result_expanded_var_expr_equations:
        assert isinstance(var, ir.VariadicTypeExpansion) and isinstance(
            var.expr, ir.AtomicTypeLiteral)

    if verbose:
        print(
            'unify(exprs=[%s], local_var_definitions={%s}, patterns=[%s], expr_variables=[%s], pattern_variables=[%s], ...):\nUsing name mappings: %s, %s\nReturning CERTAIN with result_var_expr_equations:\n%s\nresult_expanded_var_expr_equations:\n%s'
            % (', '.join(expr_to_cpp_simple(expr)
                         for expr in initial_exprs), ', '.join(
                             '%s = %s' % (var, expr_to_cpp_simple(expr))
                             for var, expr in local_var_definitions.items()),
               ', '.join(expr_to_cpp_simple(pattern)
                         for pattern in patterns), ', '.join(
                             expr_variable
                             for expr_variable in expr_variables), ', '.join(
                                 pattern_variable
                                 for pattern_variable in pattern_variables),
               unique_var_name_by_expr_type_literal_name,
               unique_var_name_by_pattern_type_literal_name, '\n'.join(
                   expr_to_cpp_simple(var) + ' = [' + ', '.join(
                       expr_to_cpp_simple(expr) for expr in
                       (exprs if isinstance(exprs, list) else (exprs, ))) + ']'
                   for var, exprs in result_var_expr_equations), '\n'.join(
                       expr_to_cpp_simple(var) + ' = [' + ', '.join(
                           expr_to_cpp_simple(expr)
                           for expr in (exprs if isinstance(exprs, list) else
                                        (exprs, ))) + ']'
                       for var, exprs in result_expanded_var_expr_equations)))

    return UnificationResult(UnificationResultKind.CERTAIN,
                             result_var_expr_equations,
                             result_expanded_var_expr_equations)
 def __init__(self, heteroatom_list):
     super().__init__(heteroatom_list)
     self.dict = {}
     for i, v in enumerate(self.heteroatom_list):
         self.dict[v] = i
     self.dict = bidict(self.dict)
예제 #48
0
Gan = ["甲", "乙", "丙", "丁", "戊", "己", "庚", "辛", "壬", "癸"]
ten_deities = {
    '甲':
    bidict({
        '甲': '比肩',
        "乙": '劫',
        "丙": '食',
        "丁": '伤',
        "戊": '偏财',
        "己": '财',
        "庚": '杀',
        "辛": '官',
        "壬": '偏印',
        "癸": '印',
        "子": '沐浴',
        "丑": '冠带',
        "寅": '建',
        "卯": '帝旺',
        "辰": '衰',
        "巳": '病',
        "午": '死',
        "未": '墓',
        "申": '绝',
        "酉": '胎',
        "戌": '养',
        "亥": '长生'
    }),
    '乙':
    bidict({
        '甲': '劫',
        "乙": '比肩',
예제 #49
0
医生不爽
其他情况
发现异常


'''
# 病人
classlist = [
    '打招呼', '描述病情', '个人信息', '询问病情', '怎么治疗', '询问医院地址', '怎么收费', '尝试过哪些治疗方法',
    '不方便接电话', '我的顾虑', '介绍其他医院费用低廉', '询问治疗效果', '不方便过来', '询问医保', '准备过来',
    '我考虑下关闭', '愤怒关闭'
]
classdict = {}
for cate in classlist:
    classdict[cate] = classlist.index(cate)

categories = bidict(classdict)

maxlen = 500  #400
embedding_dims = 60  #60
maxFeature = 5000  #10000
test_size = 0.05
train_epochs = 300
class_length = len(categories)

tokenizer = Tokenizer(filters='!"#$%&()*+,-./:;<=>?@[\]^_`{|}~\t\n!,。();’‘',
                      split=" ",
                      num_words=maxFeature)  #创建一个Tokenizer对象
modelfile = 'D:\dev\Command\datamining\embedding\\word2vec\\word2vec.model'
corpusfile = 'segment'
예제 #50
0
# TS 102 221 Section 9.5.1 / Table 9.3
pin_names = bidict({
    0x01: 'PIN1',
    0x02: 'PIN2',
    0x03: 'PIN3',
    0x04: 'PIN4',
    0x05: 'PIN5',
    0x06: 'PIN6',
    0x07: 'PIN7',
    0x08: 'PIN8',
    0x0a: 'ADM1',
    0x0b: 'ADM2',
    0x0c: 'ADM3',
    0x0d: 'ADM4',
    0x0e: 'ADM5',
    0x11: 'UNIVERSAL_PIN',
    0x81: '2PIN1',
    0x82: '2PIN2',
    0x83: '2PIN3',
    0x84: '2PIN4',
    0x85: '2PIN5',
    0x86: '2PIN6',
    0x87: '2PIN7',
    0x88: '2PIN8',
    0x8a: 'ADM6',
    0x8b: 'ADM7',
    0x8c: 'ADM8',
    0x8d: 'ADM9',
    0x8e: 'ADM10',
})
예제 #51
0
def cut_until_limit(scc):
	global edges_to_remove
	global graph

	if len (scc) <= 1:
		return []
	elif len(scc) <= scc_size_limit:
		return [scc]
	else:
		try:
			sg = graph.subgraph(scc) # this graph is frozen. Cannot be modified!!!
			sg2 = graph.subgraph(scc) # this graph is frozen. Cannot be modified!!!
			sg = nx.DiGraph(sg)
			sg2 = nx.DiGraph(sg2)
			sg.remove_edges_from(edges_to_remove)
			print ('cutting this scc of size: #nodes', len(scc))
			print ('cutting this scc of size: #edges', sg.number_of_edges())

			# num_partitions =  int(len(scc) / ( scc_size_limit ))
			num_partitions = 2
			print ('to be divded into ', num_partitions, ' partitions')
			ele_to_index = bidict()
			index = 0
			for n in sg.nodes():
				if n not in ele_to_index.keys():
					ele_to_index[n] = index
					index += 1

			adjacency = {}
			for m in sg.nodes():
				adjacency.setdefault(ele_to_index[m], [])

			for (s, t) in sg.edges:
				adjacency.setdefault(ele_to_index[s], []).append(ele_to_index[t])

			# print ('adj-lst = ', len(adjacency))

			cuts, part_vert  = pymetis.part_graph(num_partitions, adjacency=adjacency)
			# print ('cuts = ', cuts)
			# print ('part ', part_vert)
			print ('finished!', flush=True)

			partition_sccs = []
			partitions = []
			for c in range(num_partitions):
				col_nodes = []
				for p in range(len(part_vert)):
					if part_vert[p] == c:
						col_nodes.append(ele_to_index.inverse[p])

				print (c, ' has size ', len(col_nodes))
				partitions.append(col_nodes)

			collect_edges_partition = []
			for p in partitions:
				sg_p = sg.subgraph(p)
				partition_sccs += compute_scc_from_graph(sg_p)
				collect_edges_partition += sg_p.edges()

			sg.remove_edges_from(collect_edges_partition)

			print ('edges removed during partitioning: ', len(sg.edges()))
			edges_to_remove += list(sg.edges()) # remove all those left
			graph.remove_edges_from(edges_to_remove)

			for s in partition_sccs:
				if len (s) >10:
					print ('partition scc size = ', len (s))

			collect_sccs = []
			for s in partition_sccs:
				collect_sccs += cut_until_limit(s)
			return collect_sccs
			# print ('=================', flush=True)

		except Exception as e:
			print ('error: ', e)
예제 #52
0
atomics = reduce(atomic_split, corpus)

#对语料的切分
def word_split(param1, param2):
    if isinstance(param1, list):
        return param1 + param2.split()
    else:
        return word_split(word_split([], param1), param2)

words = reduce(word_split, corpus)

#计算词频,索引
fd = FreqDist(words)

index = bidict()
pos = 0
for k, c in fd.items():
    index[k] = pos
    pos = pos + 1

#=====利用nltk的biggrams函数,建立gram矩阵==========================
grams = list(bigrams(words))

gc = np.zeros((fd.B(), fd.B()), dtype=np.int32)

#统计gram次数
for p1, p2 in grams:
    gc[index[p1], index[p2]] += 1

#统计gram概率
def generate_embedding(stations, G, top_k=6):
    station_id_to_idx = bidict()
    station_id_to_exp_idx = MyBidict()

    # station_id = 408134
    station_id = 400000
    station_data = read_station_data(station_id, stations.loc[station_id, "Lanes"])
    days_groups = get_days_datapoints(station_data)

    # one hot encoders
    _, day_label_encoder, day_one_hot_encoder = generate_one_hot_encoding(station_data.index.day)
    _, hour_label_encoder, hour_one_hot_encoder = generate_one_hot_encoding(station_data.index.hour)
    _, minutes_label_encoder, minutes_one_hot_encoder = generate_one_hot_encoding(station_data.index.minute)

    nodes = list(filter(lambda x: type(x) == int, G.nodes))
    num_exp = day_one_hot_encoder.active_features_.size
    seq_len = days_groups[0].size - 6
    features_len = 4 + day_one_hot_encoder.active_features_.size + hour_one_hot_encoder.active_features_.size + minutes_one_hot_encoder.active_features_.size

    input_embeddings = torch.FloatTensor(num_exp * len(nodes), seq_len, features_len).zero_()
    target_embeddings = torch.FloatTensor(num_exp * len(nodes), seq_len, 1).zero_()
    neighbor_embeddings = torch.FloatTensor(num_exp * len(nodes), top_k, seq_len, features_len).zero_()
    edge_type = torch.ones(num_exp * len(nodes), top_k, seq_len, 1)
    neigh_mask = torch.zeros(num_exp * len(nodes), top_k).byte()

    nodes_data = {}
    for node_idx, node in enumerate(nodes):
        if node in nodes_data:
            node_data = nodes_data[node]
        else:
            node_data = read_station_data(node, stations.loc[station_id, "Lanes"])
            assert not np.isnan(node_data.values).any()
            nodes_data[node] = node_data

        neighbors_data = []
        for neighbor_id, distance in G.neighbors(node):
            if neighbor_id in nodes_data:
                neighbor_data = nodes_data[neighbor_id]
            else:
                neighbor_data = read_station_data(neighbor_id, stations.loc[station_id, "Lanes"])
                assert not np.isnan(neighbor_data.values).any()
                nodes_data[neighbor_id] = neighbor_data
            neighbors_data.append((neighbor_id, neighbor_data))

        station_id_to_idx[node] = node_idx

        # node embedding
        for day_idx, day_timestep in enumerate(days_groups):
            day_one_hot, _, _ = one_hot_conversion(day_timestep.day, day_label_encoder, day_one_hot_encoder)
            hour_one_hot, _, _ = one_hot_conversion(day_timestep.hour, hour_label_encoder, hour_one_hot_encoder)
            minute_one_hot, _, _ = one_hot_conversion(day_timestep.minute, minutes_label_encoder, minutes_one_hot_encoder)

            node_data_value = np.concatenate([node_data.loc[day_timestep].values, day_one_hot, hour_one_hot, minute_one_hot], axis=1)
            input_embeddings[((node_idx * num_exp) + day_idx):((node_idx * num_exp) + day_idx + 1)] = torch.from_numpy(node_data_value[:-6])
            target_embeddings[((node_idx*num_exp)+day_idx):((node_idx*num_exp)+day_idx+1)] = torch.from_numpy(node_data_value[6:, 0])

            # neighbor embedding
            for neighbor_idx, (neighbor_id, neighbor_data) in enumerate(neighbors_data):
                try:
                    neighbor_data_value = np.concatenate([neighbor_data.loc[day_timestep].values, day_one_hot, hour_one_hot, minute_one_hot], axis=1)
                    neighbor_embeddings[((node_idx*num_exp)+day_idx), neighbor_idx] = torch.from_numpy(neighbor_data_value[:-6])
                except Exception as e:
                    print(neighbor_idx, neighbor_id, day_idx)
                    print(e)
                    raise e

        station_id_to_exp_idx[node] = list(range(node_idx * num_exp, (node_idx + 1) * num_exp))

        if node_idx % 10 == 0:
            print(node_idx)

    return input_embeddings, target_embeddings, neighbor_embeddings, edge_type, neigh_mask, station_id_to_idx, station_id_to_exp_idx
예제 #54
0
def transfer_labels(x: AnnData,
                    ref: AnnData,
                    method: str = 'Scanpy Ingest',
                    inplace: Optional[bool] = True,
                    **kwargs) -> Optional[Union[AnnData, np.ndarray]]:
    """
    Transfer labels using a reference dataset.

    Parameters
    __________
    x: AnnData object containing the data.

    ref: AnnData object with x.var['labels'] populated.

    method: String specifying the label transfer method to use. See
        https://github.com/ferrocactus/cellar/tree/master/doc for
        a full list of methods available.

    inplace: If set to true will update x.obs['labels'], otherwise,
        return a new AnnData object.

    **kwargs: Additional parameters that will get passed to the
        clustering object as specified in method. For a full list
        see the documentation of the corresponding method.

    Returns
    _______
    If x is an AnnData object, will either return an AnnData object
    or None depending on the value of inplace.
    """
    # Validations
    is_AnnData = isinstance(x, AnnData) and isinstance(ref, AnnData)
    if not is_AnnData:
        raise InvalidArgument("x is not in AnnData format.")
    adata = x.copy() if not inplace else x

    if 'labels' not in ref.obs:
        raise InvalidArgument("labels not found in reference dataset.")

    _method_exists('align', method)

    # Create alignment object and get labels
    labels = wrap("align", method)().get(
        adata.X,
        adata.var.index.to_numpy().astype('U'), ref.X,
        ref.var.index.to_numpy().astype('U'),
        ref.obs['labels'].to_numpy().astype(np.int)).astype(np.int)

    # Populate entries
    adata.obs['labels'] = labels
    adata.uns['cluster_info'] = {}
    unq_labels = np.unique(labels)
    adata.uns['cluster_info']['unique_labels'] = unq_labels
    adata.uns['cluster_info']['n_clusters'] = len(unq_labels)
    adata.uns['cluster_info']['method'] = method
    adata.uns['cluster_info']['kwargs'] = kwargs
    adata.uns['cluster_names'] = bidict({i: str(i) for i in unq_labels})

    populate_subsets(adata)

    if not inplace:
        return adata
예제 #55
0
            return TERM.on_color(code)(' ')
    return TERM.on_color(7)(' ')


def print_map():
    order = range(1, 9)
    for y in order:
        chars = (ap_at_state(x, y, in_ascii=True) for x in order)
        print(''.join(chars))


# ==============  TRACES ========================

ACTION2ARROW = bidict({
    GW.NORTH_C: '↑',
    GW.SOUTH_C: '↓',
    GW.WEST_C: '←',
    GW.EAST_C: '→',
})


def print_trc(trc):
    actions, states = trc
    obs = (ap_at_state(*pos, in_ascii=True) for pos in states)
    print(''.join(''.join(x) for x in zip(actions, obs)))


def str2actions(vals):
    return [ACTION2ARROW.inv[c] for c in vals]


ACTIONS0 = "↑↑→↑↑↑→←←←←←←←←"
예제 #56
0
파일: calc.py 프로젝트: pikers/piker
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.

# You should have received a copy of the GNU Affero General Public License
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
"""
Handy financial calculations.
"""
import math
import itertools

from bidict import bidict

_mag2suffix = bidict({3: 'k', 6: 'M', 9: 'B'})


def humanize(number: float, digits: int = 1) -> str:
    '''
    Convert large numbers to something with at most ``digits`` and
    a letter suffix (eg. k: thousand, M: million, B: billion).

    '''
    try:
        float(number)
    except ValueError:
        return '0'

    if not number or number <= 0:
        return str(round(number, ndigits=digits))
예제 #57
0
from bidict import bidict

ELE_TO_NUM = bidict({
    'H': 1,
    'C': 6,
    'N': 7,
    'O': 8,
    'F': 9,
    'P': 15,
    'S': 16,
    'Cl': 17,
})
TYPE_ORDER = ['1', 'Ar', '2', '3']
BOND_LENGTHS = {
    "C": {
        "3":   0.62,
        "2":   0.69,
        "Ar": 0.72,
        "1":   0.85,
    },
    "Cl": {
        "1":   1.045,
    },
    "F": {
        "1":   1.23,
    },
    "H": {
        "1":   0.6,
    },
    "N": {
        "3":   0.565,
예제 #58
0
from bidict import bidict

classes = bidict({
    'Axis': 0,
    'Bearing': 1,
    'Bearing_Box': 2,
    'Distance_Tube': 3,
    'F20_20_B': 4,
    'F20_20_G': 5,
    'M20': 6,
    'M20_100': 7,
    'M30': 8,
    'Motor': 9,
    'R20': 10,
    'S40_40_B': 11,
    'S_40_40_G': 12,
    'nothing': 13,
    'unspecified': 255
})  #255 is translated to 13 at some point

BATCH_SIZE = 6
PRINT_EVERYTHING = False
OUTPUT_LAYERS = 2  # classes['nothing'] + 1 # 13 image types + nothing = 14

#HOLDOUT_OBJECT_DURING_TRAINING = None # [0.0, 4.0] # MUST BE list od DOUBLE, or None
예제 #59
0
class CatConverter:
    """Manages entries of .cat files"""

    mapper = bidict.bidict([('a', '-1'), ('b', '-2'), ('c', '-3'), ('d', '-4'),
                            ('e', '-5'), ('P', '25'), ('f', '-6'), ('g', '-7'),
                            ('h', '-8'), ('i', '-9'), ('j', '-10'),
                            ('k', '-11'), ('l', '-12'), ('m', '-13'),
                            ('n', '-14'), ('o', '-15'), ('p', '-16'),
                            ('A', '10'), ('B', '11'), ('C', '12'), ('D', '13'),
                            ('E', '14'), ('F', '15'), ('G', '16'), ('H', '17'),
                            ('I', '18'), ('J', '19'), ('K', '20'), ('L', '21'),
                            ('M', '22'), ('N', '23'), ('O', '24')])

    @staticmethod
    def __decode_quant(str_q):
        """replace a -> -1, A -> 10, etc."""

        str_s = str_q[0:1]
        if str_s in CatConverter.mapper:
            str_q = str_q.replace(str_s, CatConverter.mapper[str_s], 1)

        return str_q

    @staticmethod
    def __encode_quant(str_q, thresh=3):
        """replace -1 -> a, 10 -> A, etc."""

        if len(str_q) >= thresh:
            str_s = str_q[0:2]
            if (str_s in CatConverter.mapper.inv):
                str_q = str_q.replace(str_s, CatConverter.mapper.inv[str_s], 1)

        return str_q

    @staticmethod
    def __read_quanta(str_quanta, int_fmt):
        """convert quanta from .cat to dict 
           returns (dict_upper, dict_lower) 
        """
        dict_ql = {}
        dict_qu = {}

        INT_C = 6
        for i in range(0, INT_C):

            str_qu = str_quanta[i * 2:(i + 1) * 2]
            str_ql = str_quanta[(i + INT_C) * 2:(i + INT_C + 1) * 2]

            if str_qu != "  " and str_ql != "  ":
                str_qu = CatConverter.__decode_quant(str_qu)
                str_ql = CatConverter.__decode_quant(str_ql)

                headers = quanta_headers(int_fmt)
                dict_ql[headers[i]] = int(str_ql)
                dict_qu[headers[i]] = int(str_qu)
            else:
                break

        return (dict_qu, dict_ql)

    @staticmethod
    def __write_quanta(dict_qu, dict_ql, int_fmt):
        """convert quanta from (dict,dict) to .cat str"""

        INT_C = 6
        str_quanta = ""

        headers = quanta_headers(int_fmt)[0:len(dict_qu)]
        for str_q in ["%2d" % dict_qu[x] for x in headers]:
            str_q = CatConverter.__encode_quant(str_q)
            str_quanta += str_q
        for i in range(len(headers), INT_C):
            str_quanta += "  "

        headers = quanta_headers(int_fmt)[0:len(dict_ql)]
        for str_q in ["%2d" % dict_ql[x] for x in headers]:
            str_q = CatConverter.__encode_quant(str_q)
            str_quanta += str_q
        for i in range(len(headers), INT_C):
            str_quanta += "  "

        return str_quanta

    @staticmethod
    def str2line(str_line):
        """str to Line object"""

        obj_line = Line()

        obj_line.freq = float(str_line[0:13])
        obj_line.freq_err = float(str_line[13:21])

        obj_line.log_I = float(str_line[21:29])
        obj_line.int_deg_freedom = int(str_line[29:31])

        obj_line.E = float(str_line[31:41])
        obj_line.g = int(CatConverter.__decode_quant(str_line[41:44]))

        obj_line.int_cat_tag = int(str_line[44:51])

        str_q = str_line[55:79]
        int_fmt = int(str_line[51:55])
        dict_qu, dict_ql = CatConverter.__read_quanta(str_q, int_fmt)

        obj_line.q_upper = dict_qu
        obj_line.q_lower = dict_ql
        obj_line.int_fmt = int_fmt

        return obj_line

    @staticmethod
    def line2str(obj_line):
        """Line object to str"""

        str_out = ""

        str_quanta = CatConverter.__write_quanta(obj_line.q_upper,
                                                 obj_line.q_lower,
                                                 obj_line.int_fmt)

        str_out += "%13.4f%8.4f" % (obj_line.freq, obj_line.freq_err)
        str_out += "%8.4f%2d" % (obj_line.log_I, obj_line.int_deg_freedom)
        str_out += "%10.4f%3s" % (
            obj_line.E, CatConverter.__encode_quant(str(obj_line.g), thresh=4))
        str_out += "%7d" % (obj_line.int_cat_tag)
        str_out += "%4d%s " % (obj_line.int_fmt, str_quanta)

        return str_out
import pytest
from collections import Counter, OrderedDict, defaultdict
from bidict import bidict, frozenbidict, namedbidict
from itertools import product

d = dict(H='hydrogen', He='helium')
c = Counter(d)
o = OrderedDict(d)
dd = defaultdict(int, d)


class dictsubclass(dict):
    pass


s = dictsubclass(d)

b = bidict(d)
f = frozenbidict(d)
n = namedbidict('named', 'keys', 'vals')(d)

dicts = (d, c, o, dd, s)
bidicts = (b, f, n)


@pytest.mark.parametrize('d, b', product(dicts, bidicts))
def test_eq(d, b):
    assert d == b