def __init__(self, sign, mod, name, *keys): self.sign = sign self.mod = mod self.name = name Difficulte.MAP[util.snorm(name)] = self for k in keys: Difficulte.MAP[util.snorm(k)] = self
def __init__(self, name, degats, aptitude, *keys): self.name = name self.degats = degats self.aptitude = aptitude self.parsed_degats = regles.parse_dice(degats) Arme.TOUTES[util.snorm(name)] = self for k in keys: Arme.TOUTES[util.snorm(k)] = self self.keys = keys
def _junk(self, raw): if util.snorm(raw) in regles.Difficulte.MAP: self.difficulte(raw, regles.Difficulte.MAP[raw]) elif self.client.has_perso(raw): self.persos.append((self.client.get_perso(raw), None)) else: le_perso, _uid = self.le_perso() if le_perso is not None: norm = util.snorm(raw) if norm in le_perso.ref_map: self.score(raw, le_perso.ref_map[raw]) elif norm in le_perso.avantages.value: self.bonus(raw) elif norm in le_perso.desavantages.value: self.malus(raw) else: self.junk(raw) else: self.junk(raw)
def parse_line(self, line): m = Perso.SHORT_PATTERN.match(line) if m is not None: for k, v in m.groupdict().items(): self.setv(k, v) return m = Perso.LINE_PATTERN.match(line) if m is not None: k = util.snorm(m.group('k')) v = m.group('v').strip() self.setv(k, v)
async def get_reply(self, message): if not message.content.startswith('reload'): return () update_pj = False update_pnj = False for t in message.content[6:].split(' '): t = util.snorm(t) if t == 'pj': update_pj = True if t == 'pnj': update_pnj = True npj, npnj = client.load_data(update_pj, update_pnj) return ('%d PJ, %d PNJ' % (npj, npnj),)
def load_data(self, update_pj=False, update_pnj=False): with open(self.mj_file) as f: self.mj_userid = int(f.read().strip()) npj = 0 for pj, path in perso.load(self.pj_path): pj.niveau.value = 'pj' userid = int(os.path.basename(path)[:-4]) if userid not in self.pj_par_userid or update_pj: self.pj_par_userid[userid] = pj self.add_perso(pj) npj += 1 npnj = 0 for pnj, path in perso.load(self.pnj_path): if util.snorm(pnj.nom.value) not in self.persos_par_nom or update_pnj: self.add_perso(pnj) npnj += 1 return npj, npnj
checkpoint_file = 'segm.ckpt' if validation_path: validation_data = util.get_validation_data(validation_path, char_dic, vocab_size, n_steps, padd) seq = 0 while seq < training_iters: c_istate = np.zeros((batch_size, 2 * n_hidden)) i = 0 fid = util.open_file(train_path, 'r') for line in fid: line = line.strip() if line == "": continue line = line.decode('utf-8') sentence = util.snorm(line) pos = 0 while pos != -1: batch_xs, batch_ys, next_pos, count = util.next_batch( sentence, pos, char_dic, vocab_size, n_steps, padd) ''' print 'window : ' + sentence[pos:pos+n_steps].encode('utf-8') print 'count : ' + str(count) print 'next_pos : ' + str(next_pos) print batch_ys print batch_xs ''' feed = { x: batch_xs, y_: batch_ys, istate: c_istate,
def has_perso(self, nom): return util.snorm(nom) in self.persos_par_nom
def get_perso(self, nom): return self.persos_par_nom[util.snorm(nom)]
def add_perso(self, p): self.persos_par_nom[util.snorm(p.nom.value)] = p
saver = tf.train.Saver() # save all variables checkpoint_dir = model_dir checkpoint_file = 'segm.ckpt' validation_data = util.get_validation_data_emb(validation_path, char_dic, id2emb, n_steps, padd) seq = 0 while seq < training_iters : c_istate = np.zeros((batch_size, 2*n_hidden)) i = 0 fid = util.open_file(train_path, 'r') for line in fid : line = line.strip() if line == "" : continue line = line.decode('utf-8') sentence = util.snorm(line) pos = 0 while pos != -1 : batch_xs, batch_ys, next_pos, count = util.next_batch_emb(sentence, pos, char_dic, id2emb, n_steps, padd) ''' print 'window : ' + sentence[pos:pos+n_steps].encode('utf-8') print 'count : ' + str(count) print 'next_pos : ' + str(next_pos) print batch_ys print batch_xs ''' feed={x: batch_xs, y_: batch_ys, istate: c_istate, early_stop:count} sess.run(optimizer, feed_dict=feed) pos = next_pos sys.stderr.write('%s th sentence in %s th iterations ... done\n' % (i, seq)) i += 1