def checking(infos): try: kl = infos.text.lower() cont = LowLevel.jfile("t", infos.bid, infos.user.lang_n) # controllo equals for trigger in cont["equals"]: if "|" in trigger: for part in trigger.split("|"): if re.search("^%s$" % regexa(escape(part)), kl): return trigger elif re.search("^%s$" % regexa(escape(trigger)), kl): return trigger # controllo contenuti for content in cont["contents"]: x = cicler(content, kl) if x: return x return False except Exception as err: Log.e(err)
def status(bot, update): try: g_name = update["message"]["chat"]["title"] by = update["message"]["from"]["username"] byid = update["message"]["from"]["id"] gid = update["message"]["chat"]["id"] if update["message"]["new_chat_members"]: join_user_name = update["message"]["new_chat_members"][0]['first_name'] if "username" in update["message"]["new_chat_members"][0]: join_user_username = update["message"]["new_chat_members"][0]['username'] else: join_user_username = join_user_name join_user_id = update["message"]["new_chat_members"][0]['id'] if join_user_id == bot["id"]: text = "Aggiunta a: %s\nUtente: @%s" % (g_name, by) bpht = None # ToDo Get Propic Method if bpht: HTTPLL.sendPhoto(Manager.get_main_bot_token(), Manager.get_owner_id(), bpht, caption=text) else: HTTPLL.sendMessage(Manager.get_main_bot_token(), Manager.get_owner_id(), text) elif update["message"]["left_chat_member"]: left_user_name = update["message"]["left_chat_member"]['first_name'] left_user_id = update["message"]["left_chat_member"]['id'] if left_user_id == bot["id"]: HTTPLL.sendMessage(Manager.get_main_bot_token(), Manager.get_owner_id(), text="Rimossa da: %s\nUtente @%s" % (g_name, by)) Log.a("[%s] Rimossa da un gruppo da %s" % (bot["first_name"], by)) except Exception as err: Log.e(err) pprint.pprint(update)
def set_symbol(infos): say = infos.reply try: if infos.user.lang_n != 0: triggers = "triggers_eng.json" else: triggers = "triggers.json" if infos.text == "": return say("Devi dirmi il simbolo con cui riconoscero' i comandi!") if len(infos.text) > 1: return say("Il simbolo puo' essere di un solo carattere!") trigs = json.loads( open("Files/bot_files/%s/%s" % (infos.bid, triggers)).read()) trigs["bot_comm_symbol"] = infos.text with open("Files/bot_files/%s/%s" % (infos.bid, triggers), "w") as file_d: file_d.write(json.dumps(trigs)) say("*%s* impostato come simbolo per i comandi!" % infos.text, markdown=True) except Exception as err: say("S-si è verificato un errore...") Log.e(err)
def cicler(inter, kl, interact=False): try: if "&" in inter: full = True for part in inter.split("&"): if not re.search(boundary(regexa(escape(part))), kl): full = False if full: return inter elif "|" in inter: for part in inter.split("|"): if re.search(boundary(regexa(escape(part))), kl): return inter else: if re.search(boundary(regexa(escape(inter))), kl): if not interact: if not re.search( "^" + boundary(regexa(escape(inter))) + r"$", kl): return inter else: return inter return None except Exception as err: Log.e("Errore: %s inter: %s" % (err, inter)) return None
def _parse_douban(page): soup = BeautifulSoup(page) ret = [] try: table = soup.findAll('table', {'class':'olt'})[0] ''' nexturl = soup.findAll('div', {'class':'paginator'})[0]\ .findAll('span', {'class':'next'})[0] try: nexturl = nexturl.findAll('link')[0]['href'] except: nexturl = '' ''' for row in PageParser._clean(table)[1:]: #skip table header try: url, _, _, reply_time = PageParser._clean(row) except: continue url = PageParser._clean(url)[0]['href'].strip() reply_time = PageParser._clean(reply_time)[0].strip() if ':' in reply_time: year = time.strftime('%Y-',time.localtime(time.time())) reply_time = time.strptime(year+reply_time, '%Y-%m-%d %H:%M') else: reply_time = time.strptime(reply_time, '%Y-%m-%d') reply_time = int(time.mktime(reply_time)) hashurl = hashlib.md5(url).hexdigest() ret.append(Link( hashurl=hashurl, url=url, reply_time=reply_time )) except IndexError: Logger.error('index error!') return ret #, nexturl
def make_reply(self, msg, username, first_name): reply = None if msg is not None: if msg == "ping": Log.a("pong") reply = "pong" return reply if msg == "info": Log.a("info") reply = "Shamiko-Project, version 0.0.5.1" return reply if "give" and "cookie" in msg: Log.a("cookie") reply = "Of course!" return reply if msg == "hello there": Log.a("Hello there") reply = "Hello @" + username + ". I am Yuko ( ^ ω ^)" return reply if msg == "hello": Log.a("hello") reply = "Hi " + first_name + " ^^" return reply
def jfile(desc, bid, lang): try: if not bid: print("Impossibile leggere con bid nullo.") return False lang = int(lang) f = None if desc == "d": if lang == 0: try: return read("dialogs.json", bid) except: pass try: return read("dialogs_eng.json", bid) except: return read("dialogs.json", bid) if desc == "t": if lang == 0: return read("triggers.json", bid) return read("triggers_eng.json", bid) if desc == "c": if lang == 0: return read("condizioni.json", bid) return read("condizioni_eng.json", bid) if not f: return print("Descrittore errato (%s)." % desc) return json.loads(open("Files/bot_files/%s/%s" % (bid, f)).read()) except Exception as err: Log.w("File non trovato? %s" % err) return None
def del_risps(token, section, lang_n=None): try: bid = token.split(":")[0] if not lang_n: lang_n = 0 if lang_n != 0: dialogs = "dialogs_eng.json" else: dialogs = "dialogs.json" if section == "": return "Devi specificare la sezione/trigger da cui eliminare le risposte.", False trigger = section.lower() dials = json.loads( open("Files/bot_files/%s/%s" % (bid, dialogs)).read()) if trigger not in dials: return "non ho trovato il trigger richiesto (%s)" % trigger, False if not dials[trigger]: return "Non ci sono gia' frasi in questa sezione.", False dials[trigger] = [] with open("Files/bot_files/%s/%s" % (bid, dialogs), "w") as file_d: file_d.write(json.dumps(dials)) return "Risposte dal trigger %s eliminate." % trigger, True except Exception as err: Log.e(err) return "S-si è verificato un errore...", False
def main_stage1(): print(f"\nStart Stage-1 training ...\n") # for initializing backbone, two branches, and centroids. start_epoch = 0 # start from epoch 0 or last checkpoint epoch # Model print('==> Building model..') net = DFPNet(backbone=args.arch, num_classes=args.train_class_num, embed_dim=args.embed_dim, distance=args.distance, scaled=args.scaled) criterion = DFPLoss(alpha=args.alpha, beta=args.beta) optimizer = optim.SGD(net.parameters(), lr=args.stage1_lr, momentum=0.9, weight_decay=5e-4) net = net.to(device) if device == 'cuda': net = torch.nn.DataParallel(net) cudnn.benchmark = True if args.stage1_resume: # Load checkpoint. if os.path.isfile(args.stage1_resume): print('==> Resuming from checkpoint..') checkpoint = torch.load(args.stage1_resume) net.load_state_dict(checkpoint['net']) start_epoch = checkpoint['epoch'] logger = Logger(os.path.join(args.checkpoint, 'log_stage1.txt'), resume=True) else: print("=> no checkpoint found at '{}'".format(args.resume)) else: logger = Logger(os.path.join(args.checkpoint, 'log_stage1.txt')) logger.set_names(['Epoch', 'Train Loss', 'Softmax Loss', 'Within Loss', 'Between Loss', 'Train Acc.']) if not args.evaluate: for epoch in range(start_epoch, args.stage1_es): adjust_learning_rate(optimizer, epoch, args.stage1_lr, step=15) print('\nStage_1 Epoch: %d | Learning rate: %f ' % (epoch + 1, optimizer.param_groups[0]['lr'])) train_out = stage1_train(net, trainloader, optimizer, criterion, device) save_model(net, epoch, os.path.join(args.checkpoint,'stage_1_last_model.pth')) logger.append([epoch + 1, train_out["train_loss"], train_out["cls_loss"], train_out["dis_loss_within"], train_out["dis_loss_between"], train_out["accuracy"]]) if args.plot: plot_feature(net, trainloader, device, args.plotfolder1, epoch=epoch, plot_class_num=args.train_class_num, maximum=args.plot_max, plot_quality=args.plot_quality,normalized=args.plot_normalized) if args.plot: # plot the test set plot_feature(net, testloader, device, args.plotfolder1, epoch="test", plot_class_num=args.train_class_num + 1, maximum=args.plot_max, plot_quality=args.plot_quality, normalized=args.plot_normalized) # calculating distances for last epoch distance_results = plot_distance(net, trainloader, device, args) logger.close() print(f"\nFinish Stage-1 training...\n") print("===> Evaluating ...") stage1_test(net, testloader, device) return {"net": net, "distance": distance_results }
def pingt(): response_list = ping(pars.ReadSettings("LowLevel", "ping", "ping_ip"), size=40, count=1) pingr = response_list.rtt_avg_ms Log.i("response time: " + str(pingr) + " ms") return pingr
def __init__(self, token, cid, uid): if cid > 0: self.is_admin = False return admin = get_admin(token, cid, uid) if not admin: self.is_admin = False return self.is_admin = True try: if admin["status"] == "creator": for permission in permissions: setattr(self, permission, True) else: for permission in permissions: if permission in admin: setattr(self, permission, admin[permission]) else: setattr(self, permission, False) except Exception as err: self.is_admin = False Log.e(err) pprint(admin) time.sleep(1)
def set_antispam_time(infos): say = infos.reply try: if infos.user.lang_n != 0: triggers = "triggers_eng.json" else: triggers = "triggers.json" if infos.text == "": return say("Devi dirmi un numero che indica il tempo di antispam!") try: infos.text = float(infos.text) except ValueError: return say( "L'antispam time deve essere un numero intero o un decimale!") if infos.text > 10: return say("L'antispam time deve essere compreso tra 1.2 e 10!") if infos.text < 1.2: return say("L'antispam time deve essere compreso tra 1.2 e 10!") trigs = json.loads( open("Files/bot_files/%s/%s" % (infos.bid, triggers)).read()) trigs["antispam time"] = infos.text with open("Files/bot_files/%s/%s" % (infos.bid, triggers), "w") as file_d: file_d.write(json.dumps(trigs)) say("*%s* impostato come antispam time!" % infos.text, markdown=True) except Exception as err: say("S-si è verificato un errore...") Log.e(err)
def main_stage1(): print(f"\nStart Stage-1 training...\n") start_epoch = 0 # start from epoch 0 or last checkpoint epoch # data loader # Model print('==> Building model..') net = Network(backbone=args.arch, embed_dim=512, num_classes=args.train_class_num, use_fc=False, attmodule=False, classifier='dotproduct', backbone_fc=False, data_shape=4) # net = models.__dict__[args.arch](num_classes=args.train_class_num) # CIFAR 100 net = net.to(device) if device == 'cuda': net = torch.nn.DataParallel(net) cudnn.benchmark = True if args.stage1_resume: # Load checkpoint. if os.path.isfile(args.stage1_resume): print('==> Resuming from checkpoint..') checkpoint = torch.load(args.stage1_resume) net.load_state_dict(checkpoint['net']) # best_acc = checkpoint['acc'] # print("BEST_ACCURACY: "+str(best_acc)) start_epoch = checkpoint['epoch'] logger = Logger(os.path.join(args.checkpoint, 'log_stage1.txt'), resume=True) else: print("=> no checkpoint found at '{}'".format(args.resume)) else: logger = Logger(os.path.join(args.checkpoint, 'log_stage1.txt')) logger.set_names( ['Epoch', 'Learning Rate', 'Train Loss', 'Train Acc.']) criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(net.parameters(), lr=args.lr, momentum=0.9, weight_decay=5e-4) for epoch in range(start_epoch, args.stage1_es): print('\nStage_1 Epoch: %d Learning rate: %f' % (epoch + 1, optimizer.param_groups[0]['lr'])) adjust_learning_rate(optimizer, epoch, args.lr, step=10) train_loss, train_acc = stage1_train(net, trainloader, optimizer, criterion, device) save_model(net, None, epoch, os.path.join(args.checkpoint, 'stage_1_last_model.pth')) logger.append([ epoch + 1, optimizer.param_groups[0]['lr'], train_loss, train_acc ]) logger.close() print(f"\nFinish Stage-1 training...\n") return net
def rnd_elab(infos, text): try: x = 0 while "rnd[" in text: if x > 10: x = 0 / 0 minmax = text.split("rnd[")[1] minn = int(minmax.split(",")[0]) maxx = int(minmax.split(",")[1].split("]")[0]) if minn > maxx: x = 0 / 0 num = random.randint(minn, maxx) if "rnd[%s,%s]" % (minn, maxx) in text: text = text.replace("rnd[%s,%s]" % (minn, maxx), str(num), 1) elif "rnd[%s, %s]" % (minn, maxx) in text: text = text.replace("rnd[%s, %s]" % (minn, maxx), str(num), 1) x += 1 return text except Exception: warn = get_phrase("rnd[ err") HTTPLL.sendMessage(infos.token, chat_id=Manager.get_prop_id(infos.token), text=warn) Log.d("[Bot: @%s | %s] rnd error" % (infos.bid, infos.username)) return None
def restore(content, bid, lang_n): try: if lang_n != 0: dialogs = "dialogs_eng.json" triggers = "triggers_eng.json" else: dialogs = "dialogs.json" triggers = "triggers.json" new = decode("kitsu", content) x = json.loads(new) Log.d("Decode ok! JSON ok!") if "dialogs" not in x or "triggers" not in x: return False, "File di backup invalido!" with open("Files/bot_files/%s/%s" % (bid, dialogs), "w") as f: f.write(json.dumps(x["dialogs"])) with open("Files/bot_files/%s/%s" % (bid, triggers), "w") as f: f.write(json.dumps(x["triggers"])) return True, "Backup ripristinato con successo!" except Exception as err: Log.e(err) return False, str(err)
def get_trigger_list(token, lang_n=0): try: result = {} tot = 0 bid = Manager.get_botid_from_token(token) dic = LowLevel.get_triggers( bid, "triggers.json" if lang_n == 0 else "triggers_eng.json") sections = [ "contents", "equals", "interactions", "eteractions", "bot_commands", "admin_actions" ] for section in sections: tot += len(dic[section]) result[section] = [] for trig in dic[section]: result[section].append(trig) result["total"] = tot result["symbol"] = get_com_symbol(bid) return result except Exception as err: Log.e(err) return str(err)
def update_handler(bot, update): try: infos = Infos.Infos(bot, update) if infos.error: return Log.d("Errore owo!") if infos.user.uid in json.loads(open("Files/jsons/blacklist.json").read()) or infos.skip: return if "message" in update: if "new_chat_members" not in update["message"]: return if update["message"]["new_chat_members"] or "left_chat_member" in update["message"]: if bot["id"] == Manager.get_main_bot_id(): return Foos.status(bot, update) return BotsFoos.status(bot, update) if infos.user.message.what != "command": return Elaborator.reader(infos) if infos.user.message.command == "report": return Foos.report(infos) ok = Elaborator.command_reader(infos) if ok != "procedi": return if infos.user.message.pers_command: return Elaborator.pers_commands(infos) except Exception as err: Log.e("Ho trovato un errore: riga %s %s %s" % (sys.exc_info()[-1].tb_lineno, type(err).__name__, err))
def reply_to_usermessage(self, msg, sendname, takename, chat_id, from_id, user_id): reply = None Log.d("Running RTU") if msg is not None: msg, branch = pars.ReadReply(msg) try: if branch == "admin_commands": reply = Manage.admin_commands(msg, chat_id, from_id, user_id, takename, branch) return reply if branch == "simple_interactions": if takename == "MayaChan": reply = pars.LoadDialog(msg, branch) pars.Usage(branch) return reply if branch == "user_interactions": reply = pars.LoadDialog(msg, branch) pars.Usage(branch) except: branch = None if reply is not None: if "{}" in reply: reply = reply.format(takename) return reply
def set_nascita(infos): say = infos.reply try: data_nascita = infos.text.lower() if infos.user.lang_n != 0: dialogs = "dialogs_eng.json" else: dialogs = "dialogs.json" if infos.text == "": return say("Devi dirmi la data di nasciata nel formato DD/MM/YYYY") dialogs_f = json.loads( open("Files/bot_files/%s/%s" % (infos.bid, dialogs)).read()) try: d, m, y = data_nascita.split("/") datetime.date(int(y), int(m), int(d)) except Exception as err: return say("A quanto pare %s non e' una data reale (%s)" % (infos.text, err)) dialogs_f["data_nascita"] = data_nascita with open("Files/bot_files/%s/%s" % (infos.bid, dialogs), "w") as file_d: file_d.write(json.dumps(dialogs_f)) say("Data di nascita impostata a %s!" % infos.text) except Exception as err: say("S-si è verificato un errore...") Log.e(err)
def doTheStuff(): log.turnOff() log.dbgMode(False) t1 = timeStart() bible = readBible("Texts/Tests/bbl_short_test_old.txt", "Texts/Tests/bbl_short_test_new.txt") timeStop(t1, "read short version") printOutput("output_tst.txt", bible) t1 = timeStart() serial.serialize("data.bin", bible) timeStop(t1, "serialize short version") t1 = timeStart() deserializedBible = serial.deserialize("data.bin") timeStop(t1, "deserialize short version") printOutput("deserialized.txt", deserializedBible) t1 = timeStart() bible = readBible("Texts/Bible/bbl_old.txt", "Texts/Bible/bbl_new.txt") timeStop(t1, "read full version") printOutput("output.txt", bible) t1 = timeStart() serial.serialize("data_full.bin", bible) timeStop(t1, "serialize full version") t1 = timeStart() deserializedBible = serial.deserialize("data_full.bin") timeStop(t1, "deserialize full version") printOutput("deserialized_full.txt", deserializedBible)
def get_empty_bot(infos): try: infos.reply("Ok, master.") d = 0 w = 0 wb = 0 u_bots = "Zero-Trigger bots:" bids = Manager.get_bots_id() for bid in bids: tot = Manager.trigger_count(bid) toke = Manager.get_token_from_bot_id(bid) if tot < 5: if toke not in BotCache.bots: BotCache.bots[toke] = HTTPLL.getMe(toke) bot = BotCache.bots[toke] u_bots += "\n%s - %s" % (bid, bot["username"]) Manager.delete_bot(bid) d += 1 elif tot < 20: try: HTTPLL.sendMessage(toke, Manager.get_prop_id(toke), "Master, t-ti sei dimenticato di me...?") except Exception: wb += 1 Manager.delete_bot(bid) w += 1 u_bots += "\n\n%s unactive bots detached.\n%s warns sent to the bot masters but %s of them had blocked their bot." % (d, w, wb) infos.reply(u_bots) except Exception as err: Log.e("Ho trovato un errore: riga %s %s %s (%s)" % (sys.exc_info()[-1].tb_lineno, type(err).__name__, err, infos.text)) infos.reply("M-master... Controlla il log... k-kitsu! ><")
def run_linkspider(db, meta): source = 'douban' baseurls = [ 'http://www.douban.com/group/beijingzufang/discussion', 'http://www.douban.com/group/fangzi/discussion', 'http://www.douban.com/group/262626/discussion', 'http://www.douban.com/group/276176/discussion', 'http://www.douban.com/group/26926/discussion', 'http://www.douban.com/group/sweethome/discussion', 'http://www.douban.com/group/242806/discussion', 'http://www.douban.com/group/257523/discussion', 'http://www.douban.com/group/279962/discussion', 'http://www.douban.com/group/334449/discussion', ] for baseurl in baseurls: Logger.info('start ' + baseurl) groupid = baseurl\ .replace('http://www.douban.com/group/', '')\ .replace('/discussion', '') reply_time = 0 if meta.has(source, groupid)\ and meta.get(source, groupid).has_key('reply_time'): reply_time = meta.get(source, groupid)['reply_time'] linkspider = LinkSpider( baseurl=baseurl, db=db, ) reply_time = linkspider.crawl(source=source, reply_time=reply_time, ext={'groupid': groupid}) meta.set(source, groupid, {'reply_time': reply_time}) meta.write()
def readBible(oldFile, newFile): contentOld = fileReader.getFileContentRemEmptys(oldFile) contentNew = fileReader.getFileContentRemEmptys(newFile) bible = database.Bible() ommitedLinesOld = 0 ommitedLinesNew = 0 for line in contentOld: try: parsed = parser.parse(line) bible.addVerset("Stary Testament", parsed[0], parsed[1], [parsed[2], parsed[3]]) except ValueError: ommitedLinesOld = ommitedLinesOld + 1 for line in contentNew: try: parsed = parser.parse(line) bible.addVerset("Stary Testament", parsed[0], parsed[1], [parsed[2], parsed[3]]) except ValueError: ommitedLinesNew = ommitedLinesNew + 1 log.inf("Ommited: old " + str(ommitedLinesOld)) log.inf("Ommited: new " + str(ommitedLinesNew)) return bible
def backup(infos): say = infos.reply try: if infos.user.lang_n != 0: dialogs = "dialogs_eng.json" triggers = "triggers_eng.json" else: dialogs = "dialogs.json" triggers = "triggers.json" name = "Files/bot_files/%s/backup - %s.kb" % ( infos.bid, time.strftime("%A - %H:%M:%S")) d = json.loads( open("Files/bot_files/%s/%s" % (infos.bid, dialogs)).read()) t = json.loads( open("Files/bot_files/%s/%s" % (infos.bid, triggers)).read()) f = json.dumps({"dialogs": d, "triggers": t}) with open(name, "w") as fl: fl.write(encode("kitsu", f)) HTTPLL.sendFileDocument(infos.token, infos.cid, name, caption="Ecco il backup.") except Exception as err: say("S-si è verificato un errore...") say(str(err)) Log.e(err)
def add_risposta(token, trigger, replies, lang_n): try: bid = token.split(":")[0] if not lang_n: lang_n = 0 if lang_n != 0: dialogs = "dialogs_eng.json" else: dialogs = "dialogs.json" if replies == "": return "Devi specificare la sezione/trigger a cui aggiungere la/e risposte.", False if isinstance(replies, list): frasi = replies else: com = replies if " | " not in com: return "Devi dirmi le frasi o la frase da aggiungere a trigger", False comps = com.split(" | ") trigger = comps[0] trigger = trigger.lower() frasi = comps[1:] for frase in frasi: if frase.count("[private]") > 1: return "Ogni risposta puo' contenere un solo tag \"[private]\"!", False if frase.endswith("[private]"): return "Il tag [private] puo' andare tra due risposte o ad inizio risposta.", False dials = json.loads( open("Files/bot_files/%s/%s" % (bid, dialogs)).read()) try: dials[trigger].extend(frasi) except Exception: return "Non ho trovato il trigger/sezione %s." % trigger, False com = " ".join(frasi) if com.count("<b>") != com.count("</b>") or com.count("<i>") != com.count("</i>") \ or com.count("<c>") != com.count("</c>") or com.count("<link>") != com.count("</link>"): return "Formattazione non valida.", False if "<link>" in com: if com.count(":>") != com.count("<link>") or ":></link>" in com: return "Descrizione del link mancante.", False with open("Files/bot_files/%s/%s" % (bid, dialogs), "w") as file_d: file_d.write(json.dumps(dials)) return "Nuove frasi imparate! Vai a provarle~", True except Exception as err: Log.e(err) return "S-si è verificato un errore..."
def get(self, key: str): if key in translations[self.lang]: return translations[self.lang][key].replace('\t', '') Logger.err("String {} is not translated to {}!".format(key, self.lang)) if key not in translations["en_US"]: Logger.err("String {} is not translated at all!".format(key)) return "[Missing String]" return translations["en_US"][key].replace('\t', '')
def reply_to_usermessage(self, msg, sendname, takename): if msg is not None: if "yuko pat her" in msg: Log.a("pat her") reply = "Hai! *smiles and jumps on " + takename + "'s arms*" return reply
def temp_mute(infos, minutes): try: time.sleep(minutes * 60) HTTPLL.restrictChatMember(infos.token, infos.cid, infos.to_user.uid) send(infos, "user_unmuted") except Exception as err: HTTPLL.sendMessage(infos.token, infos.cid, "Non ho potuto unmutare l'utente con ID %s..." % infos.to_user.uid) Log.d("Errore nel unmute: %s" % err)
def main_stage2(net, mid_energy): print("Starting stage-2 fine-tuning ...") if args.stage2_resume: # Load checkpoint. if os.path.isfile(args.stage1_resume): print('==> Resuming from checkpoint..') checkpoint = torch.load(args.stage1_resume) net.load_state_dict(checkpoint['net']) start_epoch = checkpoint['epoch'] logger = Logger(os.path.join(args.checkpoint, 'log_stage1.txt'), resume=True) else: print("=> no checkpoint found at '{}'".format(args.resume)) else: logger = Logger(os.path.join(args.checkpoint, 'log_stage1.txt')) logger.set_names(['Epoch', 'Train Loss', 'Train Acc.']) # after resume criterion = DFPLoss(temperature=args.temperature) optimizer = torch.optim.SGD(net.parameters(), lr=args.stage1_lr, momentum=0.9, weight_decay=5e-4) if not args.evaluate: for epoch in range(start_epoch, args.stage1_es): adjust_learning_rate(optimizer, epoch, args.stage1_lr, factor=args.stage1_lr_factor, step=args.stage1_lr_step) print('\nStage_1 Epoch: %d | Learning rate: %f ' % (epoch + 1, optimizer.param_groups[0]['lr'])) train_out = stage1_train(net, trainloader, optimizer, criterion, device) save_model(net, epoch, os.path.join(args.checkpoint, 'stage_1_last_model.pth')) logger.append( [epoch + 1, train_out["train_loss"], train_out["accuracy"]]) if args.plot: plot_feature(net, args, trainloader, device, args.plotfolder, epoch=epoch, plot_class_num=args.train_class_num, plot_quality=args.plot_quality) plot_feature(net, args, testloader, device, args.plotfolder, epoch="test" + str(epoch), plot_class_num=args.train_class_num + 1, plot_quality=args.plot_quality, testmode=True) logger.close() print(f"\nFinish Stage-1 training...\n")
def main(): print(f"\nStart training ...\n") start_epoch = 0 # start from epoch 0 or last checkpoint epoch print('==> Building model..') net = BuildNet(backbone=args.arch, num_classes=args.train_class_num, embed_dim=args.embed_dim) net = net.to(device) if device == 'cuda': net = torch.nn.DataParallel(net) cudnn.benchmark = True optimizer = torch.optim.SGD(net.parameters(), lr=args.lr, momentum=0.9, weight_decay=5e-4) if args.resume: # Load checkpoint. if os.path.isfile(args.resume): print('==> Resuming from checkpoint..') checkpoint = torch.load(args.resume) net.load_state_dict(checkpoint['net']) optimizer.load_state_dict(checkpoint['optimizer']) start_epoch = checkpoint['epoch'] logger = Logger(os.path.join(args.checkpoint, 'log.txt'), resume=True) else: print("=> no checkpoint found at '{}'".format(args.resume)) else: logger = Logger(os.path.join(args.checkpoint, 'log.txt')) logger.set_names( ['Epoch', 'Train Loss', 'Train Acc.', "Test F1", 'threshold']) if not args.evaluate: for epoch in range(start_epoch, args.es): adjust_learning_rate(optimizer, epoch, args.lr, factor=args.lr_factor, step=args.lr_step) print('\nEpoch: %d | Learning rate: %f ' % (epoch + 1, optimizer.param_groups[0]['lr'])) train_out = train(net, trainloader, optimizer, criterion, device) save_model(net, optimizer, epoch, os.path.join(args.checkpoint, 'last_model.pth')) test_out = test(net, testloader, criterion, device) logger.append([ epoch + 1, train_out["train_loss"], train_out["accuracy"], test_out["best_F1"], test_out["best_thres"] ]) logger.close() print(f"\nFinish training...\n") else: print("===> Evaluating ...") test(net, testloader, criterion, device)
def diff_task(linkdb, output, pagelist): page_filenames = [] #trick :) if os.path.exists(pagelist): Logger.info('Use pagelist indead of page files!') with open(pagelist, 'r') as f: for line in f: line = line.strip() if not line: break filename = line page_filenames.append(filename) else: Logger.info('CANNOT find pagelist file: %s' % pagelist) tasks = {} sources = [] with open(linkdb, 'r') as f: for line in f: line = line.strip() if line: try: hashurl, url, reply_time, source = line.split('\t')[:4] except: continue filename = '%s' % (hashurl) tasks.update({ filename:{ 'url':url, 'source':source } }) sources.append(source) if not pagelist: for source in set(sources): source = os.path.join(output, source) if os.path.exists(source): filenames = os.listdir(source) for filename in filenames: tasks.pop(filename) else: os.mkdir(source) else: for source in set(sources): source = os.path.join(output, source) if not os.path.exists(source): os.mkdir(source) for filename in page_filenames: try: tasks.pop(filename) except: print 'Skip', filename return tasks
def open(self, url, delay=0.1): response = None try: response = self.br.open(url, timeout=20.0) except urllib2.HTTPError, e: while e.code != 404: interval = Interval.val() time.sleep(interval) Logger.info('sleep %ds error %d %s' % (interval, e.code, url)) try: response = self.br.open(url, timeout=20.0) Logger.info('skip 403 ' + url) break except urllib2.HTTPError, e: if e.code != 404: continue except:
def parse(self, **args): hashurl = args['hashurl'] title = args['title'] text = args['text'] ret = TextParser.parse(title+' '+text) if ret.has_key('error'): Logger.info(hashurl+' '+ret['error']) return record = '\t'.join([ hashurl, title, text, ret['jushi'], ret['shouji'], ret['zujin'], ret['dizhi'], ret['ditie'], ]) self._db.insert(record)
def parse(self, **args): page = args['page'] source = args['source'] hashurl = args['hashurl'] ret = PageParser.parse(page, source) if ret.has_key('error'): Logger.info(hashurl+' '+ret['error']) return record = '\t'.join([ hashurl, ret['title2'] if ret['title2']\ else ret['title'], json.dumps(ret['author']), json.dumps(ret['images']), json.dumps(ret['links']), ret['text'], ret['pub_time'], ]).encode('utf-8') self._db.insert(record)
def crawl(self, **args): source = args['source'] ext = args['ext'] reply_time = args['reply_time'] br = Browser() page = br.open(self.baseurl) new_reply_time = reply_time while True: links = PageParser.parse(page, source) for i, link in enumerate(links): if reply_time < link.reply_time: if i is 0: new_reply_time = link.reply_time self.db.insert('\t'.join([str(link), source, json.dumps(ext)])) else: return new_reply_time try: page = br.follow_link(text='后页>') except: Logger.info('finished!') break return new_reply_time
def run_linkspider(db, meta): source = 'douban' baseurls = [ 'http://www.douban.com/group/beijingzufang/discussion', 'http://www.douban.com/group/fangzi/discussion', 'http://www.douban.com/group/262626/discussion', 'http://www.douban.com/group/276176/discussion', 'http://www.douban.com/group/26926/discussion', 'http://www.douban.com/group/sweethome/discussion', 'http://www.douban.com/group/242806/discussion', 'http://www.douban.com/group/257523/discussion', 'http://www.douban.com/group/279962/discussion', 'http://www.douban.com/group/334449/discussion', ] for baseurl in baseurls: Logger.info('start '+baseurl) groupid = baseurl\ .replace('http://www.douban.com/group/', '')\ .replace('/discussion', '') reply_time = 0 if meta.has(source, groupid)\ and meta.get(source, groupid).has_key('reply_time'): reply_time = meta.get(source, groupid)['reply_time'] linkspider = LinkSpider( baseurl=baseurl, db=db, ) reply_time = linkspider.crawl( source=source, reply_time=reply_time, ext={ 'groupid':groupid } ) meta.set(source, groupid, { 'reply_time':reply_time }) meta.write()
from SketchFramework.SketchGUI import DummyGUI from SketchFramework.Stroke import Stroke from Utils import GeomUtils from Utils import Logger from xml.etree import ElementTree as ET import datetime import pdb import sys import threading from Utils.GeomUtils import pointDistance logger = Logger.getLogger('Board', Logger.WARN ) #-------------------------------------------- class BoardException (Exception): """A custom exception class to handle errors within the framework""" def __init__(self, message): """input: message for the error""" self.message = message def __str__(self): return repr(self.message) #-------------------------------------------- class BoardObserver(object): "The Board Observer Class from which all other Board Observers should be derived" def __init__(self, board): self._parentBoard = board self.AnnoFuncs={} self._targetAnnotations = None #Set by the Board with a call to "RegisterBoardObserver"
# XrdTest is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with XrdTest. If not, see <http://www.gnu.org/licenses/>. # # ------------------------------------------------------------------------------- # # File: Daemon # Desc: TODO # ------------------------------------------------------------------------------- from Utils import Logger LOGGER = Logger(__name__).setup() try: import os import signal import sys import logging except ImportError, e: LOGGER.error(str(e)) sys.exit(1) class DaemonException(Exception): """ General Exception raised by Daemon. """
import datetime from Utils.Hacks import type from Utils import Logger logger = Logger.getLogger("Annotation/AnnoObject", Logger.DEBUG) #-------------------------------------------- class Annotation(object): "Base Annotation Class, it is a container for data placed on sets of strokes." def __init__(self): self.Strokes = [] # list of strokes that this annotates self.Time = datetime.datetime.utcnow() # time used for debuging replay def isType( self, arg): "Input: either a classobj, or a list of classobjs. Return true if this class is one of the classobjs listed" if type(arg) == "list": clist = arg else: clist = [arg] if self.__class__ in clist: return True else: return False def classname( self ): "Returns a string with the name of this type of annotation" return self.__class__.__name__ #--------------------------------------------
from sketchvision.ImageStrokeConverter import imageBufferToStrokes, GETNORMWIDTH from Utils.StrokeStorage import StrokeStorage from Utils import Logger from Observers.ObserverBase import Animator # Constants WIDTH = 1024 HEIGHT = int(4.8 * WIDTH / 8) MID_W = WIDTH/2 MID_H = HEIGHT/2 logger = Logger.getLogger("NetSketchGUI", Logger.DEBUG) class DrawAction(object): "Base class for a draw action" def __init__(self, action_type): self.action_type = action_type def xml(self): raise NotImplemented class DrawCircle(DrawAction): "An object that defines parameters for drawing a circle" def __init__(self, x, y, radius, color, fill, width): DrawAction.__init__(self, "Circle") self.x = x self.y = y
>>> d.trackAnnotation(myAnnotation) FIXME: need some way to actually trigger the proper events to actually test that the visualizer is called correctly """ import time import pdb from Utils import Logger from Utils import GeomUtils from SketchFramework.Point import Point from SketchFramework.Board import BoardObserver from SketchFramework.Annotation import Annotation, AnnotatableObject logger = Logger.getLogger('DiGraphObserver', Logger.WARN ) #------------------------------------- class DebugObserver( BoardObserver ): "Watches for all annotations, and draws them" def __init__(self, board): BoardObserver.__init__(self, board) self.getBoard().AddBoardObserver( self, [] ) self.watchSet = set([]) # set of annotation types to track self.seenBefore = {} # set of particular annotation that we have already drawn def trackAnnotation(self,annoType): logger.debug("debugObserver adding %s", annoType.__name__ ); # add this annotation type to the list to track self.watchSet.add(annoType)
import time import math import sys import pdb from Utils import Logger from Utils import GeomUtils from SketchFramework.Point import Point from SketchFramework.Stroke import Stroke from SketchFramework.Board import BoardObserver from SketchFramework.Annotation import Annotation, AnimateAnnotation from Observers import ObserverBase logger = Logger.getLogger('TestObserver', Logger.DEBUG) #------------------------------------- class TestAnnotation(AnimateAnnotation): def __init__(self): Annotation.__init__(self) self.dt = 0 self.pattern = [1,1,1,1,1,0,0,0,0,0,2,0,0,0,0,0] self.idx = 0 def step(self,dt): "Test animator, tracks the time since this was last called" self.idx = (self.idx + 1) % len(self.pattern) self.dt += dt
# import sys from SketchFramework.Annotation import AnnotatableObject from Utils import Logger logger = Logger.getLogger("Point", Logger.WARN) class Point(AnnotatableObject): "Point defined by X, Y, T. X,Y Cartesian Coords, T as Time" def __init__(self, xLoc, yLoc, drawTime=0): AnnotatableObject.__init__(self) # self.X = int(xLoc) # self.Y = int(yLoc) # self.T = int(drawTime) self.X = float(xLoc) self.Y = float(yLoc) self.T = float(drawTime) def distance(self, point2): "Returns the distance from this point to the point in argument 1" logger.error("Point.distance deprecated, use GeomUtils.pointDist") return 0.0 def copy(self): return Point(self.X, self.Y, self.T) def __str__(self): return "(" + ("%.1f" % self.X) + "," + ("%.1f" % self.Y) + ")" # return "(" + str(self.X) + "," + str(self.Y) + ")" # return "(" + str(self.X) + "," + str(self.Y) + "," + str(self.T) + ")"
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with XrdTest. If not, see <http://www.gnu.org/licenses/>. # #------------------------------------------------------------------------------- # # File: EmailNotifier.py # Desc: Functionality for sending email notifications to a set of email # addresses in case of test suite success/failure, based on policies # about the frequency and type of notifications desired. # #------------------------------------------------------------------------------- from Utils import Logger LOGGER = Logger(__name__).setup() try: import sys import os import smtplib from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText except ImportError, e: LOGGER.error(str(e)) sys.exit(1) class EmailNotifierException(Exception): ''' General Exception raised by EmailNotifier. '''
merged[h] = (title2, url2, pub_time2) else: title2, url2, pub_time2 = merged[h] else: title2, url2, pub_time2 = title, url, pub_time buf_list.append((h, title2, url2, pub_time2)) if len(buf_list) > 1: buf_list = sorted(buf_list, key=lambda i:i[3], reverse=True) simdb.insert('\t'.join( [buf_list[0][0], json.dumps(buf_list[1:])] )) def main(**args): modulepath = args['modulepath'] finaldb_cut = os.path.join(modulepath, '../output/final.db.2') simdb = os.path.join(modulepath, '../output/sim.db') if os.path.exists(simdb): backup(simdb) os.remove(simdb) sim_merge(finaldb_cut, DB(simdb)) if __name__ == '__main__': starttime = datetime.datetime.now() run = sys.argv[0] modulepath = os.path.dirname(run) main( modulepath=modulepath ) endtime = datetime.datetime.now() Logger.info('done! %lds' % (endtime-starttime).seconds)
from Observers import LineObserver from Observers import ObserverBase from SketchFramework.Point import Point from SketchFramework.Stroke import Stroke from SketchFramework.Board import BoardObserver from SketchFramework.Annotation import Annotation, AnnotatableObject from xml.etree import ElementTree as ET from Bin.BinObserver import BinAnnotation from Bin.EqualsObserver import EqualsAnnotation from types import * logger = Logger.getLogger('TextObserver', Logger.WARN ) #------------------------------------- class EquationAnnotation(Annotation): def __init__(self, scale, type, number = 0): "Create a Text annotation. text is the string, and scale is an appropriate size" Annotation.__init__(self) self.scale = scale # an approximate "size" for the text self.type = type self.number = number def xml(self): root = Annotation.xml(self) root.attrib['scale'] = str(self.scale) return root
from Utils import Rubine from Observers import CircleObserver from Observers import LineObserver from Observers import ObserverBase from SketchFramework.Point import Point from SketchFramework.Stroke import Stroke from SketchFramework.Board import BoardObserver from SketchFramework.Annotation import Annotation, AnnotatableObject from xml.etree import ElementTree as ET from numpy import * logger = Logger.getLogger('RubineObserver', Logger.DEBUG ) #------------------------------------- class RubineAnnotation(Annotation): def __init__(self, scores): "Create a Rubin annotation." Annotation.__init__(self) #self.type = type # Deprecated #self.accuracy = accuracy Deprecated #self.scale = scale # Deprecated self.scores = scores self.name = "" if len(self.scores) > 0: self.name = scores[0]['symbol']
from Utils.Hacks import type from pyjamas.Canvas2D import Canvas from pyjamas.ui.RootPanel import RootPanel from pyjamas.ui import Event from pyjamas.ui.MouseListener import MouseHandler from pyjamas import DOM from pyjamas import Window import math from __pyjamas__ import jsinclude jsinclude("javascript/processing.js") from __javascript__ import Processing logger = Logger.getLogger('PyjSketchGUI', Logger.DEBUG) WIDTH = 1024 HEIGHT = 768 _Singleton = None def SketchGUISingleton(): global _Singleton if _Singleton is None: _Singleton = _PyjSketchGUI() return _Singleton class _PyjSketchGUI(_SketchGUI): def __init__(self, *args, **kargs):
import string import sys from Utils import Logger from Utils import GeomUtils from Observers import ObserverBase from xml.etree import ElementTree as ET import pdb from SketchFramework.Annotation import Annotation, AnnotatableObject logger = Logger.getLogger('Rubine', Logger.WARN ) #------------------------------------------------------------ class Stroke: COUNT = 0 def __init__(self, points = [], id = None): if id is None: self.id = Stroke.COUNT else: self.id = id Stroke.COUNT = self.id + 1 #Try to avoid conflicts self.points = points class Dataset(): # top class of the data set. Containes each participant
>>> m = MyCollector([Annotation],Annotation2) """ #------------------------------------- import math from Utils import Logger from Utils import GeomUtils from SketchFramework.Point import Point from SketchFramework.Stroke import Stroke from SketchFramework.Board import BoardObserver, BoardSingleton from SketchFramework.Annotation import Annotation, AnnotatableObject logger = Logger.getLogger('ObserverBase', Logger.WARN ) #------------------------------------- class Visualizer( BoardObserver ): "Watches for annotations, draws them" def __init__(self, anno_type): BoardSingleton().AddBoardObserver( self ) BoardSingleton().RegisterForAnnotation( anno_type, self ) self.annotation_list = [] def onAnnotationAdded( self, strokes, annotation ): logger.debug("anno added %s", annotation ) self.annotation_list.append(annotation)
from Utils.ImageUtils import findCalibrationChessboard from multiprocessing.queues import Queue from sketchvision import ImageStrokeConverter as ISC import cv import gobject import gtk import multiprocessing import pdb import pygtk import threading pygtk.require('2.0') log = Logger.getLogger("CamArea", Logger.DEBUG) MAXCAPSIZE = (2592, 1944) HD1080 = (1920, 1080) HD720 = (1280, 720) DEFAULT_CORNERS = [ (777.6, 239.76000000000002), (2080, 533), (2235.6, 1506.6000000000001), (625.32, 1441.8000000000002), ] if __name__ == "__main__": DEBUG = True else: DEBUG = False class CamArea (ImageArea):
>>> c.onStrokeAdded(Stroke(circlepoints)) """ #------------------------------------- import math from Utils import Logger from Utils import GeomUtils from SketchFramework.Point import Point from SketchFramework.Stroke import Stroke from SketchFramework.Board import BoardObserver from SketchFramework.Annotation import Annotation, AnnotatableObject logger = Logger.getLogger('LineObserver', Logger.WARN ) #------------------------------------- class LineAnnotation(Annotation): def __init__(self, linearity, angle, start_point, end_point ): Annotation.__init__(self) self.linearity = linearity self.angle = angle self.start_point = start_point self.end_point = end_point #------------------------------------- class LineMarker( BoardObserver ): "Watches for lines, and annotates them with the linearity and angle"
from Utils import GeomUtils from Observers import CircleObserver from Observers import LineObserver from Observers import ObserverBase from SketchFramework.Point import Point from SketchFramework.Stroke import Stroke from SketchFramework.Board import BoardObserver from SketchFramework.Annotation import Annotation, AnnotatableObject from xml.etree import ElementTree as ET from Bin import DirectedLine logger = Logger.getLogger('TextObserver', Logger.WARN ) #------------------------------------- class MultAnnotation(Annotation): def __init__(self, scale): "Create a Text annotation. text is the string, and scale is an appropriate size" Annotation.__init__(self) self.scale = scale # an approximate "size" for the text def xml(self): root = Annotation.xml(self) root.attrib['scale'] = str(self.scale) return root #-------------------------------------
#from SketchFramework.strokeout import imageBufferToStrokes, imageToStrokes #from SketchFramework.NetworkReceiver import ServerThread from Utils.StrokeStorage import StrokeStorage from Utils.GeomUtils import getStrokesIntersection from Utils import Logger from Observers.ObserverBase import Animator # Constants WIDTH = 1000 HEIGHT = 800 MID_W = WIDTH/2 MID_H = HEIGHT/2 logger = Logger.getLogger("TkSketchGUI", Logger.DEBUG) class TkSketchGUI(_SketchGUI): Singleton = None def __init__(self): "Set up members for this GUI" global HEIGHT, WIDTH self.sketchFrame = None TkSketchGUI.Singleton = self self.run() def run(self): root = Tk() root.title("Sketchy/Scratch") self.sketchFrame = TkSketchFrame(master = root)
from Observers.ObserverBase import Animator import Config from functools import partial # Constants WIDTH = 950 HEIGHT = 640 #WIDTH = 1680 #HEIGHT = 1050 MID_W = WIDTH/2 MID_H = HEIGHT/2 logger = Logger.getLogger("TkSketchFrame", Logger.DEBUG) class ImgProcThread (threading.Thread): """A Thread that continually pulls image data from imgQ and puts the resulting strokes in strokeQ""" def __init__(self, imgQ, strokeQ): threading.Thread.__init__(self) self.daemon = True self.img_queue = imgQ self.stk_queue = strokeQ def run(self): while True: image = StringIO.StringIO(self.img_queue.get()) logger.debug("Processing net image") stks = imageBufferToStrokes(image)
from SketchFramework.Stroke import Stroke from SketchFramework.Point import Point from Utils import Logger logger = Logger.getLogger('StrokeStorage', Logger.DEBUG) class StrokeStorage(object): def __init__(self, filename = "strokes.dat"): self._fname = filename def saveStrokes(self, strokelist): fd = open(self._fname, "w") for strk in strokelist: print >> fd, "#STROKE" for p in strk.Points: print >> fd, " %s %s %s" % ( p.X, p.Y, p.T) print >> fd, "#ENDSTROKE" logger.debug("Saved Stroke with %s points" % (len(strk.Points)) ) fd.close() def loadStrokes(self): fd = open(self._fname, "r") curStroke = None for line in fd.readlines(): if line.startswith("#STROKE"): curStroke = Stroke() elif line.startswith("#ENDSTROKE"): logger.debug("Loaded Stroke with %s points" % (len(curStroke.Points)) ) yield curStroke curStroke = None else: fields = line.split()
#------------------------------------- import math import pdb from Utils import Logger from Utils import GeomUtils from Utils import Template from SketchFramework.Point import Point from SketchFramework.Stroke import Stroke from SketchFramework.Board import BoardObserver from SketchFramework.Annotation import Annotation, AnnotatableObject from xml.etree import ElementTree as ET from Observers.Semantics_Class1 import Class1Annotation logger = Logger.getLogger('ArrowObserver', Logger.WARN) arrowHeadMatcher = Template.TemplateDict(filename = "Utils/arrowheads.templ") class ArrowHeadAnnotation( Class1Annotation ): def __init__(self, end1, cusp, end2): Class1Annotation.__init__(self) self.addKeyPoint("end1", end1) self.addKeyPoint("cusp", cusp) self.addKeyPoint("end2", end2) def classifyArrowhead(board, stroke): """Following the class 1 semantics, this classifies arrowheads""" if _isArrowHead(stroke, arrowHeadMatcher): # * (tip-point) # o o
--- lists of strokes --- - computes the bounding box of a list of strokes, and returns a tuple of - point as (topleft,bottomright) >>> (tl,br) = strokelistBoundingBox( [instroke, instroke] ) >>> str(tl),str(br) ('(1.0,343.0)', '(343.0,1.0)') """ import math import sys from Utils import Logger from SketchFramework.Point import Point from SketchFramework.Stroke import Stroke logger = Logger.getLogger('GeomUtils', Logger.WARN ) #-------------------------------------------------------------- # Functions on Points # FIXME: maybe these should be functions on "cordinates" rather than points def pointDistanceSquared(X1, Y1, X2, Y2): "Input: two points. Returns the squared distance between the points" distance = (X2 - X1) ** 2 + (Y2 - Y1) ** 2 return distance def pointDistance(X1, Y1, X2, Y2): "Input: two points. Returns the euclidian distance between the points" return math.sqrt(float(pointDistanceSquared(X1,Y1,X2,Y2)));
import random import pdb from SketchFramework import SketchGUI from Utils import Logger from Utils import GeomUtils from SketchFramework.Point import Point from SketchFramework.Stroke import Stroke from SketchFramework.Board import BoardObserver, BoardSingleton from SketchFramework.Annotation import Annotation, AnnotatableObject from Observers import ObserverBase #------------------------------------- ssv_logger = Logger.getLogger("SplitStrokekVisualizer", Logger.DEBUG) class SplitStrokeVisualizer( ObserverBase.Visualizer ): COLORMAP = {"#00AAAA": None, "#AA00AA": None, "#AAAA00": None, "#AAAAAA": None, "#AAFFFF": None, "#FFAAFF": None, "#FFAAFF": None, "#22AAAA": None, "#AA22AA": None, "#AA22AA": None, } def __init__(self): ObserverBase.Visualizer.__init__( self, SplitStrokeAnnotation )