async def main(): global timer logging('Valine-Cheker 开始初始化。', prnt=True) init() while True: check() await asyncio.sleep(config['interval'])
def printover(self, _in, nt): tp = type(_in) t = '' n = 0 while n < nt: t = t + '\t' n = n + 1 if tp is dict: for _key, _val in _in.items(): if type(_val) != dict and\ type(_val) != list and\ type(_val) != tuple: fs = t + str(_key) + ': ' + str(_val) lg.logging(fs) else: fs = t + 'Elements in %s :' % (str(_key)) lg.logging(fs, 'SPECIAL') self.printover(_val, nt+1) elif tp is list or tp is tuple: for _val in _in: if type(_val) != dict and\ type(_val) != list and\ type(_val) != tuple: fs = t + str(_val) lg.logging(fs) else: fs = t + 'Elements in list :' lg.logging(fs, 'SPECIAL') self.printover(_val, nt+1) else: fs = t + str(_in) lg.logging(fs)
def SetHists(self, hist, TYPE): if TYPE == 'DATA': self.DATA = hist elif TYPE == 'STACK': self.STACK[hist.GetName()] = hist elif TYPE == 'SINGLE': self.SINGLE[hist.GetName()] = hist else: lg.logging('CANNOT FIND TYPE %s' % (TYPE), 'WARN')
def Get(self, _path): spath = _path.strip().split('/') try: _out = self.fConfig for sp in spath: if sp is '' or sp is ' ' or sp is '\n': continue else: _out = _out[sp] except: lg.logging('Failed to get value, Wrong path', 'WARN') _out = None return _out
def fit(self, dataset): self.model.train() bar = tqdm(dataset, smoothing=0) avgLoss = Average('Loss', num=20) acc = Average('TAcc') for i, (x, y) in enumerate(bar): x, y = [createVariable(z, self.use_cuda) for z in [x, y]] prob = self.model(x) loss = F.cross_entropy(prob, y) avgLoss.append(toList(loss)[0]) pred = torch.max(prob.data, 1)[1] corr = (pred == y.data).sum() total = y.size(0) acc.append(corr / total) self.optimizer.zero_grad() loss.backward() torch.nn.utils.clip_grad_norm(self.model.parameters(), 10) self.optimizer.step() logs = logging((avgLoss, acc)) bar.desc = logs bar.close() return [avgLoss, acc]
def fit(self, dataset): bar = tqdm(dataset, smoothing=0) avgGLoss = Average('GL', num=4) for i, (x, y) in enumerate(bar): self.step += 1 batchSZ = y.size(0) x, y = [createVariable(z, self.use_cuda) for z in [x, y]] true = createVariable(torch.ones(batchSZ).float(), self.use_cuda) false = createVariable(torch.zeros(batchSZ).float(), self.use_cuda) # b/w # coff = torch.rand(3) # coff /= coff.sum() coff = [0.299, 0.587, 0.114] # sign = torch.rand(3) # bw = sum(x[:, i] * coff[i] if sign[i] > 0.5 else (1.0 - x[:, i]) * coff[i] for i in range(3)) bw = sum(x[:, i] * coff[i] for i in range(3)) bw = bw.unsqueeze(1) c = x # lr decay if self.step % 10000 == 0: for param_group in self.optimG.param_groups: param_group['lr'] = param_group['lr'] * 0.5 self.optimG.zero_grad() self.G.train() gloss = 0 # l1 x = self.G(y[:, 0], y[:, 1], bw) if self.step % 15 == 0: imb = bw.data[0].repeat(3, 1, 1) img = c.data[0] img = torch.cat([x.data[0], img, imb], 1) img = img.cpu().numpy() img, org = toImage(img) img.save( os.path.join('output', 'training', 'cnorm', '%d-0.jpg' % (self.step))) org.save( os.path.join('output', 'training', 'corig', '%d-0.jpg' % (self.step))) loss = F.mse_loss(x, c) gloss += loss.data.cpu().numpy().tolist()[0] loss.backward() avgGLoss.append(gloss) torch.nn.utils.clip_grad_norm(self.G.parameters(), 1) self.optimG.step() logs = logging((avgGLoss, )) bar.desc = logs bar.close() return [ avgGLoss, ]
def get_spots(lat, lng, genre, skima_time, user_id, spots_amount): connector = MySQLdb.connect(user=DB.user, passwd=DB.password, host=DB.host, db=DB.name, charset=DB.charset) cursor = connector.cursor() sql = "select * from visited_spots where user_id = '%s'" % (user_id) cursor.execute(sql) visited_spots = cursor.fetchall() if lat is None or lng is None: sql = "select * from spots;" cursor.execute(sql) spots = cursor.fetchall() cursor.close connector.close return response_spots(spots, visited_spots, lat, lng, skima_time, spots_amount) sys.exit() if genre is None: sql = "select * from spots;" else: sql = "select * from spots where genre = '%s';" % (genre) cursor.execute(sql) spots = cursor.fetchall() cursor.close connector.close response = response_spots(spots, visited_spots, lat, lng, skima_time, spots_amount) if user_id is not None: session_id = get_session_id() response.update({"session_id": session_id}) logging(user_id, "GET /spots", session_id) return response
def list_users(): connector = MySQLdb.connect(user=DB.user, passwd=DB.password, host=DB.host, db=DB.name, charset=DB.charset) cursor = connector.cursor() sql = "select * from users;" cursor.execute(sql) result = cursor.fetchall() cursor.close connector.close logging('null', "GET /users", get_session_id()) return response_users(result)
def get_user(user_id): connector = MySQLdb.connect(user=DB.user, passwd=DB.password, host=DB.host, db=DB.name, charset=DB.charset) cursor = connector.cursor() sql = "select * from users where user_id = '%s'" % (user_id) cursor.execute(sql) result = cursor.fetchall() cursor.close connector.close logging(user_id, "GET /users/{usre_id}", get_session_id()) return response_users(result)
def __init__(self, _inputF): lg.logging( 'Start Initialing %s' % (self.__class__.__name__), 'SPECIAL' ) HJB.HtbJsonBase.__init__(self, _inputF) self.MAIN = None self.STACK = [] self.SINGLE = [] self.DecorateConfig() self.General = self.fConfig['General'] del self.fConfig['General'] self.CutList = self.fConfig.keys() self.Text = {} self.Vars = {} for cut in self.CutList: self.Text[cut] = self.fConfig[cut]['TEXT'] self.Vars[cut] = self.fConfig[cut].keys() self.Vars[cut].remove('TEXT')
def check(): try: lst = check_new_comments() send_emails(lst) logging('等待 %d 秒...' % config['interval']) except Exception as e: logging('Error encountered:', level='error', prnt=True) for line in traceback.format_exc().split('\n'): logging(line, level='error', prnt=True) logging('重新登录 leancloud...', prnt=True) init()
def import_records(rc_project, form_status, overwrite): # # rc_project: is the redcap.Project() object # form_status: array of dicts, csv/xml string, ``pandas.DataFrame`` # overwrite : ('normal'), 'overwrite' # ``'overwrite'`` will erase values previously stored in the # database if not specified in the to_import dictionaries. # import_response = {} try: import_response = rc_project.import_records(form_status, overwrite=overwrite) except redcap.RedcapError as e: error = 'Failed to import data into Redcap Project' logger.logging('22_sri_import_records@sibis', error, data_to_import=form_status, redcap_error=str(e)) return import_response
def get_visited_spots(user_id): connector = MySQLdb.connect(user=DB.user, passwd=DB.password, host=DB.host, db=DB.name, charset=DB.charset) cursor = connector.cursor() sql = "select * from visited_spots where user_id = '%s';" % (user_id) cursor.execute(sql) visited_spots = cursor.fetchall() cursor.close connector.close logging(user_id, "GET /users/{user_id}/visited", get_session_id()) return response_visited_spots(visited_spots)
def add_user(user): connector = MySQLdb.connect(user=DB.user, passwd=DB.password, host=DB.host, db=DB.name, charset=DB.charset) cursor = connector.cursor() sql = "insert into users value (0, '%s', '%s');" % (user.age, user.gender) cursor.execute(sql) connector.commit() sql = "select * from users;" cursor.execute(sql) result = cursor.fetchall() cursor.close connector.close logging('null', "POST /users", get_session_id()) return response_user_id(result)
def XsecInit(xfilename): xsecf = open(xfilename) output = {} lns = xsecf.readlines() for ln in lns: if ln.strip() == '': continue elif ln.strip()[0] == '#': continue sln = ln.strip() if '\t' in sln: sln = sln.expandtabs(1) s = sln.split(' ') ss = [i for i in s if i != ''] try: output[str(ss[0])] = float(ss[1]) * float(ss[2]) except: print 'XSection file error, ID %s' % (str(ss[0])) lg.logging('Done', 'SUCCESS') return output
def update_user(user_id, user): connector = MySQLdb.connect(user=DB.user, passwd=DB.password, host=DB.host, db=DB.name, charset=DB.charset) cursor = connector.cursor() sql = "update users set age = '%s', gender = '%s' where user_id='%s';" % ( user.age, user.gender, user_id) cursor.execute(sql) connector.commit() sql = "select * from users where user_id = '%s'" % (user_id) cursor.execute(sql) result = cursor.fetchall() cursor.close connector.close logging(user_id, "POST /users/{usre_id}", get_session_id()) return response_user_id(result)
def add_spots(spot): connector = MySQLdb.connect(user=DB.user, passwd=DB.password, host=DB.host, db=DB.name, charset=DB.charset) cursor = connector.cursor() sql = "insert into spots value (0, '%s', '%s', '%s', '%s', '%s', '%s', '%s');" % ( spot.name, spot.longitude, spot.latitude, spot.genre, spot.stay_time, spot.comment, spot.plan) cursor.execute(sql) connector.commit() sql = "select * from spots;" cursor.execute(sql) spots = cursor.fetchall() cursor.close connector.close logging('null', "POST /spots", get_session_id()) return spot_id(spots)
def add_visited_spots(user_id, number): connector = MySQLdb.connect(user=DB.user, passwd=DB.password, host=DB.host, db=DB.name, charset=DB.charset) cursor = connector.cursor() sql = "insert into visited_spots value ('%s', '%s');" % (user_id, number.spot_id) cursor.execute(sql) connector.commit() sql = "select * from visited_spots;" cursor.execute(sql) visited_spots = cursor.fetchall() cursor.close connector.close logging(user_id, "POST /users/{user_id}/visited", number.session_id) return visited_spot_id(visited_spots)
def score(self, dataset): self.model.eval() bar = tqdm(dataset, smoothing=0) acc = Average('Acc') for i, (x, y) in enumerate(bar): x, y = [createVariable(z, self.use_cuda) for z in [x, y]] prob = self.model(x) pred = torch.max(prob.data, 1)[1] corr = (pred == y.data).sum() total = y.size(0) acc.append(corr / total) logs = logging((acc, )) bar.desc = logs bar.close() return [acc]
def init(): global query, user, akismet_enabled logging('加载配置文件...', prnt=True) load_config() lc.init(config['app_id'], master_key=config['master_key']) if 'akismet_key' in config and config['akismet_key'] != '': logging('验证 akismet key...', prnt=True) if not akismet.init(config['akismet_key'], config['site_url']): logging('akismet key 验证失败,请检查你的 akismet key', level='error', prnt=True) exit(1) akismet_enabled = True query = lc.Query('Comment')
def send_emails(lst): if len(lst) == 0: return prepare_smtp_server() for c in lst: if akismet_enabled: logging('正在通过 akismet 验证垃圾评论: %s' % c.get('comment')[:-1]) # 最后一个字符是 \n if akismet.check(config['site_url'], c.get('ip'), c.get('ua'), config['site_url'] + c.get('url'), c.get('comment'), c.get('nick'), c.get('mail'), c.get('link')): logging('检测到垃圾评论,跳过发送邮件') acl = lc.ACL() acl.set_public_read_access(False) c.set_acl(acl) c.set('isSpam', True) c.save() continue if c.get('pid') == None: # notify the blogger func = send_admin_email logging('正在通知博主: objectId = %s' % c.id) else: # notify the author of the comment be replied func = send_replay_email logging('正在通知被回复者: objectId = %s' % c.id) if func(c): logging('邮件发送成功!') c.set('isNotified', True) c.save() else: logging('邮件发送失败!', level='error', prnt=True) exit(1) logging('登出 SMTP 服务器...', prnt=True) server.quit()
def check_new_comments() -> list: query.not_equal_to('isNotified', True) query.not_equal_to('isSpam', True) unnotified_list = query.find() logging('检查新评论,查询到 %d 个新评论。' % len(unnotified_list), prnt=True) return unnotified_list
def PlotCompare(Config): rootfile = rt.TFile(Config.General['input'], 'READ') for cut in Config.CutList: lg.logging('Processing cut %s' % (cut)) for var in Config.Vars[cut]: lg.logging('\tProcessing var %s' % (var)) rt.gROOT.ProcessLine('SetAtlasStyle()') canvas = RC.HtbCompCanvas() canvas.canvas.Draw() canvas.pad1.Draw() canvas.pad2.Draw() canvas.pad1.cd() fHists = util.GetHists(Config, rootfile, cut, var) hStack = rt.THStack('hStack', 'hStack') h_tot = None g_tot = None xtitle = Config.fConfig[cut][var]['xname'] ytitle = Config.fConfig[cut][var]['yname'] h_data = None hasData = False if fHists.DATA: h_data = fHists.DATA hasData = True i_color = 2 for _name, hist in fHists.STACK.items(): if i_color == 5 or i_color == 8: i_color += 1 if i_color == 10: i_color = 41 hist.SetFillColor(i_color) hist.SetLineWidth(0) hist.SetLineColor(rt.kBlack) hStack.Add(hist) if h_tot is None: h_tot = hist.Clone('allmc') else: h_tot.Add(hist) i_color += 1 b_ShowYields = Config.General['ShowYields'] b_logy = False if 'logy' in Config.fConfig[cut][var]: b_logy = Config.fConfig[cut][var]['logy'] rt.gStyle.SetEndErrorSize(4.0) h_dummy = h_tot.Clone('h_dummy') h_dummy.Scale(0) h_dummy.Draw('HIST') hStack.Draw('same HIST') g_tot = rt.TGraphAsymmErrors(h_tot) g_tot.SetFillStyle(3354) g_tot.SetFillColor(rt.kBlack) g_tot.SetLineColor(rt.kWhite) g_tot.SetLineWidth(0) g_tot.SetMarkerSize(0) g_tot.Draw('same E2') g_data = None if hasData: h_data.SetMarkerStyle(20) h_data.SetLineColor(rt.kBlack) h_data.SetLineWidth(2) h_data.SetMarkerSize(1.4) g_data = rt.TGraphAsymmErrors(h_data) g_data.SetMarkerSize(h_data.GetMarkerSize()) g_data.SetMarkerColor(h_data.GetMarkerColor()) g_data.SetMarkerStyle(h_data.GetMarkerStyle()) g_data.SetLineWidth(h_data.GetLineWidth()) else: h_data = h_tot.Clone('dummyData') h_data.SetTitle('Asimov Data') g_data = rt.TGraphAsymmErrors(h_data) if fHists.SINGLE: i_color = 2 for _name, hist in fHists.SINGLE.items(): if i_color == 5: i_color += 1 hist.SetLineColor(rt.TColor.GetColorBright(i_color)) hist.SetLineStyle(2) hist.SetLineWidth(3) ntotal = h_tot.Integral() nhist = hist.Integral() if nhist != 0: hist.Scale(ntotal / nhist) hist.Draw('same HIST') i_color += 1 if hasData: g_data.Draw('same Ep1') h_dummy.GetXaxis().SetTitle(xtitle) h_dummy.GetYaxis().SetTitle(ytitle) # h_dummy.GetYaxis().SetTitleOffset(2.3) if b_logy: h_dummy.SetMinimum(0.1) else: h_dummy.SetMinimum(0) if hasData: ymax = rt.TMath.Max( h_tot.GetMaximum(), h_data.GetMaximum() ) if fHists.SINGLE: for _name, hist in fHists.SINGLE.items(): if hist.Integral() != 0: hist_max = hist.GetMaximum() * h_tot.Integral() / hist.Integral() if ymax < hist_max: ymax = hist_max if b_logy: h_dummy.SetMaximum(800 * ymax) canvas.pad1.SetLogy(True) else: h_dummy.SetMaximum(1.5 * ymax) else: ymax = h_tot.GetMaximum() if not fHists.SINGLE == {}: for _name, hist in fHists.SINGLE.items(): hist_max = hist.GetMaximum() * h_tot.Integral() / hist.Integral() if ymax < hist_max: ymax = hist_max if b_logy: h_dummy.SetMaximum(500 * ymax) canvas.pad1.SetLogy(True) else: h_dummy.SetMaximum(1.5 * ymax) canvas.pad1.RedrawAxis() canvas.pad1.SetTickx() canvas.pad1.SetTicky() legX1 = 1 - 0.41 * (596.0 / canvas.pad1.GetWw()) - 0.08 legX2 = 0.91 legXmid = legX1 + 0.5 * (legX2 - legX1) if b_ShowYields: legXmid = legX1 + 0.6 * (legX2 - legX1) leg = rt.TLegend(legX1, 0.93 - ( len(fHists.STACK) + len(fHists.SINGLE) + 2) * 0.04, legXmid, 0.93) leg1 = rt.TLegend(legXmid, leg.GetY1(), legX2, leg.GetY2()) leg.SetFillStyle(0) leg.SetBorderSize(0) leg.SetTextAlign(32) leg.SetTextFont(rt.gStyle.GetTextFont()) leg.SetTextSize(rt.gStyle.GetTextSize() * 0.6) leg.SetMargin(0.22) leg1.SetFillStyle(0) leg1.SetBorderSize(0) leg1.SetTextAlign(32) leg1.SetTextFont(rt.gStyle.GetTextFont()) leg1.SetTextSize(rt.gStyle.GetTextSize() * 0.6) leg1.SetMargin(0.0) if hasData: leg.AddEntry(h_data, 'DATA', 'lep') leg1.AddEntry( None, str('%.1f' % (h_data.Integral())), '' ) if fHists.SINGLE: for _name, hist in fHists.SINGLE.items(): leg.AddEntry(hist, _name, 'f') leg1.AddEntry( None, str('%.1f' % (hist.Integral())), '' ) if fHists.STACK: for _name, hist in fHists.STACK.items(): leg.AddEntry(hist, _name, 'f') leg1.AddEntry( None, str('%.1f' % (hist.Integral())), '' ) leg.AddEntry(None, 'Total', '') leg1.AddEntry(None, str('%.1f' % (h_tot.Integral())), '') leg.AddEntry(g_tot, 'Uncertainty', 'f') leg1.AddEntry(None, ' ', '') leg.Draw() leg1.Draw() else: leg = rt.TLegend(legX1, 0.93 - ( (len(fHists.STACK) + len(fHists.SINGLE) + 2) / 2 ) * 0.06, legX2, 0.93) leg.SetNColumns(2) leg.SetFillStyle(0) leg.SetBorderSize(0) leg.SetTextAlign(32) leg.SetTextFont(rt.gStyle.GetTextFont()) leg.SetTextSize(rt.gStyle.GetTextSize() * 0.55) leg.SetMargin(0.22) if hasData: leg.AddEntry(h_data, 'DATA', 'lep') if fHists.SINGLE: for _name, hist in fHists.SINGLE.items(): leg.AddEntry(hist, _name, 'f') if fHists.STACK: for _name, hist in fHists.STACK.items(): leg.AddEntry(hist, _name, 'f') leg.AddEntry(g_tot, 'Uncertainty', 'f') leg.Draw() for textObj in Config.Text[cut]: canvas.DrawText(textObj) sqrts = {"text": "#sqrt{s} = 13TeV", "xPos": 0.18, "yPos": 0.82, "size": 0.035, "color": 1} atlas = {"text": "#bf{#it{ATLAS}} Work in Progress", "xPos": 0.16, "yPos": 0.89, "size": 0.06, "color": 1} lumi = {"text": "#intLdt =" + Config.General['lumi'] + " pb^{-1}", "xPos": 0.32, "yPos": 0.82, "size": 0.035, "color": 1} canvas.DrawText(sqrts) canvas.DrawText(atlas) canvas.DrawText(lumi) canvas.pad2.cd() canvas.pad2.GetFrame().SetY1(2) h_dummy2 = h_tot.Clone('h_dummy2') h_dummy2.Scale(0) h_dummy2.Draw('HIST') # h_dummy2.GetYaxis().SetTitleOffset( # 1.0 * h_dummy.GetYaxis().GetTitleOffset() # ) h_ratio = h_data.Clone('h_ratio') h_tot_noerr = h_tot.Clone('h_tot_noerr') for i_bin in range(1, h_tot_noerr.GetNbinsX() + 1): h_tot_noerr.SetBinError(i_bin, 0) g_ratio2 = g_tot.Clone('g_ratio2') for i_bin in range(1, h_tot_noerr.GetNbinsX() + 1): if h_tot_noerr.GetBinContent(i_bin) == 0: continue g_ratio2.SetPoint( i_bin - 1, g_ratio2.GetX()[i_bin - 1], g_ratio2.GetY()[i_bin - 1] / h_tot_noerr.GetBinContent(i_bin) ) g_ratio2.SetPointEXlow( i_bin - 1, g_ratio2.GetEXlow()[i_bin - 1] ) g_ratio2.SetPointEXhigh( i_bin - 1, g_ratio2.GetEXhigh()[i_bin - 1] ) g_ratio2.SetPointEYlow( i_bin - 1, g_ratio2.GetEYlow()[i_bin - 1] / h_tot_noerr.GetBinContent(i_bin) ) g_ratio2.SetPointEYhigh( i_bin - 1, g_ratio2.GetEYhigh()[i_bin - 1] / h_tot_noerr.GetBinContent(i_bin)) h_dummy2.SetTitle('Data/MC') h_dummy2.GetYaxis().CenterTitle() h_dummy2.GetYaxis().SetTitle('Data/Bkg.') # h_dummy2.GetYaxis().SetLabelSize( # 1.0 * h_ratio.GetYaxis().GetLabelSize() # ) h_dummy2.GetYaxis().SetLabelOffset(0.02) h_dummy.GetYaxis().SetLabelOffset(0.02) h_dummy2.GetYaxis().SetNdivisions(504, False) rt.gStyle.SetEndErrorSize(4.0) canvas.pad1.SetTicky() h_ratio.Divide(h_tot_noerr) h_ratio.SetMarkerStyle(20) h_ratio.SetMarkerSize(1.4) h_ratio.SetMarkerColor(rt.kBlack) h_ratio.SetLineWidth(2) g_ratio = rt.TGraphAsymmErrors(h_ratio) g_ratio.SetMarkerStyle(h_ratio.GetMarkerStyle()) g_ratio.SetMarkerSize(h_ratio.GetMarkerSize()) g_ratio.SetMarkerColor(h_ratio.GetMarkerColor()) g_ratio.SetLineWidth(h_ratio.GetLineWidth()) g_ratio.SetLineColor(h_ratio.GetLineColor()) g_ratio.SetLineStyle(h_ratio.GetLineStyle()) hline = rt.TLine( h_dummy2.GetXaxis().GetXmin(), 1, h_dummy2.GetXaxis().GetXmax(), 1 ) hline.SetLineColor(rt.kRed) hline.SetLineWidth(2) hline.SetLineStyle(2) if hasData: g_ratio.Draw('Ep1 same') hline.Draw() h_dummy2.SetMinimum(0.5) h_dummy2.SetMaximum(1.5) h_dummy2.GetXaxis().SetTitle(h_dummy.GetXaxis().GetTitle()) # h_dummy2.GetXaxis().SetTitleOffset(5.0) h_dummy.GetXaxis().SetTitle('') h_dummy.GetXaxis().SetLabelSize(0) labelsize = h_dummy.GetYaxis().GetLabelSize() titlesize = h_dummy.GetYaxis().GetTitleSize() titleoffset = h_dummy.GetYaxis().GetTitleOffset() h_dummy.GetYaxis().SetLabelSize(0.7 * labelsize) h_dummy2.GetYaxis().SetLabelSize(1.5 * labelsize) h_dummy.GetYaxis().SetTitleSize(0.75 * titlesize) h_dummy2.GetYaxis().SetTitleSize(1.7 * titlesize) h_dummy2.GetXaxis().SetTitleSize(2.0 * titlesize) h_dummy2.GetXaxis().SetLabelSize(1.7 * labelsize) h_dummy2.GetYaxis().SetTitleOffset(0.45 * titleoffset) h_dummy.GetYaxis().SetTitleOffset(1.1 * titleoffset) h_dummy2.GetXaxis().SetLabelOffset(0.02) g_ratio2.Draw('same E2') canvas.pad2.RedrawAxis() plotname = var + '.png' outDir = util.checkDir(Config.General['plotdir']) outDir = outDir + cut + '/' util.MakeDir(outDir) canvas.SavePrint(outDir + plotname) lg.logging('\t%s Done' % (var), 'SUCCESS') del canvas lg.logging('%s Done' % (cut), 'SUCCESS') rootfile.Close()
def prepare_smtp_server(): logging('正在登陆 SMTP 服务器...', prnt=True) m = login_to_smtp() if m != '': logging(m, level='error', prnt=True) exit(1)
#!/usr/bin/env python import HtbPlotter import HtbPlotConfig import util import logger as lg import ROOT as rt import sys import os if __name__ == '__main__': data_path = os.environ['MYANALYSIS'] + '/Plotter/data' rt.gROOT.SetMacroPath(data_path) rt.gROOT.LoadMacro('atlasstyle/AtlasStyle.C') rt.gROOT.SetBatch(True) Config = HtbPlotConfig.HtbPlotConfig(sys.argv[1]) PlotFunc = Config.General['TYPE'] if not hasattr(HtbPlotter, PlotFunc): lg.logging('CANNOT FIND FUNCTION!!!', 'WARN') sys.exit(1) Run = getattr(HtbPlotter, PlotFunc) Run(Config)
def add_logger(self): if not os.path.isdir(self.configure["paths"]["log_dir"]): os.mkdir(self.configure["paths"]["log_dir"]) log_file = os.path.join(self.configure["paths"]["log_dir"], "pipeline.log") self.configure["logger"] = logger.logging(log_file)
def fit(self, dataset): bar = tqdm(dataset, smoothing=0) avgDLoss = Average('DL', num=4) realRealAcc = Average('DR', num=4) avgGLoss = Average('GL', num=4) fakeRealAcc = Average('GR', num=4) realIlluAcc = Average('TI', num=4) fakeIlluAcc = Average('GI', num=4) for i, (x, y) in enumerate(bar): self.step += 1 batchSZ = y.size(0) x, y = [createVariable(z, self.use_cuda) for z in [x, y]] true = createVariable(torch.ones(batchSZ).float(), self.use_cuda) false = createVariable(torch.zeros(batchSZ).float(), self.use_cuda) # lr decay if self.step % 50000 == 0: for param_group in self.optimD.param_groups: param_group['lr'] = param_group['lr'] * 0.5 for param_group in self.optimG.param_groups: param_group['lr'] = param_group['lr'] * 0.5 # tagger pretrain # if self.step < 4000: # self.G.eval() # self.D.train() # self.optimD.zero_grad() # dloss = 0 # # Real data # isReal, tags = self.D(x) # lossHair = F.cross_entropy(tags[:, 0, :], y[:, 0]) # lossEyes = F.cross_entropy(tags[:, 1, :], y[:, 1]) # realHairAcc.append(toList((torch.max(tags[:, 0, :], 1)[1] == y[:, 0]).sum())[0] / batchSZ) # realEyesAcc.append(toList((torch.max(tags[:, 1, :], 1)[1] == y[:, 1]).sum())[0] / batchSZ) # lossRealTags = lossHair * 0.6 + lossEyes # loss = lossRealTags # dloss += loss.data.cpu().numpy().tolist()[0] # loss.backward() # # Gradient penalty # alpha = createVariable(torch.rand(batchSZ, 1, 1, 1), self.use_cuda) # beta = createVariable(torch.randn(x.size()), self.use_cuda) # gradientPenalty = 0 # x = alpha * x + (1 - alpha) * (x + 0.5 * x.std() * beta) # x = x.detach() # x.requires_grad = True # isReal, tags = self.D(x) # hair = tags[:,0,:12] # eyes = tags[:,1,:11] # hairGrad = createVariable(torch.ones(batchSZ, 12).float(), self.use_cuda) # hairGrad = grad(hair, x, hairGrad, create_graph=True, # retain_graph=True, only_inputs=True)[0].view(batchSZ, -1) # gradientPenalty += ((hairGrad.norm(p=2, dim=1) - 1)**2).mean() # eyesGrad = createVariable(torch.ones(batchSZ, 11).float(), self.use_cuda) # eyesGrad = grad(eyes, x, eyesGrad, create_graph=True, # retain_graph=True, only_inputs=True)[0].view(batchSZ, -1) # gradientPenalty += ((eyesGrad.norm(p=2, dim=1) - 1)**2).mean() # gradientPenalty *= 0.5 # dloss += gradientPenalty.data.cpu().numpy().tolist()[0] # gradientPenalty.backward() # avgDLoss.append(dloss) # torch.nn.utils.clip_grad_norm(self.D.parameters(), 1) # self.optimD.step() # logs = logging((avgDLoss, avgGLoss, realRealAcc, fakeRealAcc, realHairAcc, fakeHairAcc, realEyesAcc, fakeEyesAcc)) # bar.desc = logs # continue lambdaAdvMax = 1 # lambdaAdv = min(1, self.step / 4000) ** 2 # lambdaAdv = lambdaAdv * 0.8 + 0.2 # lambdaAdv = lambdaAdv * lambdaAdvMax lambdaAdv = lambdaAdvMax skipD = False if lambdaAdv >= lambdaAdvMax - 1e-10: # gap skip gap = max(realRealAcc.value() - fakeRealAcc.value(), 0) gap = min(1, gap * 2) r = random.random() if r > 1 - gap * 0.9: skipD = True pass if not skipD: for _ in range(1): # Training Discriminator self.G.eval() self.D.train() self.optimD.zero_grad() self.optimG.zero_grad() dloss = 0 # Real data isReal, illum = self.D(x) lossRealLabel = F.binary_cross_entropy_with_logits( isReal, true) realRealAcc.append(toList(F.sigmoid(isReal).mean())[0]) lossIllu = F.mse_loss(illum, y) realIlluAcc.append(toList(lossIllu)[0]) loss = lossRealLabel * lambdaAdv + lossIllu dloss += loss.data.cpu().numpy().tolist()[0] loss.backward() # Gradient penalty alpha = createVariable(torch.rand(batchSZ, 1, 1, 1), self.use_cuda) beta = createVariable(torch.randn(x.size()), self.use_cuda) gradientPenalty = 0 x = alpha * x + (1 - alpha) * (x + 0.5 * x.std() * beta) x = x.detach() x.requires_grad = True isReal, illum = self.D(x) # isReal = F.sigmoid(isReal) realGrad = grad(isReal, x, true, create_graph=True, retain_graph=True, only_inputs=True)[0].view(batchSZ, -1) gradientPenalty += ((realGrad.norm(p=2, dim=1) - 1)**2).mean() gradientPenalty *= 0.5 dloss += gradientPenalty.data.cpu().numpy().tolist()[0] gradientPenalty.backward() # Fake data noise = createVariable(torch.randn(batchSZ, noiseDim), self.use_cuda) illum = createVariable( torch.FloatTensor(batchSZ).uniform_(0.3, 1), self.use_cuda) x = self.G(noise, illum) # x = torch.clamp(x, 0, 1) x = x.detach() isReal, illum = self.D(x) lossRealLabel = F.binary_cross_entropy_with_logits( isReal, false) loss = lossRealLabel * lambdaAdv loss = loss * 0.1 dloss += loss.data.cpu().numpy().tolist()[0] loss.backward() # Fake data history if len(self.memory) > batchSZ: x = random.sample(self.memory, batchSZ) x = createVariable(torch.stack(x, 0), self.use_cuda) isReal, illum = self.D(x) lossRealLabel = F.binary_cross_entropy_with_logits( isReal, false) loss = lossRealLabel * lambdaAdv loss = loss * 0.9 dloss += loss.data.cpu().numpy().tolist()[0] loss.backward() avgDLoss.append(dloss) torch.nn.utils.clip_grad_norm(self.D.parameters(), 1) self.optimD.step() # Training Generator for i in range(1): self.optimD.zero_grad() self.optimG.zero_grad() self.D.eval() self.G.train() noise = createVariable(torch.randn(batchSZ, noiseDim), self.use_cuda) illum = createVariable( torch.FloatTensor(batchSZ).uniform_(0.3, 1), self.use_cuda) gloss = 0 x = self.G(noise, illum) isReal, _illum = self.D(x) self.memory.append(x[0].data.cpu()) if len(self.memory) > 1e6: self.memory = self.memory[-1e6:] if self.step % 15 == 0 and i == 0: img = x.data[0].cpu().numpy() img, org = toImage(img) try: img.save( os.path.join('output', 'training', 'norm', '%d-0.jpg' % (self.step))) org.save( os.path.join('output', 'training', 'orig', '%d-0.jpg' % (self.step))) except: pass lossRealLabel = F.binary_cross_entropy_with_logits( isReal, true) fakeRealAcc.append(toList(F.sigmoid(isReal).mean())[0]) lossIllu = F.mse_loss(_illum, illum) fakeIlluAcc.append(toList(lossIllu)[0]) loss = lossRealLabel * lambdaAdv + lossIllu gloss += loss.data.cpu().numpy().tolist()[0] loss.backward() avgGLoss.append(gloss) torch.nn.utils.clip_grad_norm(self.G.parameters(), 1) self.optimG.step() logs = logging((avgDLoss, avgGLoss, realRealAcc, fakeRealAcc, realIlluAcc, fakeIlluAcc)) bar.desc = logs bar.close() return [ avgDLoss, avgGLoss, realRealAcc, fakeRealAcc, realIlluAcc, fakeIlluAcc ]
except: print("Cannot initiate LCD") print(openPorts) while 1: try: line = None gps = None if settings.isProduction: line = sp.readFromPort(openPorts["Battery"]) gps = sp.readFromPort(openPorts["Generic"]) else: gps = sp.readFromPort(openPorts["Generic"]) if line != b'' and line != None: print(str(line)) parsed = utils.parseBatteryData(str(line), lastValues) lg.logging(parsed) packData = utils.packData(parsed) if len(packData) > 0: for data in packData: print(data) sp.writeToPort(openPorts["XBee"], data.encode()) if gps != b'': parsed = gpsController.parseGPSData(str(gps), lastValues) packData = utils.packData(parsed) if len(packData) > 0: try: #lcdController.lcd_string("MAYMUNCUK <",0x80) line1 = "S:{} T:{}".format(lastValues["lastSpeed"][-1], lastValues["lastTemps"][0][-1]) line2 = "%:{} A:{}".format(lastValues["lastPercentages"][0][-1], lastValues["lastAmps"][-1]) lcdController.lcd_string(str(line1), 0x80) lcdController.lcd_string(str(line2), 0xC0)