def clsCustom(wspace, data, nToys = 100, smOnly = None, testStatType = None, note = "", plots = True) : assert not smOnly toys = {} for label,f in {"b":0.0, "sb":1.0, "fHat":None}.iteritems() : if f!=None : wspace.var("f").setVal(f) wspace.var("f").setConstant() else : wspace.var("f").setVal(1.0) wspace.var("f").setConstant(False) results = utils.rooFitResults(common.pdf(wspace), data) wspace.saveSnapshot("snap_%s"%label, wspace.allVars()) toys[label] = common.pseudoData(wspace, nToys) utils.delete(results) args = {"wspace": wspace, "testStatType": testStatType, "snapSb": "snap_sb", "snapB": "snap_b", "snapfHat": "snap_fHat"} obs = ts(data = data, **args) out = {} values = {} for label in ["b", "sb"] : values[label] = [] for toy in toys[label] : values[label].append(ts(data = toy, **args)) out["CL%s"%label] = 1.0-utils.indexFraction(obs, values[label]) if plots : plotting.clsCustomPlots(obs = obs, valuesDict = values, note = "TS%d_%s"%(testStatType, note)) out["CLs"] = out["CLsb"]/out["CLb"] if out["CLb"] else 9.9 return out
def del_local_file(params): log("del_local_file: params: %s" % params) local_file = utils.unquote_plus(params.get("nzb")) if xbmcgui.Dialog().yesno("Pneumatic", "Delete:", "%s" % local_file): log("del_local_file: delete: %s" % local_file) utils.delete(local_file) xbmc.executebuiltin("Container.Refresh")
def _archive(self, dest, **kwargs): """ Creates a tarball from output_paths by using the _copy function and stores the logfile in the tarball. """ dest = dest.rstrip('/') # make a temp directory tempath = tempfile.mkdtemp() # use monotool._copy to get all our artifacts # in to the temp directory (tempath). self._copy(tempath) # copy the logfile into tempath so we have # it for later. lf = logfile() filename = lf.split('/')[-1] copy(lf, '%s/%s' % (tempath, filename)) # use git hash of our current directory as part of the # tarball name. ghash, _, _ = run('git rev-parse HEAD') ghash = ghash.rstrip() # remove new line character. app = self.__get_app_name() # create tarball self.logger.info('Saving tarball %s/%s.%s.tar.gz' % (dest, app, ghash)) mktar(tempath, '%s/%s.%s.tar.gz' % (dest, app, ghash)) # clean up by deleting temp directory. delete(tempath)
def ts1(wspace = None, data = None, snapSb = None, snapB = None, snapfHat = None, verbose = False) : wspace.loadSnapshot(snapSb) nll = common.pdf(wspace).createNLL(data) sbLl = -nll.getVal() utils.delete(nll) wspace.loadSnapshot(snapB) nll = common.pdf(wspace).createNLL(data) bLl = -nll.getVal() utils.delete(nll) return -2.0*(sbLl-bLl)
def del_local_file_in_dir(params): log("del_local_file_in_dir: params: %s" % params) local_file = utils.unquote_plus(params.get("nzb")) local_path = os.path.dirname(local_file) if xbmcgui.Dialog().yesno("Pneumatic", "Delete:", "%s" % local_path): for file in utils.listdir_files(local_path): local_file_path = utils.join(local_path, file) log("del_local_file_in_dir: delete: %s" % local_file_path) utils.delete(local_file_path) log("del_local_file_in_dir: rmdir: %s" % local_path) utils.rmdir(local_path) xbmc.executebuiltin("Container.Refresh")
def ts40(wspace = None, data = None, snapSb = None, snapB = None, snapfHat = None, verbose = False) : wspace.loadSnapshot(snapB) results = utils.rooFitResults(common.pdf(wspace), data) if verbose : print " B " print "---" results.Print("v") out = -results.minNll() utils.delete(results) if verbose : print "TS:",out return out
def delete(): if not session: return redirect('/') uid=request.args.get('id') util.WriteLog("delete","/tmp/info.log").info("delete:%s"%session['username']) data=utils.delete(table,uid) return json.dumps(data)
def delete(): if not session: return redirect('/') uid = request.args.get('id') print uid, table data = utils.delete(table, uid) return json.dumps(data)
def get_utxo_pool(self): """ get chain loop transaction get output transactions that has public key of client :return: """ self.total_number_messages += len(self.peers) if self.name == 0: # return as it is original client has no pool and no check log( "PERFORMANCE", f'Original client total number of message sent = {self.total_number_messages} messages' ) return blockchain = Blockchain() blockchain = consensus(blockchain, self.peers) self.total_number_messages += len(self.peers) * 2 log("get_utxo_pool", f'blockchain chain: {blockchain.chain}') log( "PERFORMANCE", f'Average number of message sent per block for client {self.name} = {self.total_number_messages / len(blockchain.chain)} messages/block' ) self.utxo_pool = [] for block in blockchain.chain: for tx in block.transactions: if contains_in_list(tx.recipients, self.public_key): i = index(tx.recipients, self.public_key) new_UTXO = UTXO(tx.hash, i, tx.values[i], tx.recipients[i]) self.utxo_pool.append(new_UTXO) inputs = tx.inputs for utxo_input in inputs: if contains_in_list(self.utxo_pool, utxo_input): self.utxo_pool = delete(self.utxo_pool, utxo_input)
def get_utxo_pool(self, sender): """ get chain loop transaction get output transactions that has public key of client :return: """ #TODO check race condition of all APIs utxo_pool = [] for block in self.blockchain.chain: for tx in block.transactions: log("get_utxo_pool", f"checking transaction {tx.__dict__}") log("get_utxo_pool", f"is sender in recipients list? {contains_in_list(tx.recipients, sender)}") if contains_in_list(tx.recipients, sender): i = index(tx.recipients, sender) if i == -1: raise Exception("public key is not found!!") log("get_utxo_pool", f"index of sender is found at {i}") new_UTXO = UTXO(tx.hash, i, tx.values[i], tx.recipients[i]) utxo_pool.append(new_UTXO) inputs = tx.inputs log("get_utxo_pool", f"check transaction input") for utxo_input in inputs: log("get_utxo_pool", f"check utxo input {utxo_input.__dict__}") log("get_utxo_pool", f"is input in utxo pool? {contains_in_list(utxo_pool, utxo_input)}") if contains_in_list(utxo_pool, utxo_input): log("get_utxo_pool", f"remove input utxo from utxo pool") utxo_pool = delete(utxo_pool, utxo_input) log("get_utxo_pool", f"utxo pool resulted: {utxo_pool}") return utxo_pool
def ts1(wspace=None, data=None, snapSb=None, snapB=None, snapfHat=None, verbose=False): wspace.loadSnapshot(snapSb) nll = common.pdf(wspace).createNLL(data) sbLl = -nll.getVal() utils.delete(nll) wspace.loadSnapshot(snapB) nll = common.pdf(wspace).createNLL(data) bLl = -nll.getVal() utils.delete(nll) return -2.0 * (sbLl - bLl)
def ts40(wspace=None, data=None, snapSb=None, snapB=None, snapfHat=None, verbose=False): wspace.loadSnapshot(snapB) results = utils.rooFitResults(common.pdf(wspace), data) if verbose: print " B " print "---" results.Print("v") out = -results.minNll() utils.delete(results) if verbose: print "TS:", out return out
def deleteuser(): if not session: return redirect("/login") if request.method == 'GET': uid = request.args.get('id', '') result = delete('user', uid) # return json.dumps(result) return redirect('/userlist/')
def deletecn(): if not session: return redirect('/login/') if request.method == 'POST': uid = request.form.get('id') result = utils.delete('cabinet', uid) print result return json.dumps(result)
def delete_task(): start_ts = request.args.get('start_ts', '') end_ts = request.args.get('end_ts', '') submit_user = request.args.get('submit_user', '') en_name = request.args.get('en_name', '') print '???' status = delete(en_name, start_ts, end_ts, submit_user) return json.dumps(status)
def delid(): if request.method == 'GET': uid = int(request.args.get('id')) result = delete('user2', uid) if result['code'] == 0: return redirect("/userlist/") else: return result
def serverdelete(): if not session: return redirect('/login/') if request.method == 'POST': uid = request.form.get('id') result = utils.delete('server',uid) return json.dumps(result)
def delne(): if not session: return redirect('/login/') data = {k: v[0] for k, v in dict(request.args).items()} res = delete("user", filed, data) print data return json.dumps(res) return redirect('/login/')
def server_delete(): if not session: return redirect('/') uid = request.args.get('id') util.WriteLog("server_delete").info("server_delete:%s" % session['username']) data = utils.delete(server_table, uid) return json.dumps(data)
def delete_task(): start_ts = request.args.get('start_ts','') end_ts = request.args.get('end_ts','') submit_user = request.args.get('submit_user','') en_name = request.args.get('en_name','') print '???' status = delete(en_name,start_ts,end_ts,submit_user) return json.dumps(status)
def plInterval(dataset, modelconfig, wspace, note, smOnly, cl=None, makePlots=True, poiList=[]): assert poiList out = {} calc = r.RooStats.ProfileLikelihoodCalculator(dataset, modelconfig) calc.SetConfidenceLevel(cl) lInt = calc.GetInterval() assert wspace.var(poiList[0]), "%s not in workspace" % poiList[0] out["lowerLimit"] = lInt.LowerLimit(wspace.var(poiList[0])) out["upperLimit"] = lInt.UpperLimit(wspace.var(poiList[0])) ##doesn't work #status = r.std.vector('bool')() #status.push_back(False) #out["upperLimit"] = lInt.UpperLimit(wspace.var("f"), status.front()) #out["status"] = status.at(0) ##doesn't work #status = array.array('c', ["a"]) #out["upperLimit"] = lInt.UpperLimit(wspace.var("f"), status) #out["status"] = ord(status[0]) ##perhaps works but offers no information #out["upperLimit"] = lInt.UpperLimit(wspace.var("f")) #out["status"] = lInt.FindLimits(wspace.var("f"), r.Double(), r.Double()) if makePlots: canvas = r.TCanvas() canvas.SetTickx() canvas.SetTicky() psFile = "%s/intervalPlot_%s_%g.pdf" % (directories.plot(), note, 100 * cl) plot = r.RooStats.LikelihoodIntervalPlot(lInt) plot.Draw() print canvas.Print(psFile) utils.delete(lInt) return out
def clsCustom(wspace, data, nToys=100, smOnly=None, testStatType=None, note="", plots=True): assert not smOnly toys = {} for label, f in {"b": 0.0, "sb": 1.0, "fHat": None}.iteritems(): if f != None: wspace.var("f").setVal(f) wspace.var("f").setConstant() else: wspace.var("f").setVal(1.0) wspace.var("f").setConstant(False) results = utils.rooFitResults(common.pdf(wspace), data) wspace.saveSnapshot("snap_%s" % label, wspace.allVars()) toys[label] = common.pseudoData(wspace, nToys) utils.delete(results) args = { "wspace": wspace, "testStatType": testStatType, "snapSb": "snap_sb", "snapB": "snap_b", "snapfHat": "snap_fHat" } obs = ts(data=data, **args) out = {} values = {} for label in ["b", "sb"]: values[label] = [] for toy in toys[label]: values[label].append(ts(data=toy, **args)) out["CL%s" % label] = 1.0 - utils.indexFraction(obs, values[label]) if plots: plotting.clsCustomPlots(obs=obs, valuesDict=values, note="TS%d_%s" % (testStatType, note)) out["CLs"] = out["CLsb"] / out["CLb"] if out["CLb"] else 9.9 return out
def serverdelete(): if request.method == 'POST': user_id=request.form.get('id') else: user_id=request.args.get('id') res=utils.delete('server',user_id) # return redirect('/userlist/') # return render_template('idc.html',user=session['username'],role=session['role']) return json.dumps(res)
def delete(): if not session: return redirect('/') uid = request.args.get('id') print uid, table data = utils.delete(table, uid) #return render_template("list.html",result=result,res=res) #return redirect("/login/") return json.dumps(data)
def ntupleOfFitToys(wspace = None, data = None, nToys = None, cutVar = ("",""), cutFunc = None ) : results = utils.rooFitResults(pdf(wspace), data) wspace.saveSnapshot("snap", wspace.allVars()) obs = collect(wspace, results, extraStructure = True) toys = [] for i,dataSet in enumerate(pseudoData(wspace, nToys)) : wspace.loadSnapshot("snap") #dataSet.Print("v") results = utils.rooFitResults(pdf(wspace), dataSet) if all(cutVar) and cutFunc and cutFunc(getattr(wspace,cutVar[0])(cutVar[1]).getVal()) : wspace.allVars().assignValueOnly(dataSet.get()) wspace.saveSnapshot("snapA", wspace.allVars()) return obs,results,i toys.append( collect(wspace, results) ) utils.delete(results) return obs,toys
def delete_task(task_id): url = 'http://localhost:8080/api/v1/task/{id}'.format(id=task_id) resp = utils.delete(url) if resp.status_code == 200: print('Delete task status:', resp.status_code, 'data:', resp.text) return raise RuntimeError('status: {code} data: {text}'.format( code=resp.status_code, text=resp.text))
def delete_user(): if not session: return redirect('/login/') uid = int(request.args.get('id')) result = delete('user1', uid) if result['code'] == 0 and session['role'] == 0: return redirect('/userlist/') elif result['code'] == 0 and session['role'] == 1: return redirect('/userinfo/') else: result = "user is nulled" return redirect('/userinfo/', result=result)
def plIntervalQcd(dataset, modelconfig, wspace, note, cl = None, makePlots = True) : out = {} calc = r.RooStats.ProfileLikelihoodCalculator(dataset, modelconfig) calc.SetConfidenceLevel(cl) lInt = calc.GetInterval() out["upperLimit"] = lInt.UpperLimit(wspace.var("A_qcd")) out["lowerLimit"] = lInt.LowerLimit(wspace.var("A_qcd")) lInt.Print() if makePlots : canvas = r.TCanvas() canvas.SetTickx() canvas.SetTicky() psFile = "intervalPlot_%s_%g.ps"%(note, 100*cl) plot = r.RooStats.LikelihoodIntervalPlot(lInt) plot.Draw(); print canvas.Print(psFile) utils.ps2pdf(psFile) utils.delete(lInt) return out
def addFields(self, table): # Only style if it looks like a topo layer if not table in self.sqlLocations.keys(): return self.prepare(table) # Download SQL file sqlPath = utils.download(self.sqlLocations[table], table + '.sql') sqlQuery = "" # also patch, we need different schema name with open(sqlPath, "rt") as fin: for line in fin: sqlQuery += line.replace(' osmm_topo.', " " + self.tmpSchema + ".") + "\n" self.cur.execute(sqlQuery, {}) self.cleanUp(table) utils.delete(sqlPath)
def ntupleOfFitToys(wspace = None, data = None, nToys = None, cutVar = ("",""), cutFunc = None, toyNumberMod = 5) : results = utils.rooFitResults(common.pdf(wspace), data) wspace.saveSnapshot("snap", wspace.allVars()) obs = collect(wspace, results, extraStructure = True) toys = [] for i,dataSet in enumerate(common.pseudoData(wspace, nToys)) : if not (i%toyNumberMod) : print "iToy = %d"%i wspace.loadSnapshot("snap") #dataSet.Print("v") results = utils.rooFitResults(common.pdf(wspace), dataSet) wspace.allVars().assignValueOnly(dataSet.get()) #store this toy's observations, needed for (a) computing chi2 in collect(); (b) making "snapA" if all(cutVar) and cutFunc and cutFunc(getattr(wspace,cutVar[0])(cutVar[1]).getVal()) : wspace.saveSnapshot("snapA", wspace.allVars()) return obs,results,i toys.append( collect(wspace, results) ) utils.delete(results) return obs,toys
def ts10(wspace = None, data = None, snapSb = None, snapB = None, snapfHat = None, verbose = False) : wspace.loadSnapshot(snapSb) results = utils.rooFitResults(pdf(wspace), data) if verbose : print "S+B" print "---" results.Print("v") sbLl = -results.minNll() utils.delete(results) wspace.loadSnapshot(snapB) results = utils.rooFitResults(pdf(wspace), data) if verbose : print " B " print "---" results.Print("v") bLl = -results.minNll() utils.delete(results) out = -2.0*(sbLl-bLl) if verbose : print "TS:",out return out
def go(outer={}, inner={}, outputFile="", mapOptions={}, options={}): raw.setup_root() import_plugins(outer["plugins"]) outer.update(fileSpec(outer["fileNames"])) if inner: inner.update(fileSpec(inner["fileNames"])) if inner["fileNames"] == outer["fileNames"]: mapOptions["identityMap"] = True chain = raw.tchain(outer) oMapF, oMapB, oMapBcn = eventMaps(chain, outer, mapOptions["identityMap"]) chainI, innerEvent, iMapF, iMapB, iMapBcn = inner_vars( outer, inner, mapOptions, oMapF, oMapB, oMapBcn) book = loop(chain=chain, chainI=chainI, outer=outer, inner=inner, innerEvent=innerEvent, oMapF=oMapF, options=options) utils.delete(chain) if chainI: utils.delete(chainI) # write results to a ROOT file dirName = os.path.dirname(outputFile) if not os.path.exists(dirName): print "Creating directory '%s'" % dirName os.mkdir(dirName) f = r.TFile(outputFile, "RECREATE") if not f.IsZombie(): write_category_graphs( category_vs_time(oMap=oMapF, oMapBcn=oMapBcn, iMap=iMapF, iMapBcn=iMapBcn, innerEvent=innerEvent), outer, inner) for h in book.values(): h.Write() f.Close() for h in book.values(): utils.delete(h) if printEventSummary(outer, inner): s = "%s: %4s = %6d" % (outputFile, outer["label"], len(oMapF)) if inner: nBoth = len(filter(lambda x: x is not None, innerEvent.values())) s += ", %4s = %6d, both = %6d" % (inner["label"], len(iMapB), nBoth) printer.msg(s) oFeds = sorted(outer.get("wargs", {}).keys()) iFeds = sorted(inner.get("wargs", {}).keys()) if inner else [] return not len(oMapF), oFeds, iFeds
def plInterval(dataset, modelconfig, wspace, note, smOnly, cl = None, makePlots = True, poiList = []) : assert poiList out = {} calc = r.RooStats.ProfileLikelihoodCalculator(dataset, modelconfig) calc.SetConfidenceLevel(cl) lInt = calc.GetInterval() assert wspace.var(poiList[0]), "%s not in workspace"%poiList[0] out["lowerLimit"] = lInt.LowerLimit(wspace.var(poiList[0])) out["upperLimit"] = lInt.UpperLimit(wspace.var(poiList[0])) ##doesn't work #status = r.std.vector('bool')() #status.push_back(False) #out["upperLimit"] = lInt.UpperLimit(wspace.var("f"), status.front()) #out["status"] = status.at(0) ##doesn't work #status = array.array('c', ["a"]) #out["upperLimit"] = lInt.UpperLimit(wspace.var("f"), status) #out["status"] = ord(status[0]) ##perhaps works but offers no information #out["upperLimit"] = lInt.UpperLimit(wspace.var("f")) #out["status"] = lInt.FindLimits(wspace.var("f"), r.Double(), r.Double()) if makePlots : canvas = r.TCanvas() canvas.SetTickx() canvas.SetTicky() psFile = "%s/intervalPlot_%s_%g.pdf"%(directories.plot(), note, 100*cl) plot = r.RooStats.LikelihoodIntervalPlot(lInt) plot.Draw(); print canvas.Print(psFile) utils.delete(lInt) return out
def go(outer={}, inner={}, outputFile="", mapOptions={}, options={}): raw.setup_root() import_plugins(outer["plugins"]) outer.update(fileSpec(outer["fileNames"])) if inner: inner.update(fileSpec(inner["fileNames"])) if inner["fileNames"] == outer["fileNames"]: mapOptions["identityMap"] = True chain = raw.tchain(outer) oMapF, oMapB, oMapBcn = eventMaps(chain, outer, mapOptions["identityMap"]) chainI, innerEvent, iMapF, iMapB, iMapBcn = inner_vars(outer, inner, mapOptions, oMapF, oMapB, oMapBcn) book = loop(chain=chain, chainI=chainI, outer=outer, inner=inner, innerEvent=innerEvent, oMapF=oMapF, options=options) utils.delete(chain) if chainI: utils.delete(chainI) # write results to a ROOT file dirName = os.path.dirname(outputFile) if not os.path.exists(dirName): print("Creating directory '%s'" % dirName) os.mkdir(dirName) f = r.TFile(outputFile, "RECREATE") if not f.IsZombie(): write_category_graphs(category_vs_time(oMap=oMapF, oMapBcn=oMapBcn, iMap=iMapF, iMapBcn=iMapBcn, innerEvent=innerEvent), outer, inner) for h in book.values(): h.Write() f.Close() for h in book.values(): utils.delete(h) if printEventSummary(outer, inner): s = "%s: %4s = %6d" % (outputFile, outer["label"], len(oMapF)) if inner: nBoth = len([x for x in list(innerEvent.values()) if x is not None]) s += ", %4s = %6d, both = %6d" % (inner["label"], len(iMapB), nBoth) printer.msg(s) oFeds = sorted(outer.get("wargs", {}).keys()) iFeds = sorted(inner.get("wargs", {}).keys()) if inner else [] return not len(oMapF), oFeds, iFeds
def delete_group(groupid): '''Delete a scaling group.''' data = { "name": "workers", "cooldown": 60, "minEntities": 0, "maxEntities": 0, "metadata": { "firstkey": "this is a string", "secondkey": "1" } } print 'Updating config for group %s...' % groupid put = utils.put(endpoint % (auth_data['id'], 'groups/%s/config' % groupid), data=data, token=auth_data['token']) print put.status_code, json.dumps(put.json, indent=4) delete = utils.delete(endpoint % (auth_data['id'], 'groups/%s' % groupid), token=auth_data['token']) print delete.status_code, json.dumps(delete.json, indent=4)
def create_workflow(dnac, device_ip, interface, file_id): if interface == "*": interface = "" safe_interface = interface.replace("/", "-") name = '{}:{}'.format(device_ip, safe_interface) old_workflow = find_workflow(dnac, name) if old_workflow != []: response = delete( dnac, "dna/intent/api/v1/onboarding/pnp-workflow/{}".format( old_workflow[0]['id'])) print("Deleting Old workflow:{}".format(name)) logging.debug(json.dumps(response.json())) payload = { "name": name, "description": "", "currTaskIdx": 0, "tasks": [{ "configInfo": { "saveToStartUp": True, "connLossRollBack": True, "fileServiceId": file_id }, "type": "Config", "currWorkItemIdx": 0, "name": "Config Download", "taskSeqNo": 0 }], "addToInventory": True } logging.debug(json.dumps(payload)) response = post(dnac, "dna/intent/api/v1/onboarding/pnp-workflow", payload) logging.debug(json.dumps(response.json())) workflow_id = response.json()['id'] print("Workflow:{} created, id:{}".format(name, workflow_id)) return workflow_id
def get_utxo_pool(self, sender): """ get chain loop transaction get output transactions that has public key of client :return: """ #TODO check race condition of all APIs utxo_pool = [] for block in self.chain: for tx in block.transactions: if contains_in_list(tx.recipients, sender): i = index(tx.recipients, sender) if i == -1: raise Exception("public key is not found!!") new_UTXO = UTXO(tx.hash, i, tx.values[i], tx.recipients[i]) utxo_pool.append(new_UTXO) inputs = tx.inputs for utxo_input in inputs: if contains_in_list(utxo_pool, utxo_input): utxo_pool = delete(utxo_pool, utxo_input) return utxo_pool
def DownloadFile(name, src, dst, image=None): orignalSrc = src playlist = False isSrc = src.lower().endswith('.txt') or src.lower().endswith('.%s' % SRC) if isSrc: src = urllib.quote_plus(src) src = s3.getURL(src) src = s3.convertToCloud(src) src = utils.GetHTML(src, maxAge=7*86400) src = src.replace('\r', '') src = src.split('\n') if len(src) > 1: repeatMode = GetRepeatMode() AddPlaylistToPlaylist(name, image, orignalSrc, AMAZON_FILE, isFirst=True) xbmc.executebuiltin('PlayerControl(%s)' % repeatMode) return src = src[0] #replace extension on destination dst = sfile.removeextension(dst) + '.' + sfile.getextension(src) autoPlay = True repeatMode = False thumb = ICON exists = sfile.exists(dst) if exists: if (not REPEAT) and (not utils.DialogYesNo(GETTEXT(30099), GETTEXT(30100))): exists = False utils.delete(dst, APPLICATION) else: autoPlay = True repeatMode = GetRepeatMode() name, thumb = utils.GetTitleAndImage(dst) if not exists: autoPlay = False if REPEAT or not utils.DialogYesNo(GETTEXT(30085), GETTEXT(30086), noLabel=GETTEXT(30111), yesLabel=GETTEXT(30112)): autoPlay = True repeatMode = GetRepeatMode() if autoPlay: downloaded = 0 else: downloaded = utils.DoDownload(name, dst, src, image, orignalSrc) if downloaded > 0: #not successful if downloaded == 1:#failed NOT cancelled utils.DialogOK(name, utils.GETTEXT(30081)) return if autoPlay: src = 'AMAZON@' + src #download any related files?? AddToPlaylist(name, thumb, src, AMAZON_FILE, isFirst=True) xbmc.executebuiltin('PlayerControl(%s)' % repeatMode) xbmcgui.Window(10000).setProperty('LB_AUTOPLAY', 'True') APPLICATION.containerRefresh() else: utils.DialogOK(name, utils.GETTEXT(30082)) APPLICATION.containerRefresh()
def ts4(wspace = None, data = None, snapSb = None, snapB = None, snapfHat = None, verbose = False) : wspace.loadSnapshot(snapB) nll = pdf(wspace).createNLL(data) bLl = -nll.getVal() utils.delete(nll) return bLl
def delete_policy(polid, groupid): '''Deletes a scaling policy''' resp = utils.delete(endpoint % (auth_data['id'], 'groups/%s/policies/%s' % (groupid, polid)), token=auth_data['token']) print resp.status_code, json.dumps(resp.json, indent=4)
def msg_delete(): if request.method == 'GET': data = dict((k, v[0]) for k, v in dict(request.args).items()) if delete('user_messages', data['id']): return redirect("/userlist/")
def delete_user(): uid = int(request.args.get('id')) delete('user1', uid) return redirect('/userlist/')
elif mode == LOCAL_FILE: try: try: title = urllib.unquote_plus(params['title']) except: title = '' try: image = urllib.unquote_plus(params['image']) except: image = '' PlayResolvedVideo(mode, url, title, image) except Exception, e: utils.Log('Error in LOCAL_FILE mode - %s' % str(e)) elif mode == DELETE_LOCAL_FILE: utils.delete(url, APPLICATION) elif mode == DELETE_LOCAL_FOLDER: utils.delete(url, APPLICATION) elif mode == SERVER_FOLDER or mode == AMAZON_FOLDER: try: ParseRemoteFolder(url, mode) if url == utils.GetClient() and showVimeo(): ExaminationRoom() except Exception, e: utils.DialogOK(str(e)) utils.Log('Error in SERVER_FOLDER mode(%d) - %s' % (mode, str(e)))
if __name__ == '__main__': logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') if len(sys.argv) == 2: pass elif len(sys.argv) == 5: code = 'FIRE' cfg.regions = {"Fire domain": {"code": code, "bbox": sys.argv[1:5]}} try: os.remove(osp.join(cfg.workspace_path, code + '-geo.nc')) except Exception as e: logging.warning(e) try: delete(osp.join(cfg.workspace_path, code)) except Exception as e: logging.warning(e) else: print('Usage: to use domains configured in etc/rtma_cycler.json:') print('./rtma_cycler.sh anything') print('To use a custom domain named FIRE by giving a bounding box:') print('./rtma_cycler.sh lat1 lon1 lat2 lon2') print('Example: ./rtma_cycler.sh 42, -124.6, 49, -116.4') exit(1) logging.info('regions: %s' % json.dumps(cfg.regions)) #logging.info('regions: %s' % json.dumps(cfg.regions, indent=1, separators=(',',':'))) # current time now = datetime.now(pytz.UTC)
def delete_device(dnac, deviceId): response = delete(dnac, "onboarding/pnp-device/{}".format(deviceId)) print response.json()
def delete_device(dnac, deviceId): response = delete(dnac, "onboarding/pnp-device/{}".format(deviceId)) return response.json()['deviceInfo']['state']
def delete(user="", title=""): if 'un' not in session or session['un'] == 0: return redirect(url_for("home")) else: utils.delete(user, title) return redirect(url_for("profile"))
def delete(): uid=request.args.get('id') print uid,table utils.delete(User,uid) return redirect('/login/')
def deleteChains(self) : #free up memory (http://wlav.web.cern.ch/wlav/pyroot/memory.html) for chain in self.chains.values() : utils.delete(chain) for step in self.steps : for name,hist in list(step.book.iteritems()) : utils.delete(hist)
def go(outer={}, inner={}, outputFile="", mapOptions={}, loopOptions={}, printEventSummary=None): innerEvent = {} deltaOrn = {} chain = tchain(outer) # by default: nMapMin = 0 # start from beginning nMapMax = None # look at all entries nEv = outer["nEventsMax"] + outer["nEventsSkip"] if nEv: if mapOptions["identityMap"]: nMapMin = outer["nEventsSkip"] nMapMax = nEv else: nMapMax = nEv * 2 # a guess for how far to look not to miss out-of-order events oMapF, oMapB, oMapBcn = eventMaps(chain, outer, nMapMin=nMapMin, nMapMax=nMapMax) iMapF = iMapB = iMapBcn = {} if inner.get("fileNames") == outer["fileNames"]: chainI = chain innerEvent = {} elif inner: chainI = tchain(inner) if mapOptions["identityMap"]: iMapF = oMapF iMapB = oMapB iMapBcn = oMapBcn else: iMapF, iMapB, iMapBcn = eventMaps(chainI, inner) innerEvent = eventToEvent(oMapF, iMapB) if set(innerEvent.values()) == set([None]): sys.exit("No common events found. Consider passing --identity-map.") if mapOptions['printEventMap']: for oEntry, iEntry in sorted(innerEvent.iteritems()): printer.msg(", ".join(["oEntry = %s" % str(oEntry), "oEvnOrn = %s" % str(oMapF[oEntry]), "iEntry = %s" % str(iEntry), ])) else: chainI = None book = loop(chain=chain, chainI=chainI, outer=outer, inner=inner, innerEvent=innerEvent, options=loopOptions) utils.delete(chain) if chainI: utils.delete(chainI) # write results to a ROOT file dirName = os.path.dirname(outputFile) if not os.path.exists(dirName): print "Creating directory '%s'" % dirName os.mkdir(dirName) f = r.TFile(outputFile, "RECREATE") d = category_vs_time(oMap=oMapF, oMapBcn=oMapBcn, iMap=iMapF, iMapBcn=iMapBcn, innerEvent=innerEvent) if outer["fedIds"]: iFeds = inner.get("fedIds", []) if not iFeds: iFeds = [None] for iGraph, gr in enumerate(graphs(d, oFed=outer["fedIds"][0], iFed=iFeds[0])): if iGraph == 0: gr.SetTitle("_".join(["only %s" % inner.get("label", ""), "only %s" % outer.get("label", ""), "both"])) if iGraph == 1: gr.SetTitle(",".join(outer["fileNames"])) gr.Write() for h in book.values(): h.Write() f.Close() for h in book.values(): utils.delete(h) if printEventSummary: s = "%s: %4s = %6d" % (outputFile, outer["label"], len(oMapF)) if inner: nBoth = len(filter(lambda x: x is not None, innerEvent.values())) s += ", %4s = %6d, both = %6d" % (inner["label"], len(iMapB), nBoth) printer.msg(s) return not len(oMapF)
def AddPlaylistToPlaylist(title, image, url, mode, isFirst=False): src = url playlist = [] root = '' while not sfile.exists(src): loc = utils.getDownloadLocation() client = utils.GetClient() + s3.DELIMETER dst = os.path.join(loc, url.replace(client, '')) if sfile.exists(dst): if utils.DialogYesNo(GETTEXT(30101), GETTEXT(30100)): src = dst break else: utils.delete(dst, APPLICATION) src = urllib.quote_plus(src) src = s3.getURL(src) src = s3.convertToCloud(src) src = utils.GetHTML(src, maxAge=7*86400) src = src.replace('\r', '') src = src.split('\n') content = '' root = 'AMAZON@' for video in src: if len(video.strip()) > 0: content += root content += video content += '\r\n' sfile.write(dst, content) src = utils.removeExtension(url) root = utils.removeExtension(dst) plotFile = root + '.%s' % DSC plot = utils.getAmazonContent(url, DSC) sfile.write(plotFile, plot) imageTypes = IMG_EXT imageTypes.append('.gif') for ext in imageTypes: image = src + ext image = s3.getURL(urllib.quote_plus(image)) image = s3.convertToCloud(image) utils.DownloadIfExists(image, root+ext) return AddPlaylistToPlaylist(title, image, dst, mode, isFirst) playlist = utils.getPlaylistFromLocalSrc(src) for video in playlist: utils.Log('Adding to playlist: %s' % video) title, thumb = utils.GetTitleAndImage(video) if not thumb: thumb = image try: AddToPlaylist(title, thumb, video, mode=mode, isFirst=isFirst) except: pass isFirst = False
def delete(user="",title=""): if 'un' not in session or session['un']==0: return redirect(url_for("home")) else: utils.delete(user, title) return redirect(url_for("profile"))
def delete_device(dnac, deviceId): response = delete(dnac, "onboarding/pnp-device/{}".format(deviceId)) if response.status_code == 400: message = json.loads(response.text) return message['response']['message'] return response.json()['deviceInfo']['state']