def runPipelines(id): filter={'_id': ObjectId(id)} db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) data = db.query(filter=filter, collection="pipelines") tasks = data[0]["tasks"] df = pd.DataFrame(tasks) df.sort_values(by=["order"]) current_task=-1 for index,task in df.iterrows(): if current_task == task["order"] or current_task<0: if task["state"] in STATES_PIPELINE: if task["state"] == STATE_COMPLETED or task["state"]==STATE_ERROR: if task["state"]==STATE_ERROR and task["skip"]: current_task = task["order"]+1 else: current_task = task["order"] + 1 elif task["state"] == STATE_RUNNING or task["state"] == STATE_INITIATED: current_task = task["order"] else: #init task updatePipeline(id,task["action"],STATE_INITIATED) task["params"]["IDpipeLine"] = id logger.info("runPipelines:: Run {0} action ".format(task["action"])) globals()[task["action"]].send(**task["params"]) if not task["skip"]: break
def tnsUpdate(**kwargs): collection = current_collection projection = {} radio = 5 db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) filter = {} if "collection" in kwargs.keys() and kwargs["collection"] != "": collection = kwargs["collection"] if "filter" in kwargs.keys() and kwargs["filter"] !="": filter = kwargs["filter"] db.setCollection(collection) data = db.getData(filter=filter, projection=projection) cont=0 for index, row in enumerate(data): ra = row["ra"] dec = row["dec"] tns = tnsxmatch(ra,dec,radio) if tns!=None: updated = db.update(filter={"id": row["id"]}, query={"$set": {"crossmatch.tns": tns}}) print("tns update ",row["id"]) cont+=1 print("updated {} of {}".format(cont,len(data)))
def getAll(filter={}, projection=""): db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection("tnssn") data = db.getData(filter=filter, projection=projection) return data
def getAggegation(collection, pipeline): db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) data = db.command(collection=collection, pipeline=pipeline) return data
def createPipeline(tasks): db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) id = db.saveData(tasks,collection="pipelines") logger.info("createPipeline:: created PIPELINE {0} ".format(str(id))) return id
def getLightCurve(ztfid): db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection("lastdetections7") filter = {"id": ztfid} projection = {"lightpeak.lightcurve": 1} data = db.getData(filter=filter, projection=projection) return data
def scoreCandidates(collection,filter={}): lasairarchive = LasairArchive() db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection(collection) data = db.getData(filter=filter) for indx, row in enumerate(data): print("ID score",row["id"])
def getByID(id="", filter="", collection="lastdetections7"): db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection(collection) if filter == "": filter = {"id": {'$eq': id}} data = db.getData(filter=filter) return data
def calcABMagCandidates(): db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection(current_collection) data = db.getData() for index,row in enumerate(data): try: if "crossmatch" in row.keys() and len(row["crossmatch"])>0: ab_mags = calcAbsoluteMagnitud(row) db.update(filter={"id":row["id"]},query={"$set":{"abmag":ab_mags}}) logger.info("calcABMagCandidates:: updated {1}".format(row["id"])) except Exception as ex: logger.error("calcABMagCandidates:: error getting abmags {0} error {1}".format(row["id"],str(ex)))
def calcABMagnitud(): db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection("tnssn") data=db.getData(filter={"Redshift":{'$gt':0},"DiscInternalName":{"$regex":'^ZTF'}},projection={"DiscInternalName":1,"lightcurve.candidates":1,"Redshift":1,"id":1,"Name":1}) for index,row in enumerate(data): try: dt=pd.DataFrame(row["lightcurve"]["candidates"]) magsd=dt["magpsf"].tolist() ab=Convertion.aparentToAbsoluteMagnitud(magsd,z=row["Redshift"]) db.update({"id":row["id"]},query={"$set":{"abmag":ab.tolist()}}) except Exception as err: logger.error("calcABMagnitud:: Cant calculate ABmagnituds for {0}".format(row["id"]))
def getQuery(collection, filter={}, projection={}): db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection(collection) if filter != {} and filter != "": filter = json.loads(filter) else: filter = {} if projection != {} and projection != "": projection = json.loads(projection) else: projection = {} data = db.getData(filter=filter, projection=projection) return data
def calcRedshiftCandidates(**kwargs): collection = current_collection if "collection" in kwargs.keys() and kwargs["collection"] != "": collection = kwargs["collection"] filter={} if "filter" in kwargs.keys() and kwargs["filter"] != "": filter = kwargs["filter"] if "IDpipeLine" in kwargs.keys() and kwargs["IDpipeLine"] != "": updatePipeline(kwargs["IDpipeLine"], "calcRedshiftCandidates", STATE_RUNNING) db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection(collection) data = db.getData(filter=filter) for index,row in enumerate(data): # try: print("get redshift to " + row["id"]) if "crossmatch" in row.keys() and len(row["crossmatch"].keys())>0: good_spec,good_photo,photo_spec,redshift = getRedshifts(row["crossmatch"]) sncos=False if len(redshift)<=0 and sncos: if "g" in row["lightpeak"]["lightcurve"] and "r" in row["lightpeak"]["lightcurve"]: if row["lightpeak"]["lightcurve"]["g"]["detections"]>=2 and row["lightpeak"]["lightcurve"]["r"]["detections"]>=2: snclasifier = getSNCosmosFit(row["lightcurve"],id=row["id"]) if snclasifier != None: redshift["sncosmos"]=snclasifier query={"redshift": redshift, "best_photo_z": good_photo, "best_spec_z": good_spec} query.update(photo_spec) up = db.update(filter={"id": row["id"]},query={"$set":query}) print("udpdate " + row["id"]) # except Exception as ex: # logger.error("calcRedshiftCandidates:: Cant calculate Redshift for {0} error {1}".format(row["id"],str(ex))) if "IDpipeLine" in kwargs.keys() and kwargs["IDpipeLine"] != "": updatePipeline(kwargs["IDpipeLine"], "calcRedshiftCandidates", STATE_COMPLETED)
def updatePipeline(pipelineID, taskname, status, error=""): #pipelineID=ObjectId(pipelineID) db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) now= datetime.now().timestamp() filter={'_id': ObjectId(pipelineID),"tasks.action":taskname} query={"tasks.$.state":status} db.update(filter=filter, query={"$set":query}, collection="pipelines") msg="" filter = {'_id': ObjectId(pipelineID)} if error != "": msg=error query = {"$addToSet": {"activities": {"task": taskname, "state": status, "date": now,"msg":msg}}} taskup = db.update(filter=filter, query=query, collection="pipelines") logger.info("updatePipeline:: Updated {0} task to {1}".format(taskname,status)) if status == STATE_COMPLETED or status == STATE_ERROR: #runPipelines(pipelineID) runPipelines.send(pipelineID)
def classifyCandidate(): # db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection(current_collection) data=db.getData(filter={"$or":[{"lightpeak.lightcurve.g.magab": {"$exists": True}},{"lightpeak.lightcurve.r.magab": {"$exists": True}}]}) for indx, row in enumerate(data): print("row",row["id"]) db.setCollection("tnssn") probabilities={} filters=[] if ("g" in row["peak"]["stats"].keys() and "magab" in row["peak"]["stats"]["g"]) or ("r" in row["peak"]["stats"].keys() and "magab" in row["peak"]["stats"]["r"]): try: keys = row["peak"]["stats"]["g"]["magab"].keys() except Exception as err: keys = row["peak"]["stats"]["r"]["magab"].keys() for archive in keys: try: maxg = min(row["peak"]["stats"]["g"]["magab"][archive]) filters.append({"peak.stats.g.abmag": {"$lte": maxg}}) except Exception: print("not g band",row["id"]) try: maxr = min(row["peak"]["stats"]["r"]["magab"][archive]) filters.append({"peak.stats.r.abmag": {"$lte": maxr}}) except Exception: print("not r band", row["id"]) if len(filters) > 0: classtypes=db.getData({"$or":filters},projection={"Redshift":1,"ObjType":1,"id":1,"peak":1,"DiscInternalName":1}) if len(classtypes)>0: classify = [] for idx, classtype in enumerate(classtypes): if idx >10: break data_classifier= {"redshift":classtype["Redshift"],"ObjType":classtype["ObjType"],"id":classtype["id"],"ztfid":classtype["DiscInternalName"]} if "g" in classtype["peak"]["stats"]: data_classifier["slope_g"]=classtype["peak"]["stats"]["g"]["slope"] data_classifier["abmagpeak_g"]= min(classtype["peak"]["stats"]["g"]["abmag"]) data_classifier["magpeak_g"] = min(classtype["peak"]["stats"]["g"]["y"]) if "r" in classtype["peak"]["stats"]: data_classifier["slope_r"] = classtype["peak"]["stats"]["r"]["slope"] data_classifier["abmagpeak_r"] = min(classtype["peak"]["stats"]["r"]["abmag"]) data_classifier["magpeak_r"] = min(classtype["peak"]["stats"]["r"]["y"]) classify.append(data_classifier) probabilities[archive]=classify if len(probabilities.keys()) > 0: try: db.setCollection(current_collection) upd=db.update(filter={"id":row["id"]},query={"$set":{"probabilities":probabilities}}) logger.error("classifyCandidate:: classifier update {0}".format(row["id"])) except Exception as err: logger.error("classifyCandidate:: many candidates try to save for {0}".format(row["id"])) del probabilities
def crossMatchCollection(**kwargs): collection = current_collection filter = {'crossmatch.check': False} projection = {} forcecrossmatch = False radio = 5 if "collection" in kwargs.keys() and kwargs["collection"] !="": collection = kwargs["collection"] if "forcecrossmatch" in kwargs.keys() and kwargs["forcecrossmatch"] !="": forcecrossmatch = kwargs["forcecrossmatch"] if "filter" in kwargs.keys() and kwargs["filter"] !="": filter = kwargs["filter"] if "radio" in kwargs.keys() and kwargs["radio"] !="": radio = kwargs["radio"] if "projection" in kwargs.keys() and kwargs["projection"] !="": projection = kwargs["projection"] if "IDpipeLine" in kwargs.keys() and kwargs["IDpipeLine"]!="": updatePipeline(kwargs["IDpipeLine"],"crossMatchCollection",STATE_RUNNING) db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection(collection) if "crossmatch" not in projection: projection["crossmatch"]=1 if "id" not in projection: projection["id"] = 1 if "ra" not in projection: projection["ra"] = 1 if "dec" not in projection: projection["dec"] = 1 data = db.getData(filter=filter, projection=projection) print("cross match source to update",len(data)) for index,row in enumerate(data): if forcecrossmatch or ("crossmatch" not in row.keys() or row["crossmatch"]["check"] == False) : try: id = row["id"] logger.info("try cross match..." + id) print("try cross match..." + id,index,row["crossmatch"]["check"]) ra = row["ra"] dec = row["dec"] #ra = row["ra"] #ra = row["dec"] current=row["crossmatch"] print("cross match",id) crossdata=crossMatch(ra,dec,radio=radio) crossdata["lasair"]=current["lasair"] crossdata["check"] = True logger.info("check follow up candidates and update ZTF light curves..."+id) updated=db.update(filter={"id":id}, query={"$set":{"crossmatch":crossdata}}) print("id {0} updated {1}",id,updated) except Exception as err: #print("crossmatch error by"+id,err) print("error cross match",err) logger.error("crossMatchCollection:: Cant crossmatch {0}".format(row["id"])) if "IDpipeLine" in kwargs.keys() and kwargs["IDpipeLine"]!="": updatePipeline(kwargs["IDpipeLine"],"crossMatchCollection",STATE_COMPLETED)
def checkLastDetections(**kwargs): # try: allrecords=0 collection=current_collection days_ago=15 if "collection" in kwargs.keys() and kwargs["collection"]!="": collection = kwargs["collection"] if "days_ago" in kwargs.keys() and kwargs["days_ago"]!="": days_ago=kwargs["days_ago"] if "IDpipeLine" in kwargs.keys() and kwargs["IDpipeLine"]!="": updatePipeline(kwargs["IDpipeLine"],"checkLastDetections",STATE_RUNNING) logger.info("checkLastDetections:: getting the last ZTF detections from brokers...") lasairarchive = LasairArchive() #coneecto to DATABASE db = MongodbManager() config=Config() dbconfig=config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection(collection) #Get last candidates and update previews detection and light curves bestCandidates = BestCandidates() table_candidates, alerceDF, lasairDF = bestCandidates.searchCadidates(days_ago) #check if the new candidates is already in DB #get all zft id in and array to validate if exist into ddatabase and filter by listcandidates=table_candidates["id"] filter={"oid":{"$in":listcandidates.data.tolist()}} projection={"oid":1 ,"lastmjd":1 ,"last_update":1} current_data = db.getData(filter=filter, projection=projection) for remove_data in current_data: oid=remove_data["oid"] print("get info for ",oid) table_candidates.remove_rows(table_candidates["id"] == oid) #get desi photoz dataarchive = SussexArchive() desi_targetsvo, desi_targetstable = dataarchive.getDesiPhotoZfromTable(table_candidates) alerceTable = QTable.from_pandas(alerceDF) lasairTable = QTable.from_pandas(lasairDF) alerceTable.rename_column("oid","id") lasairTable.rename_column("oid", "id") alerceTable["id"] = alerceTable["id"].astype(str) lasairTable["id"] = lasairTable["id"].astype(str) desi_targetstable["id"] =desi_targetstable["id"].astype(str) desi_targetstable["desidec"].mask = False desi_targetstable["desira"].mask = False #calc separation desi source ra_ref = desi_targetstable["ramean"].tolist() dec_ref = desi_targetstable["decmean"].tolist() cref = SkyCoord(ra_ref, dec_ref, frame='icrs', unit='deg') ra_desi = desi_targetstable["desira"].tolist() dec_desi = desi_targetstable["desidec"].tolist() c1 = SkyCoord(ra_desi, dec_desi, frame='icrs', unit='deg') desi_distance = cref.separation(c1).arcsec desi_targetstable["separation"] = desi_distance #merge all table in one json to save in mongo desi_targetstable = Table(desi_targetstable, masked=False) alerceTable = Table(alerceTable, masked=False) lasairTable = Table(lasairTable, masked=False) alerceTable["broker"] = "alerce" lasairTable["broker"] = "lasair" update_alerce_table = join(alerceTable, lasairTable, join_type='outer', keys='id') merge_table = join(update_alerce_table, desi_targetstable, join_type='outer', keys='id') merge_table["desiid"] = merge_table["desiid"].astype(str) merge_table["field"] = merge_table["field"].astype(str) lastItems= merge_table.to_pandas() newItems = lastItems.fillna('', axis=1) dic_result = newItems.to_dict('records') newCandidates=0 logger.info("checkLastDetections:: Ingested {0} candidates".format(str(len(dic_result)))) allrecords=len(dic_result) for index,row in enumerate(dic_result): id=row["id"] print("saving candidate",id) row["comments"]={} row["snh_score"] = 0.0 if row["broker_1"] != "": #alerce #row["pclassearly"]=row["pclassearly_1"] if row["broker_2"]!="": row["broker"]=row["broker_1"]+"/"+row["broker_2"] else: row["broker"] = row["broker_1"] row["meanra"]=row["meanra_1"] row["meandec"]=row["meandec_1"] row["lastmjd"]=row["lastmjd_1"] else: #lasair #row["pclassearly"] = row["pclassearly_2"] row["broker"] = row["broker_2"] row["meanra"] = row["meanra_2"] row["meandec"] = row["meandec_2"] row["lastmjd"] = row["lastmjd_2"] try: #remove duplicate fields #del row["pclassearly_1"] #del row["pclassearly_2"] del row["broker_1"] del row["broker_2"] del row["meanra_1"] del row["meandec_1"] del row["meanra_2"] del row["meandec_2"] del row["lastmjd_2"] del row["lastmjd_1"] except KeyError as er: print("key error",er,id) # check if already exist this candidate, if exist update light curve and run check list to alerts currentdata = db.getData(filter={"id": id}, projection={"nobs": 1, "last_update": 1, "id": 1}) now = datetime.now().timestamp() rowupdated={} if len(currentdata) > 0: currentdata = currentdata[0] days_from_update = ((now - float(currentdata["last_update"])) / 3600) / 24 if days_from_update < 0.6: print("last detections is the same, not getting enough to services update classify",id) logger.info("checkLastDetections:: {0} last detections is the same, not getting enough to services update classify".format(id)) continue classification = getClassification(id) #peak = lasairarchive.getPeakLightCurve(classification["light_curve"]["candidates"]) rowupdated["ra"] = row["meanra"] rowupdated["dec"] = row["meandec"] rowupdated["lasair_clas"]=classification["lasair_clas"] rowupdated["alerce_clas"]=classification["alerce_clas"] rowupdated["alerce_early_class"] = classification["alerce_early_class"] rowupdated["alerce_late_class"] = classification["alerce_late_class"] rowupdated["crossmatch"]={"lasair":classification["light_curve"]["crossmatches"],"check":False} rowupdated["lightcurve"] = classification["light_curve"]["candidates"] rowupdated["report"] = row rowupdated["broker"] = row["broker"] rowupdated["nobs"] = row["nobs"] rowupdated["lastmjd"] = row["lastmjd"] rowupdated["sigmara"] = row["sigmara"] rowupdated["sigmadec"] = row["sigmadec"] rowupdated["last_magpsf_g"] = row["last_magpsf_g"] rowupdated["last_magpsf_r"] = row["last_magpsf_r"] rowupdated["first_magpsf_g"] = row["first_magpsf_g"] rowupdated["first_magpsf_r"] = row["first_magpsf_r"] rowupdated["sigma_magpsf_g"] = row["sigma_magpsf_g"] rowupdated["sigma_magpsf_r"] = row["sigma_magpsf_r"] rowupdated["max_magpsf_g"] = row["max_magpsf_g"] rowupdated["max_magpsf_r"] = row["max_magpsf_r"] rowupdated["id"] = row["id"] #check if already exist this candidate, if exist update light curve and run check list to alerts currentdata=db.getData(filter={"id":id},projection={"nobs":1,"last_update":1,"id":1}) now = datetime.now().timestamp() if len(currentdata)>0 : #update current data try: if currentdata[0]["nobs"] < rowupdated["nobs"]: peak = lasairarchive.getPeakLightCurve(classification["light_curve"]["candidates"]) rowupdated["lightpeak"] = peak update_query={"last_update":now,"lightcurve":rowupdated["lightcurve"],"lightpeak":peak,"lasair_clas":rowupdated["lasair_clas"],"alerce_clas":rowupdated["alerce_clas"],"nobs":rowupdated["nobs"],"state":"updated"} update_id = db.update(filter={"id":id}, query={"$set":update_query}) print("updated source",id,update_id.raw_result) else: print("last detections is the same, not getting enough to services update classify",id) except Exception as err: print("Error updated",id,currentdata[0]["nobs"],rowupdated["nobs"]) logger.error("checkLastDetections:: {0} Error updated..".format(str(id))) else: peak = lasairarchive.getPeakLightCurve(classification["light_curve"]["candidates"]) rowupdated["lightpeak"] = peak #insert new candidate print("save new candidate") rowupdated["state"]="new" rowupdated["last_update"] = now db.saveData(rowupdated) logger.info("checkLastDetections:: {0} Saved candidate with {1} observations".format(id,rowupdated["nobs"])) newCandidates+=1 logger.info("checkLastDetections:: {0} candidates stored..".format(str(len(dic_result)))) logger.info("checkLastDetections:: alerce table detections {0}".format(str(len(alerceTable)))) logger.info("checkLastDetections:: lasair table detections {0}".format(str(len(lasairTable)))) logger.info("checkLastDetections:: desi detections {0}".format(str(len(desi_targetstable)))) logger.info("checkLastDetections:: new Candidates {0}".format(str(newCandidates))) db.saveData(data={"date":now,"newcandidates":newCandidates,"allrecords":allrecords,"alerce_records":len(alerceTable),"lasair_records":len(lasairTable),"desi_matchs":len(desi_targetstable),"process":"lastdetections"},collection="tasks") if "IDpipeLine" in kwargs.keys() and kwargs["IDpipeLine"]!="": updatePipeline(kwargs["IDpipeLine"],"checkLastDetections",STATE_COMPLETED)
def getPeaks(**kwargs): collection = current_collection filter = {"lightcurve": {"$exists": True}} projection = {} if "collection" in kwargs.keys() and kwargs["collection"] != "": collection = kwargs["collection"] if "filter" in kwargs.keys() and kwargs["filter"] != "": filter = kwargs["filter"] if "projection" in kwargs.keys() and kwargs["projection"] != "": projection = kwargs["projection"] if "IDpipeLine" in kwargs.keys() and kwargs["IDpipeLine"] != "": updatePipeline(kwargs["IDpipeLine"], "getPeaks", STATE_RUNNING) lasairarchive = LasairArchive() db = MongodbManager() config = Config() dbconfig = config.getDatabase("mongodb") db.setDatabase(dbconfig["dbname"]) db.setCollection(collection) data = db.getData(filter=filter) for indx,row in enumerate(data): print("try to get peak",row["id"]) if len(row["lightcurve"])>0: peak = lasairarchive.getPeakLightCurve(row["lightcurve"]) query = {} query["best_photoz_gabmag"] = 999 query["best_photoz_rabmag"] = 999 query["best_specz_gabmag"] = 999 query["best_specz_rabmag"] = 999 if "Redshift" in row or ("redshift" in row and len(row["redshift"].keys())>0): redshift = [] if "Redshift" in row: z=row["Redshift"] redshifts_archives=["tns"] else: redshifts_archives=row["redshift"].keys() for z_key in redshifts_archives: if z_key == "sncosmos": if "best"in row["redshift"]["sncosmos"] and "redshift" in row["redshift"]["sncosmos"]["best"]: z = row["redshift"]["sncosmos"]["best"]["redshift"] else: continue else: z=row["redshift"][z_key] if "g" in peak["stats"].keys(): if "magab" not in peak["stats"]["g"]: peak["stats"]["g"]["magab"]={} peak["stats"]["g"]["magab"][z_key] = Convertion.aparentToAbsoluteMagnitud(peak["stats"]["g"]["y"],z=z).tolist() if "r" in peak["stats"].keys(): if "magab" not in peak["stats"]["r"]: peak["stats"]["r"]["magab"]={} peak["stats"]["r"]["magab"][z_key] = Convertion.aparentToAbsoluteMagnitud(peak["stats"]["r"]["y"], z=z).tolist() if "g" in peak["lightcurve"].keys(): if "magab" not in peak["lightcurve"]["g"]: peak["lightcurve"]["g"]["magab"]={} peak["lightcurve"]["g"]["magab"][z_key] = Convertion.aparentToAbsoluteMagnitud( peak["lightcurve"]["g"]["mag"], z=z).tolist() if "r" in peak["lightcurve"].keys(): if "magab" not in peak["lightcurve"]["r"]: peak["lightcurve"]["r"]["magab"]={} peak["lightcurve"]["r"]["magab"][z_key] = Convertion.aparentToAbsoluteMagnitud( peak["lightcurve"]["r"]["mag"], z=z).tolist() peaks=[] gmag=False rmag = False if "g" in peak["stats"].keys(): peak_g = peak["stats"]["g"]["peakmag"] peaks.append(peak_g) gmag = True if "r" in peak["stats"].keys(): peak_r = peak["stats"]["r"]["peakmag"] peaks.append(peak_r) rmag=True if "best_photo_z" in row.keys() and len(row["best_photo_z"])>0: photoz = row["best_photo_z"]["photo_z"] best_photomagab=Convertion.aparentToAbsoluteMagnitud(peaks, z=photoz).tolist() if gmag: query["best_photoz_gabmag"]=best_photomagab[0] if rmag: idxphotorbest = 1 if gmag else 0 query["best_photoz_rabmag"]=best_photomagab[idxphotorbest] if "best_spec_z" in row.keys() and len(row["best_spec_z"])>0: specz = row["best_spec_z"]["spec_z"] best_specmagab=Convertion.aparentToAbsoluteMagnitud(peaks, z=specz).tolist() if gmag: query["best_specz_gabmag"]=best_specmagab[0] if rmag: idxspecbest = 1 if gmag else 0 query["best_specz_rabmag"]=best_specmagab[idxspecbest] query["lightpeak"]= peak if "g" in peak["status"].keys(): query["g_state"]= peak["status"]["g"] if "r" in peak["status"].keys(): query["r_state"]= peak["status"]["r"] update=db.update(filter={"id":row["id"]},query={"$set":query}) print("update peak ",row["id"],update) if "IDpipeLine" in kwargs.keys() and kwargs["IDpipeLine"] != "": updatePipeline(kwargs["IDpipeLine"], "getPeaks", STATE_COMPLETED)