def searchtaxo(): term=gvg("q") if len(term)<=2: # return "[]" if not current_user.is_authenticated: return "[]" # current_user.id with app.MRUClassif_lock: # app.MRUClassif[current_user.id]=[{"id": 2904, "pr": 0, "text": "Teranympha (Eucomonymphidae-Teranymphidae)"}, # {"id": 12488, "pr": 0, "text": "Teranympha mirabilis "}, # {"id": 76677, "pr": 0, "text": "Terasakiella (Methylocystaceae)"}, # {"id": 82969, "pr": 0, "text": "Terasakiella pusilla "}] return json.dumps(app.MRUClassif.get(current_user.id,[])) # gère les MRU en utilisant les classif ltfound=term.find('<')>0 SQLWith=""" """ # * et espace comme % terms=[x.lower().replace("*","%").replace(" ","%")+R"%" for x in term.split('<')] param={'term':terms[0]} # le premier term est toujours appliqué sur le display name ExtraWhere=ExtraFrom="" if len(terms)>1: ExtraFrom = SQLTreeJoin terms = ['%%<'+x.replace("%","%%").replace("*","%%").replace(" ","%%") for x in terms[1:]] termsSQL=QuotedString("".join(terms)).getquoted().decode('iso-8859-15','strict') ExtraWhere= ' and '+SQLTreeExp+" ilike "+termsSQL sql="""SELECT tf.id, tf.display_name as name ,0 FROM taxonomy tf {0} WHERE lower(tf.display_name) LIKE %(term)s {1} order by lower(tf.display_name) limit 200""".format(ExtraFrom,ExtraWhere) PrjId=gvg("projid") if PrjId!="": PrjId=int(PrjId) Prj=database.Projects.query.filter_by(projid=PrjId).first() if ntcv(Prj.initclassiflist) != "": InitClassif=Prj.initclassiflist InitClassif=", ".join(["("+x.strip()+")" for x in InitClassif.split(",") if x.strip()!=""]) # ,tf.name||case when p1.name is not null and tf.name not like '%% %%' then ' ('||p1.name||')' else ' ' end as name sql=""" SELECT tf.id ,tf.display_name as name , case when id2 is null then 0 else 1 end inpreset FROM taxonomy tf join (select t.id id1,c.id id2 FROM taxonomy t full JOIN (VALUES """+InitClassif+""") c(id) ON t.id = c.id WHERE lower(display_name) LIKE %(term)s) tl2 on tf.id=coalesce(id1,id2) """+ExtraFrom+""" WHERE lower(tf.display_name) LIKE %(term)s """+ExtraWhere+""" order by inpreset desc,lower(tf.display_name),name limit 200 """ res = GetAll(sql, param,debug=False) return json.dumps([dict(id=r[0],text=r[1],pr=r[2]) for r in res])
def routetaxobrowse(): BackProjectBtn = '' if gvp('updatestat') == 'Y': # DoSyncStatUpdate() DoFullSync() if gvg('fromprj'): BackProjectBtn = "<a href='/prj/{}' class='btn btn-default btn-primary'>{} Back to project</a> ".format( int(gvg('fromprj')), FAIcon('arrow-left')) if gvg('fromtask'): BackProjectBtn = "<a href='/Task/Question/{}' class='btn btn-default btn-primary'>{} Back to importation task</a> ".format( int(gvg('fromtask')), FAIcon('arrow-left')) if not (current_user.has_role(database.AdministratorLabel) or current_user.has_role(database.ProjectCreatorLabel)): # /prj/653 txt = "You cannot create tanonomy category, you must request to your project manager (check project page)" if gvg('fromprj'): txt += "<br>" + BackProjectBtn return PrintInCharte(FormatError(txt, DoNotEscape=True)) g.taxoserver_url = app.config.get('TAXOSERVER_URL') if current_user.has_role(database.AdministratorLabel): ExtraWehereClause = "" else: ExtraWehereClause = "and t.creator_email='{}'".format( current_user.email) lst = GetAll( """select t.id,t.parent_id,t.display_name as name,case t.taxotype when 'M' then 'Morpho' when 'P' then 'Phylo' else t.taxotype end taxotype,t.taxostatus,t.creator_email,t.id_source ,to_char(t.creation_datetime,'yyyy-mm-dd hh24:mi') creation_datetime,to_char(t.lastupdate_datetime,'yyyy-mm-dd hh24:mi') lastupdate_datetime,{} from taxonomy t {} where t.id_instance ={} {} order by case t.taxostatus when 'N' then 1 else 2 end,t.id LIMIT 400 """.format(SQLTreeSelect, SQLTreeJoin, app.config.get('TAXOSERVER_INSTANCE_ID'), ExtraWehereClause)) for lstitem in lst: # lstitem['tree']=PackTreeTxt(lstitem['tree']) #evite les problèmes de safe if lstitem['parent_id'] is None: lstitem['parent_id'] = "" # nbrtaxon=GetAll("select count(*) from taxonomy")[0][0] # return render_template('browsetaxo.html',lst=lst,nbrtaxon=nbrtaxon) return PrintInCharte( render_template('taxonomy/browse.html', lst=lst, BackProjectBtn=BackProjectBtn))
def searchsamples(): term=("%"+gvg("q")+"%").lower().replace('*','%') projid="" if gvg("projid")!="": projid=str(int(gvg("projid"))) if gvg("projid[]")!="": projid=",".join([str(int(x)) for x in request.args.getlist("projid[]")]) if projid=="": return "[]" res = database.GetAll("""SELECT sampleid, orig_id FROM samples WHERE projid in ({0}) and orig_id like %s order by orig_id limit 2000""".format(projid), (term,)) if gvg("format",'J')=='J': # version JSon par defaut return json.dumps([dict(id=r[0],text=r[1]) for r in res]) return render_template('search/samples.html', samples=res)
def searchtaxo(): term = gvg("q") if len(term) <= 2: return "[]" terms = [x.strip().lower() + R"%" for x in term.split('*')] # psycopg2.extensions.QuotedString("""c'est ok "ici" à """).getquoted() param = {'term': terms[-1]} # le dernier term est toujours dans la requete terms = [ QuotedString(x).getquoted().decode('iso-8859-15', 'strict').replace("%", "%%") for x in terms[0:-1] ] ExtraWhere = ExtraFrom = "" if terms: for t in terms: ExtraWhere += "\n and (" # SQLI insensible, protégé par quotedstring ExtraWhere += ' or '.join( ("lower(p{0}.name) like {1}".format(i, t) for i in range(1, 6))) + ")" ExtraFrom = "\n".join([ "left join taxonomy p{0} on p{1}.parent_id=p{0}.id".format( i, i - 1) for i in range(2, 6) ]) sql = """SELECT tf.id, tf.display_name as name ,0 FROM taxonomy tf left join taxonomy p1 on tf.parent_id=p1.id {0} WHERE lower(tf.name) LIKE %(term)s {1} order by tf.name limit 200""".format(ExtraFrom, ExtraWhere) res = GetAll(sql, param, debug=False) return json.dumps([dict(id=r[0], text=r[1], pr=r[2]) for r in res])
def ajaxorganisationlist(): LstOrga = database.GetAll( "select * from (select distinct organisation from users where organisation ilike %(term)s)q order by upper(organisation)", ({ 'term': '%' + gvg('term', '') + '%' })) return json.dumps([o['organisation'] for o in LstOrga])
def searchusers(): term=gvg("q") if len(term)<2: return "[]" term=R"%"+term+R"%" res = database.GetAll("SELECT id, name FROM users WHERE name ILIKE %s and active=true order by name limit 1000", (term,),debug=False) return json.dumps([dict(id=r[0],text=r[1]) for r in res])
def QuestionProcessScreenSelectSourceTaxo(self,Prj): # Second écran de configuration, choix des taxon utilisés dans la source # recupere les categories et le nombre d'occurence dans les projet de base/learning # sql = """select n.classif_id,t.name||case when p1.name is not null and t.name not like '%% %%' then ' ('||p1.name||')' else ' ' end as name sql = """select n.classif_id,t.display_name as name ,n.nbr from (select o.classif_id,count(*) nbr from obj_head o where projid in ({0}) and classif_qual='V' group by classif_id) n JOIN taxonomy t on n.classif_id=t.id left join taxonomy p1 on t.parent_id=p1.id order by nbr desc,name""".format(database.CSVIntStringToInClause(gvp('src', gvg('src')))) g.TaxoList = GetAll(sql, None, cursor_factory=None) s = sum([r[2] for r in g.TaxoList]) # Nbr total d'objet par categorie d = DecodeEqualList(Prj.classifsettings) TaxoCSV=d.get('seltaxo') if TaxoCSV: TaxoList={int(x) for x in TaxoCSV.split(',')} else: TaxoList = {} g.TaxoList = [[r[0], r[1], r[2], round(100 * r[2] / s, 1), 'checked' if len(TaxoList)==0 or r[0] in TaxoList else ''] for r in g.TaxoList] # Ajout du % d'objet par categorie ExtraHeader="<input type='hidden' name='src' value='{}'>".format(gvp('src', gvg('src'))) ExtraHeader += self.GetFilterText() return render_template('task/classifauto2_create_lsttaxo.html' ,url=request.query_string.decode('utf-8') ,ExtraHeader=ExtraHeader,prj=Prj)
def ListTasks(owner=None): g.headcenter = "<H3>Task Monitor</h3>" AddTaskSummaryForTemplate() seeall = "" if current_user.has_role( database.AdministratorLabel) and gvg("seeall") == 'Y': tasks = Task.query.filter_by().order_by("id").all() seeall = '&seeall=Y' else: tasks = Task.query.filter_by( owner_id=current_user.id).order_by("id").all() txt = "" if gvg("cleandone") == 'Y' or gvg("cleanerror") == 'Y' or gvg( "cleanall") == 'Y': txt = "Cleanning process result :<br>" for t in tasks: if (gvg("cleandone")=='Y' and t.taskstate=='Done') or (gvg("cleanall")=='Y') \ or (gvg("cleanerror")=='Y' and t.taskstate=='Error') : txt += DoTaskClean(t.id) tasks = Task.query.filter_by( owner_id=current_user.id).order_by("id").all() # txt += "<a class='btn btn-default' href=?cleandone=Y>Clean All Done</a> <a class='btn btn-default' href=?cleanerror=Y>Clean All Error</a> <a class='btn btn-default' href=?cleanall=Y>Clean All (warning !!!)</a> Task count : "+str(len(tasks)) return render_template('task/listall.html', tasks=tasks, header=txt, len_tasks=len(tasks), seeall=seeall, IsAdmin=current_user.has_role( database.AdministratorLabel))
def ajaxcoutrylist(): Lst = database.GetAll( "select countryname from countrylist where countryname ilike %(term)s order by countryname", ({ 'term': '%' + gvg('term', '') + '%' })) res = [{'id': o['countryname'], 'text': o['countryname']} for o in Lst] res.append({'id': 'Other', 'text': 'Other'}) return json.dumps({"results": res})
def dbadmin_merge2taxon(): if gvg("src","")=="" or gvg("dest","")=="": txt="Select source Taxon (will be deleted after merge) :" txt+="<br>Select Target Taxon :" return render_template('search/merge2taxo.html') TaxoSrc=database.Taxonomy.query.filter_by(id=int(gvg("src",""))).first() TaxoDest=database.Taxonomy.query.filter_by(id=int(gvg("dest",""))).first() N1=ExecSQL("update obj_head set classif_id=%(dest)s where classif_id=%(src)s",{"src":TaxoSrc.id,"dest":TaxoDest.id}) N2=ExecSQL("update obj_head set classif_auto_id=%(dest)s where classif_auto_id=%(src)s",{"src":TaxoSrc.id,"dest":TaxoDest.id}) N3=ExecSQL("update objectsclassifhisto set classif_id=%(dest)s where classif_id=%(src)s",{"src":TaxoSrc.id,"dest":TaxoDest.id}) N4=ExecSQL("update taxonomy set parent_id=%(dest)s where parent_id=%(src)s",{"src":TaxoSrc.id,"dest":TaxoDest.id}) N5=ExecSQL("delete from taxonomy where id=%(src)s",{"src":TaxoSrc.id,"dest":TaxoDest.id}) return PrintInCharte("""Merge of '%s' in '%s' done <br>%d Objects Manuel classification updated <br>%d Objects Automatic classification updated <br>%d Objects classification historical updated <br>%d Taxonomy child updated <br>%d Taxonomy Node deleted """%(TaxoSrc.name,TaxoDest.name,N1,N2,N3,N4,N5))
def searchinstrumlist(): sql="select DISTINCT lower(instrument) from acquisitions where instrument is not null and instrument!='' " if gvg("projid")!="": sql += " and projid="+str(int(gvg("projid"))) res = database.GetAll(sql+" order by 1") txt="List of available Intruments : <hr><ul id=InstrumList>" for r in res: txt +="\n<li>{0}</li>".format(r[0]) txt += """</ul> <hr> <button type="button" class="btn btn-default btn-" onclick="$('#PopupDetails').modal('hide');">Close</button> <br><br> <script> $('#InstrumList li').click(function(){ $('#filt_instrum').val($(this).text()); $('#PopupDetails').modal('hide'); }).css('cursor','pointer'); </script> """ return txt
def taxotreerootjson(): parent=gvg("id") sql="""SELECT id, name,parent_id,coalesce(nbrobj,0)+coalesce(nbrobjcum,0) ,exists(select 1 from taxonomy te where te.parent_id=taxonomy.id) FROM taxonomy WHERE """ if parent=='#': sql+="parent_id is null" else: sql+="parent_id =%d"%(int(parent)) sql+=" order by name " res = GetAll(sql) # print(res) return json.dumps([dict(id=str(r[0]),text="<span class=v>"+r[1]+"</span> ("+str(r[3])+") <span class='TaxoSel label label-default'><span class='glyphicon glyphicon-ok'></span></span>",parent=r[2] or "#",children=r[4]) for r in res])
def indexExplore(): data = {'pageoffset': gvg("pageoffset", "0")} for k, v in FilterList.items(): data[k] = gvg(k, v) data['inexplore'] = True data["projid"] = gvg("projid", 0) data["taxochild"] = gvg("taxochild", '1') data["sample_for_select"] = "" if data["samples"]: for r in GetAll( "select sampleid,orig_id from samples where sampleid =any(%s)", ([int(x) for x in data["samples"].split(',')], ), doXSSEscape=True): data[ "sample_for_select"] += "\n<option value='{sampleid}' selected>{orig_id}</option> ".format( **r) data["projects_for_select"] = "" if data["projid"]: for r in GetAll( "select projid,title from projects where projid =any(%s) and visible=true", ([int(x) for x in data["projid"].split(',')], ), doXSSEscape=True): data[ "projects_for_select"] += "\n<option value='{projid}' selected>{title}</option> ".format( **r) data["taxo_for_select"] = "" if gvg("taxo[]"): print(gvg("taxo[]")) for r in GetAll( "SELECT id, display_name FROM taxonomy WHERE id =any(%s) order by name", ([int(x) for x in request.args.getlist("taxo[]")], ), debug=False): data[ "taxo_for_select"] += "\n<option value='{id}' selected>{display_name}</option> ".format( **r) data["month_for_select"] = "" for (k, v) in enumerate( ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'), start=1): data[ "month_for_select"] += "\n<option value='{1}' {0}>{2}</option> ".format( 'selected' if str(k) in data['month'].split(',') else '', k, v) data["daytime_for_select"] = "" for (k, v) in database.DayTimeList.items(): data[ "daytime_for_select"] += "\n<option value='{1}' {0}>{2}</option> ".format( 'selected' if str(k) in data['daytime'].split(',') else '', k, v) right = 'dodefault' classiftab = "" appli.AddTaskSummaryForTemplate() filtertab = getcommonfilters(data) return render_template('search/explore.html', top="", lefta=classiftab, leftb=filtertab, right=right, data=data)
def part_prj(): params={} sql="""select pprojid,ptitle,up.ownerid,u.name,u.email,rawfolder,instrumtype,ep.title ,(select count(*) from part_samples where pprojid=up.pprojid) samplecount from part_projects up left JOIN projects ep on up.projid=ep.projid LEFT JOIN users u on ownerid=u.id """ sql += " where 1=1 " if not current_user.has_role(database.AdministratorLabel): sql+=" and ownerid=%d"%(current_user.id,) if gvg('filt_title','')!='': sql +=" and ( up.ptitle ilike '%%'||%(title)s ||'%%' or to_char(up.pprojid,'999999') like '%%'||%(title)s or ep.title ilike '%%'||%(title)s ||'%%' or to_char(ep.projid,'999999') like '%%'||%(title)s) " params['title']=gvg('filt_title') if gvg('filt_instrum','')!='': sql +=" and up.instrumtype ilike '%%'||%(filt_instrum)s ||'%%' " params['filt_instrum']=gvg('filt_instrum') sql+=" order by lower(ep.title),lower(ptitle)" res = GetAll(sql,params) #,debug=True # app.logger.info("res=%s",res) CanCreate=False if current_user.has_role(database.AdministratorLabel) or current_user.has_role(database.ProjectCreatorLabel): CanCreate=True g.headcenter = "<h4>Particle Projects management</h4><a href='/part/'>Particle Module Home</a>" return PrintInCharte( render_template('part/list.html', PrjList=res, CanCreate=CanCreate, AppManagerMailto=appli.GetAppManagerMailto() , filt_title=gvg('filt_title'), filt_subset=gvg('filt_subset'), filt_instrum=gvg('filt_instrum')))
def TaskShow(TaskID): AddTaskSummaryForTemplate() try: task = LoadTask(TaskID) except: return PrintInCharte( "This task doesn't exists anymore, peraphs it was automaticaly purged" ) txt = "" if gvg('log') == "Y": WorkingDir = task.GetWorkingDir() # app.send_static_file(os.path.join(WorkingDir,"TaskLog.txt")) return flask.send_from_directory(WorkingDir, "TaskLog.txt") if gvg('CustomDetails') == "Y": return task.ShowCustomDetails() if "GetResultFile" in dir(task): f = task.GetResultFile() if f is None: txt += "Error, final file not available" else: txt += "<a href='/Task/GetFile/%d/%s' class='btn btn-primary btn-sm ' role='button'>Get file %s</a>" % ( TaskID, f, f) CustomDetailsAvail = "ShowCustomDetails" in dir(task) try: decodedsteperrors = json.loads(task.task.inputparam).get("steperrors") except: decodedsteperrors = ["Task Decoding Error"] ProjectID = getattr(task.param, 'ProjectId', None) if ProjectID: Prj = database.Projects.query.filter_by(projid=ProjectID).first() g.headcenter = "<h4>Project : <a href='/prj/{0}'>{1}</a></h4>".format( Prj.projid, XSSEscape(Prj.title)) return render_template('task/show.html', task=task.task, steperror=decodedsteperrors, CustomDetailsAvail=CustomDetailsAvail, extratext=txt)
def TaskClean(TaskID): AddTaskSummaryForTemplate() if gvg('thengotoproject') == 'Y': task = LoadTask(TaskID) ProjectID = getattr(task.param, 'ProjectId', None) else: ProjectID = '' Msg = DoTaskClean(TaskID) Msg += '<br><a href="/Task/listall"><span class="label label-info"> Back to Task List</span></a>' if ProjectID: Msg += """"<script> window.location.href = "/prj/%s" </script>""" % (ProjectID, ) return PrintInCharte(Msg)
def ServerFolderSelectJSON(): ServerRoot=Path(app.config['SERVERLOADAREA']) CurrentPath=ServerRoot parent=gvg("id") if parent!='#': CurrentPath=ServerRoot.joinpath(Path(parent)) res=[] for x in CurrentPath.iterdir(): rr=x.relative_to(ServerRoot).as_posix() rc=x.relative_to(CurrentPath).as_posix() try: if x.is_dir(): if gvg('ZipOnly')=='Y': res.append(dict(id=rr,text="<span class=v>"+rc+"</span> ",parent=parent,children=True)) else: res.append(dict(id=rr,text="<span class=v>"+rc+"</span> <span class='TaxoSel label label-default'>Select</span>",parent=parent,children=True)) if x.suffix.lower()==".zip": fi=os.stat( x.as_posix()) res.append(dict(id=rr,text="<span class=v>"+"%s (%.1f Mb : %s)"%(rc,fi.st_size/1048576,time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(fi.st_mtime)))+"</span> <span class='TaxoSel label label-default'>Select</span>",parent=parent,children=False)) except: None # le parcours des fichier peu planter sur system volume information par exemple. res.sort(key=lambda val: str.upper(val['id']),reverse=False) return json.dumps(res)
def searchgettaxomapping(): Prj = database.Projects.query.filter_by(projid=int(gvg("projid"))).first() classifsettings = DecodeEqualList(Prj.classifsettings) PostTaxoMapping=classifsettings.get("posttaxomapping","") res={'mapping':{},'taxo':{}} if PostTaxoMapping!='': res['mapping'] = {el[0].strip(): el[1].strip() for el in [el.split(':') for el in PostTaxoMapping.split(',') if el != '']} sql = """SELECT tf.id, tf.name||case when p1.name is not null and tf.name not like '%% %%' then ' ('||p1.name||')' else ' ' end as name FROM taxonomy tf left join taxonomy p1 on tf.parent_id=p1.id WHERE tf.id = any (%s) order by tf.name limit 2000""" res['taxo'] = {x[0]:x[1] for x in database.GetAll(sql,([int(x) for x in res['mapping'].values()],))} return json.dumps(res)
def part_readprojectmeta(): res = {} ServerRoot = Path(app.config['SERVERLOADAREA']) DossierUVPPath = ServerRoot / gvg('path') DirName = DossierUVPPath.name CruiseFile = DossierUVPPath / "config/cruise_info.txt" # app.logger.info("CruiseFile=%s",CruiseFile.as_posix()) if CruiseFile.exists(): CruiseInfoData = appli.DecodeEqualList(CruiseFile.open('r').read()) res['op_name'] = CruiseInfoData.get('op_name') res['op_email'] = CruiseInfoData.get('op_email') res['cs_name'] = CruiseInfoData.get('cs_name') res['cs_email'] = CruiseInfoData.get('cs_email') res['do_name'] = CruiseInfoData.get('do_name') res['do_email'] = CruiseInfoData.get('do_email') res['prj_info'] = CruiseInfoData.get('gen_info') res['prj_acronym'] = CruiseInfoData.get('acron') # ConfigFile = DossierUVPPath / "config/uvp5_settings/uvp5_configuration_data.txt" # if ConfigFile.exists(): # ConfigInfoData = appli.DecodeEqualList(ConfigFile.open('r').read()) # res['default_aa']=ConfigInfoData.get('aa_calib') # res['default_exp'] = ConfigInfoData.get('exp_calib') # res['default_volimage'] = ConfigInfoData.get('img_vol') m = re.search(R"([^_]+)_(.*)", DirName) if m.lastindex == 2: FichierHeader = DossierUVPPath / "meta" / (m.group(1) + "_header_" + m.group(2) + ".txt") res['instrumtype'] = m.group(1) m = re.search(R"([^_]+)", m.group(2)) res['default_instrumsn'] = m.group(1) if FichierHeader.exists(): LstSamples = [] with FichierHeader.open() as FichierHeaderHandler: F = csv.DictReader(FichierHeaderHandler, delimiter=';') for r in F: LstSamples.append(r) # print(LstSamples) if len(LstSamples) > 0: res['cruise'] = LstSamples[0].get('cruise') res['ship'] = LstSamples[0].get('ship') res['default_depthoffset'] = 1.2 return json.dumps(res)
def browsetaxo(): lst = GetAll( """select t.id,t.parent_id,t.display_name as name,t.taxotype,t.taxostatus,t.creator_email,t.id_source ,to_char(t.creation_datetime,'yyyy-mm-dd hh24:mi') creation_datetime,to_char(t.lastupdate_datetime,'yyyy-mm-dd hh24:mi') lastupdate_datetime,{} from taxonomy t {} order by t.id LIMIT 200 """.format(SQLTreeSelect, SQLTreeJoin)) for lstitem in lst: # lstitem['tree']=PackTreeTxt(lstitem['tree']) #evite les problèmes de safe if lstitem['parent_id'] is None: lstitem['parent_id'] = "" nbrtaxon = GetAll("select count(*) from taxonomy")[0][0] g.AdminLists = GetAll( "select email,name from users where email like '%@%' and active=TRUE order by 2" ) return render_template('browsetaxo.html', lst=lst, nbrtaxon=nbrtaxon, taxon_id=gvg('id'))
def QuestionProcess(self): ServerRoot = Path(app.config['SERVERLOADAREA']) txt = "<h1>Text File Importation Task</h1>" errors = [] if self.task.taskstep == 0: txt += "<h3>Task Creation</h3>" Prj = database.Projects.query.filter_by(projid=gvg("p")).first() g.prjtitle = Prj.title g.prjprojid = Prj.projid g.prjmanagermailto = Prj.GetFirstManagerMailto() txt = "" if Prj.CheckRight(2) == False: return PrintInCharte("ACCESS DENIED for this project") if gvp('starttask') == "Y": FileToSave = None FileToSaveFileName = None self.param.ProjectId = gvg("p") self.param.updateclassif = gvp("updateclassif") TaxoMap = {} for l in gvp('TxtTaxoMap').splitlines(): ls = l.split('=', 1) if len(ls) != 2: errors.append( "Taxonomy Mapping : Invalid format for line %s" % (l)) else: TaxoMap[ls[0].strip().lower()] = ls[1].strip().lower() # Verifier la coherence des données uploadfile = request.files.get("uploadfile") if uploadfile is not None and uploadfile.filename != '': # import d'un fichier par HTTP FileToSave = uploadfile # La copie est faite plus tard, car à ce moment là, le repertoire de la tache n'est pas encore créé FileToSaveFileName = "uploaded.zip" self.param.InData = "uploaded.zip" elif len(gvp("ServerPath")) < 2: errors.append("Input Folder/File Too Short") else: sp = ServerRoot.joinpath(Path(gvp("ServerPath"))) if not sp.exists(): #verifie que le repertoire existe errors.append("Input Folder/File Invalid") else: self.param.InData = sp.as_posix() if len(errors) > 0: for e in errors: flash(e, "error") else: self.param.TaxoMap = TaxoMap # on stocke le dictionnaire et pas la chaine return self.StartTask( self.param, FileToSave=FileToSave, FileToSaveFileName=FileToSaveFileName) else: # valeurs par default self.param.ProjectId = gvg("p") return render_template('task/importupdate_create.html', header=txt, data=self.param, ServerPath=gvp("ServerPath"), TxtTaxoMap=gvp("TxtTaxoMap")) if self.task.taskstep == 1: PrjId = self.param.ProjectId Prj = database.Projects.query.filter_by(projid=PrjId).first() g.prjtitle = Prj.title g.appmanagermailto = GetAppManagerMailto() # self.param.TaxoFound['agreia pratensis']=None #Pour TEST A EFFACER NotFoundTaxo = [ k for k, v in self.param.TaxoFound.items() if v == None ] NotFoundUsers = [ k for k, v in self.param.UserFound.items() if v.get('id') == None ] app.logger.info("Pending Taxo Not Found = %s", NotFoundTaxo) app.logger.info("Pending Users Not Found = %s", NotFoundUsers) if gvp('starttask') == "Y": app.logger.info("Form Data = %s", request.form) for i in range(1, 1 + len(NotFoundTaxo)): orig = gvp( "orig%d" % (i) ) #Le nom original est dans origXX et la nouvelle valeur dans taxolbXX newvalue = gvp("taxolb%d" % (i)) if orig in NotFoundTaxo and newvalue != "": t = database.Taxonomy.query.filter( database.Taxonomy.id == int(newvalue)).first() app.logger.info(orig + " associated to " + t.name) self.param.TaxoFound[orig] = t.id else: errors.append( "Taxonomy Manual Mapping : Invalid value '%s' for '%s'" % (newvalue, orig)) for i in range(1, 1 + len(NotFoundUsers)): orig = gvp( "origuser%d" % (i) ) #Le nom original est dans origXX et la nouvelle valeur dans taxolbXX newvalue = gvp("userlb%d" % (i)) if orig in NotFoundUsers and newvalue != "": t = database.users.query.filter( database.users.id == int(newvalue)).first() app.logger.info("User " + orig + " associated to " + t.name) self.param.UserFound[orig]['id'] = t.id else: errors.append( "User Manual Mapping : Invalid value '%s' for '%s'" % (newvalue, orig)) app.logger.info("Final Taxofound = %s", self.param.TaxoFound) self.UpdateParam() # On met à jour ce qui à été accepté # Verifier la coherence des données if len(errors) == 0: return self.StartTask(self.param, step=2) for e in errors: flash(e, "error") NotFoundTaxo = [ k for k, v in self.param.TaxoFound.items() if v == None ] NotFoundUsers = [ k for k, v in self.param.UserFound.items() if v.get('id') == None ] return render_template('task/import_question1.html', header=txt, taxo=NotFoundTaxo, users=NotFoundUsers, task=self.task) return PrintInCharte(txt)
def PrjMerge(PrjId): Prj = database.Projects.query.filter_by(projid=PrjId).first() if Prj is None: flash("Project doesn't exists", 'error') return PrintInCharte("<a href=/prj/>Select another project</a>") if not Prj.CheckRight(2): # Level 0 = Read, 1 = Annotate, 2 = Admin flash('You cannot edit settings for this project', 'error') return PrintInCharte("<a href=/prj/>Select another project</a>") g.headcenter = "<h4><a href='/prj/{0}'>{1}</a></h4>".format( Prj.projid, XSSEscape(Prj.title)) txt = "<h3>Project Merge / Fusion </h3>" if not gvg('src'): txt += """<ul><li>You are allowed to merge projects that you are allowed to manage <li>User privileges from both projects will be added <li>This tool allow to merge two projects in a single projet (called Current project). The added project will then be automatically deleted. If object data are not consistent between both projects : <ul><li>New data fields are added to the Current project <li>The resulting project will thus contain partially documented datafields. </ul><li>Note : Next screen will indicate compatibility issues (if exists) and allow you to Confirm the merging operation. </ul> """ sql = "select p.projid,title,status,coalesce(objcount,0) objcount,coalesce(pctvalidated,0) pctvalidated,coalesce(pctclassified,0) pctclassified from projects p" if not current_user.has_role(database.AdministratorLabel): sql += " Join projectspriv pp on p.projid = pp.projid and pp.member=%d" % ( current_user.id, ) sql += " where p.projid!=%d order by title" % Prj.projid res = GetAll(sql, doXSSEscape=True) #,debug=True txt += """<table class='table table-bordered table-hover table-verycondensed'> <tr><th width=120>ID</td><th>Title</td><th width=100>Status</th><th width=100>Nbr Obj</th> <th width=100>% Validated</th><th width=100>% Classified</th></tr>""" for r in res: txt += """<tr><td><a class="btn btn-primary" href='/prj/merge/{activeproject}?src={projid}'>Select</a> {projid}</td> <td>{title}</td> <td>{status}</td> <td>{objcount:0.0f}</td> <td>{pctvalidated:0.2f}</td> <td>{pctclassified:0.2f}</td> </tr>""".format(activeproject=Prj.projid, **r) txt += "</table>" return PrintInCharte(txt) PrjSrc = database.Projects.query.filter_by(projid=int(gvg('src'))).first() if PrjSrc is None: flash("Source project doesn't exists", 'error') return PrintInCharte("<a href=/prj/>Select another project</a>") if not PrjSrc.CheckRight(2): # Level 0 = Read, 1 = Annotate, 2 = Admin flash('You cannot merge for this project', 'error') return PrintInCharte("<a href=/prj/>Select another project</a>") txt += """<h4>Source Project : {0} - {1} (This project will be destroyed)</h4> """.format(PrjSrc.projid, XSSEscape(PrjSrc.title)) if not gvg('merge'): # Ici la src à été choisie et vérifiée if PrjSrc.mappingobj != Prj.mappingobj: flash("Object mapping differ With source project ", "warning") if PrjSrc.mappingsample != Prj.mappingsample: flash("Sample mapping differ With source project ", "warning") if PrjSrc.mappingacq != Prj.mappingacq: flash("Acquisition mapping differ With source project ", "warning") if PrjSrc.mappingprocess != Prj.mappingprocess: flash("Process mapping differ With source project ", "warning") txt += FormatError( """ <span class='glyphicon glyphicon-warning-sign'></span> Warning project {1} - {2}<br> Will be destroyed, its content will be transfered in the target project.<br> This operation is irreversible</p> <br><a class='btn btn-lg btn-warning' href='/prj/merge/{0}?src={1}&merge=Y'>Start Project Fusion</a> """, Prj.projid, PrjSrc.projid, XSSEscape(PrjSrc.title), DoNotEscape=True) return PrintInCharte(txt) if gvg('merge') == 'Y': ExecSQL("update acquisitions set projid={0} where projid={1}".format( Prj.projid, PrjSrc.projid)) ExecSQL("update process set projid={0} where projid={1}".format( Prj.projid, PrjSrc.projid)) ExecSQL("update samples set projid={0} where projid={1}".format( Prj.projid, PrjSrc.projid)) ExecSQL("update obj_head set projid={0} where projid={1}".format( Prj.projid, PrjSrc.projid)) ExecSQL("update part_projects set projid={0} where projid={1}".format( Prj.projid, PrjSrc.projid)) # garde le privilege le plus elevé des 2 projets ExecSQL("""UPDATE projectspriv ppdst set privilege=case when 'Manage' in (ppsrc.privilege,ppdst.privilege) then 'Manage' when 'Annotate' in (ppsrc.privilege,ppdst.privilege) then 'Annotate' else 'View' end from projectspriv ppsrc where ppsrc.projid={1} and ppdst.projid={0} and ppsrc.member=ppdst.member""" .format(Prj.projid, PrjSrc.projid), debug=True) # Transfere les privilege depuis le projet source ExecSQL("""update projectspriv set projid={0} where projid={1} and member not in (select member from projectspriv where projid={0})""" .format(Prj.projid, PrjSrc.projid)) # Efface ceux qui etait des 2 cotés ExecSQL("delete from projectspriv where projid={0}".format( PrjSrc.projid)) ExecSQL("delete from projects where projid={0}".format(PrjSrc.projid)) appli.project.main.RecalcProjectTaxoStat(Prj.projid) appli.project.main.UpdateProjectStat(Prj.projid) txt += "<div class='alert alert-success' role='alert'>Fusion Done successfully</div>" txt += "<br><a class='btn btn-lg btn-primary' href='/prj/%s'>Back to target project</a>" % Prj.projid return PrintInCharte(txt)
def part_drawchart(): Couleurs = ("#FF0000", "#4385FF", "#00BE00", "#AA6E28", "#FF9900", "#FFD8B1", "#808000", "#FFEA00", "#FFFAC8", "#BEFF00", "#AAFFC3", "#008080", "#64FFFF", "#000080", "#800000", "#820096", "#E6BEFF", "#FF00FF", "#808080", "#FFC9DE", "#000000") PrjColorMap = {} PrjSampleCount = {} PrjTitle = {} try: gpr = request.args.getlist('gpr') gpd = request.args.getlist('gpd') gctd = request.args.getlist('ctd') gtaxo = request.args.getlist('taxolb') NbrChart = len(gpr) + len(gpd) + len(gctd) + len(gtaxo) samples = umain.GetFilteredSamples(GetVisibleOnly=True, RequiredPartVisibility='V') for S in samples: if S['pprojid'] not in PrjColorMap: PrjColorMap[S['pprojid']] = Couleurs[len(PrjColorMap) % len(Couleurs)] PrjTitle[S['pprojid']] = S['ptitle'] PrjSampleCount[S['pprojid']] = PrjSampleCount.get(S['pprojid'], 0) + 1 if len(PrjColorMap) > 1: NbrChart += 1 FigSizeX = NbrChart if NbrChart > 4: FigSizeX = 4 FigSizeY = math.ceil(NbrChart / FigSizeX) font = {'family': 'arial', 'weight': 'normal', 'size': 10} plt.rc('font', **font) plt.rcParams['lines.linewidth'] = 0.5 Fig = plt.figure(figsize=(FigSizeX * 4, FigSizeY * 5), dpi=100) chartid = 0 DepthFilter = "" if gvg('filt_depthmin'): DepthFilter += " and depth>=%d" % int(gvg('filt_depthmin')) if gvg('filt_depthmax'): DepthFilter += " and depth<=%d" % int(gvg('filt_depthmax')) # traitement des Graphes particulaire réduit if len(gpr) > 0: sql = "select depth y " # sql+=''.join([',case when watervolume>0 then class%02d/watervolume else 0 end as c%s'%(int(c[2:]),i) # for i,c in enumerate(gpr) if c[0:2]=="cl"]) sql += ''.join([ ',case when watervolume>0 then class%02d/watervolume else null end as c%s' % (int(c[2:]), i) for i, c in enumerate(gpr) if c[0:2] == "cl" ]) sql += ''.join([ ',coalesce(biovol%02d) as c%s' % (int(c[2:]), i) for i, c in enumerate(gpr) if c[0:2] == "bv" ]) sql += """ from part_histopart_reduit where psampleid=%(psampleid)s {} order by Y""".format(DepthFilter) graph = list(range(0, len(gpr))) for i, c in enumerate(gpr): graph[i] = Fig.add_subplot(FigSizeY, FigSizeX, chartid + 1) if c[0:2] == "cl": graph[i].set_xlabel( 'Particle red. class %s (%s) #/l' % (c, GetClassLimitTxt(PartRedClassLimit, int(c[2:])))) if c[0:2] == "bv": graph[i].set_xlabel( 'Biovolume red. class %s (%s) µl/l' % (c, GetClassLimitTxt(PartRedClassLimit, int(c[2:])))) chartid += 1 for rs in samples: DBData = database.GetAll(sql, {'psampleid': rs['psampleid']}) data = np.empty((len(DBData), 2)) for i, c in enumerate(gpr): xcolname = "c%d" % i valcount = 0 for rnum, r in enumerate(DBData): if r[xcolname] is None: continue data[valcount] = (-r['y'], r[xcolname]) valcount += 1 # data = data[~np.isnan(data[:,1]),:] # Supprime les lignes avec une valeur à Nan et fait donc de l'extrapolation linaire # sans cette ligne les null des colonnes cl devient des nan et ne sont pas tracès (rupture de ligne) # cependant l'autre option est de le traiter au niveau de l'import graph[i].plot(data[:valcount, 1], data[:valcount, 0], color=PrjColorMap[rs['pprojid']] if len(PrjColorMap) > 1 else None) # fait après les plot pour avoir un echelle X bien callé avec les données et evite les erreurs log si la premiere serie n'as pas de valeurs for i, c in enumerate(gpr): if gvg('XScale') != 'I': try: if gvg('XScale') == 'O': graph[i].set_xscale('log') if gvg('XScale') == 'S': graph[i].set_xscale('symlog') except Exception as e: # parfois s'il n'y a pas de données pas possible de passer en echelle log, on force alors linear sinon ça plante plus loin graph[i].set_xscale('linear') else: graph[i].set_xlim(left=0) # traitement des Graphes particulaire détaillés if len(gpd) > 0: sql = "select depth y " sql += ''.join([ ',case when watervolume>0 then class%02d/watervolume else 0 end as c%s' % (int(c[2:]), i) for i, c in enumerate(gpd) if c[0:2] == "cl" ]) sql += ''.join([ ',coalesce(biovol%02d) as c%s' % (int(c[2:]), i) for i, c in enumerate(gpd) if c[0:2] == "bv" ]) sql += """ from part_histopart_det where psampleid=%(psampleid)s {} order by Y""".format(DepthFilter) graph = list(range(0, len(gpd))) for i, c in enumerate(gpd): graph[i] = Fig.add_subplot(FigSizeY, FigSizeX, chartid + 1) if c[0:2] == "cl": graph[i].set_xlabel( 'Particle det. class %s (%s) #/l' % (c, GetClassLimitTxt(PartDetClassLimit, int(c[2:])))) if c[0:2] == "bv": graph[i].set_xlabel( 'Biovolume det. class %s (%s) µl/l' % (c, GetClassLimitTxt(PartDetClassLimit, int(c[2:])))) chartid += 1 for rs in samples: DBData = database.GetAll(sql, {'psampleid': rs['psampleid']}) data = np.empty((len(DBData), 2)) for i, c in enumerate(gpd): xcolname = "c%d" % i valcount = 0 for rnum, r in enumerate(DBData): if r[xcolname] is None: continue data[valcount] = (-r['y'], r[xcolname]) valcount += 1 graph[i].plot(data[:valcount, 1], data[:valcount, 0], color=PrjColorMap[rs['pprojid']] if len(PrjColorMap) > 1 else None) # fait après les plot pour avoir un echelle X bien callé avec les données et evite les erreurs log si la premiere serie n'as pas de valeurs for i, c in enumerate(gpd): if gvg('XScale') != 'I': try: if gvg('XScale') == 'O': graph[i].set_xscale('log') if gvg('XScale') == 'S': graph[i].set_xscale('symlog') except Exception as e: # parfois s'il n'y a pas de données pas possible de passer en echelle log, on force alors linear sinon ça plante plus loin graph[i].set_xscale('linear') else: graph[i].set_xlim(left=0) # traitement des Graphes CTD if len(gctd) > 0: sql = "select depth y ," + ','.join( ['%s as c%d' % (c, i) for i, c in enumerate(gctd)]) sql += """ from part_ctd where psampleid=%(psampleid)s {} order by lineno""".format(DepthFilter) graph = list(range(0, len(gctd))) for i, c in enumerate(gctd): graph[i] = Fig.add_subplot(FigSizeY, FigSizeX, chartid + 1) graph[i].set_xlabel('CTD %s ' % (CTDFixedColByKey.get(c))) chartid += 1 for rs in samples: DBData = database.GetAll(sql, {'psampleid': rs['psampleid']}) data = np.empty((len(DBData), 2)) for i, c in enumerate(gctd): xcolname = "c%d" % i valcount = 0 for rnum, r in enumerate(DBData): if r[xcolname] is None: continue data[valcount] = (-r['y'], r[xcolname]) valcount += 1 graph[i].plot(data[:valcount, 1], data[:valcount, 0], color=PrjColorMap[rs['pprojid']] if len(PrjColorMap) > 1 else None) # traitement des Graphes TAXO if len(gtaxo) > 0: # sql = "select depth y ,1000*nbr/watervolume as x from part_histocat h " sql = "select depth y ,nbr as x from part_histocat h " if gvg('taxochild') == '1': sql += " join taxonomy t0 on h.classif_id=t0.id " for i in range(1, 15): sql += " left join taxonomy t{0} on t{1}.parent_id=t{0}.id ".format( i, i - 1) # sql += " where psampleid=%(psampleid)s and ( classif_id = %(taxoid)s and watervolume>0" sql += " where psampleid=%(psampleid)s and ( classif_id = %(taxoid)s " if gvg('taxochild') == '1': for i in range(1, 15): sql += " or t{}.id= %(taxoid)s".format(i) sql += " ){} order by Y" "".format(DepthFilter) sqlWV = """ select {0} tranche,sum(watervolume) from part_histopart_det where psampleid=%(psampleid)s {1} group by tranche """.format(GetTaxoHistoWaterVolumeSQLExpr("depth"), DepthFilter) graph = list(range(0, len(gtaxo))) for i, c in enumerate(gtaxo): NomTaxo = database.GetAll( """select concat(t.name,' (',p.name,')') nom from taxonomy t left JOIN taxonomy p on t.parent_id=p.id where t.id= %(taxoid)s""", {'taxoid': c})[0]['nom'] if gvg('taxochild') == '1': NomTaxo += " and children" graph[i] = Fig.add_subplot(FigSizeY, FigSizeX, chartid + 1) graph[i].set_xlabel('%s #/m3' % (NomTaxo)) # graph[i].set_yscale('log') def format_fn(tick_val, tick_pos): if -int(tick_val) < len( DepthTaxoHistoLimit) and -int(tick_val) >= 0: return DepthTaxoHistoLimit[-int(tick_val)] else: return '' ##graph[i].yaxis.set_major_formatter(FuncFormatter(format_fn)) # graph[i].set_yticklabels(GetTaxoHistoLimit(20000)) # graph[i].set_yticklabels(["a","b","c"]) # graph[i].yticks(np.arange(5), ('Tom', 'Dick', 'Harry', 'Sally', 'Sue')) ##graph[i].set_yticks(np.arange(0,-20,-1)) chartid += 1 for isample, rs in enumerate(samples): if rs['visibility'][1] >= 'V': # Visible ou exportable DBData = database.GetAll(sql, { 'psampleid': rs['psampleid'], 'taxoid': c }) WV = database.GetAssoc2Col( sqlWV, {'psampleid': rs['psampleid']}) else: # si pas le droit, on fait comme s'il n'y avait pas de données. DBData = [] WV = {} # print("{} =>{}".format(rs['psampleid'],WV)) if len(DBData) > 0: data = np.empty((len(DBData), 2)) for rnum, r in enumerate(DBData): data[rnum] = (r['y'], r['x']) # hist,edge=np.histogram(data[:,0],bins=GetTaxoHistoLimit(data[:,0].max()),weights=data[:,1]) # Y=(edge[:-1]+edge[1:])/2 # graph[i].step(hist,Y) # graph[i].hist(data[:,0],bins=GetTaxoHistoLimit(data[:,0].max()),weights=data[:,1],histtype ='step',orientation ='horizontal') bins = GetTaxoHistoLimit(data[:, 0].max()) categ = -np.arange(len(bins) - 1) #-isample*0.1 hist, edge = np.histogram(data[:, 0], bins=bins, weights=data[:, 1]) # print(hist) for ih, h in enumerate(hist): if h > 0: if WV.get(edge[ih], 0) > 0: hist[ih] = 1000 * h / WV.get(edge[ih]) else: hist[ih] = 0 # print(hist,edge) # Y=-(edge[:-1]+edge[1:])/2 calcul du milieu de l'espace Y = -edge[:-1] # Y=categ graph[i].step(hist, Y, color=PrjColorMap[rs['pprojid']] if len(PrjColorMap) > 1 else None) # bottom, top=graph[i].get_ylim() # bottom=min(bottom,categ.min()-1) # graph[i].set_ylim(bottom, top) bottom, top = graph[i].get_ylim() if gvg('filt_depthmin'): top = -float(gvg('filt_depthmin')) if gvg('filt_depthmax'): bottom = -float(gvg('filt_depthmax')) elif len(WV) > 0: bottom = min(bottom, -max(WV.keys())) if top > 0: top = 0 if bottom >= top: bottom = top - 10 graph[i].set_ylim(bottom, top) # generation du graphique qui liste les projets if len(PrjColorMap) > 1: data = np.empty((len(PrjSampleCount), 2)) PrjLabel = [] GColor = [] for i, (k, v) in enumerate(PrjSampleCount.items()): data[i] = i, v PrjLabel.append(PrjTitle[k]) GColor.append(PrjColorMap[k]) graph = Fig.add_subplot(FigSizeY, FigSizeX, chartid + 1) graph.barh(data[:, 0], data[:, 1], color=GColor) graph.set_yticks(np.arange(len(PrjLabel)) + 0.4) graph.set_yticklabels(PrjLabel) graph.set_xlabel("Sample count per project + Legend") Fig.tight_layout() except Exception as e: Fig = plt.figure(figsize=(8, 6), dpi=100) tb_list = traceback.format_tb(e.__traceback__) s = "%s - %s " % (str(e.__class__), str(e)) for m in tb_list[::-1]: s += "\n" + m Fig.text(0, 0.5, s) print(s) png_output = io.BytesIO() Fig.savefig(png_output) png_output.seek(0) return send_file(png_output, mimetype='image/png')
def searchtaxotree(): res = GetAll("SELECT id, name FROM taxonomy WHERE parent_id is null order by name ") # print(res) return render_template('search/taxopopup.html',root_elements=res,targetid=gvg("target","taxolb"))
def QuestionProcess(self): Prj=database.Projects.query.filter_by(projid=gvg("projid")).first() txt="<a href='/prj/%d'>Back to project</a>"%Prj.projid if not Prj.CheckRight(1): return PrintInCharte("ACCESS DENIED for this project<br>"+txt) txt+="<h3>Text export Task creation</h3>" txt+="<h5>Exported Project : #%d - %s</h5>"%(Prj.projid,XSSEscape(Prj.title)) errors=[] self.param.filtres = {} for k in sharedfilter.FilterList: if gvg(k, "") != "": self.param.filtres[k] = gvg(k, "") if len(self.param.filtres) > 0: TxtFiltres = ",".join([k + "=" + v for k, v in self.param.filtres.items() if v != ""]) else: TxtFiltres="" if self.task.taskstep==0: # Le projet de base est choisi second écran ou validation du second ecran if gvp('starttask')=="Y": # validation du second ecran self.param.ProjectId=gvg("projid") self.param.what=gvp("what") self.param.samplelist=gvp("samplelist") self.param.objectdata=gvp("objectdata") self.param.processdata=gvp("processdata") self.param.acqdata=gvp("acqdata") self.param.sampledata=gvp("sampledata") self.param.histodata=gvp("histodata") self.param.commentsdata=gvp("commentsdata") self.param.usecomasepa=gvp("usecomasepa") self.param.sumsubtotal=gvp("sumsubtotal") self.param.internalids = gvp("internalids") self.param.use_internal_image_name = gvp("use_internal_image_name") self.param.exportimagesbak = gvp("exportimagesbak") self.param.exportimagesdoi = gvp("exportimagesdoi") self.param.typeline = gvp("typeline") self.param.splitcsvby = gvp("splitcsvby") self.param.putfileonftparea = gvp("putfileonftparea") if self.param.splitcsvby=='sample': # si on splitte par sample, il faut les données du sample self.param.sampledata='1' # Verifier la coherence des données # errors.append("TEST ERROR") if self.param.what=='' : errors.append("You must select What you want to export") if len(errors)>0: for e in errors: flash(e,"error") else: # Pas d'erreur, on lance la tache return self.StartTask(self.param) else: # valeurs par default self.param.what ="TSV" self.param.objectdata = "1" self.param.processdata = "1" self.param.acqdata = "1" self.param.sampledata = "1" self.param.splitcsvby="" #recupere les samples sql="""select sampleid,orig_id from samples where projid =%(projid)s order by orig_id""" g.SampleList=GetAll(sql,{"projid":gvg("projid")},cursor_factory=None) g.headcenter="<h4>Project : <a href='/prj/{0}'>{1}</a></h4>".format(Prj.projid,XSSEscape(Prj.title)); if TxtFiltres!="": g.headcenter = "<h4>Project : <a href='/prj/{0}?{2}'>{1}</a></h4>".format(Prj.projid, Prj.title, "&".join([k + "=" + v for k, v in self.param.filtres.items() if v != ""])) LstUsers = database.GetAll("""select distinct u.email,u.name,Lower(u.name) FROM users_roles ur join users u on ur.user_id=u.id where ur.role_id=2 and u.active=TRUE and email like '%@%' order by Lower(u.name)""") g.LstUser = "******".join(["<a href='mailto:{0}'>{0}</a></li> ".format(*r) for r in LstUsers]) return render_template('task/textexport_create.html',header=txt,data=self.param,TxtFiltres=TxtFiltres)
def PrjResetToPredicted(PrjId): request.form # Force la lecture des données POST sinon il y a une erreur 504 Prj = database.Projects.query.filter_by(projid=PrjId).first() if Prj is None: flash("Project doesn't exists", 'error') return PrintInCharte("<a href=/prj/>Select another project</a>") if not Prj.CheckRight(2): # Level 0 = Read, 1 = Annotate, 2 = Admin flash('You cannot edit settings for this project', 'error') return PrintInCharte("<a href=/prj/>Select another project</a>") g.headcenter = "<h4><a href='/prj/{0}'>{1}</a></h4>".format( Prj.projid, XSSEscape(Prj.title)) txt = "<h3>Reset status to predicted</h3>" sqlparam = {} filtres = {} for k in sharedfilter.FilterList: if gvg(k): filtres[k] = gvg(k, "") process = gvp('process') if process == 'Y': sqlhisto = """insert into objectsclassifhisto(objid,classif_date,classif_type,classif_id,classif_qual,classif_who) select objid,classif_when,'M', classif_id,classif_qual,classif_who from objects o where projid=""" + str( Prj.projid ) + """ and classif_when is not null and classif_qual in ('V','D') and not exists(select 1 from objectsclassifhisto och where och.objid=o.objid and och.classif_date=o.classif_when) """ sqlhisto += sharedfilter.GetSQLFilter(filtres, sqlparam, str(current_user.id)) ExecSQL(sqlhisto, sqlparam) sqlhisto = """update obj_head set classif_qual='P' where projid={0} and objid in (select objid from objects o where projid={0} and classif_qual in ('V','D') {1}) """.format( Prj.projid, sharedfilter.GetSQLFilter(filtres, sqlparam, str(current_user.id))) ExecSQL(sqlhisto, sqlparam) # flash('Data updated', 'success') txt += "<a href='/prj/%s' class='btn btn-primary'>Back to project</a> " % ( Prj.projid) appli.project.main.RecalcProjectTaxoStat(Prj.projid) appli.project.main.UpdateProjectStat(Prj.projid) return PrintInCharte(txt) sql = "select objid FROM objects o where projid=" + str(Prj.projid) if len(filtres): sql += sharedfilter.GetSQLFilter(filtres, sqlparam, str(current_user.id)) ObjList = GetAll(sql, sqlparam) ObjListTxt = "\n".join((str(r['objid']) for r in ObjList)) txt += "<span style='color:red;font-weight:bold;font-size:large;'>USING Active Project Filters, {0} objects</span>".format( len(ObjList)) else: txt += "<span style='color:red;font-weight:bold;font-size:large;'>Apply to ALL OBJETS OF THE PROJECT (NO Active Filters)</span>" Lst = GetFieldList(Prj) # txt+="%s"%(Lst,) return PrintInCharte( render_template("project/prjresettopredicted.html", Lst=Lst, header=txt))
def QuestionProcess(self): ServerRoot = Path(app.config['SERVERLOADAREA']) txt = "<h1>Particle ZooScan folder Importation Task</h1>" errors = [] txt += "<h3>Task Creation</h3>" Prj = partdatabase.part_projects.query.filter_by( pprojid=gvg("p")).first() if Prj is None: return PrintInCharte(ErrorFormat("This project doesn't exists")) if Prj.instrumtype not in LstInstrumType: return PrintInCharte( ErrorFormat("Instrument type '%s' not in list : %s" % (Prj.instrumtype, ','.join(LstInstrumType)))) g.prjtitle = Prj.ptitle g.prjprojid = Prj.pprojid # g.prjowner=Prj.owneridrel.name DossierUVPPath = ServerRoot / Prj.rawfolder self.param.DossierUVP = DossierUVPPath.as_posix() txt = "" # TODO gestion sécurité # if Prj.CheckRight(2)==False: # return PrintInCharte("ACCESS DENIED for this project"); self.param.pprojid = gvg("p") DirName = DossierUVPPath.name m = re.search(R"([^_]+)_(.*)", DirName) if m.lastindex != 2: return PrintInCharte( ErrorFormat("Le repertoire projet n'as pas un nom standard")) else: FichierHeader = DossierUVPPath / "meta" / ( m.group(1) + "_header_" + m.group(2) + ".txt") if not FichierHeader.exists(): return PrintInCharte( ErrorFormat("Le fichier header n'existe pas :"******"""select profileid,psampleid,filename,stationid,firstimage,lastimg,lastimgused,comment,histobrutavailable ,(select count(*) from part_histopart_det where psampleid=s.psampleid) nbrlinedet ,(select count(*) from part_histopart_reduit where psampleid=s.psampleid) nbrlinereduit ,(select count(*) from part_histocat where psampleid=s.psampleid) nbrlinetaxo from part_samples s where pprojid=%s""" % (self.param.pprojid)) # print("ouverture de " + FichierHeader) with open(FichierHeader.as_posix()) as FichierHeaderHandler: F = csv.DictReader(FichierHeaderHandler, delimiter=';') for r in F: r['psampleid'] = None if r['profileid'] in dbsample: r['psampleid'] = dbsample[ r['profileid']]['psampleid'] r['histobrutavailable'] = dbsample[ r['profileid']]['histobrutavailable'] r['nbrlinedet'] = dbsample[ r['profileid']]['nbrlinedet'] r['nbrlinereduit'] = dbsample[ r['profileid']]['nbrlinereduit'] r['nbrlinetaxo'] = dbsample[ r['profileid']]['nbrlinetaxo'] self.param.profilelistinheader.append(r) # self.param.profilelistinheader[r['profileid']]=r # Tri par 4eme colonne, profileid self.param.profilelistinheader = sorted( self.param.profilelistinheader, key=lambda r: r['profileid']) if gvp('starttask') == "Y": self.param.ProcessOnlyMetadata = (gvp('onlymeta', 'N') == 'Y') self.param.user_name = current_user.name self.param.user_email = current_user.email for f in request.form: self.param.profiletoprocess[request.form.get(f)] = "Y" if len(self.param.profiletoprocess) == 0: errors.append("No sample to process selected") if len(errors) > 0: for e in errors: flash(e, "error") else: return self.StartTask(self.param) else: # valeurs par default if len(self.param.profilelistinheader) == 0: return PrintInCharte( ErrorFormat("No sample available in file %s" % (FichierHeader.as_posix()))) print("%s" % (self.param.profilelistinheader)) return render_template('task/uvpzooscanimport_create.html', header=txt, data=self.param, ServerPath=gvp("ServerPath"), TxtTaxoMap=gvp("TxtTaxoMap"))
def PrjConfusionMatrix(PrjId): sql = """select lower(case when tr.name like '% %' then tr.name else concat(tr.name,'(',trp.name,')') end ) ClassifReel ,lower(case when tp.name like '% %' then tp.name else concat(tp.name,'(',tpp.name,')') end) ClassifPredict from objects o join taxonomy tp on tp.id=o.classif_auto_id join taxonomy tr on tr.id=o.classif_id left join taxonomy tpp on tp.parent_id=tpp.id left join taxonomy trp on tr.parent_id=trp.id where projid ={} and classif_qual='V'""".format(PrjId) DBRes = np.array(GetAll(sql)) txtbacktoproject = "<a href='/prj/%d'>Back to project</a>" % PrjId Prj = database.Projects.query.filter_by(projid=PrjId).first() g.headcenter = "<h4><a href='/prj/{0}'>{1}</a></h4>".format( Prj.projid, XSSEscape(Prj.title)) if len(DBRes) == 0: flash("No validated objects with prediction", 'error') return PrintInCharte(txtbacktoproject) CatTrue = DBRes[:, 0] CatPred = DBRes[:, 1] CatAll = [x for x in set(CatPred) | set(CatTrue)] CatAll.sort() cm = metrics.confusion_matrix(y_pred=CatPred, y_true=CatTrue) # Version Division par axe des Réel (somme horiz) SommeH = cm.sum(axis=1) SommeV = cm.sum(axis=0) SommeVNoZero = cm.sum(axis=0) SommeVNoZero[SommeVNoZero == 0] = 999999 # pour eviter les division par zéro SommeHNoZero = cm.sum(axis=1) SommeHNoZero[SommeHNoZero == 0] = 999999 # pour eviter les division par zéro TotalObj = CatPred.shape[0] D = np.diag(cm) if gvg("astsv"): t = [""] #case haut gauche vide for c in CatAll: t.append("\t%s" % c) t.append("\tNb. true\t% true\tRecall") for c, cml, s, recall in zip(CatAll, cm, SommeH, 100 * D / SommeHNoZero): t.append("\n%s" % c) for v in cml: t.append("\t%s" % v) t.append("\t%s\t%0.1f\t%0.1f" % (s, 100 * s / TotalObj, recall)) # Ajoute le total & Pct de la ligne t.append("\nNb. predicted") for s in SommeV: t.append("\t%s" % (s)) # Ajoute le total de la Colonne t.append("\n% of predicted") for s in SommeV: t.append("\t%0.1f" % (100 * s / TotalObj)) # Ajoute le % de la Colonne t.append("\nPrecision") for s in 100 * D / SommeVNoZero: t.append("\t%0.1f" % (s)) # Ajoute la precision t.append("\n") return Response("".join(t), mimetype="text/tsv", headers={ "Content-Disposition": "attachment; filename=confusionmatrix_%s.tsv" % Prj.projid }) t = [ """<style> th { vertical-align: bottom !important; background-color: #ddd } .table > tbody > tr > th.rotate { height: 140px; white-space: nowrap; } .table > tbody > tr > th.row_header{ height: 140px; white-space: nowrap; vertical-align: top !important; } th.rotate > div { transform: rotate(270deg); width: 15px; } th.row_header > div { transform: translate(0px, 200px) rotate(270deg); width: 15px; } .margin { font-style: italic; } </style> <h2>Confusion matrix - <a href='?astsv=Y' style='font-size:medium' class='btn btn-primary btn-xs'>TSV Export</a> </h2> <p>This matrix is refreshed every time you access it. For more information on confusion statistics, please see the <a href='https://en.wikipedia.org/wiki/Precision_and_recall'>very well written Wikipedia page</a>.</p> <table class='table table-bordered table-hover table-condensed' style='font-size:12px;'> <tr> <th> </th> <th> </th> <th class='column_header' colspan='1000'>Predicted category</th> </tr> <tr> <th class='row_header' rowspan='1000'><div>True category</div></th> <th> </th> """ ] # ligne titre des categorie for c in CatAll: t.append("<th class='rotate'><div>%s</div></th>" % c) t.append( "<th>Nb. true</th><th>% true</th><th><a href='https://en.wikipedia.org/wiki/Precision_and_recall#Recall' target='_blank'>Recall</a></th>" ) for c, cml, s, recall in zip(CatAll, cm, SommeH, 100 * D / SommeHNoZero): t.append("</tr><tr><th>%s</th>" % c) for v in cml: t.append("<td>%s</td>" % v) t.append( "<td class='margin'>%s</td><td class='margin'>%0.1f</td class='margin'><td>%0.1f</td>" % (s, 100 * s / TotalObj, recall)) # Ajoute le total & Pct de la ligne t.append("</tr><tr><th>Nb. predicted</th>") for s in SommeV: t.append("<td class='margin'>%s</td>" % (s)) # Ajoute le total de la Colonne t.append("</tr><tr><th>% of predicted</th>") for s in SommeV: t.append("<td class='margin'>%0.1f</td>" % (100 * s / TotalObj)) # Ajoute le % de la Colonne t.append( "</tr><tr><th><a href='https://en.wikipedia.org/wiki/Precision_and_recall#Precision' target='_blank' >Precision</a></th>" ) for s in 100 * D / SommeVNoZero: t.append("<td class='margin'>%0.1f</td>" % (s)) # Ajoute la precision t.append("</tr></table>") cm_normalized = cm.astype('float') / SommeHNoZero[:, np.newaxis] FigSize = int(SommeHNoZero.shape[0] / 3) if FigSize < 8: FigSize = 8 # 800x800 px g.Fig = plt.figure(figsize=(FigSize, FigSize), dpi=100) plot_confusion_matrix(cm_normalized, CatAll) RamImage = io.BytesIO() g.Fig.savefig(RamImage, dpi=100, format='png') t.append( "<h3>Confusion matrix divided by sum of lines</h3><p>The diagonal contains the <a href='https://en.wikipedia.org/wiki/Precision_and_recall#Recall' target='_blank'>recall</a> rate.</p><img src='data:image/png;base64,{}'/>" .format(base64.encodebytes(RamImage.getvalue()).decode())) # Version division par axe des prediction pas de div pas zero possible et permet de voir ce que c'est devenu (somme Vert.) cm_normalized = cm.astype('float') / SommeVNoZero # plt.figure(figsize=(8,8), dpi=100) # 800x800 px g.Fig.clf() plot_confusion_matrix(cm_normalized, CatAll) RamImage = io.BytesIO() g.Fig.savefig(RamImage, dpi=100, format='png') t.append( "<h3>Confusion matrix divided by sum of columns</h3><p>The diagonal contains the <a href='https://en.wikipedia.org/wiki/Precision_and_recall#Precision' target='_blank'>precision</a> rate.</p><img src='data:image/png;base64,{}'/>" .format(base64.encodebytes(RamImage.getvalue()).decode())) # t.append("<br>"+txtbacktoproject) return PrintInCharte("\n".join(t))
def PrjEditDataMass(PrjId): request.form # Force la lecture des données POST sinon il y a une erreur 504 Prj = database.Projects.query.filter_by(projid=PrjId).first() if Prj is None: flash("Project doesn't exists", 'error') return PrintInCharte("<a href=/prj/>Select another project</a>") if not Prj.CheckRight(2): # Level 0 = Read, 1 = Annotate, 2 = Admin flash('You cannot edit settings for this project', 'error') return PrintInCharte("<a href=/prj/>Select another project</a>") g.headcenter = "<h4><a href='/prj/{0}'>{1}</a></h4>".format( Prj.projid, XSSEscape(Prj.title)) txt = "<h3>Project Mass data edition </h3>" sqlparam = {} filtres = {} for k in sharedfilter.FilterList: if gvg(k): filtres[k] = gvg(k, "") field = gvp('field') if field and gvp('newvalue'): tables = { 'f': 'obj_field', 'h': 'obj_head', 's': 'samples', 'a': 'acquisitions', 'p': 'process' } tablecode = field[0] table = tables[ tablecode] # on extrait la table à partir de la premiere lettre de field field = field[ 1:] # on supprime la premiere lettre qui contenait le nom de la table sql = "update " + table + " set " + field + "=%(newvalue)s " if field == 'classif_id': sql += " ,classif_when=current_timestamp,classif_who=" + str( current_user.id) sql += " where " if tablecode == "h": sql += " objid in ( select objid from objects o " elif tablecode == "f": sql += " objfid in ( select objid from objects o " elif tablecode == "s": sql += " sampleid in ( select distinct sampleid from objects o " elif tablecode == "a": sql += " acquisid in ( select distinct acquisid from objects o " elif tablecode == "p": sql += " processid in ( select distinct processid from objects o " sql += " where projid=" + str(Prj.projid) sqlparam['newvalue'] = gvp('newvalue') if len(filtres): sql += " " + sharedfilter.GetSQLFilter(filtres, sqlparam, str(current_user.id)) sql += ")" if field == 'classif_id': sqlhisto = """insert into objectsclassifhisto(objid,classif_date,classif_type,classif_id,classif_qual,classif_who) select objid,classif_when,'M', classif_id,classif_qual,classif_who from objects o where projid=""" + str( Prj.projid) + " and classif_when is not null " sqlhisto += sharedfilter.GetSQLFilter(filtres, sqlparam, str(current_user.id)) ExecSQL(sqlhisto, sqlparam) ExecSQL(sql, sqlparam) flash('Data updated', 'success') if field == 'latitude' or field == 'longitude' or gvp('recompute') == 'Y': ExecSQL( """update samples s set latitude=sll.latitude,longitude=sll.longitude from (select o.sampleid,min(o.latitude) latitude,min(o.longitude) longitude from obj_head o where projid=%(projid)s and o.latitude is not null and o.longitude is not null group by o.sampleid) sll where s.sampleid=sll.sampleid and projid=%(projid)s """, {'projid': Prj.projid}) flash('sample latitude and longitude updated', 'success') sql = "select objid FROM objects o where projid=" + str(Prj.projid) if len(filtres): sql += sharedfilter.GetSQLFilter(filtres, sqlparam, str(current_user.id)) ObjList = GetAll(sql, sqlparam) ObjListTxt = "\n".join((str(r['objid']) for r in ObjList)) txt += "<span style='color:red;font-weight:bold;font-size:large;'>USING Active Project Filters, {0} objects</span>".format( len(ObjList)) else: txt += "<span style='color:red;font-weight:bold;font-size:large;'>Apply to ALL OBJETS OF THE PROJECT (NO Active Filters)</span>" Lst = GetFieldList(Prj) # txt+="%s"%(Lst,) return PrintInCharte( render_template("project/prjeditdatamass.html", Lst=Lst, header=txt))
def QuestionProcess(self): Prj=database.Projects.query.filter_by(projid=gvg("projid")).first() if not Prj.CheckRight(1): return PrintInCharte("ACCESS DENIED for this project<br>") g.prjtitle=Prj.title for k in sharedfilter.FilterList: self.param.filtres[k] = gvg(k, "") g.headcenter="<h4><a href='/prj/{0}'>{1}</a></h4>".format(Prj.projid,XSSEscape(Prj.title)) txt="" errors=[] # Le projet de base est choisi second écran ou validation du second ecran if gvp('starttask')=="Y": # validation du second ecran self.param.ProjectId=gvg("projid") if gvg("src",gvp("src",""))!="": self.param.BaseProject=database.CSVIntStringToInClause(gvg("src",gvp("src",""))) self.param.CritVar=gvp("CritVar") self.param.Perimeter=gvp("Perimeter") self.param.usemodel_foldername = gvp('modeldir', '') if gvp('ReadPostTaxoMappingFromLB') =="Y": self.param.PostTaxoMapping = ",".join((x[6:] + ":" + gvp(x) for x in request.form if x[0:6] == "taxolb")) else: self.param.PostTaxoMapping = gvp("PostTaxoMapping") self.param.learninglimit = gvp("learninglimit") self.param.keeplog=gvp("keeplog") self.param.savemodel_foldername = gvp("savemodel_foldername") self.param.savemodel_title = gvp("savemodel_title") self.param.savemodel_comments = gvp("savemodel_comments") self.param.usescn=gvp("usescn","") # self.param.Taxo=",".join( (x[4:] for x in request.form if x[0:4]=="taxo") ) self.param.Taxo =gvp('Taxo') self.param.CustSettings=DecodeEqualList(gvp("TxtCustSettings")) g.TxtCustSettings=gvp("TxtCustSettings") # Verifier la coherence des données if self.param.usemodel_foldername=='': if self.param.CritVar=='' and self.param.usescn=="": errors.append("You must select some variable") if self.param.Taxo=='' : errors.append("You must select some category") if len(errors)>0: for e in errors: flash(e,"error") else: # Pas d'erreur, on memorize les parametres dans le projet et on lance la tache # On ajoute les valeurs dans CustSettings pour les sauver dans le ClassifSettings du projet PrjCS = DecodeEqualList(Prj.classifsettings) d=self.param.CustSettings.copy() if gvg("src", gvp("src", "")) != "": # on écrase que si les données sont saisies, sinon on prend dans le projet d['critvar']=self.param.CritVar d['baseproject']=self.param.BaseProject d['seltaxo'] = self.param.Taxo if "usemodel_foldername" in PrjCS: d["usemodel_foldername"]=PrjCS["usemodel_foldername"] else: d['usemodel_foldername']=self.param.usemodel_foldername if "critvar" in PrjCS: d["critvar"]=PrjCS["critvar"] if "baseproject" in PrjCS: d["baseproject"]=PrjCS["baseproject"] if "seltaxo" in PrjCS: d["seltaxo"] = PrjCS["seltaxo"] d['posttaxomapping'] =self.param.PostTaxoMapping Prj.classifsettings=EncodeEqualList(d) return self.StartTask(self.param) else: # valeurs par default if gvp('frommodel', gvg('frommodel')) == "Y": if gvp('modeldir')=='': return self.QuestionProcessScreenSelectModel(Prj) elif gvp('displaytaxomap')=='Y': return self.QuestionProcessScreenSelectModelTaxo(Prj) else: if gvp('src', gvg('src')) == "": return self.QuestionProcessScreenSelectSource(Prj) elif gvp('seltaxo', gvg('seltaxo')) == "": return self.QuestionProcessScreenSelectSourceTaxo(Prj) d=DecodeEqualList(Prj.classifsettings) # Certaines variable on leur propre zone d'edition, les autres sont dans la zone texte custom settings self.param.CritVar=d.get("critvar","") self.param.Taxo=d.get("seltaxo","") self.param.Perimeter="nmc" self.param.learninglimit = int(gvp("learninglimit","5000")) if "critvar" in d : del d["critvar"] if "perimeter" in d : del d["perimeter"] if "methode" in d: del d["methode"] if "learninglimit" in d: del d["learninglimit"] if "seltaxo" in d : del d["seltaxo"] if "PostTaxoMapping" in d: del d["PostTaxoMapping"] if "baseproject" in d : del d["baseproject"] g.TxtCustSettings=EncodeEqualList(d) self.param.Taxo = ",".join((x[4:] for x in request.form if x[0:4] == "taxo" and x[0:6] != "taxolb")) self.param.PostTaxoMapping = ",".join((x[6:]+":"+gvp(x) for x in request.form if x[0:6] == "taxolb")) # Determination des criteres/variables utilisées par l'algo de learning revobjmap = self.GetReverseObjMap(Prj) PrjListInClause=database.CSVIntStringToInClause(gvp("src",gvg("src"))) LstPrjSrc=GetAll("select projid,mappingobj from projects where projid in({0})".format(PrjListInClause)) revobjmapbaseByProj={} CommonKeys = set(revobjmap.keys()) for PrjBase in LstPrjSrc: revobjmapbaseByProj[PrjBase['projid']] = self.GetReverseObjMap(PrjBase) CommonKeys = CommonKeys.intersection(set(revobjmapbaseByProj[PrjBase['projid']].keys())) # critlist[NomCol] 0:NomCol , 1:LS % validé rempli , 2:LS Nbr distincte ,3:Cible % rempli ,4:Cible % NV Rempli Inutile ? critlist={k:[k,0,0,0,0] for k in CommonKeys} # Calcul des stat des projets du LearningSet sql="select count(*) nbrtot" for k in CommonKeys: case="case " for PrjBase in LstPrjSrc: case +=" when projid={0} then {1} ".format(PrjBase['projid'],revobjmapbaseByProj[PrjBase['projid']][k])