def post_nr2(usr,topdt,new_dt,sin_dt,level): sql1="insert into SEMANTIC_OBJECT_DT(username,object,dt,topico,LEV) values(?,?,?,?,?)" try: conn.sqlX (sql1,([usr,topdt,new_dt,sin_dt,level])) except: print 'Erro ao post:',topdt,new_dt,sin_dt,level log.exception("================")
def post_nr(usr, tp, level=1, id_top=1): tp_Dt = '' for d in tp.dt: tp_Dt += d tp_name = tp_Dt for sn in tp.sinapses: sn_dt = '' for s1 in sn.nr.dr: sn_dt += s1 sql1 = "insert into SEMANTIC_OBJECT_DT(username,object,dt,topico,LEV,id_top) values(?,?,?,?,?,?)" try: conn.sqlX(sql1, ([usr, nameo, tp_Dt, sn_dt, level, id_top])) except: print 'Erro ao post:', nameo, tp_Dt, sn_dt #========== if True: sqlcc = 'select id_top from SEMANTIC_OBJECT_DT where uid=? and topico=? and lev=? and sin=? and id_top=?' res = conn.sqlX( sqlcc, ([usr, nameo, tp_Dt, sn_dt, level, id_top])) id_top = 1 for ns in res: id_top = ns[0] break post_nr(usr, sn.nr, level + 1, id_top)
def clear_termo(username): sql1 = " delete from SEMANTIC_OBJECT_DT3 where username = ? " try: conn.sqlX(sql1, ([username])) except Exception, err: print 'Erro ao del(OBJECT-DT):', err, [username]
def insert_cache(url): try: [url1, rec_id] = url conn.sqlX(sql_insert_p, [url1]) #========================= cursor2.execute("delete from url where rec_id=" + str(rec_id)) except: log.exception('ERROR INSERT')
def post_nr2(usr, topdt, new_dt, sin_dt, level, id_top, sinapse): sql1 = "insert into SEMANTIC_OBJECT_DT3(UID,dt,topico,LEV,sin,id_top,username) values(?,?,?,?,?,?,?)" try: conn.sqlX( sql1, ([uid, topdt, new_dt, sin_dt, level, sinapse, id_top, usr])) except: print 'Erro ao post:', topdt, new_dt, sin_dt, level
def post_datah_state(state_type,obj,composicao,rels,usr): for sti in state_type: for compo in composicao: sql='insert into SEMANTIC_INFOSTATE( USERNAME,OBJECT,TOPICO,INDI_STATE ) VALUES(?,?,?,?)' conn.sqlX (sql,([usr,obj,compo,sti])) #== for rels in composicao: sql='insert into SEMANTIC_INFOSTATE( USERNAME,OBJECT,TOPICO,INDI_STATE ) VALUES(?,?,?,?)' conn.sqlX (sql,([usr,obj,compo,sti]))
def rem_results(usr, sente): sql1 = "delete from INBOX_MSG where USERNAME=? and SENTECE = ?" try: conn.sqlX(sql1, ([usr, sente])) except Exception, err: print 'Erro ao del(MSG_D):', err
def post_l(usr, msga, source, referencias, sente): msg = msga sql1 = "delete from INBOX_MSG where USERNAME =? and source = ? " try: conn.sqlX(sql1, ([usr, source])) except Exception, err: print 'Erro ao post(MSG_D):', err
def post_object_by_data3p(layer, cenario, usr, termo, foco, posted_objs, senti, l_p_ant): if layer.name == '': return def get_top_level(obj, foc, usr, termo_s): rts = [] resultSet = conn.sqlX( "SELECT lev,id_top FROM SEMANTIC_OBJECT_DT3 where OBJECT = ? and TOPICO= ? and USERNAME = ? and UID= ? order by LEV ", ([obj, foc, usr, termo_s])) for results in resultSet: i = results[0] id_top = results[1] rts.append([i, id_top]) return rts #======================= nameo = layer.name if umisc.trim(nameo) == '' or umisc.trim(nameo) == '\n': if l_p_ant != None: nameo = l_p_ant.name if umisc.trim(nameo) == '' or umisc.trim(nameo) == '\n': return fnd_tops = False l_p_ant = layer print 'POST:LR:', nameo print '++------------------------------------------' for s in layer.topicos: print 'DT:', s.dt fnd_tops = True for d in s.sinapses: print d.nr.dt print '++------------------------------------------' if not fnd_tops: return print 'Post-obj:[', nameo, ']' no_post_o = False for [s, st] in posted_objs: if s == nameo and st == senti: no_post_o = True posted_objs.append([nameo, senti]) #========== #if not no_post_o and len(layer.topicos)>0: if not no_post_o: sql1 = "insert into SEMANTIC_OBJECT3(username,objeto,cenar,senti) values(?,?,?,?)" try: conn.sqlX(sql1, ([usr, nameo, cenario, senti])) except Exception, err: print 'Erro ao post(OBJECT):', err
def post_cn2(its): print 'POST.LEN:', len(its) sql = ' insert into web_cache3(PG,PROCESSED,TERMO,USR,PURPOSE,URL_ICON,URL_PICTURE,ID_USR,NAME_USR,STORY,TITLE,DOC_ID,TP,PHONE,STREET,CITY,COUNTRY,ZIP,LATITUDE,LONGITUDE,TPS,URL) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)' #for [PG,PROCESSED,TERM,USR,PURPOSE,URL_ICON,URL_PICTURE,ID_USR,NAME_USR,STORY,TITLE,DOC_ID,TP,PHONE,STREET,CITY,COUNTRY,ZIP,LATITUDE,LONGITUDE,TPS,URL] in its: for [ PG, PROCESSED, TERM, USR, PURPOSE, URL_ICON, URL_PICTURE, ID_USR, NAME_USR, STORY, TITLE, DOC_ID, TP, PHONE, STREET, CITY, COUNTRY, ZIP, LATITUDE, LONGITUDE, TPS, URL ] in its: conn.sqlX(sql, [ PG, PROCESSED, TERM, USR, PURPOSE, URL_ICON, URL_PICTURE, ID_USR, NAME_USR, STORY, TITLE, DOC_ID, TP, PHONE, STREET, CITY, COUNTRY, ZIP, LATITUDE, LONGITUDE, TPS, URL ]) conn.commit()
def get_top_level(obj,foc,usr): rts=[] resultSet =conn.sqlX ("SELECT LEV FROM SEMANTIC_OBJECT_DT2 where OBJECT = ? and TOPICO= ? and USERNAME = ? order by LEV ",([obj,foc,usr])) for results in resultSet: i=results[0] rts.append(i) return rts
def get_pages(usr, start_c): pages = [] if len(entry_doc) > 0: return entry_doc #sql='select URL,PG,I,TITLE,PURPOSE from WEB_CACHE where USR = ? and PROCESSED <> \'S\' and termo= ? and pg is not null and length(pg) > 1 order by i ' #resultSet = conn.sqlX (sql,([usr,termo])) #== resultSet = conn.sqlX(" call ret_pages (:usr,:starti )", [usr, start_c]) #== obj_nm = None idx = 1 print 'Start index:', start_c for results in resultSet: pg_add = results[0].read() pg_txt = results[1].read() pg_txt = pg_txt.replace('\n', ' ') pg_txt = pg_txt.replace('\r', ' ') id_p = results[2] title = results[3].read() purp = results[4] #print 'Page:',idx pg_txt = (title + ' ! ' + pg_txt) idx += 1 pages.append([Task_C(pg_add, pg_txt), id_p]) return pages
def get_parsers(): prs = [] sql = "SELECT DT from knowledge_manager where USERNAME = '******' and typ=2 and DEST=\'postLayout\' and layout_onto = ? order by i " resultSet = conn.sqlX(sql, ([purpos])) for results in resultSet: prs.append(results[0]) return prs
def get_class(purp,usr): sql = "SELECT Label from knowledge_manager where USERNAME = '******' and typ=4 and DT= s% order by i " resultSet = conn.sqlX (sql,([purp])) for results in resultSet: ar=results[0] return ar return ''
def post_nr(usr, tp, level=1): tp_Dt = '' for d in tp.dt: tp_Dt += d tp_name = tp_Dt for sn in tp.sinapses: sn_dt = '' for s1 in sn.nr.dr: sn_dt += s1 sql1 = "insert into SEMANTIC_OBJECT_DT(username,object,dt,topico,LEV) values(?,?,?,?,?)" try: conn.sqlX(sql1, ([usr, nameo, tp_Dt, sn_dt, level])) except: print 'Erro ao post:', nameo, tp_Dt, sn_dt #========== post_nr(usr, sn.nr, level + 1)
def ret_usr(sessao): params=[ sessao] resultSet = conn.sqlX ("SELECT USERNAME from usuarios where SESSAO= ? ",(params)) print resultSet for results in resultSet: i=results[0] return i
def get_top_level(obj,foc,usr,termo_s): rts=[] resultSet = conn.sqlX ("SELECT lev,id_top FROM SEMANTIC_OBJECT_DT3 where OBJECT = ? and TOPICO= ? and USERNAME = ? and UID= ? order by LEV ",([obj,foc,usr,termo_s])) for results in resultSet: i=results[0] id_top=results[1] rts.append([i,id_top]) return rts
def ret_usr_inter(sessao): params=[ sessao] resultSet =conn.sqlX ("SELECT action_def,cenario from interaction_usr where useranme= ? ",(params)) rt=[] for results in resultSet: j=results[1] rt.append([j]) return rt
def hava_not_indexed(USER, TERMO): sqlcc = 'select * from web_cache where USR=? and termo= ? and indexed = \'N\' and tps=\'T\' and rowno < 2 ' resultSet = conn.sqlX(sqlcc, [USER, TERMO]) r1 = [] fs = False for results in resultSet: fs = True return fs
def get_pages_len(usr,termo,purpose): pages=[] sql='select count(*) from WEB_CACHE where USR = ? and PROCESSED <> \'S\' and purpose= ? and termo = ? ' resultSet =conn.sqlX (sql,([usr,purpose,termo])) obj_nm=None ind=0 for results in resultSet: ind=results[0] if ind > 500 : ind=500 return ind
def post_nr(usr,tp,level=1,id_top=1,just_sin=False): try: if not just_sin: tp_Dt='' try: for d in tp.dt: if type(d) == type([] ): tp_Dt+=d[0] else: tp_Dt+=d except Exception,e: print 'Err:-nr.post(2):',tp.dt,'->',e tp_name=tp_Dt if len(tp.sinapses)==0: sql1="insert into SEMANTIC_OBJECT_DT3(UID,topico,LEV,sin,dt,id_top,username) values(?,?,1,\'\',\'\',?,?)" try: conn.sqlX (sql1,([uid,tp_Dt,id_top,usr])) except Exception,escp: print 'Erro ao post(OBJ_DT):',escp,' DT:',nameo,tp_Dt
def get_objectdt_by(objs, usr): rts = [] for obj in objs: sql = 'select distinct TOPICO from SEMANTIC_OBJECT_DT where OBJECT=? and USERNAME = ? ' resultSet = conn.sqlX(sql, ([obj, usr])) for results in resultSet: TOP = results[0] if TOP.upper() != 'SPECIAL-PURPOSE': rts.append(TOP) break return rts
def clear_termo(username,cenario ): print 'Clear termo:',cenario,',',username sql1=" delete from SEMANTIC_RELACTIONS3 where username = ? and \"UID\" in ( select I from SEMANTIC_OBJECT3 where username = ? and cenar=? ) " try: conn5.sqlX (sql1,([username,username,cenario])) except Exception,err: print 'Erro ao del(OBJECT-REL):',err,[username,username,cenario] sql1=" delete from SEMANTIC_OBJECT_DT3 where username = ? and \"UID\" in ( select I from SEMANTIC_OBJECT3 where username = ? and cenar=? ) " try: conn.sqlX (sql1,([username,username,cenario])) except Exception,err: print 'Erro ao del(OBJECT-DT):',err,[username,cenario] sql1=" delete from SEMANTIC_OBJECT3 where username = ? and cenar = ? " try: conn5.sqlX (sql1,([username,cenario])) except Exception,err: print 'Erro ao del(OBJECT):',err,[username,cenario] conn.commit()
def collect_objs_dest(sins,i,usr): objs_r=[] for pr in sins: resultSet = conn.sqlX ("SELECT OBJ_ORIG,FOCO,FOCO_D FROM SEMANTIC_RELACTIONS where OBJ_DEST = ? and OPCODE = ? and USERNAME = ? ",([i,pr,usr])) for resultsC in resultSet: ido=resultsC[0] fco=resultsC[1] if fco == None : fco='' fcd=resultsC[2] if fcd == None: fcd = '' objs_r.append([ido,pr,fco,fcd]) return objs_r
def get_objs_purp(obj, purspe, usr): #----------- rts = [] sql = 'select DT, TOPICO from SEMANTIC_OBJECT_DT where USERNAME = ? ' resultSet = conn.sqlX(sql, ([obj, usr])) for results in resultSet: TOP = results[1] DT = results[0] OB = results[2] if TOP.upper() == 'SPECIAL-PURPOSE': if DT.upper() == purspe: rts.append(OB) return rts
def get_pages(usr,start_c,termo,purpose): pages=[] sql='select URL,PG from WEB_CACHE where USR = ? and PROCESSED <> \'S\' and termo= ? and purpose = ? LIMIT '+str(start_c)+' , 15 ' resultSet =conn.sqlX (sql,([usr,termo,purpose])) obj_nm=None idx=1 for results in resultSet: pg_add=results[0] pg_txt=results[1] #print 'Page:',idx idx+=1 pages.append(Task_C(pg_add,pg_txt)) return pages
def get_pages_len(usr): if len(entry_doc) > 0: return len(entry_doc) pages = [] sql = 'select count(*) from WEB_CACHE where USR = ? and PROCESSED <> \'S\' ' resultSet = conn.sqlX(sql, ([usr])) obj_nm = None ind = 0 for results in resultSet: ind = results[0] if ind > 500: ind = 500 return ind
def insere_usr(user_name,id,u_name): #print 'insert:',[user_name,id,u_name] try: conn.sqlX(sql_insert_p,([user_name,id,u_name])) except: pass
def get_by_keyword( is2 ): # busca algumas palavras chave para extrair os 'samples', amostras de codigo para calibrar e treinar o processador fuzzy isd = [] try: cursor = conn.sql( "SELECT PG,PROCESSED,TERMO,USR,PURPOSE,URL_ICON,URL_PICTURE,ID_USR,NAME_USR,STORY,TITLE,DOC_ID,TP,PHONE,STREET,CITY,COUNTRY,ZIP,LATITUDE,LONGITUDE,TPS,URL,i from web_cache where i in(" + is2 + ") ") for results in cursor: I = results[22] #============================ #print 'Print pg:',I PG = results[0] PROCESSED = results[1] TERMO = results[2] USR = results[3] PURPOSE = results[4] URL_ICON = results[5] URL_PICTURE = results[6] ID_USR = results[7] NAME_USR = results[8] STORY = results[9] TITLE = results[10] DOC_ID = results[11] TP = results[12] PHONE = results[13] STREET = results[14] CITY = results[15] COUNTRY = results[16] ZIP = results[17] LATITUDE = results[18] LONGITUDE = results[19] TPS = results[20] URL = results[21] #========== if PG != None: PG = PG.read() else: PG = '' if URL_ICON != None: URL_ICON = URL_ICON.read() else: URL_ICON = '' if URL_PICTURE != None: URL_PICTURE = URL_PICTURE.read() else: URL_PICTURE = '' if STORY != None: STORY = STORY.read() else: STORY = '' if TITLE != None: TITLE = TITLE.read() else: TITLE = '' if URL != None: URL = URL.read() else: URL = '' words = tokeniz(PG) fnd = False fnd2 = False if 'are now friends' in PG: fnd2 = True elif 'is now friends with' in PG: fnd2 = True elif PG[:7] == 'http://': fnd2 = True elif 'likes' in PG: fnd2 = True elif '{like}' in PG: fnd2 = True #=== for w in words: if 'quer' in w: fnd = True elif 'precis' in w: fnd = True elif 'poderia' in w: fnd = True elif 'pode' in w: fnd = True elif 'podi' in w: fnd = True elif 'gostar' in w: fnd = True elif 'pensand' in w: fnd = True elif 'comprar' in w: fnd = True elif 'adquirir' in w: fnd = True elif 'pens' in w: fnd = True elif 'pegar' in w: fnd = True elif 'encontr' in w: fnd = True elif 'indicar' in w: fnd = True #================================ if umisc.trim(PG) == '': fnd = False if fnd and not fnd2: isd.append([ PG, PROCESSED, TERMO, USR, PURPOSE, URL_ICON, URL_PICTURE, ID_USR, NAME_USR, STORY, TITLE, DOC_ID, TP, PHONE, STREET, CITY, COUNTRY, ZIP, LATITUDE, LONGITUDE, TPS, URL ]) #apagar o item, passando p tabela processados somente os I,DOC_ID para o processo de reprocessamento nao considerar mais esses documentos conn.sqlX('insert into PROC_DS (ID,DOC_ID) values(?,?)', [I, DOC_ID]) conn.sqlX('delete from web_cache where I=?', [I]) except: log.exception("") conn.rollback() return [] conn.commit() return isd
def post_object_by_data(layer,usr,termo,foco): def get_top_level(obj,foc,usr): rts=[] resultSet =conn4.sqlX ("SELECT LEV FROM SEMANTIC_OBJECT_DT where OBJECT = ? and TOPICO= ? and USERNAME = ? order by LEV ",([obj,foc,usr])) for results in resultSet: i=results[0] rts.append(i) return rts #======================= nameo=layer.name sql1="insert into SEMANTIC_OBJECT(username,objeto,TERMO) values(?,?,?)" conn.sqlX (sql,([usr,nameo,termo])) for tp in topicos: #========== =============================================== def post_nr(usr,tp,level=1): tp_Dt='' for d in tp.dt: tp_Dt+=d tp_name=tp_Dt for sn in tp.sinapses: sn_dt='' for s1 in sn.nr.dr: sn_dt+=s1 sql1="insert into SEMANTIC_OBJECT_DT(username,object,dt,topico,LEV) values(?,?,?,?,?)" try: conn.sqlX (sql1,([usr,nameo,tp_Dt,sn_dt,level])) except: print 'Erro ao post:',nameo,tp_Dt,sn_dt log.exception("================") #========== post_nr(usr,sn.nr,level+1) #========== =============================================== def post_nr2(usr,topdt,new_dt,sin_dt,level): sql1="insert into SEMANTIC_OBJECT_DT(username,object,dt,topico,LEV) values(?,?,?,?,?)" try: conn.sqlX (sql1,([usr,topdt,new_dt,sin_dt,level])) except: print 'Erro ao post:',topdt,new_dt,sin_dt,level log.exception("================") #========== =============================================== if len(foco) == 0: post_nr(usr,tp) else: for l in foco: #achar o topico e o level dts='' for ldt in l.dt: dts+=ldt level=get_top_level(nameo,l,usr) if len(level)>0: for level_s in level: post_nr2(usr,dts,tp.dt,l.opcode,level_s) #=============================================== for lnk in layer.links: sqlc='insert into SEMANTIC_RELACTIONS(OBJ_ORIG,OBJ_DEST,OPCODE,USERNAME,FOCO,FOCO_D) values(?,?,?,?,?,?)' #==================== def get_nr_dts1(nrs): d='' for nr in nrs: for n in nr.dt: d+=n d+=',' return d #==================== foco_o=get_nr_dts1(lnk.foco_o) foco_d=get_nr_dts1(lnk.foco_d) conn.sqlX (sqlc,([nameo,lnk.lr.name,lnk.opcode,usr,foco_o,foco_d])) #=============== post_object_data(lnk.lr,usr)