def entry(usr): print 'start base table....' mdER5.process_base_tb() print 'Start compile:', usr mdER5.cclear() mdER5.compile_rct(usr) conn.commit()
def mathcDetail2DB(): cursor.execute( 'select matchId from matchDetail order by matchid desc limit 1') r1 = cursor.fetchall() cursor.execute( 'select distinct matchId from accountPlayed where matchId > %s' % r1[0]['matchId']) r2 = cursor.fetchall() count = 1 for item in r2: detail = api.get_match_details(item['matchId']) matchId = detail['match_id'] startTime = detail['start_time'] duration = detail['duration'] / 60 #单位:分钟 firstBloodTime = detail['first_blood_time'] gameMode = detail['game_mode'] value = [matchId, startTime, duration, firstBloodTime, gameMode] cursor.execute( 'INSERT INTO matchDetail (matchId,startTime,duration,firstBloodTime,gameMode) values(%s,%s,%s,%s,%s)', value) conn.commit() count = count + 1 print count
def reopen(ids): print 're-open:',ids try: conn.sql("insert into to_reopen(ids) values( "+str(ids) +")" ) conn.commit() except: print 'Eror reopen:' log.exception("")
def reopen2(ids): print 're-open:',ids try: conn.sql("update fcb_users set indexed='N' where ID=\'"+str(ids) +'\'' ) conn.commit() except: print 'Eror reopen:' log.exception("")
def post_cmd(arr,usr,u_nm,idk): try: print 'insert rows.id(',idk,'):',len(arr) for its in arr: [a,u_id,u_name]=its insere_usr('',u_id,u_name) conn.commit() except: log.exception("error insert rowds.id("+str(idk)+")") print 'insert',idk,' OK!!'
def reopen(ids): a_lock.acquire() print 're-open:',ids try: conn.sql("update fcb_users set indexed='N' where ID=\'"+str(ids) +'\'' ) conn.commit() except: print 'Eror reopen:' log.exception("") a_lock.release()
def hero2DB(): heroes = api.get_heroes() for i in xrange(0, 113): heroId = heroes['heroes'][i]['id'] localized_name = heroes['heroes'][i]['localized_name'] nickName = cf.get('heroname', localized_name) value = [heroId, nickName] cursor.execute('INSERT INTO hero (heroId,nickName) values(%s,%s)', value) print heroId, nickName conn.commit()
def played2DB(): for k, v in friden.items(): print k matchHistory = api.get_match_history(account_id=v) for i in xrange(0, 100): for l in xrange(0, len(matchHistory['matches'])): matchId = matchHistory['matches'][l]['match_id'] for j in xrange(0, 10): if v == matchHistory['matches'][l]['players'][j][ 'account_id']: floor = j + 1 heroId = matchHistory['matches'][l]['players'][j][ 'hero_id'] break if floor > 5: isDirOrRadin = 1 #天灾 else: isDirOrRadin = 0 #近卫 matchDetail = api.get_match_details(matchId) isRadinWin = matchDetail['radiant_win'] #使用异或(xor)来判断此局胜负. bool(xxx) != bool(xxx) WinorLoss = isDirOrRadin != isRadinWin if WinorLoss: flag = 1 else: flag = 0 assists = matchDetail['players'][floor - 1]['assists'] deaths = matchDetail['players'][floor - 1]['deaths'] denies = matchDetail['players'][floor - 1]['denies'] heroDamage = matchDetail['players'][floor - 1]['hero_damage'] kills = matchDetail['players'][floor - 1]['kills'] lastHits = matchDetail['players'][floor - 1]['last_hits'] goldSpent = matchDetail['players'][floor - 1]['gold_spent'] goldPerMin = matchDetail['players'][floor - 1]['gold_per_min'] xpPerMin = matchDetail['players'][floor - 1]['xp_per_min'] heroHealing = matchDetail['players'][floor - 1]['hero_healing'] towerDamage = matchDetail['players'][floor - 1]['tower_damage'] value = [ v, matchId, heroId, floor, flag, assists, denies, deaths, goldPerMin, kills, lastHits, heroDamage, heroHealing, goldSpent, xpPerMin, towerDamage ] cursor.execute( 'INSERT INTO accountPlayed (accountId,matchId,heroId,floor,winOrLoss,assists,denies,deaths,goldPerMin,kills,lastHits,heroDamage,heroHealing,goldSpent,xpPerMin,towerDamage) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', value) conn.commit() lastHistory = matchHistory['matches'][len(matchHistory['matches']) - 1]['match_id'] matchHistory = api.get_match_history(account_id=v, start_at_match_id=lastHistory) if lastHistory == matchHistory['matches'][ len(matchHistory['matches']) - 1]['match_id']: break
def get_competition_id(): for i in range(1, 18): url = 'https://www.kaggle.com/competitions.json?sortBy=recentlycreated&pageSize=20&page=' + str( i) content = requests.get(url, headers=headers).content js = json.loads(content).get('pagedCompetitionGroup').get( 'competitions') for e in js: id = e.get('competitionId') if cur.execute( 'select * from userdb.kaggle_competitions where id =' + str(id) + ';'): continue title = e.get('competitionTitle') description = e.get('competitionDescription') url = e.get('competitionUrl') team_num = e.get('totalTeams') kernel_num = e.get('totalKernels') reward = e.get('rewardDisplay') level = e.get('hostSegment') enabledata = e.get('enabledDate')[0:10] deadline = e.get('deadline')[0:10] datatype = [] analysis = [] problemtype = [] category = [] em = e.get("evaluationMetric") for c in e.get('categories').get('categories'): cid = c.get('id') cfullpath = c.get('fullPath') cname = c.get('name') category.append(cname) if ('data type' in cfullpath): datatype.append(cfullpath[cfullpath.find('>') + 2:]) if ('analysis' in cfullpath): analysis.append(cfullpath[cfullpath.find('>') + 2:]) if ('problem type' in cfullpath): problemtype.append(cfullpath[cfullpath.find('>') + 2:]) sql = "insert into userdb.kaggle_competitions values (" \ "'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % ( id, title.replace("'", ' '), reward, level, str(datatype).replace("'", '"'), str(problemtype).replace("'", '"'), str(analysis).replace("'", '"'), str(category).replace("'", '"'), description.replace("'", '"'), url.replace("'", '"'), team_num, kernel_num, enabledata, deadline, em) try: cur.execute(sql) conn.commit() except: print(sql)
def f1(): count = 1 cursor.execute('select matchId from matchDetail order by matchid desc ') r1 = cursor.fetchall() for matchId in r1: detail = api.get_match_details(matchId['matchId']) gameMode = detail['game_mode'] matchId = detail['match_id'] value = [gameMode, matchId] cursor.execute( 'Update matchDetail set gameMode = %s where matchId = %s', value) conn.commit() count = count + 1 print count
def c_reopen(): a_lock.acquire() cur = conn.sql("select i,ids from to_reopen where rowno <= 500") dels = [] for re in cur: i = re[0] dels.append(i) ids = re[1] reopen2(ids) conn.commit() for c in dels: conn.sql("delete from to_reopen where i=" + str(ic)) conn.commit() a_lock.release()
def clean_all(): global usr2 sq = "update web_cache3 set indexed='N' where USR='******' " cursor = conn.sql(sq) #================== sq = " delete from WEB_CACHE3_IDX2__2 " cursor = conn.sql(sq) #================== sq = " delete from WEB_CACHE3_IDX " cursor = conn.sql(sq) #================== sq = " delete from WEB_CACHE3_IDX2 " cursor = conn.sql(sq) conn.commit()
def account2DB(): for name in ['brave', 'bear', 'monkey1', 'man', 'monkey2', 'beard']: accountId = cf.getint('constant', name) playdetail = api.get_player_summaries(accountId) if not len(playdetail['players']): continue avatar = playdetail['players'][0]['avatarfull'] nickName = playdetail['players'][0]['personaname'] timeCreated = playdetail['players'][0]['timecreated'] value = [accountId, str(nickName), str(avatar), timeCreated] cursor.execute( 'INSERT INTO account (accountId,nickName,avatar,timeCreated) values(%s,%s,%s,%s)', value) print accountId, nickName, avatar conn.commit()
def post_cmd(arr, usr, u_nm): a_lock.acquire() try: print 'insert rows.id:', len(arr) for its in arr: [a, u_id, u_name] = its insere_usr('', u_id, u_name) if True: conn.sql("update fcb_users set indexed='S' where i= " + str(usr)) conn.commit() print 'Close usr:', u_nm except: pass a_lock.release()
def post_cn2(its): print 'POST.LEN:', len(its) sql = ' insert into web_cache3(PG,PROCESSED,TERMO,USR,PURPOSE,URL_ICON,URL_PICTURE,ID_USR,NAME_USR,STORY,TITLE,DOC_ID,TP,PHONE,STREET,CITY,COUNTRY,ZIP,LATITUDE,LONGITUDE,TPS,URL) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)' #for [PG,PROCESSED,TERM,USR,PURPOSE,URL_ICON,URL_PICTURE,ID_USR,NAME_USR,STORY,TITLE,DOC_ID,TP,PHONE,STREET,CITY,COUNTRY,ZIP,LATITUDE,LONGITUDE,TPS,URL] in its: for [ PG, PROCESSED, TERM, USR, PURPOSE, URL_ICON, URL_PICTURE, ID_USR, NAME_USR, STORY, TITLE, DOC_ID, TP, PHONE, STREET, CITY, COUNTRY, ZIP, LATITUDE, LONGITUDE, TPS, URL ] in its: conn.sqlX(sql, [ PG, PROCESSED, TERM, USR, PURPOSE, URL_ICON, URL_PICTURE, ID_USR, NAME_USR, STORY, TITLE, DOC_ID, TP, PHONE, STREET, CITY, COUNTRY, ZIP, LATITUDE, LONGITUDE, TPS, URL ]) conn.commit()
def get_dist_u_next(): a_lock.acquire() isd=[] try: cursor = conn.sql("SELECT distinct ID,i from fcb_users where indexed='N' and rowno < 2 ") for results in cursor: ids=results[0] i=results[1] isd=[ids,i] conn.sql("update fcb_users set indexed='S' where i= "+str(i)) conn.commit() print 'Close usr(1):',ids break except: pass a_lock.release() return isd
def index_subs(): c=0 while c < 100000 : [usrs,fc]=get_dist_u() c2=0 for u in usrs: print 'Process usr:'******'Close usr:'******'S' where i= "+str(fc[c2])) conn.commit() err=False except: pass c2+=1 c+=1
def enen(ccount, cid, last_id): print(ccount, cid, last_id) url = 'https://www.kaggle.com/kernels.json?sortBy=scoreDescending&group=everyone&pageSize=20&after=' + str( last_id) + '&competitionId=' + str(cid) try: content = requests.get(url, headers=headers).content except: print('cannot get kernels of competition : id' + str(cid)) try: if len(content) < 3: return except: get_kernels(cid, last_id) js = json.loads(content) for i in js: kid = i.get('id') last_id = kid if cur.execute( 'select * from userdb.kaggle_competitions_kernels where kernel_id =' + str(kid) + ';'): continue language = i.get('aceLanguageName') medal = "" medal = i.get('medal') best_score = str(i.get('bestPublicScore')) title = i.get('title').replace("'", ' ') url = i.get('scriptUrl') votes = i.get('totalVotes') content = "" sql = "insert into userdb.kaggle_competitions_kernels values (" \ "'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % ( kid, cid, title, votes, best_score, medal, language, url, content, content) try: cur.execute(sql) conn.commit() except: print(sql) with codecs.open("error.txt", 'classifier', 'utf-8') as f: f.writelines(sql) f.write('\n') enen(ccount, cid, last_id)
def index_subs(): c=0 while c < 1 : [usrs,fc]=get_dist_u() ths=[] for u in usrs: print 'Process usr:'******'',ths[len(ths)-1] ) ) #== except:pass ind_col=0 while True: print 'wait for pages...',len(ths)-ind_col fnds_t=False ind_col=0 for ths1 in ths: if not ths1.finished:fnds_t=True if ths1.finished: ind_col+=1 if fnds_t: time.sleep(10) continue else: break #============================= print 'insert rows.id:',len(to_ins) for its in to_ins: [a,u_id,u_name]=its insere_usr('',u_id,u_name) time.sleep( 2 ) indc2=0 for fcs in fc: print 'Close usr:'******'S' where i= "+str(fcs)) conn.commit() indc2+=1 c+=1 to_ins=[]
def clear_termo(username,cenario ): print 'Clear termo:',cenario,',',username sql1=" delete from SEMANTIC_RELACTIONS3 where username = ? and \"UID\" in ( select I from SEMANTIC_OBJECT3 where username = ? and cenar=? ) " try: conn5.sqlX (sql1,([username,username,cenario])) except Exception,err: print 'Erro ao del(OBJECT-REL):',err,[username,username,cenario] sql1=" delete from SEMANTIC_OBJECT_DT3 where username = ? and \"UID\" in ( select I from SEMANTIC_OBJECT3 where username = ? and cenar=? ) " try: conn.sqlX (sql1,([username,username,cenario])) except Exception,err: print 'Erro ao del(OBJECT-DT):',err,[username,cenario] sql1=" delete from SEMANTIC_OBJECT3 where username = ? and cenar = ? " try: conn5.sqlX (sql1,([username,cenario])) except Exception,err: print 'Erro ao del(OBJECT):',err,[username,cenario] conn.commit()
def get_dist_u_next2(): a_lock.acquire() isd=[] closes=[] try: cursor = conn.sql("SELECT distinct ID,i from fcb_users where indexed='N' and rowno <= 1 ") isd=[] for results in cursor: ids=results[0] i=results[1] isd.append([ids,i]) closes.append([i,ids]) #============== for [cl,u_nm] in closes: conn.sql("update fcb_users set indexed='S' where i= "+str(cl)) print 'Close usr(2):',u_nm conn.commit() #== except Exception, err2: print 'Error collect:',err2
def entry(args): usuario='' ext='' try: start_c=0 usr=0 usr=args[0] sentence=args[1] purp=args[2] tp_n=args[3] # type of neural objects termo=args[4] # type of neural objects mdNeural.kstermo=[termo,False] #print 'Cmd line :',usr mdNeural.self_usr=usr mdNeural.type_coll=tp_n usuario=Gentry(usr,sentence,purp) conn.commit () except Exception,err: log.exception( 'Error process sentences:' ) ext='Error process sentences:'+err.__str__()
def commitTransaction(self): '''Confirma una transaccion''' conn = QtSql.QSqlDatabase.database() conn.commit()
[usrs,fc]=get_dist_u() c2=0 for u in usrs: print 'Process usr:'******'Close usr:'******'S' where i= "+str(fc[c2])) conn.commit() err=False except: pass c2+=1 c+=1 a=sys.argv[1] try: get_feeds(a) except: pass conn.commit() #get_usrs("100002272680690")
def get_by_keyword( is2 ): # busca algumas palavras chave para extrair os 'samples', amostras de codigo para calibrar e treinar o processador fuzzy isd = [] try: cursor = conn.sql( "SELECT PG,PROCESSED,TERMO,USR,PURPOSE,URL_ICON,URL_PICTURE,ID_USR,NAME_USR,STORY,TITLE,DOC_ID,TP,PHONE,STREET,CITY,COUNTRY,ZIP,LATITUDE,LONGITUDE,TPS,URL,i from web_cache where i in(" + is2 + ") ") for results in cursor: I = results[22] #============================ #print 'Print pg:',I PG = results[0] PROCESSED = results[1] TERMO = results[2] USR = results[3] PURPOSE = results[4] URL_ICON = results[5] URL_PICTURE = results[6] ID_USR = results[7] NAME_USR = results[8] STORY = results[9] TITLE = results[10] DOC_ID = results[11] TP = results[12] PHONE = results[13] STREET = results[14] CITY = results[15] COUNTRY = results[16] ZIP = results[17] LATITUDE = results[18] LONGITUDE = results[19] TPS = results[20] URL = results[21] #========== if PG != None: PG = PG.read() else: PG = '' if URL_ICON != None: URL_ICON = URL_ICON.read() else: URL_ICON = '' if URL_PICTURE != None: URL_PICTURE = URL_PICTURE.read() else: URL_PICTURE = '' if STORY != None: STORY = STORY.read() else: STORY = '' if TITLE != None: TITLE = TITLE.read() else: TITLE = '' if URL != None: URL = URL.read() else: URL = '' words = tokeniz(PG) fnd = False fnd2 = False if 'are now friends' in PG: fnd2 = True elif 'is now friends with' in PG: fnd2 = True elif PG[:7] == 'http://': fnd2 = True elif 'likes' in PG: fnd2 = True elif '{like}' in PG: fnd2 = True #=== for w in words: if 'quer' in w: fnd = True elif 'precis' in w: fnd = True elif 'poderia' in w: fnd = True elif 'pode' in w: fnd = True elif 'podi' in w: fnd = True elif 'gostar' in w: fnd = True elif 'pensand' in w: fnd = True elif 'comprar' in w: fnd = True elif 'adquirir' in w: fnd = True elif 'pens' in w: fnd = True elif 'pegar' in w: fnd = True elif 'encontr' in w: fnd = True elif 'indicar' in w: fnd = True #================================ if umisc.trim(PG) == '': fnd = False if fnd and not fnd2: isd.append([ PG, PROCESSED, TERMO, USR, PURPOSE, URL_ICON, URL_PICTURE, ID_USR, NAME_USR, STORY, TITLE, DOC_ID, TP, PHONE, STREET, CITY, COUNTRY, ZIP, LATITUDE, LONGITUDE, TPS, URL ]) #apagar o item, passando p tabela processados somente os I,DOC_ID para o processo de reprocessamento nao considerar mais esses documentos conn.sqlX('insert into PROC_DS (ID,DOC_ID) values(?,?)', [I, DOC_ID]) conn.sqlX('delete from web_cache where I=?', [I]) except: log.exception("") conn.rollback() return [] conn.commit() return isd
def commit(self): conn.commit()
def clean(USERNAME, termo, purpose): mt = [USERNAME, termo] conn.sqlX(" delete from web_cache where usr=? and termo=? and tps='T' ", mt) conn.commit()
def post_termo2(it, termo, purpose, user): [ msg_id, from_msg, msg_story, msg_caption, msg_description, msg_picture, msg_link, msg_name, msg_icon, msg_type, msg_likes, msg_message, msg_location, msg_phone ] = it if msg_phone == None: msg_phone = '' if msg_message == None: msg_message = '' if msg_caption == None: msg_caption = '' if msg_description == None: msg_description = '' if msg_name == None: msg_name = '' if msg_story == None: msg_story = '' if msg_picture == None: msg_picture = '' if msg_link == None: msg_link = '' if msg_icon == None: msg_icon = '' if msg_type == None: msg_type = '' if len(from_msg) == 0: from_msg.append(['0', msg_name]) msg_caption = msg_caption + ' ' + msg_name.encode('latin-1', 'ignore') try: msg_link = msg_link.encode('latin-1', 'ignore') except: pass try: msg_message = msg_message.encode('latin-1', 'ignore') except: pass try: msg_description = msg_description.encode('latin-1', 'ignore') except: pass try: msg_icon = msg_icon.encode('latin-1', 'ignore') except: pass try: msg_picture = msg_picture.encode('latin-1', 'ignore') except: pass try: msg_story = msg_story.encode('latin-1', 'ignore') except: pass try: msg_caption = msg_caption.encode('latin-1', 'ignore') except: pass try: msg_type = msg_type.encode('latin-1', 'ignore') except: pass street = '' city = '' country = '' zip = '' latitude = '' longitude = '' if len(msg_location) > 0: [street, city, country, zip, latitude, longitude] = msg_location rpg = (msg_message + ' ' + msg_description) msg_link = msg_link.replace('\n', '') rpg = rpg.replace('\n', '') msg_icon = msg_icon.replace('\n', '') msg_picture = msg_picture.replace('\n', '') msg_story = msg_story.replace('\n', '') msg_caption = msg_caption.replace('\n', '') msg_link = msg_link.replace('\\n', '') rpg = rpg.replace('\\n', '') msg_icon = msg_icon.replace('\\n', '') msg_picture = msg_picture.replace('\\n', '') msg_story = msg_story.replace('\\n', '') msg_caption = msg_caption.replace('\\n', '') mt = [ msg_link, rpg, termo, user, purpose, 'N', msg_icon, msg_picture, from_msg[0], from_msg[1].encode('latin-1', 'ignore'), msg_story, msg_caption, msg_id, msg_type, msg_phone, street, city, country, zip, latitude, longitude ] #prep=conn.prepare(" insert into web_cache(url,pg,termo,usr,purpose,processed,url_icon,url_picture,id_usr,name_usr,story,title,doc_id,tp,phone,street,city,country,zip,latitude,longitude) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) ") #prep.execute(mt) conn.commit()
def entry(usr): print 'Start compile:', usr mdER.compile_rct(usr) conn.commit()
def get_by_keyword( is2 ): # busca algumas palavras chave para extrair os 'samples', amostras de codigo para calibrar e treinar o processador fuzzy isd = [] cnc = False for ch in is2: try: cnc = False try: rows = w_cache.get(ch) except: cnc = True if cnc: continue if True: #============================ print 'Print pg:', ch #================= PG = rows[u'pg'] PROCESSED = rows[u'processed'] TERMO = rows[u'termo'] USR = rows[u'usr'] PURPOSE = rows[u'purpose'] URL_ICON = rows[u'url_icon'] URL_PICTURE = rows[u'url_picture'] ID_USR = float(rows[u'id_usr']) NAME_USR = rows[u'name_usr'] STORY = rows[u'story'] TITLE = rows[u'title'] DOC_ID = rows[u'doc_id'] TP = rows[u'tp'] PHONE = '' STREET = '' CITY = '' COUNTRY = '' ZIP = '' LATITUDE = '' LONGITUDE = '' TPS = rows['tps'] URL = rows['url'] #========== if PG != None: pass else: PG = '' if URL_ICON != None: pass else: URL_ICON = '' if URL_PICTURE != None: pass else: URL_PICTURE = '' if STORY != None: pass else: STORY = '' if TITLE != None: pass else: TITLE = '' if URL != None: pass else: URL = '' words = tokeniz(PG) fnd = False fnd2 = False if 'are now friends' in PG: fnd2 = True elif 'is now friends with' in PG: fnd2 = True elif PG[:7] == 'http://': fnd2 = True elif 'likes' in PG: fnd2 = True elif '{like}' in PG: fnd2 = True #=== for w in words: if 'quer' in w: fnd = True elif 'precis' in w: fnd = True elif 'poderia' in w: fnd = True elif 'pode' in w: fnd = True elif 'podi' in w: fnd = True elif 'gostar' in w: fnd = True elif 'pensand' in w: fnd = True elif 'comprar' in w: fnd = True elif 'adquirir' in w: fnd = True elif 'pens' in w: fnd = True elif 'pegar' in w: fnd = True elif 'encontr' in w: fnd = True elif 'indicar' in w: fnd = True #================================ if umisc.trim(PG) == '': fnd = False if fnd and not fnd2: isd.append([ PG, PROCESSED, TERMO, USR, PURPOSE, URL_ICON, URL_PICTURE, ID_USR, NAME_USR, STORY, TITLE, DOC_ID, TP, PHONE, STREET, CITY, COUNTRY, ZIP, LATITUDE, LONGITUDE, TPS, URL ]) #apagar o item, passando p tabela processados somente os I,DOC_ID para o processo de reprocessamento nao considerar mais esses documentos #=================================================================== #I=0 #conn.sqlX('insert into PROC_DS (ID,DOC_ID) values(?,?)',[I,ch]) proc_ds.insert(ch, {'ch': ch}) #====================================================================== w_cache.remove(ch) except: log.exception("") conn.rollback() return [] conn.commit() return isd
def post_termo(it,termo,purpose,user,prep): print 'post text:',len(it) try: #ac.append( [ from_id,object_id,msg_story,msg_caption,msg_description,msg_picture,link,msg_name,icon,type,msg_likes,message ] ) prep=conn.prepare(" insert into web_cache(url,pg,termo,usr,purpose,processed,url_icon,url_picture,id_usr,name_usr,story,title,doc_id,tp,tps) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,'F') ") for its in it: [ from_msg,msg_id,msg_story,msg_caption,msg_description,msg_picture,msg_link,msg_name,msg_icon,msg_type,msg_likes,msg_message ]=its if msg_message== None: msg_message='' if msg_caption== None: msg_caption='' if msg_description== None: msg_description='' if msg_name== None: msg_name='' if msg_story== None: msg_story='' if msg_picture== None: msg_picture='' if msg_link== None: msg_link='' if msg_icon== None: msg_icon='' if msg_type== None: msg_type='' if len(from_msg) == 0: from_msg.append(['0',msg_name]) try: msg_caption=msg_caption.encode('latin-1','ignore') except : pass try: m1=msg_name.encode('latin-1','ignore') except Exception,err : print 'Erro msg_caption(1):',err,msg_caption,msg_name try: msg_caption=msg_caption+' '+m1 except Exception,err : print 'Erro msg_caption(2):',err,msg_caption,msg_name try: msg_link=msg_link.encode('latin-1','ignore') except : pass try: msg_message=msg_message.encode('latin-1','ignore') except : pass try: msg_description=msg_description.encode('latin-1','ignore') except : pass try: msg_icon=msg_icon.encode('latin-1','ignore') except : pass try: msg_picture=msg_picture.encode('latin-1','ignore') except : pass try: msg_story=msg_story.encode('latin-1','ignore') except : pass try: msg_type=msg_type.encode('latin-1','ignore') except : pass try: from_msg[1]=from_msg[1].encode('latin-1','ignore') except : pass try: msg_id=msg_id.encode('latin-1','ignore') except : pass msg_link=msg_link.replace('\n','') msg_icon=msg_icon.replace('\n','') msg_picture=msg_picture.replace('\n','') msg_story=msg_story.replace('\n','') msg_caption=msg_caption.replace('\n','') msg_message=msg_message.replace('\n','') msg_description=msg_description.replace('\n','') msg_link=msg_link.replace('\\n','') msg_message=msg_message.replace('\\n','') msg_description=msg_description.replace('\\n','') msg_icon=msg_icon.replace('\\n','') msg_picture=msg_picture.replace('\\n','') msg_story=msg_story.replace('\\n','') msg_caption=msg_caption.replace('\\n','') all_msg=msg_message +' '+msg_description if msg_type == u'status': all_msg=msg_story msg_link='{owner}' #from_msg[0]=float(from_msg[0]) #if umisc.trim(msg_id) == '' : msg_id='-' #conn.sqlX("insert into web_cache_sign(ID,ID_USR) values(?,?)",([msg_id,0])) if umisc.trim(all_msg) <> '' or umisc.trim(msg_caption) <> '': if umisc.trim(msg_link) == '' : msg_link='-' if umisc.trim(all_msg) == '': all_msg='-' if umisc.trim(termo) == '' : termo='-' if umisc.trim(user) == '' : user='******' if umisc.trim(purpose) == '' :purpose ='-' if umisc.trim(msg_icon) == '' :msg_icon ='-' if umisc.trim(msg_picture) == '' : msg_picture='-' if umisc.trim(from_msg[0]) == '' : from_msg[0]='0' if umisc.trim(from_msg[1]) == '' :from_msg[1] ='-' if umisc.trim(msg_story) == '' :msg_story ='-' if umisc.trim(msg_type) == '' : msg_type='-' from_msg=float(from_msg) if umisc.trim(msg_id) == '' : msg_id='-' #insert into web_cache(url,pg,termo,usr,purpose,processed,url_icon,url_picture,id_usr,name_usr,story,title,doc_id,tp) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?) mt=[msg_link ,all_msg,termo,user,purpose,'N',msg_icon ,msg_picture ,from_msg,'',msg_story ,msg_caption ,msg_id,msg_type] #mt=[msg_link ,all_msg] try: #prep=conn.prepare(" insert into web_cache(url,pg) values(?,?) ") prep.execute(mt) conn.commit () #conn.sqlX(" insert into web_cache(url,pg,termo,usr,purpose,processed,url_icon,url_picture,id_usr,name_usr,story,title,doc_id,tp) values(?,?,?,?,?,?,?,?,?,?,?,?,?,?) ",mt) except Exception,err: print 'Error post facebook item:',err,mt log.exception("") else: pass except: log.exception("") print 'post.term OK.'