def get_local(self, filter): """ Search the local torrent database for torrent files by keyword. :param filter: (Optional) keyword filter. :return: List of torrents in dictionary format. """ keywords = split_into_keywords(unicode(filter)) keywords = [keyword for keyword in keywords if len(keyword) > 1] TORRENT_REQ_COLUMNS = ['T.torrent_id', 'infohash', 'swift_hash', 'swift_torrent_hash', 'T.name', 'torrent_file_name', 'length', 'category_id', 'status_id', 'num_seeders', 'num_leechers', 'C.id', 'T.dispersy_id', 'C.name', 'T.name', 'C.description', 'C.time_stamp', 'C.inserted'] TUMBNAILTORRENT_REQ_COLUMNS = ['torrent_id', 'Torrent.infohash', 'swift_hash', 'swift_torrent_hash', 'name', 'torrent_file_name', 'length', 'category_id', 'status_id', 'num_seeders', 'num_leechers'] @forceAndReturnDBThread def local_search(keywords): begintime = time() results = self._torrent_db.searchNames(keywords, doSort=False, keys=TORRENT_REQ_COLUMNS) print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" print results begintuples = time() if len(results) > 0: def create_channel(a): return Channel(*a) channels = {} for a in results: channel_details = a[-10:] if channel_details[0] and channel_details[0] not in channels: channels[channel_details[0]] = create_channel(channel_details) def create_torrent(a): #channel = channels.get(a[-10], False) #if channel and (channel.isFavorite() or channel.isMyChannel()): # t = ChannelTorrent(*a[:-12] + [channel, None]) #else: t = Torrent(*a[:11] + [False]) t.misc_db = self._misc_db t.torrent_db = self._torrent_db t.channelcast_db = self._channelcast_db #t.metadata_db = self._metadata_db t.assignRelevance(a[-11]) return t results = map(create_torrent, results) print ">>>>>>> LOCAL RESULTS: %s" % results _logger.debug('TorrentSearchGridManager: _doSearchLocalDatabase took: %s of which tuple creation took %s', time() - begintime, time() - begintuples) return results results = self._prepare_torrents(local_search(keywords)) print ">>>>>>> LOCAL RESDICT: %s" % results return results
def callback(delayedResult, permid, query): dictOfAdditions = delayedResult.get() if len(dictOfAdditions) > 0: # 22/01/10 boudewijn: use the split_into_keywords function to # split. This will ensure that kws is unicode and splits on # all 'splittable' characters kwstr = query[len("CHANNEL x "):] kws = split_into_keywords(kwstr) self.channelsearch_manager.gotRemoteHits(permid, kws, dictOfAdditions)
def sesscb_got_remote_hits(self,permid,query,hits): # Called by SessionCallback thread if DEBUG: print >>sys.stderr,"GUIUtil: sesscb_got_remote_hits",len(hits) # 22/01/10 boudewijn: use the split_into_keywords function to split. This will ensure # that kws is unicode and splits on all 'splittable' characters kwstr = query[len('SIMPLE '):] kws = split_into_keywords(kwstr) self.torrentsearch_manager.gotRemoteHits(permid, kws, hits)
def description_for(self, hitsgroup): # assert: len(hitsgroup) > 0 N = LevGrouping.MAX_LEN hit = hitsgroup.hits[0] key = ' '.join(split_into_keywords(hit.name)) if len(key) > N: # check if we're truncating within a word if key[N-1] != ' ' and key[N] != ' ': key = key[:N] + '...' else: key = key[:N].rstrip() return u'Names of these items resemble "%s"' % key
def description_for(self, hitsgroup): # assert: len(hitsgroup) > 0 N = LevGrouping.MAX_LEN hit = hitsgroup.hits[0] key = ' '.join(split_into_keywords(hit.name)) if len(key) > N: # check if we're truncating within a word if key[N - 1] != ' ' and key[N] != ' ': key = key[:N] + '...' else: key = key[:N].rstrip() return u'Names of these items resemble "%s"' % key
def process_query(self, permid, d, selversion): hits = None p = None sendtorrents = False netwq = d['q'] if netwq.startswith("SIMPLE"): # remote query # Format: 'SIMPLE '+string of space separated keywords or # 'SIMPLE+METADATA' +string of space separated keywords # # In the future we could support full SQL queries: # SELECT infohash,torrent_name FROM torrent_db WHERE status = ALIVE if netwq.startswith('SIMPLE+METADATA'): q = d['q'][len('SIMPLE+METADATA '):] sendtorrents = True else: q = d['q'][len('SIMPLE '):] uq = self.clean_netwq(q) kws = split_into_keywords(uq) hits = self.search_torrents(kws, maxhits=MAX_RESULTS,sendtorrents=sendtorrents) p = self.create_remote_query_reply(d['id'],hits,selversion) elif netwq.startswith("CHANNEL"): # channel query if DEBUG: print>>sys.stderr, "Incoming channel query", d['q'] q = d['q'][len('CHANNEL '):] uq = self.clean_netwq(q,channelquery=True) hits = self.channelcast_db.searchChannels(uq) p = self.create_channel_query_reply(d['id'],hits,selversion) # log incoming query, if logfile is set if self.logfile: self.log(permid, q) m = QUERY_REPLY+p if self.overlay_log: nqueries = self.get_peer_nqueries(permid) # RECV_MSG PERMID OVERSION NUM_QUERIES MSG self.overlay_log('RECV_QRY', show_permid(permid), selversion, nqueries, repr(d)) # RPLY_QRY PERMID NUM_HITS MSG self.overlay_log('RPLY_QRY', show_permid(permid), len(hits), repr(p)) self.overlay_bridge.send(permid, m, self.send_callback) self.inc_peer_nqueries(permid)
def extractTerms(self, name_or_keywords): """ Extracts the terms from a torrent name. @param name_or_keywords The name of the torrent. Alternatively, you may pass a list of keywords (i.e., the name split into words using split_into_keywords). @return A list of extracted terms in order of occurence. The list may contain duplicates if a term occurs multiple times in the name. """ if isinstance(name_or_keywords, basestring): keywords = split_into_keywords(name_or_keywords) else: keywords = name_or_keywords return [term for term in keywords if self.isSuitableTerm(term)]
def assignRelevance(self, matches): """ Assigns a relevance score to this Torrent. @param matches A dict containing sets stored under the keys 'swarmname', 'filenames' and 'fileextensions'. """ # Find the lowest term position of the matching keywords pos_score = None if matches['swarmname']: swarmnameTerms = split_into_keywords(self.name) swarmnameMatches = matches['swarmname'] for i, term in enumerate(swarmnameTerms): if term in swarmnameMatches: pos_score = -i break self.relevance_score = [len(matches['swarmname']), pos_score, len(matches['filenames']), len(matches['fileextensions']), 0]
def _set_keywords(self, keywords): """ Set the keywords that a next search should use. This clears the previous keywords and results. :param keywords: Keyword string that should be searched for. :return: Boolean indicating success. """ keywords = split_into_keywords(unicode(keywords)) keywords = [keyword for keyword in keywords if len(keyword) > 1] if keywords == self._keywords: return True try: self._remote_lock.acquire() self._keywords = keywords self._results = [] self._result_cids = [] finally: self._remote_lock.release() return True
def _set_keywords(self, keywords): """ Set the keywords that a next search should use. This clears the previous keywords and results. :param keywords: Keyword string that should be searched for. :return: Boolean indicating success. """ keywords = split_into_keywords(unicode(keywords)) keywords = [keyword for keyword in keywords if len(keyword) > 1] if keywords == self._keywords: return True try: self._remote_lock.acquire() self._keywords = keywords self._results = [] self._result_infohashes = [] finally: self._remote_lock.release() return True
def assignRelevance(self, matches): """ Assigns a relevance score to this Torrent. @param matches A dict containing sets stored under the keys 'swarmname', 'filenames' and 'fileextensions'. """ # Find the lowest term position of the matching keywords pos_score = None if matches['swarmname']: swarmnameTerms = split_into_keywords(self.name) swarmnameMatches = matches['swarmname'] for i, term in enumerate(swarmnameTerms): if term in swarmnameMatches: pos_score = -i break self.relevance_score = [ len(matches['swarmname']), pos_score, len(matches['filenames']), len(matches['fileextensions']), 0 ]
def sesscb_got_channel_hits(self, permid, query, hits): ''' Called by SessionCallback thread from RemoteQueryMsgHandler.process_query_reply. @param permid: the peer who returnd the answer to the query @param query: the keywords of the query that originated the answer @param hits: the complete answer retruned by the peer ''' # Called by SessionCallback thread if DEBUG: print >>sys.stderr,"GUIUtil: sesscb_got_channel_hits",len(hits) # Let channelcast handle inserting items etc. channelcast = BuddyCastFactory.getInstance().channelcast_core dictOfAdditions = channelcast.updateChannel(permid, query, hits) # 22/01/10 boudewijn: use the split_into_keywords function to # split. This will ensure that kws is unicode and splits on # all 'splittable' characters kwstr = query[len("CHANNEL x "):] kws = split_into_keywords(kwstr) self.channelsearch_manager.gotRemoteHits(permid, kws, dictOfAdditions)
def dosearch(self, input=None): if input == None: sf = self.frame.top_bg.searchField if sf is None: return input = sf.GetValue() if input: input = input.strip() if input == '': return else: return self.frame.top_bg.searchField.SetValue(input) if input.startswith("http://"): if self.frame.startDownloadFromUrl(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') elif input.startswith("magnet:"): if self.frame.startDownloadFromMagnet(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') elif input.startswith(SWIFT_URL_SCHEME): if self.frame.startDownloadFromSwift(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') else: keywords = split_into_keywords(input) keywords = [keyword for keyword in keywords if len(keyword) > 1] if len(keywords) == 0: self.Notify('Please enter a search term', wx.ART_INFORMATION) else: self.frame.top_bg.StartSearch() self.current_search_query = keywords if DEBUG: print >> sys.stderr, "GUIUtil: searchFiles:", keywords, time( ) self.frame.searchlist.Freeze() self.torrentsearch_manager.setSearchKeywords(keywords) self.channelsearch_manager.setSearchKeywords(keywords) self.frame.searchlist.Reset() self.ShowPage('search_results', keywords) #We now have to call thaw, otherwise loading message will not be shown. self.frame.searchlist.Thaw() #Peform local search self.torrentsearch_manager.set_gridmgr( self.frame.searchlist.GetManager()) self.channelsearch_manager.set_gridmgr( self.frame.searchlist.GetManager()) def db_thread(): self.torrentsearch_manager.refreshGrid() nr_peers_connected = self.torrentsearch_manager.searchDispersy( ) self.channelsearch_manager.searchDispersy() return nr_peers_connected def wx_thread(delayedResult): nr_peers_connected = delayedResult.get() self.frame.searchlist.SetMaxResults( nr_peers_connected + 1, keywords) self.frame.searchlist.NewResult() startWorker(wx_thread, db_thread, priority=1024)
def key(self, hit, context_state): return ' '.join(split_into_keywords(hit.name))[:LevGrouping.MAX_LEN]
def dosearch(self, input = None): if input == None: sf = self.frame.top_bg.searchField if sf is None: return input = sf.GetValue() if input: input = input.strip() if input == '': return else: return self.frame.top_bg.searchField.SetValue(input) if input.startswith("http://"): if self.frame.startDownloadFromUrl(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') elif input.startswith("magnet:"): if self.frame.startDownloadFromMagnet(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') else: wantkeywords = split_into_keywords(input) if len(' '.join(wantkeywords)) == 0: self.Notify('Please enter a search term', wx.ART_INFORMATION) else: self.frame.top_bg.StartSearch() self.current_search_query = wantkeywords if DEBUG: print >>sys.stderr,"GUIUtil: searchFiles:", wantkeywords self.frame.searchlist.Freeze() self.ShowPage('search_results') #We now have to call thaw, otherwise loading message will not be shown. self.frame.searchlist.Thaw() #Peform local search self.torrentsearch_manager.setSearchKeywords(wantkeywords, 'filesMode') self.torrentsearch_manager.set_gridmgr(self.frame.searchlist.GetManager()) self.channelsearch_manager.setSearchKeywords(wantkeywords) self.channelsearch_manager.set_gridmgr(self.frame.searchlist.GetManager()) self.torrentsearch_manager.refreshGrid() #Start remote search #Arno, 2010-02-03: Query starts as Unicode q = u'SIMPLE ' for kw in wantkeywords: q += kw+u' ' q = q.strip() self.utility.session.query_connected_peers(q, self.sesscb_got_remote_hits, self.max_remote_queries) if len(input) > 1: #do not perform remote channel search for single character inputs q = 'CHANNEL k ' for kw in wantkeywords: q += kw+' ' self.utility.session.query_connected_peers(q,self.sesscb_got_channel_hits) wx.CallLater(10000, self.CheckSearch, wantkeywords)
def _gotRemoteHits(self, permid, kws, answers): try: if DEBUG: print >>sys.stderr,"TorrentSearchGridManager: gotRemoteHist: got",len(answers),"unfiltered results for",kws, bin2str(permid), time() # Always store the results, only display when in filesMode # We got some replies. First check if they are for the current query if self.searchkeywords == kws: numResults = 0 catobj = Category.getInstance() for key,value in answers.iteritems(): if self.torrent_db.hasTorrent(key): if DEBUG: print >>sys.stderr,"TorrentSearchGridManager: gotRemoteHist: Ignoring hit for",`value['content_name']`,"already got it" continue # do not show results we have ourselves # First, check if it matches the word boundaries, that belongs to previous version # Convert answer fields as per # Session.query_connected_peers() spec. to NEWDB format newval = {} newval['name'] = value['content_name'] newval['infohash'] = key newval['torrent_file_name'] = '' newval['length'] = value['length'] newval['creation_date'] = time() # None gives '?' in GUI newval['relevance'] = 0 newval['source'] = 'RQ' newval['category'] = value['category'][0] # We trust the peer newval['status'] = 'good' newval['num_seeders'] = value['seeder'] or 0 newval['num_leechers'] = value['leecher'] or 0 # OLPROTO_VER_NINETH includes a torrent_size. Set to # -1 when not available. if 'torrent_size' in value: newval['torrent_size'] = value['torrent_size'] else: newval['torrent_size'] = -1 # OLPROTO_VER_ELEVENTH includes channel_permid, channel_name fields. if 'channel_permid' not in value: # just to check if it is not OLPROTO_VER_ELEVENTH version # if so, check word boundaries in the swarm name ls = split_into_keywords(value['content_name']) if DEBUG: print >>sys.stderr,"TorrentSearchGridManager: ls is",`ls` print >>sys.stderr,"TorrentSearchGridManager: kws is",`kws` flag = False for kw in kws: if kw not in ls: flag=True break if flag: continue if 'channel_permid' in value: newval['channel_permid']=value['channel_permid'] else: newval['channel_permid']="" if 'channel_name' in value: newval['channel_name'] = value['channel_name'] else: newval['channel_name']="" if 'channel_permid' in value: newval['neg_votes'] = self.votecastdb.getNegVotes(value['channel_permid']) newval['subscriptions'] = self.votecastdb.getNumSubscriptions(value['channel_permid']) if newval['subscriptions']-newval['neg_votes']<VOTE_LIMIT: # now, this is SPAM continue else: newval['subscriptions']=0 newval['neg_votes'] = 0 # Extra field: Set from which peer this info originates newval['query_permids'] = [permid] # Filter out results from unwanted categories flag = False for cat in value['category']: rank = catobj.getCategoryRank(cat) if rank == -1: if DEBUG: print >>sys.stderr,"TorrentSearchGridManager: gotRemoteHits: Got",`newval['name']`,"from banned category",cat,", discarded it." flag = True self.filteredResults += 1 break if flag: continue if newval['infohash'] in self.remoteHits: if DEBUG: print >>sys.stderr,"TorrentSearchGridManager: gotRemoteHist: merging hit",`newval['name']` # merge this result with previous results oldval = self.remoteHits[newval['infohash']] for query_permid in newval['query_permids']: if not query_permid in oldval['query_permids']: oldval['query_permids'].append(query_permid) # if a hit belongs to a more popular channel, then replace the previous if newval['channel_permid'] !="" and newval['channel_name'] != "" and newval['subscriptions']-newval['neg_votes'] > oldval['subscriptions']-oldval['neg_votes']: oldval['subscriptions'] = newval['subscriptions'] oldval['neg_votes'] = newval['neg_votes'] oldval['channel_permid'] = newval['channel_permid'] oldval['channel_name'] = newval['channel_name'] else: if DEBUG: print >>sys.stderr,"TorrentSearchGridManager: gotRemoteHist: appending hit",`newval['name']` self.remoteHits[newval['infohash']] = newval numResults +=1 # if numResults % 5 == 0: # self.refreshGrid() if numResults > 0: self.refreshGrid() if DEBUG: print >>sys.stderr,'TorrentSearchGridManager: gotRemoteHits: Refresh grid after new remote torrent hits came in' return True elif DEBUG: print >>sys.stderr,"TorrentSearchGridManager: gotRemoteHits: got hits for",kws,"but current search is for",self.searchkeywords return False except: print_exc() return False
def dosearch(self, input=None): if input == None: sf = self.frame.top_bg.searchField if sf is None: return input = sf.GetValue() if input: input = input.strip() if input == '': return else: return self.frame.top_bg.searchField.SetValue(input) if input.startswith("http://"): if self.frame.startDownloadFromUrl(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') elif input.startswith("magnet:"): if self.frame.startDownloadFromMagnet(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') elif input.startswith(SWIFT_URL_SCHEME) or input.startswith("ppsp://"): if self.frame.startDownloadFromSwift(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') else: keywords = split_into_keywords(input) keywords = [keyword for keyword in keywords if len(keyword) > 1] if len(keywords) == 0: self.Notify('Please enter a search term', "Your search term '%s' was either to small or to general." % input, icon=wx.ART_INFORMATION) else: self.frame.top_bg.StartSearch() self.current_search_query = keywords if DEBUG: print >> sys.stderr, "GUIUtil: searchFiles:", keywords, time() self.frame.searchlist.Freeze() self.torrentsearch_manager.setSearchKeywords(keywords) self.channelsearch_manager.setSearchKeywords(keywords) # We set oldkeywords to '', which will trigger a reset in SetKeywords (called from ShowPage). This avoid calling reset twice. # Niels: 17-09-2012, unfortunately showpage calls show(true) which results in the dirty items being refreshed. # We need to call Reset in order to prevent this from happening self.frame.searchlist.Reset() self.ShowPage('search_results', keywords) # We now have to call thaw, otherwise loading message will not be shown. self.frame.searchlist.Thaw() # Peform local search self.torrentsearch_manager.set_gridmgr(self.frame.searchlist.GetManager()) self.channelsearch_manager.set_gridmgr(self.frame.searchlist.GetManager()) def db_thread(): self.torrentsearch_manager.refreshGrid() nr_peers_connected = self.torrentsearch_manager.searchDispersy() self.channelsearch_manager.searchDispersy() return nr_peers_connected def wx_thread(delayedResult): nr_peers_connected = delayedResult.get() self.frame.searchlist.SetMaxResults(nr_peers_connected + 1, keywords) self.frame.searchlist.NewResult() startWorker(wx_thread, db_thread, priority=1024)
def dosearch(self, input = None): if input == None: sf = self.frame.top_bg.searchField if sf is None: return input = sf.GetValue() if input: input = input.strip() if input == '': return else: return self.frame.top_bg.searchField.SetValue(input) if input.startswith("http://"): if self.frame.startDownloadFromUrl(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') elif input.startswith("magnet:"): if self.frame.startDownloadFromMagnet(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') elif input.startswith(SWIFT_URL_SCHEME): if self.frame.startDownloadFromSwift(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') else: keywords = split_into_keywords(input) keywords = [keyword for keyword in keywords if len(keyword) > 1] if len(keywords) == 0: self.Notify('Please enter a search term', wx.ART_INFORMATION) else: self.frame.top_bg.StartSearch() self.current_search_query = keywords if DEBUG: print >>sys.stderr,"GUIUtil: searchFiles:", keywords, time() self.frame.searchlist.Freeze() self.torrentsearch_manager.setSearchKeywords(keywords) self.channelsearch_manager.setSearchKeywords(keywords) self.frame.searchlist.Reset() self.ShowPage('search_results', keywords) #We now have to call thaw, otherwise loading message will not be shown. self.frame.searchlist.Thaw() #Peform local search self.torrentsearch_manager.set_gridmgr(self.frame.searchlist.GetManager()) self.channelsearch_manager.set_gridmgr(self.frame.searchlist.GetManager()) def db_thread(): self.torrentsearch_manager.refreshGrid() nr_peers_connected = self.torrentsearch_manager.searchDispersy() self.channelsearch_manager.searchDispersy() return nr_peers_connected def wx_thread(delayedResult): nr_peers_connected = delayedResult.get() self.frame.searchlist.SetMaxResults(nr_peers_connected+1, keywords) self.frame.searchlist.NewResult() startWorker(wx_thread, db_thread, priority = 1024)
def dosearch(self, input = None): if input == None: sf = self.frame.top_bg.searchField if sf is None: return input = sf.GetValue() if input: input = input.strip() if input == '': return else: return self.frame.top_bg.searchField.SetValue(input) if input.startswith("http://"): if self.frame.startDownloadFromUrl(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') elif input.startswith("magnet:"): if self.frame.startDownloadFromMagnet(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') else: fts3feaures, old_keywords = fts3_preprocess(input) remotekeywords = split_into_keywords(old_keywords) remotekeywords = [keyword for keyword in remotekeywords if len(keyword) > 1] safekeywords = ' '.join(remotekeywords + fts3feaures) if len(safekeywords) == 0: self.Notify('Please enter a search term', wx.ART_INFORMATION) else: self.frame.top_bg.StartSearch() self.current_search_query = remotekeywords if DEBUG: print >>sys.stderr,"GUIUtil: searchFiles:", remotekeywords, time() self.frame.searchlist.Freeze() self.torrentsearch_manager.setSearchKeywords(remotekeywords, fts3feaures) self.channelsearch_manager.setSearchKeywords(remotekeywords) self.frame.searchlist.Reset() self.ShowPage('search_results', safekeywords) #We now have to call thaw, otherwise loading message will not be shown. self.frame.searchlist.Thaw() #Peform local search self.torrentsearch_manager.set_gridmgr(self.frame.searchlist.GetManager()) self.channelsearch_manager.set_gridmgr(self.frame.searchlist.GetManager()) self.torrentsearch_manager.refreshGrid() if len(remotekeywords) > 0: #Start remote search #Arno, 2010-02-03: Query starts as Unicode q = u'SIMPLE ' for kw in remotekeywords: q += kw+u' ' q = q.strip() nr_peers_connected = self.utility.session.query_connected_peers(q, self.sesscb_got_remote_hits, self.max_remote_queries) #Indicate expected nr replies in gui, use local result as first self.frame.searchlist.SetMaxResults(nr_peers_connected+1) self.frame.searchlist.NewResult() if len(input) > 1: #do not perform remote channel search for single character inputs q = 'CHANNEL k ' for kw in remotekeywords: q += kw+' ' self.utility.session.query_connected_peers(q,self.sesscb_got_channel_hits) wx.CallLater(10000, self.CheckSearch, remotekeywords)
def dosearch(self, input = None): if input == None: sf = self.frame.top_bg.searchField if sf is None: return input = sf.GetValue() if input: input = input.strip() if input == '': return else: return self.frame.top_bg.searchField.SetValue(input) if input.startswith("http://"): if self.frame.startDownloadFromUrl(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') elif input.startswith("magnet:"): if self.frame.startDownloadFromMagnet(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') elif input.startswith(SWIFT_URL_SCHEME) or input.startswith("ppsp://"): if self.frame.startDownloadFromSwift(str(input)): self.frame.top_bg.searchField.Clear() self.ShowPage('my_files') else: keywords = split_into_keywords(input) keywords = [keyword for keyword in keywords if len(keyword) > 1] if len(keywords) == 0: self.Notify('Please enter a search term', "Your search term '%s' was either to small or to general." % input , icon = wx.ART_INFORMATION) else: self.frame.top_bg.StartSearch() self.current_search_query = keywords if DEBUG: print >>sys.stderr,"GUIUtil: searchFiles:", keywords, time() self.frame.searchlist.Freeze() self.torrentsearch_manager.setSearchKeywords(keywords) self.channelsearch_manager.setSearchKeywords(keywords) # We set oldkeywords to '', which will trigger a reset in SetKeywords (called from ShowPage). This avoid calling reset twice. # Niels: 17-09-2012, unfortunately showpage calls show(true) which results in the dirty items being refreshed. # We need to call Reset in order to prevent this from happening self.frame.searchlist.Reset() self.ShowPage('search_results', keywords) #We now have to call thaw, otherwise loading message will not be shown. self.frame.searchlist.Thaw() #Peform local search self.torrentsearch_manager.set_gridmgr(self.frame.searchlist.GetManager()) self.channelsearch_manager.set_gridmgr(self.frame.searchlist.GetManager()) def db_thread(): self.torrentsearch_manager.refreshGrid() nr_peers_connected = self.torrentsearch_manager.searchDispersy() self.channelsearch_manager.searchDispersy() return nr_peers_connected def wx_thread(delayedResult): nr_peers_connected = delayedResult.get() self.frame.searchlist.SetMaxResults(nr_peers_connected+1, keywords) self.frame.searchlist.NewResult() startWorker(wx_thread, db_thread, priority = 1024)
def get_local(self, filter): """ Search the local torrent database for torrent files by keyword. :param filter: (Optional) keyword filter. :return: List of torrents in dictionary format. """ keywords = split_into_keywords(unicode(filter)) keywords = [keyword for keyword in keywords if len(keyword) > 1] TORRENT_REQ_COLUMNS = [ 'T.torrent_id', 'infohash', 'swift_hash', 'swift_torrent_hash', 'T.name', 'torrent_file_name', 'length', 'category_id', 'status_id', 'num_seeders', 'num_leechers', 'C.id', 'T.dispersy_id', 'C.name', 'T.name', 'C.description', 'C.time_stamp', 'C.inserted' ] TUMBNAILTORRENT_REQ_COLUMNS = [ 'torrent_id', 'Torrent.infohash', 'swift_hash', 'swift_torrent_hash', 'name', 'torrent_file_name', 'length', 'category_id', 'status_id', 'num_seeders', 'num_leechers' ] @forceAndReturnDBThread def local_search(keywords): begintime = time() results = self._torrent_db.searchNames(keywords, doSort=False, keys=TORRENT_REQ_COLUMNS) print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" print results begintuples = time() if len(results) > 0: def create_channel(a): return Channel(*a) channels = {} for a in results: channel_details = a[-10:] if channel_details[0] and channel_details[ 0] not in channels: channels[channel_details[0]] = create_channel( channel_details) def create_torrent(a): #channel = channels.get(a[-10], False) #if channel and (channel.isFavorite() or channel.isMyChannel()): # t = ChannelTorrent(*a[:-12] + [channel, None]) #else: t = Torrent(*a[:11] + [False]) t.misc_db = self._misc_db t.torrent_db = self._torrent_db t.channelcast_db = self._channelcast_db #t.metadata_db = self._metadata_db t.assignRelevance(a[-11]) return t results = map(create_torrent, results) print ">>>>>>> LOCAL RESULTS: %s" % results _logger.debug( 'TorrentSearchGridManager: _doSearchLocalDatabase took: %s of which tuple creation took %s', time() - begintime, time() - begintuples) return results results = self._prepare_torrents(local_search(keywords)) print ">>>>>>> LOCAL RESDICT: %s" % results return results