def test_remote_cache_check(self):
    arr = pickle.dumps(['http://yahoo.com','http://yahoo.com','3'])
    qUtils.send_message(arr, qUtils.CACHE_PORT)
    #self.myCache.main_cache()
    print "foo"
    response = qUtils.recv_message(qUtils.FETCH_PORT)
    print response
    time.sleep(3)
    qUtils.send_message('/localpath/tothe/file/yahoo', qUtils.FETCH_PORT)

    response2 = pickle.loads(qUtils.recv_message(qUtils.MGR_PORT))
    print response2

    response3 = qUtils.recv_message(qUtils.MGR_PORT)
    print response3
Beispiel #2
0
 def run(self):
   print 'SECOND Thread'
   print 'QUERY 2 %s' %self.search_query
   search_results = self.execute_search(search_query)
   url_list_tmp = list(set(self.parse_results_for_urls(search_results)))
   url_list = []
   for i in range (0,len(url_list_tmp)) :
     if i == 10 : break
     url_list.append(url_list_tmp[i])
   print 'LIST %s'%url_list

#using a lock here allows multiple requests to be serviced
   if url_list != [] :
     self.threadLock.acquire()
     qUtils.send_message(cPickle.dumps(url_list),qUtils.MGR_PORT)
     print 'SEND to MGR'
     data = cPickle.loads(qUtils.recv_message(qUtils.PROX_PORT))
     print 'RECV %s'%data
     self.url_results = data
     self.threadLock.release()
Beispiel #3
0
 def run(self): 
   #ask to qCache if he has the data
   global web_list
   manager = qUtils.send_message(self.request,qUtils.CACHE_PORT)
   if self.request != "!KILLPROXY" :
     while True :
       data = qUtils.recv_message(qUtils.MGR_PORT) 
       if data != 'END' :
         data = cPickle.loads(data)
         web_list.append(data) #add data to web_list
       else : break
Beispiel #4
0
class qManager(threading.Thread):
  def __init__(self,threadID, request):
    threading.Thread.__init__(self)
    self.threadID = threadID
    self.request = request

  def run(self): 
    #ask to qCache if he has the data
    global web_list
    manager = qUtils.send_message(self.request,qUtils.CACHE_PORT)
    if self.request != "!KILLPROXY" :
      while True :
        data = qUtils.recv_message(qUtils.MGR_PORT) 
        if data != 'END' :
          data = cPickle.loads(data)
          web_list.append(data) #add data to web_list
        else : break

if __name__ == '__main__':
  while True:
    #receive proxy data
    url_list = qUtils.recv_message(qUtils.MGR_PORT) 
    web_list = [] 
    pThread = qManager(1,url_list)
    pThread.start()
    pThread.join()
    if url_list == "!KILLPROXY" : break
    print 'FIN THREAD %s'%web_list
    qUtils.send_message(cPickle.dumps(web_list),qUtils.PROX_PORT)
    print 'SEND TO PROXY'
Beispiel #5
0
   self.imgs = list(set(self.soup.find_all('img')))
   self.css = self.soup.find_all('link', rel="stylesheet")
   self.js = self.soup.find_all('script', src=re.compile(".*"))

   with open(self.saved_page_dir + '/index.html','w') as f:
     f.write(self.mainpage)

   #self.fetch_images()
   #self.fetch_css_and_js()
   #self.rewrite_links()

   print 'sending the cache a local file at ' + self.saved_page_dir + '/index.html'
   #qUtils.send_message(self.saved_page_dir+'/index.html', qUtils.CACHE_PORT)

#wait for a fetch request from the cache, then dispatch a fetcher thread
#and finally send the result back to the cache 
if __name__=='__main__':
  while True : 
    #state = False
    #data = ' '
    print 'LISTEN CACHE'
    data = qUtils.recv_message(qUtils.FETCH_PORT)
    if data == "!KILLPROXY":
      break
    pThread = qFetcherThread(5,data)
    pThread.start()
    time.sleep(1)
    pThread.join()
    qUtils.send_message(pThread.saved_page_dir + '/index.html', qUtils.CACHE_PORT)

Beispiel #6
0
     print 'SEND to MGR'
     data = cPickle.loads(qUtils.recv_message(qUtils.PROX_PORT))
     print 'RECV %s'%data
     self.url_results = data
     self.threadLock.release()

if __name__ == '__main__':
  threadLock = threading.Lock()
  search_query = ' '
  s = qUtils.open_listener(qUtils.BROWSER_PORT)
  print 'starting proxy '
  while True:  
    conn,addr = s.accept()
    request = conn.recv(8192)
    print request
    search_query = qProxyManager.parse_request_for_query(request)
    print 'QUERY %s'%search_query
    senddata = qUtils.build_standard_response([])
    conn.sendall(senddata)  

    if "KILLPROXY" in search_query:
      qUtils.send_message("!KILLPROXY", qUtils.MGR_PORT)
      break

    pThread = qProxyManager(1,search_query,threadLock)
    pThread.start()
    #pThread.join()
    if pThread.url_results != []:
      conn.sendall(qUtils.build_standard_response(pThread.url_results))
    conn.close()
 def run(self):
   print 'starting client thread'
   qUtils.send_message(self.message,self.port)
   return
Beispiel #8
0
 def run(self):
   print "not in the cache "
   qUtils.send_message(self.request, qUtils.FETCH_PORT)
   if self.request == "!KILLPROXY" or self.request == 'END' : print 'END' 
   else:
Beispiel #9
0
 def run(self):
   qUtils.send_message(self.request, qUtils.MGR_PORT)  # send the address to the mgr
Beispiel #10
0
      pThread.start()
      pThread.join()
      print "send a file "

if __name__ == '__main__':
  myTable = dict()
  while True:
    url_list = qUtils.recv_message(qUtils.CACHE_PORT)
    if url_list == "!KILLPROXY" : 
      pThread = qCacheFetcher(2,url_list)
      pThread.start()
      pThread.join()	
      break
    else: url_list = pickle.loads(url_list)
    print 'TABLE %s' %myTable

    for url in url_list : 	      #take url
      if url in myTable:  # Yes, send the local address to the manager
        print "yes in the cache"
        ayfile = pickle.dumps([url, myTable.get(url)])
        pThread = qCacheManager(1,ayfile)
        pThread.start()
        pThread.join()
      else:
        pThread = qCacheFetcher(2,url)
        pThread.start()
        pThread.join()
    print 'FIN THREAD %s'%myTable
    qUtils.send_message("END", qUtils.MGR_PORT)