def initiate(): global count, search_max_depth while(URLProvider.isIncomplete()): curr_url = URLProvider.get_URL() if(int(curr_url[1]) <= int(search_max_depth) ): #Print can be removed print curr_url, count my_tuple=('00000',{'customError':'NO ACCESS'},None,None,0,0) if(SiteHandler.is_Valid(curr_url[0])): #Process further response_data=URLFetcher.getURL(curr_url[0]) if(response_data==1): my_tuple=('2222',{'customError':'Unable to Fetch Correctly'}, None, None,0, curr_url[1]) else: myMime =response_data.headers.get('Content-Type') if(valid_mime_type(myMime)): base_url=utils.getBaseUrl(curr_url[0]) mystring = response_data.read() links = AttributeExtraction.extract_Links(base_url,mystring) stored_tup = pagestoragehandler.write_log(curr_url[0],mystring) my_tuple = (response_data.code, response_data.headers, stored_tup, len(links), len(mystring),curr_url[1]) #links or None for link in set(links): if(valid_protocol_request(urlsplit(link)[0])): if(infologger.not_in_list(link)): URLProvider.add_URL(link) else: #We need to keep a log of Data-Stored. If a log of data downloaded is required #We can put the response object's data to find its size. #Ideally headers do provide the size in headers, but we found some cases #it was not provided. So implemented it in this fashion. my_tuple=('1111',{'customError':'Unsupported Mime'}, None, None,0, curr_url[1]) infologger.write_summary(curr_url[0], my_tuple ) count += 1 # Force writes and commits pagestoragehandler.store_log() infologger.store_log()
def initial_seeding(url_list): count = 0 for url in url_list: URLProvider.add_URL(url_list[count]) count += 1