def save_display(host, displayable): mmu = MemexMongoUtils() if not bool(displayable): for url_doc in mmu.list_urls(host=host, limit=100000000): mmu.set_interest(url_doc["url"], False) else: for url_doc in mmu.list_urls(host=host, limit=100000000): mmu.set_interest(url_doc["url"], True) return mmu.save_display(host, displayable)
def save_display(host, displayable): mmu = MemexMongoUtils() if not bool(displayable): for url_doc in mmu.list_urls(host = host, limit=100000000): mmu.set_interest(url_doc["url"], False) else: for url_doc in mmu.list_urls(host = host, limit=100000000): mmu.set_interest(url_doc["url"], True) return mmu.save_display(host, displayable)
def urls_handler(host=None, which_collection="crawl-data"): """Put together host documents for use with hosts endpoint """ mmu = MemexMongoUtils(which_collection=which_collection) url_dics = mmu.list_urls(host=host, limit=1000) for url_dic in url_dics: url_dic.pop("_id") try: date = url_dic["crawled_at"] url_dic["crawled_at"] = date.strftime("%Y-%m-%d %H:%M:%S") except: url_dic["crawled_at"] = None return url_dics
def urls_handler(host = None, which_collection = "crawl-data"): """Put together host documents for use with hosts endpoint """ mmu = MemexMongoUtils(which_collection = which_collection) url_dics = mmu.list_urls(host = host, limit = 1000) for url_dic in url_dics: url_dic.pop("_id") try: date = url_dic["crawled_at"] url_dic["crawled_at"] = date.strftime("%Y-%m-%d %H:%M:%S") except: url_dic["crawled_at"] = None return url_dics
def urls_handler(host = None, which_collection = "crawl-data"): """Put together host documents for use with hosts endpoint """ mmu = MemexMongoUtils(which_collection = which_collection) url_dics = mmu.list_urls(host = host) for url_dic in url_dics: url_dic.pop("_id") date = url_dic["crawled_at"] try: url_dic["crawled_at"] = date.isoformat() except: url_dic["crawled_at"] = str(date) return url_dics