def testGetPosts(sub): # spawn an action print "Get Posts:" posts = a.get_posts(sub) print "Passed" if posts else "Failed" return posts != None
def test_scan_sub(): try: os.remove("test_database.db") except: pass credentials = CRImport("TestCredentials.cred") credentials["SUBREDDIT"] = "centralscrutinizer" # clear old subs u.clear_sub(credentials, "thewhitezone") u.clear_sub(credentials, "centralscrutinizer") #get subs mypraw = u.create_multiprocess_praw(credentials) wz = u.get_subreddit(credentials, mypraw, "thewhitezone") cz = u.get_subreddit(credentials, mypraw, "centralscrutinizer") pol = Policies.DebugPolicy(wz) print "Starting ScanSub tests..." print "Simple blacklist identification:" #h'ok here we go. #first we'll create three posts from a black/whitelisted channel and a not found with DataBase.DataBaseWrapper("test_database.db") as db: entries = [ ("arghdos", "youtube.com", "http://www.youtube.com/user/arghdos", Blacklist.BlacklistEnums.Whitelisted, 0), ("IGN", "youtube.com", "http://www.youtube.com/user/IGN", Blacklist.BlacklistEnums.Blacklisted, 0), ("Karen Jones", "youtube.com", "http://www.youtube.com/user/Karen Jones", Blacklist.BlacklistEnums.NotFound, 0)] db.add_channels(entries) #create scrutinizer cs = CentralScrutinizer.CentralScrutinizer(credentials, pol, "test_database.db") ss = cs.ss #now make posts urls = ["https://www.youtube.com/watch?v=-vihDAj5VkY", "https://m.youtube.com/watch?v=G4ApQrbhQp8", "http://youtu.be/Cg9PWSHL4Vg"] ids = [] for i in range(len(urls)): ids.append(a.make_post_url(cz, url=urls[i], title=str(i)).id) #ok, now scan ss.scan(3) #next check for a 0//whitelist, 1//blacklist posts = a.get_posts(wz, 3) one = posts.next() two = posts.next() if (one.title == "0//whitelist") and (two.title == "1//blacklist"): print "Passed" else: print "Failed" return False print "Reddit record:" results = db.get_reddit(date_added=(datetime.datetime.now() - datetime.timedelta(days=1))) if len([p for p in results if p[0] in ids]) == 2: print "Passed" else: print "Failed" return False result = db.newest_reddit_entries(1) ss.last_seen = result.next()[0] print "Found old:" result = ss.scan(3) if result == ScanSub.scan_result.FoundOld: print "Passed" else: print "Failed" return False
def get_posts(self, lim): return Actions.get_posts(self.sub, lim)
def testGetComments(sub): print "Get Comments:" post = a.get_posts(sub, 1) comments = a.get_comments(post.next()) print "Passed" if comments != None and len(comments) and comments[0].body == "test comment" else "Failed" return comments != None
if str(e).startswith("No JSON object"): logging.error("Reddit-Analytics is down, retrying historical scan after pause...") else: logging.error(str(e)) if __debug__: logging.exception(e) #temporary fix to avoid endless waiting while RA is down return [] except Exception, e: logging.error(str(e)) if __debug__: logging.exception(e) #temporary fix to avoid endless waiting while RA is down return [] else: posts = Actions.get_posts(self.sub, 900) if posts is None: return None return posts def historical_scan(self, goto): posts = self.get_historial_posts(goto) if posts is not None and len(posts): post_data = [(post.created_utc, post.name, post.url, Actions.get_username(post), post) for post in posts if not post.is_self] self.__process_post_list(post_data) return scan_result.FoundOld return scan_result.Error def get_posts(self, lim): return Actions.get_posts(self.sub, lim)