def test_mark_as_read(self): oth = Reddit(USER_AGENT) oth.login('PyApiTestUser3', '1111') # pylint: disable-msg=E1101 msg = six_next(oth.user.get_unread(limit=1)) msg.mark_as_read() self.assertTrue(msg not in oth.user.get_unread(limit=5))
def test_mark_multiple_as_read(self): oth = Reddit(USER_AGENT) oth.login('PyApiTestUser3', '1111') messages = list(oth.user.get_unread(limit=2)) self.assertEqual(2, len(messages)) self.r.user.mark_as_read(messages) unread = oth.user.get_unread(limit=5) for msg in messages: self.assertTrue(msg not in unread)
def test_mark_as_unread(self): oth = Reddit(USER_AGENT) oth.login('PyApiTestUser3', '1111') found = None for msg in oth.user.get_inbox(): if not msg.new: found = msg msg.mark_as_unread() break else: self.fail('Could not find a read message.') self.assertTrue(found in oth.user.get_unread())
def parse(ignore_duty=True, ignore_resolutions=True): govfeed = feedparser.parse('http://www.govtrack.us/events/events.rss?' 'feeds=misc%3Aintroducedbills') r = Reddit(user_agent='WatchingCongress/1.0') r.login('FuturistBot', '<BOTPASS>') for entry in govfeed.entries: if not entry['guid'].find('guid'): logging.info("Couldn't find GUID") continue if not entry['title']: logging.info("No title for bill: {0}".format(entry['guid'])) continue if house_collection.find_one({'guid': entry['guid']}): logging.info("Already created story: {0}".format(entry['title'])) continue if ignore_duty and 'duty' in entry['title'] and 'temporar' in entry['title']: logging.info("Ignored boring bill: {0}".format(entry['title'])) continue if ignore_resolutions and '.Res' in entry['title']: logging.info("Ignored resolution: {0}".format(entry['title'])) continue record = { 'title': entry['title'], 'description': entry['description'], 'link': entry['link'], 'guid': entry['guid'], } bill_number = entry['title'].split(':')[0] try: news_stories = find_news_stories(bill_number) except Exception as e: news_stories = [] logging.error("Couldn't parse Google News: {}".format(unicode(e))) try: text = template.render(description=entry['description'], link=entry['link'], news_stories=news_stories) r.submit('futuristparty', entry['title'], text=text) house_collection.insert(record) logging.info("Created story: {0}".format(entry['title'])) except Exception as e: logging.error("Exception occured: {0}".format(unicode(e))) time.sleep(2)
def test_mark_multiple_as_read(self): oth = Reddit(USER_AGENT) oth.login('PyApiTestUser3', '1111') messages = [] for msg in oth.user.get_unread(limit=None): if msg.author != oth.user.name: messages.append(msg) if len(messages) >= 2: return self.assertEqual(2, len(messages)) self.r.user.mark_as_read(messages) unread = oth.user.get_unread(limit=5) for msg in messages: self.assertTrue(msg not in unread)
def test_report(self): # login as new user to report submission oth = Reddit(USER_AGENT) oth.login('PyApiTestUser3', '1111') subreddit = oth.get_subreddit(self.sr) submission = None for submission in subreddit.get_new_by_date(): if not submission.hidden: break if not submission or submission.hidden: self.fail('Could not find a non-reported submission.') submission.report() # check if submission was reported for report in self.r.get_subreddit(self.sr).get_reports(): if report.id == submission.id: break else: self.fail('Could not find reported submission.')
def parse(): govfeed = feedparser.parse('http://www.govtrack.us/events/events.rss?' 'feeds=misc%3Aintroducedbills') r = Reddit(user_agent='WatchingCongress/1.0') r.login('congressbot', '<BOTPASS>') for entry in govfeed.entries: if not entry['guid'].find('guid'): logging.info("Couldn't find GUID") continue if not entry['title']: logging.info("No title for bill: {}".format(entry['guid'])) continue if house_collection.find_one({'guid': entry['guid']}): logging.info("Already created story: {}".format(entry['title'])) continue if 'duty' in entry['title'] and 'temporar' in entry['title']: logging.info("Ignored boring bill: {}".format(entry['title'])) continue if '.Res' in entry['title']: logging.info("Ignored resolution: {}".format(entry['title'])) continue record = { 'title': entry['title'], 'description': entry['description'], 'link': entry['link'], 'guid': entry['guid'], } try: text = template.render(description=entry['description'], link=entry['link']) r.submit('watchingcongress', entry['title'], text=text) house_collection.insert(record) logging.info("Created story: {}".format(entry['title'])) except Exception as e: logging.error("Exception occured: {}".format(unicode(e))) time.sleep(2)
def authenticate(self, username, password, request=None): try: reddit = Reddit(user_agent=USER_AGENT) reddit.login(username, password) r_user = reddit.user except urllib2.URLError: log.warning("Could not reach reddit. Is it down?") r_user = None except InvalidUserPass: log.Info(_('User "%s" tried to login without valid credentials')%username) return None except urllib2.HTTPError as e: log.Info(_('User "%s" tried to login without valid credentials')%username) return None try: db_user = User.objects.get(username__iexact=username) if not r_user and not db_user.check_password(password): return None if not db_user.is_active: #instead of deleting users, disable them. return None except User.DoesNotExist: #Rules for Joining if r_user and r_user.comment_karma >= JOIN_MIN_COMMENT_KARMA \ and r_user.link_karma >= JOIN_MIN_LINK_KARMA \ and (datetime.now() - datetime.utcfromtimestamp(r_user.created_utc)) >= JOIN_MIN_MEMBER_TIME: db_user = User(username=username, is_active=True) else: return None db_user.set_password(password) # Hash and store password for offline logins db_user.backend = self.__class__.__name__ db_user.save() return db_user
def main(subreddit): print "Subreddit :", subreddit rsub = url_data("http://www.reddit.com/r/%s/new/.json?sort=new"%subreddit, json=True) children = rsub['data']['children'] r = Reddit(USERNAME, PASSWORD) session = r.login() f = open('history.txt', 'r') history = f.read() f.close() for child in children: is_self = child['data']['is_self'] thread_id = child['data']['name'] print thread_id if thread_id in history: print "Thread: %s already in history"%thread_id pass else: if not is_self: img_url = child['data']['url'] thread_id = child['data']['name'] repost = karmadecay(img_url) if repost: text = form_comment(repost) r_resp = r.post(session, thread_id, text) if r_resp != None: error = r_resp['json']['errors'] delay = find_digit(error[0][1]) print "waiting: %s seconds" %delay*60 time.sleep(delay*60) r.post(session, thread_id, text) f = open('history.txt', 'a') f.write("\n%s"%thread_id) print text f.close() time.sleep(1) print "Comment Posted:", thread_id else: pass else: pass print "Finished" return
def get_messages(): r = Reddit(user_agent='calpoly-flair') r.login(user=REDDIT_USERNAME, password=REDDIT_PASSWORD) inbox = r.get_inbox() return inbox.get_messages()
class AnagramBot: OUT_STDOUT = 1 OUT_MAINTAINER = 2 OUT_REPLY = 4 OUT_DEBUG_REPLY = 8 def __init__(self): self._reddit = Reddit(user_agent='anagram_bot') self._anagram = Wordplay() self._maintainer = None self._output = AnagramBot.OUT_STDOUT def setMaintainer(self, username): self._maintainer = username def setOutput(self, outputMode): self._output = outputMode def login(self, username, password): self._reddit.login(username, password) def postPalindrome(self): comments = list(self._fetchComments()) for comment in comments: palindrome = self._anagram.pickRandomPalindrome(comment.body) if palindrome != None: print palindrome else: print "Nope:", comment.body[:70].replace("\n", "") def makeFunny(self): comments = list(self._fetchComments()) attempts = [] anagrams = [] maxAttempts = 20 i = 0 while len(attempts) < 10 and i < maxAttempts: i += 1 comment = random.choice(comments) anagrams = self._attempt(comment.body) anagrams = sorted(anagrams, key=lambda x: -len(x[1])) if len(anagrams) > 0: attempts.append( (comment,anagrams) ) if len(attempts) == 0: return attempts = sorted(attempts, key=lambda x: -len(x[1][0][1])) (comment, anagrams) = attempts[0] anagrams = filter(lambda x: len(x[1]) > 3, anagrams) reply = self._replace(comment.body, anagrams) self._sendFunny(comment, reply) def _sendFunny(self, comment, reply): if self._output & AnagramBot.OUT_STDOUT: self._printReply(comment, reply) if self._output & AnagramBot.OUT_MAINTAINER: self._debugPM(comment.permalink + "\n\n" + reply) if self._output & AnagramBot.OUT_DEBUG_REPLY: self._moderatedReply(comment, reply) if self._output & AnagramBot.OUT_REPLY: comment.reply( reply ) def _debugPM(self, message): if self._maintainer == None: raise ValueError("No maintainer is set! Use setMaintainer(str).") self._reddit.compose_message(self._maintainer, "AnagramBot debug", message) def _printReply(self, comment, reply): print comment.body print "===================" print reply def _moderatedReply(self, comment, reply): self._printReply(comment,reply) print comment.permalink response = raw_input("Send this [YES/NO]? ") if response.strip() == "YES": print "Sending reply..." comment.reply(reply) else: print "Aborted." def _replace(self, text, anagrams): for anagram in anagrams: pattern = "([^A-Za-z'0-9])" + anagram[0] + "([^A-Za-z'0-9])" replace = "\\1" + anagram[1] + "\\2" text = re.sub(pattern, replace, text) return text def _attempt(self, text): result = [] noMatches = True for match in re.findall("[A-Za-z'0-9]+", text): for anagram in self._anagram.solveRandomAnagram(match, 5): if anagram != None and anagram != match.upper(): anagram = _matchCase(match, anagram) result.append( (match, anagram) ) return result def _fetchComments(self): return self._reddit.get_all_comments()
class SubRedditStats(object): VERSION = '0.2.0' post_prefix = 'Subreddit Stats:' post_header = '---\n###%s\n' post_footer = ('>Generated with [BBoe](/user/bboe)\'s [Subreddit Stats]' '(https://github.com/bboe/subreddit_stats) \n%s' 'SRS Marker: %d') re_marker = re.compile('SRS Marker: (\d+)') @staticmethod def _previous_max(submission): try: val = SubRedditStats.re_marker.findall(submission.selftext)[-1] return float(val) except (IndexError, TypeError): print 'End marker not found in previous submission. Aborting' sys.exit(1) @staticmethod def _permalink(permalink): tokens = permalink.split('/') if tokens[8] == '': # submission return '/comments/%s/_/' % (tokens[6]) else: # comment return '/comments/%s/_/%s?context=1' % (tokens[6], tokens[8]) @staticmethod def _user(user): return '[%s](/user/%s)' % (user.replace('_', '\_'), user) def __init__(self, subreddit, site, verbosity): self.reddit = Reddit(str(self), site) self.subreddit = self.reddit.get_subreddit(subreddit) self.verbosity = verbosity self.submissions = [] self.comments = [] self.submitters = defaultdict(list) self.commenters = defaultdict(list) self.min_date = 0 self.max_date = time.time() - DAYS_IN_SECONDS * 3 self.prev_srs = None # Config self.reddit.config.comment_limit = -1 # Fetch max comments possible self.reddit.config.comment_sort = 'top' def __str__(self): return 'BBoe\'s SubRedditStats %s' % self.VERSION def login(self, user, pswd): if self.verbosity > 0: print 'Logging in' self.reddit.login(user, pswd) def msg(self, msg, level, overwrite=False): if self.verbosity >= level: sys.stdout.write(msg) if overwrite: sys.stdout.write('\r') sys.stdout.flush() else: sys.stdout.write('\n') def prev_stat(self, prev_url): submission = self.reddit.get_submission(prev_url) self.min_date = self._previous_max(submission) self.prev_srs = prev_url def fetch_recent_submissions(self, max_duration, after, exclude_self, since_last=True): '''Fetches recent submissions in subreddit with boundaries. Does not include posts within the last three days as their scores may not be representative. Keyword arguments: max_duration -- When set, specifies the number of days to include after -- When set, fetch all submission after this submission id. exclude_self -- When true, don't include self posts. since_last -- When true use info from last submission to determine the stop point ''' if max_duration: self.min_date = self.max_date - DAYS_IN_SECONDS * max_duration url_data = {'after': after} if after else None self.msg('DEBUG: Fetching submissions', 1) for submission in self.subreddit.get_new_by_date(limit=None, url_data=url_data): if submission.created_utc > self.max_date: continue if submission.created_utc <= self.min_date: break if (since_last and str(submission.author) == str(self.reddit.user) and submission.title.startswith(self.post_prefix)): # Use info in this post to update the min_date # And don't include this post self.msg('Found previous: %s' % submission.title, 2) if self.prev_srs == None: # Only use the most recent self.min_date = max(self.min_date, self._previous_max(submission)) self.prev_srs = submission.permalink continue if exclude_self and submission.is_self: continue self.submissions.append(submission) self.msg('DEBUG: Found %d submissions' % len(self.submissions), 1) if len(self.submissions) == 0: return False # Update real min and max dates self.submissions.sort(key=lambda x: x.created_utc) self.min_date = self.submissions[0].created_utc self.max_date = self.submissions[-1].created_utc return True def fetch_top_submissions(self, top, exclude_self): '''Fetches top 1000 submissions by some top value. Keyword arguments: top -- One of week, month, year, all exclude_self -- When true, don't include self posts. ''' if top not in ('day', 'week', 'month', 'year', 'all'): raise TypeError('%r is not a valid top value' % top) self.msg('DEBUG: Fetching submissions', 1) url_data = {'t': top} for submission in self.subreddit.get_top(limit=None, url_data=url_data): if exclude_self and submission.is_self: continue self.submissions.append(submission) self.msg('DEBUG: Found %d submissions' % len(self.submissions), 1) if len(self.submissions) == 0: return False # Update real min and max dates self.submissions.sort(key=lambda x: x.created_utc) self.min_date = self.submissions[0].created_utc self.max_date = self.submissions[-1].created_utc return True def process_submitters(self): self.msg('DEBUG: Processing Submitters', 1) for submission in self.submissions: if submission.author: self.submitters[str(submission.author)].append(submission) def process_commenters(self): num = len(self.submissions) self.msg('DEBUG: Processing Commenters on %d submissions' % num, 1) for i, submission in enumerate(self.submissions): self.msg('%d/%d submissions' % (i + 1, num), 2, overwrite=True) if submission.num_comments == 0: continue try: self.comments.extend(submission.all_comments_flat) except Exception as exception: print 'Exception fetching comments on %r: %s' % (submission.content_id, str(exception)) for orphans in submission._orphaned.values(): self.comments.extend(orphans) for comment in self.comments: if comment.author: self.commenters[str(comment.author)].append(comment) def basic_stats(self): sub_ups = sum(x.ups for x in self.submissions) sub_downs = sum(x.downs for x in self.submissions) comm_ups = sum(x.ups for x in self.comments) comm_downs = sum(x.downs for x in self.comments) sub_up_perc = sub_ups * 100 / (sub_ups + sub_downs) comm_up_perc = comm_ups * 100 / (comm_ups + comm_downs) values = [('Total', len(self.submissions), '', len(self.comments), ''), ('Unique Redditors', len(self.submitters), '', len(self.commenters), ''), ('Upvotes', sub_ups, '%d%%' % sub_up_perc, comm_ups, '%d%%' % comm_up_perc), ('Downvotes', sub_downs, '%d%%' % (100 - sub_up_perc), comm_downs, '%d%%' % (100 - comm_up_perc))] retval = '||Submissions|%|Comments|%|\n:-:|--:|--:|--:|--:\n' for quad in values: retval += '__%s__|%d|%s|%d|%s\n' % quad return '%s\n' % retval def top_submitters(self, num, num_submissions): num = min(num, len(self.submitters)) if num <= 0: return '' top_submitters = sorted(self.submitters.items(), reverse=True, key=lambda x: (sum(y.score for y in x[1]), len(x[1])))[:num] retval = self.post_header % 'Top Submitters\' Top Submissions' for (author, submissions) in top_submitters: retval += '0. %d pts, %d submissions: %s\n' % ( sum(x.score for x in submissions), len(submissions), self._user(author)) for sub in sorted(submissions, reverse=True, key=lambda x: x.score)[:num_submissions]: title = sub.title.replace('\n', ' ').strip() if sub.permalink != sub.url: retval += ' 0. [%s](%s)' % (title, sub.url) else: retval += ' 0. %s' % title retval += ' (%d pts, [%d comments](%s))\n' % ( sub.score, sub.num_comments, self._permalink(sub.permalink)) retval += '\n' return retval def top_commenters(self, num): score = lambda x: x.ups - x.downs num = min(num, len(self.commenters)) if num <= 0: return '' top_commenters = sorted(self.commenters.items(), reverse=True, key=lambda x: (sum(score(y) for y in x[1]), len(x[1])))[:num] retval = self.post_header % 'Top Commenters' for author, comments in top_commenters: retval += '0. %s (%d pts, %d comments)\n' % ( self._user(author), sum(score(x) for x in comments), len(comments)) return '%s\n' % retval def top_submissions(self, num): num = min(num, len(self.submissions)) if num <= 0: return '' top_submissions = sorted(self.submissions, reverse=True, key=lambda x: x.score)[:num] retval = self.post_header % 'Top Submissions' for sub in top_submissions: author = str(sub.author) title = sub.title.replace('\n', ' ').strip() if sub.permalink != sub.url: retval += '0. [%s](%s)' % (title, sub.url) else: retval += '0. %s' % title retval += ' by %s (%d pts, [%d comments](%s))\n' % ( self._user(author), sub.score, sub.num_comments, self._permalink(sub.permalink)) return '%s\n' % retval def top_comments(self, num): score = lambda x: x.ups - x.downs num = min(num, len(self.comments)) if num <= 0: return '' top_comments = sorted(self.comments, reverse=True, key=score)[:num] retval = self.post_header % 'Top Comments' for comment in top_comments: author = str(comment.author) title = comment.submission.title.replace('\n', ' ').strip() retval += ('0. %d pts: %s\'s [comment](%s) in %s\n' % (score(comment), self._user(author), self._permalink(comment.permalink), title)) return '%s\n' % retval def publish_results(self, subreddit, submitters, commenters, submissions, comments, top, debug=False): def timef(timestamp): dtime = datetime.fromtimestamp(timestamp) return dtime.strftime('%Y-%m-%d %H:%M PDT') title = '%s %s %ssubmissions from %s to %s' % ( self.post_prefix, str(self.subreddit), 'top ' if top else '', timef(self.min_date), timef(self.max_date)) if self.prev_srs: prev = '[Previous Stat](%s) \n' % self._permalink(self.prev_srs) else: prev = '' basic = self.basic_stats() t_commenters = self.top_commenters(commenters) t_submissions = self.top_submissions(submissions) t_comments = self.top_comments(comments) footer = self.post_footer % (prev, self.max_date) body = '' num_submissions = 10 while body == '' or len(body) > MAX_BODY_SIZE and num_submissions > 2: t_submitters = self.top_submitters(submitters, num_submissions) body = (basic + t_submitters + t_commenters + t_submissions + t_comments + footer) num_submissions -= 1 if len(body) > MAX_BODY_SIZE: print 'The resulting message is too big. Not submitting.' debug = True if not debug: msg = ('You are about to submit to subreddit %s as %s.\n' 'Are you sure? yes/[no]: ' % (subreddit, str(self.reddit.user))) if raw_input(msg).lower() not in ['y', 'yes']: print 'Submission aborted' else: try: self.reddit.submit(subreddit, title, text=body) return except Exception, error: print 'The submission failed:', error # We made it here either to debug=True or an error. print title print body
class ModUtils(object): VERSION = '0.1.dev' def __init__(self, subreddit, site=None, verbose=None): self.reddit = Reddit(str(self), site) self.sub = self.reddit.get_subreddit(subreddit) self.verbose = verbose self._current_flair = None def __str__(self): return 'BBoe\'s ModUtils %s' % self.VERSION def add_users(self, category): mapping = {'banned': 'ban', 'contributors': 'make_contributor', 'moderators': 'make_moderator'} if category not in mapping: print '%r is not a valid option for --add' % category return func = getattr(self.sub, mapping[category]) print 'Enter user names (any separation should suffice):' data = sys.stdin.read().strip() for name in re.split('[^A-Za-z_]+', data): func(name) print 'Added %r to %s' % (name, category) def current_flair(self): if self._current_flair is None: self._current_flair = [] if self.verbose: print 'Fetching flair list for %s' % self.sub for flair in self.sub.flair_list(): self._current_flair.append(flair) yield flair else: for item in self._current_flair: yield item def flair_template_sync(self, editable, limit, # pylint: disable-msg=R0912 static, sort, use_css, use_text): # Parameter verification if not use_text and not use_css: raise Exception('At least one of use_text or use_css must be True') sorts = ('alpha', 'size') if sort not in sorts: raise Exception('Sort must be one of: %s' % ', '.join(sorts)) # Build current flair list along with static values if static: counter = dict((x, limit) for x in static) else: counter = {} if self.verbose: sys.stdout.write('Retrieving current flair') sys.stdout.flush() for flair in self.current_flair(): if self.verbose: sys.stdout.write('.') sys.stdout.flush() if use_text and use_css: key = (flair['flair_text'], flair['flair_css_class']) elif use_text: key = flair['flair_text'] else: key = flair['flair_css_class'] if key in counter: counter[key] += 1 else: counter[key] = 1 if self.verbose: print # Sort flair list items according to the specified sort if sort == 'alpha': items = sorted(counter.items()) else: items = sorted(counter.items(), key=lambda x: x[1], reverse=True) # Clear current templates and store flair according to the sort if self.verbose: print 'Clearing current flair templates' self.sub.clear_flair_templates() for key, count in items: if not key or count < limit: continue if use_text and use_css: text, css = key elif use_text: text, css = key, '' else: text, css = '', key if self.verbose: print 'Adding template: text: "%s" css: "%s"' % (text, css) self.sub.add_flair_template(text, css, editable) def login(self, user, pswd): if self.verbose: print 'Logging in' self.reddit.login(user, pswd) if self.verbose: print 'Fetching moderator list for %s' % self.sub if str(self.sub).lower() not in [str(x).lower() for x in self.reddit.user.my_moderation()]: raise Exception('You do not moderate %s' % self.sub) def message(self, category, subject, msg_file): users = getattr(self.sub, 'get_%s' % category)() if not users: print 'There are no %s on %s.' % (category, str(self.sub)) return if msg_file: try: msg = open(msg_file).read() except IOError, error: print str(error) return else:
def test_mark_as_read(self): oth = Reddit(USER_AGENT) oth.login('PyApiTestUser3', '1111') msg = oth.user.get_unread(limit=1).next() # pylint: disable-msg=E1101 msg.mark_as_read() self.assertTrue(msg not in list(oth.user.get_unread(limit=5)))
class Phoebe(Thread): def __init__(self, config_dir, logger=Null()): self.logger = logger self.log = self.logger.getLogger('phoebe.Phoebe') self.log.debug('Phoebe Thread initialized') self.log.debug('config_dir: %s' % config_dir) Thread.__init__(self) self.idx = 0 self.playlist = [] self.playing = False self.buffering = False if not path.isdir(config_dir): mkdir(config_dir) self.config_dir = config_dir self.log.debug('Loading history file') self.history = LocalStorage(path.join(config_dir, 'history.json'), logger=self.logger) self.log.debug('Loading settings file') self.settings = LocalStorage(path.join(config_dir, 'settings.json'), logger=self.logger) self.reddit = Reddit(logger=self.logger) if ('reddit_username' in self.settings.keys()) \ and ('reddit_password' in self.settings.keys()): self.reddit.login(self.settings['reddit_username'], self.settings['reddit_password']) if 'download_dir' not in self.settings.keys(): self.settings['download_dir'] = path.join(path.expanduser('~'), 'Downloads', 'phoebe') if not path.isdir(self.settings['download_dir']): mkdir(self.settings['download_dir']) self.mpq = Queue() # TODO: vlc backend support. There should also be an auto-detected fallback if 'backend' not in self.settings.keys(): self.settings['backend'] = 'mplayer' if self.settings['backend'] == 'mplayer': self.mp = MPlayerThread(queue=self.mpq, logger=self.logger) self.mp.daemon = True self.mp.start() self.playtime = 0 self.dlq = Queue() self.dl = DLThread(self.dlq, logger=self.logger) self.dl.daemon = True self.dl.start() @property def has_next(self): return len(self.playlist) > self.idx + 1 def run(self): self.log.debug('Running Phoebe thread') while True: sleep(1) if self.playing and self.has_next and ( self.mp.properties['time_left'] == 1) and not self.buffering: self.log.debug('End of file. Sleep 1') sleep(1) self.next() elif self.playing and not self.has_next and not ( self.mp_properties['filename'] or self.buffering): self.log.debug('End of playlist') self.playing = False def shuffle(self): self.log.debug('Shuffling playlist') current_id = self.playlist[self.idx]['id'] random.shuffle(self.playlist) idx = 0 for item in self.playlist: if item['id'] == current_id: self.idx = item break idx += 1 self.idx = idx def download(self, idx): f_path = path.join(self.settings['download_dir'], self.playlist[idx]['id']) if path.isfile(f_path): self.log.debug('Playlist item exists. Skipping download: %s' % idx) else: self.log.debug('Putting playlist item in download queue: %s' % idx) self.dlq.put({ 'id': self.playlist[idx]['id'], 'url': self.playlist[idx]['url'].replace('&', '&'), 'download_dir': self.settings['download_dir'] }) def play(self, idx): self.log.debug('Playing playlist item: %s' % idx) self.log.debug('Stopping first, before playing: %s' % idx) self.stop() self.playing = True # TODO: handle IndexError f_path = path.join(self.settings['download_dir'], self.playlist[idx]['id']) if path.isfile(f_path): self.log.debug('File exists. Loading file to mplayer process: %s' % idx) self.idx = idx self.buffering = False self.playtime = time() self.mpq.put('loadfile %s' % f_path) self.mpq.put('get_property filename') if self.playlist[idx]['id'] in self.history.keys(): voted = self.history[self.playlist[idx]['id']]['voted'] else: voted = 0 self.history[self.playlist[idx]['id']] = { 'playtime': self.playtime, 'voted': voted, 'subreddit': self.playlist[idx]['subreddit'], } self.download(idx + 1) else: self.log.debug( 'File does not exist. Need to buffer/download first: %s' % idx) if self.playlist[idx]['id'] in self.dl.downloads.keys(): status = self.dl.downloads[self.playlist[idx]['id']] if (status['process'].poll() is not None) and (status['status'] is not 'complete'): self.log.info( 'Previous download attempt failed. Skipping: %s' % idx) self.log.info('Error from download process: %s' % status['error']) self.playlist[idx]['filter'] = 'download_failed' self.next() return None print("Buffering...") self.buffering = True self.download(idx) while True: sleep(1) dlid = self.playlist[idx]['id'] status = self.dl.downloads[dlid] if status['status'] == 'downloading': print(status['percent']) if (status['status'] == 'complete') or (status['status'] == 'idle'): break if (status['process'].poll() is not None) and (status['status'] is not 'complete'): self.log.info('Download attempt failed. Skipping: %s' % idx) self.log.info('Error from download process: %s' % status['error']) self.playlist[idx]['filter'] = 'download_failed' self.next() return None self.log.debug( 'Download finished. Running play function again: %s' % idx) self.play(idx) def next(self): self.log.debug('Next') if self.has_next: for idx in range(self.idx + 1, len(self.playlist)): self.idx = idx if not 'filter' in self.playlist[idx].keys(): self.play(idx) break else: reason = self.playlist[idx]['filter'] self.log.debug( 'Skipping playlist item %s due to filter: %s' % (idx, reason)) def previous(self): if time() - self.playtime > 10: self.log.debug('Seeking back to back to beginning') self.mpq.put('seek 0 2') self.playtime = time() elif self.idx != 0: self.log.debug('Previous') self.play(self.idx - 1) def pause(self): self.log.debug('Pause') self.mpq.put('pause') def stop(self): self.log.debug('Stop') self.mpq.put('stop') self.playing = False def upvote(self): if self.playlist[self.idx]['id'] in self.history.keys(): self.log.debug('upvoting') hist = self.history[self.playlist[self.idx]['id']] hist['voted'] = 1 self.history[self.playlist[self.idx]['id']] = hist if self.reddit.logged_in: self.reddit.upvote(self.playlist[self.idx]['id']) def downvote(self): if self.playlist[self.idx]['id'] in self.history.keys(): self.log.debug('downvoting') hist = self.history[self.playlist[self.idx]['id']] hist['voted'] = -1 self.history[self.playlist[self.idx]['id']] = hist if self.reddit.logged_in: self.reddit.downvote(self.playlist[self.idx]['id']) self.next()
def test_mark_as_read(self): oth = Reddit('reddit_api test suite') oth.login('PyApiTestUser3', '1111') msg = oth.user.get_unread(limit=1).next() # pylint: disable-msg=E1101 msg.mark_as_read() self.assertTrue(msg not in list(oth.user.get_unread(limit=5)))
def test_moderator_requried(self): oth = Reddit(USER_AGENT) oth.login('PyApiTestUser3', '1111') self.assertRaises(errors.ModeratorRequired, oth.get_settings, self.sr)
def post_to_reddit(self): reddit_connection = Reddit(user_agent='wootbot/1.0') reddit_connection.login(bot_username, bot_password) reddit_connection.submit('woot', self.__str__(), url=self.url)