예제 #1
0
def submit_link(user, subreddit, title, url, thumb_url):
    account = Account._by_name(user)
    subreddit = Subreddit._by_name(subreddit)
    ip = '127.0.0.1'

    # submit the link
    link = Link._submit(
        is_self=False,
        title=title,
        content=url,
        author=account,
        sr=subreddit,
        ip=ip,
        spam=False,
    )

    try:
        # force the thumbnail before scraper_q gets in the mix
        image_data = urllib.urlopen(thumb_url).read()
        force_thumbnail(link, image_data)
    except:
        pass

    # various backend processing things
    queries.new_link(link)
    link.update_search_index()

    # wait for the amqp worker to finish up
    worker.join()

    print link.make_permalink_slow()
예제 #2
0
파일: health.py 프로젝트: aguamar/reddit
        def _shutdown():
            #give busy threads 30 seconds to finish up
            for s in xrange(30):
                busy = thread_pool.track_threads()['busy']
                if not busy:
                    break
                time.sleep(1)

            thread_pool.shutdown()
            worker.join()
            os._exit(3)
예제 #3
0
파일: health.py 프로젝트: XieConnect/reddit
        def _shutdown():
            #give busy threads 30 seconds to finish up
            for s in xrange(30):
                busy = thread_pool.track_threads()['busy']
                if not busy:
                    break
                time.sleep(1)

            thread_pool.shutdown()
            worker.join()
            os._exit(3)
예제 #4
0
def main():
    now = datetime.datetime.now(TIMEZONE)

    # calculate and store the new day's gold goal
    determine_gold_goal(now.date())

    # post a new thread if we met our revenue goal
    yesterday = (now - datetime.timedelta(days=1)).date()
    post_if_goal_reached(yesterday)

    # look at old (now complete) threads if any
    activate_requested_names(but_not=yesterday)

    # wait until all our amqp / permacache changes are flushed from the
    # in-process queue.
    worker.join()
    g.reset_caches()

    # update the sidebar with a list of names
    update_sidebar()
예제 #5
0
def submit_link(user, subreddit, title, url, thumb_url):
    account = Account._by_name(user)
    subreddit = Subreddit._by_name(subreddit)
    ip = '127.0.0.1'

    # submit the link
    link = Link._submit(title, url, account, subreddit, ip, spam=False)

    # force the thumbnail before scraper_q gets in the mix
    image_data = urllib.urlopen(thumb_url).read()
    force_thumbnail(link, image_data)

    # various backend processing things
    queries.queue_vote(account, link, UPVOTE, ip)
    queries.new_link(link)
    link.update_search_index()

    # wait for the amqp worker to finish up
    worker.join()

    print link.make_permalink_slow()
예제 #6
0
def submit_link(user, subreddit, title, url, thumb_url):
    account = Account._by_name(user)
    subreddit = Subreddit._by_name(subreddit)
    ip = '127.0.0.1'

    # submit the link
    link = Link._submit(title, url, account, subreddit, ip, spam=False)

    # force the thumbnail before scraper_q gets in the mix
    image_data = urllib.urlopen(thumb_url).read()
    force_thumbnail(link, image_data)

    # various backend processing things
    queries.queue_vote(account, link, UPVOTE, ip)
    queries.new_link(link)
    link.update_search_index()

    # wait for the amqp worker to finish up
    worker.join()

    print link.make_permalink_slow()