Exemple #1
0
    def process_queue(self, worker_id):
        """Continuously processes tasks on the queue."""

        # Create a new logs instance (with its own httplib2 instance) so that
        # there is a separate one for each thread.
        logs = Logs("twitter-listener-worker-%s" % worker_id,
                    to_cloud=self.logs_to_cloud)

        logs.debug("Started worker thread: %s" % worker_id)
        while not self.stop_event.is_set():
            try:
                data = self.queue.get(block=True, timeout=QUEUE_TIMEOUT_S)
                start_time = time()
                self.handle_data(logs, data)
                self.queue.task_done()
                end_time = time()
                qsize = self.queue.qsize()
                logs.debug("Worker %s took %.f ms with %d tasks remaining." %
                           (worker_id, end_time - start_time, qsize))
            except Empty:
                # Timed out on an empty queue.
                continue
            except Exception:
                # The main loop doesn't catch and report exceptions from
                # background threads, so do that here.
                logs.catch()
        logs.debug("Stopped worker thread: %s" % worker_id)
Exemple #2
0
    def process_queue(self, worker_id):
        """Continuously processes tasks on the queue."""

        # Create a new logs instance (with its own httplib2 instance) so that
        # there is a separate one for each thread.
        logs = Logs("twitter-listener-worker-%s" % worker_id,
                    to_cloud=self.logs_to_cloud)

        logs.debug("Started worker thread: %s" % worker_id)
        while not self.stop_event.is_set():
            # The main loop doesn't catch and report exceptions from background
            # threads, so do that here.
            try:
                size = self.queue.qsize()
                logs.debug("Processing queue of size: %s" % size)
                data = self.queue.get(block=True)
                self.handle_data(logs, data)
                self.queue.task_done()
            except BaseException as exception:
                logs.catch(exception)
        logs.debug("Stopped worker thread: %s" % worker_id)
Exemple #3
0
class Main:
    """A wrapper for the main application logic and retry loop."""

    def __init__(self):
        self.logs = Logs(name="main", to_cloud=LOGS_TO_CLOUD)
        self.twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD)

    def twitter_callback(self, tweet):
        """Analyzes Trump tweets, trades stocks, and tweets about it."""

        # Initialize the Analysis, Logs, Trading, and Twitter instances inside
        # the callback to create separate httplib2 instances per thread.
        analysis = Analysis(logs_to_cloud=LOGS_TO_CLOUD)
        logs = Logs(name="main-callback", to_cloud=LOGS_TO_CLOUD)

        # Analyze the tweet.
        companies = analysis.find_companies(tweet)
        logs.info("Using companies: %s" % companies)
        if not companies:
            return

        # Trade stocks.
        trading = Trading(logs_to_cloud=LOGS_TO_CLOUD)
        # trading.make_trades(companies)

        # Tweet about it.
        twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD)
        twitter.tweet(companies, tweet)

    def run_session(self):
        """Runs a single streaming session. Logs and cleans up after
        exceptions.
        """

        self.logs.info("Starting new session.")
        try:
            self.twitter.start_streaming(self.twitter_callback)
        except:
            self.logs.catch()
        finally:
            self.twitter.stop_streaming()
            self.logs.info("Ending session.")

    def backoff(self, tries):
        """Sleeps an exponential number of seconds based on the number of
        tries.
        """

        delay = BACKOFF_STEP_S * pow(2, tries)
        self.logs.warn("Waiting for %.1f seconds." % delay)
        sleep(delay)

    def run(self):
        """Runs the main retry loop with exponential backoff."""

        tries = 0
        while True:

            # The session blocks until an error occurs.
            self.run_session()

            # Remember the first time a backoff sequence starts.
            now = datetime.now()
            if tries == 0:
                self.logs.debug("Starting first backoff sequence.")
                backoff_start = now

            # Reset the backoff sequence if the last error was long ago.
            if (now - backoff_start).total_seconds() > BACKOFF_RESET_S:
                self.logs.debug("Starting new backoff sequence.")
                tries = 0
                backoff_start = now

            # Give up after the maximum number of tries.
            if tries >= MAX_TRIES:
                self.logs.warn("Exceeded maximum retry count.")
                break

            # Wait according to the progression of the backoff sequence.
            self.backoff(tries)

            # Increment the number of tries for the next error.
            tries += 1
Exemple #4
0
class TwitterListener(StreamListener):
    """A listener class for handling streaming Twitter data."""

    def __init__(self, callback, logs_to_cloud):
        self.logs_to_cloud = logs_to_cloud
        self.logs = Logs(name="twitter-listener", to_cloud=self.logs_to_cloud)
        self.callback = callback
        self.error_status = None
        self.start_queue()

    def start_queue(self):
        """Creates a queue and starts the worker threads."""

        self.queue = Queue()
        self.stop_event = Event()
        self.logs.debug("Starting %s worker threads." % NUM_THREADS)
        self.workers = []
        for worker_id in range(NUM_THREADS):
            worker = Thread(target=self.process_queue, args=[worker_id])
            worker.daemon = True
            worker.start()
            self.workers.append(worker)

    def stop_queue(self):
        """Shuts down the queue and worker threads."""

        # First stop the queue.
        if self.queue:
            self.logs.debug("Stopping queue.")
            self.queue.join()
        else:
            self.logs.warn("No queue to stop.")

        # Then stop the worker threads.
        if self.workers:
            self.logs.debug("Stopping %d worker threads." % len(self.workers))
            self.stop_event.set()
            for worker in self.workers:
                # Block until the thread terminates.
                worker.join()
        else:
            self.logs.warn("No worker threads to stop.")

    def process_queue(self, worker_id):
        """Continuously processes tasks on the queue."""

        # Create a new logs instance (with its own httplib2 instance) so that
        # there is a separate one for each thread.
        logs = Logs("twitter-listener-worker-%s" % worker_id,
                    to_cloud=self.logs_to_cloud)

        logs.debug("Started worker thread: %s" % worker_id)
        while not self.stop_event.is_set():
            try:
                data = self.queue.get(block=True, timeout=QUEUE_TIMEOUT_S)
                start_time = time()
                self.handle_data(logs, data)
                self.queue.task_done()
                end_time = time()
                qsize = self.queue.qsize()
                logs.debug("Worker %s took %.f ms with %d tasks remaining." %
                           (worker_id, end_time - start_time, qsize))
            except Empty:
                # Timed out on an empty queue.
                continue
            except Exception:
                # The main loop doesn't catch and report exceptions from
                # background threads, so do that here.
                logs.catch()
        logs.debug("Stopped worker thread: %s" % worker_id)
Exemple #5
0
def twitter_callback(text, link):
    """Analyzes Trump tweets, makes stock trades, and sends tweet alerts."""

    # Initialize these here to create separate httplib2 instances per thread.
    analysis = Analysis(logs_to_cloud=LOGS_TO_CLOUD)
    trading = Trading(logs_to_cloud=LOGS_TO_CLOUD)

    companies = analysis.find_companies(text)
    logs.debug("Using companies: %s" % companies)
    if companies:
        trading.make_trades(companies)
        twitter.tweet(companies, link)


if __name__ == "__main__":
    logs = Logs(name="main", to_cloud=LOGS_TO_CLOUD)

    # Restart in a loop if there are any errors so we stay up.
    while True:
        logs.info("Starting new session.")

        twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD)
        try:
            twitter.start_streaming(twitter_callback)
        except BaseException as exception:
            logs.catch(exception)
        finally:
            twitter.stop_streaming()
            logs.info("Ending session.")
Exemple #6
0
def twitter_callback(tweet):
    """Analyzes Trump tweets, makes stock trades, and sends tweet alerts."""

    # Initialize these here to create separate httplib2 instances per thread.
    analysis = Analysis(logs_to_cloud=LOGS_TO_CLOUD)
    trading = Trading(logs_to_cloud=LOGS_TO_CLOUD)

    companies = analysis.find_companies(tweet)
    logs.debug("Using companies: %s" % companies)
    if companies:
        trading.make_trades(companies)
        twitter.tweet(companies, tweet)


if __name__ == "__main__":
    logs = Logs(name="main", to_cloud=LOGS_TO_CLOUD)

    # Restart in a loop if there are any errors so we stay up.
    while True:
        logs.info("Starting new session.")

        twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD)
        try:
            twitter.start_streaming(twitter_callback)
        except BaseException as exception:
            logs.catch(exception)
        finally:
            twitter.stop_streaming()
            logs.info("Ending session.")
Exemple #7
0
def twitter_callback(tweet):
    """Analyzes Trump tweets, makes stock trades, and sends tweet alerts."""

    # Initialize these here to create separate httplib2 instances per thread.
    analysis = Analysis(logs_to_cloud=LOGS_TO_CLOUD)
    trading = Trading(logs_to_cloud=LOGS_TO_CLOUD)

    companies = analysis.find_companies(tweet)
    logs.debug("Using companies: %s" % companies)
    if companies:
        trading.make_trades(companies)
        twitter.tweet(companies, tweet)


if __name__ == "__main__":
    logs = Logs(name="main", to_cloud=LOGS_TO_CLOUD)

    # Restart in a loop if there are any errors so we stay up.
    while True:
        logs.info("Starting new session.")

        twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD)
        try:
            twitter.start_streaming(twitter_callback)
        except Exception:
            logs.catch()
        finally:
            twitter.stop_streaming()
            logs.info("Ending session.")
Exemple #8
0
class Main:
    """A wrapper for the main application logic and retry loop."""

    def __init__(self):
        self.logs = Logs(name="main", to_cloud=LOGS_TO_CLOUD)
        self.twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD)

    def twitter_callback(self, tweet):
        """Analyzes Trump tweets, trades stocks, and tweets about it."""

        # Initialize the Analysis, Logs, Trading, and Twitter instances inside
        # the callback to create separate httplib2 instances per thread.
        analysis = Analysis(logs_to_cloud=LOGS_TO_CLOUD)
        logs = Logs(name="main-callback", to_cloud=LOGS_TO_CLOUD)

        # Analyze the tweet.
        companies = analysis.find_companies(tweet)
        logs.info("Using companies: %s" % companies)
        if not companies:
            return

        # Trade stocks.
        trading = Trading(logs_to_cloud=LOGS_TO_CLOUD)
        trading.make_trades(companies)

        # Tweet about it.
        twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD)
        twitter.tweet(companies, tweet)

    def run_session(self):
        """Runs a single streaming session. Logs and cleans up after
        exceptions.
        """

        self.logs.info("Starting new session.")
        try:
            self.twitter.start_streaming(self.twitter_callback)
        except:
            self.logs.catch()
        finally:
            self.twitter.stop_streaming()
            self.logs.info("Ending session.")

    def backoff(self, tries):
        """Sleeps an exponential number of seconds based on the number of
        tries.
        """

        delay = BACKOFF_STEP_S * pow(2, tries)
        self.logs.warn("Waiting for %.1f seconds." % delay)
        sleep(delay)

    def run(self):
        """Runs the main retry loop with exponential backoff."""

        tries = 0
        while True:

            # The session blocks until an error occurs.
            self.run_session()

            # Remember the first time a backoff sequence starts.
            now = datetime.now()
            if tries == 0:
                self.logs.debug("Starting first backoff sequence.")
                backoff_start = now

            # Reset the backoff sequence if the last error was long ago.
            if (now - backoff_start).total_seconds() > BACKOFF_RESET_S:
                self.logs.debug("Starting new backoff sequence.")
                tries = 0
                backoff_start = now

            # Give up after the maximum number of tries.
            if tries >= MAX_TRIES:
                self.logs.warn("Exceeded maximum retry count.")
                break

            # Wait according to the progression of the backoff sequence.
            self.backoff(tries)

            # Increment the number of tries for the next error.
            tries += 1
Exemple #9
0
class Main:
    """A wrapper for the main application logic and retry loop."""

    def __init__(self):
        self.logs = Logs(name="main")
        self.twitter = Twitter()


    def toList(tweet, alltweets):
        item = {}
        cleaner_source = re.search("\>.+\<", tweet._json['source']).group(0)
        clean_source = cleaner_source[1: -1]
        item["source"] = clean_source
        item["id_str"] = tweet._json["id_str"]
        item["text"] = tweet._json["full_text"]
        item["created_at"] = tweet._json["created_at"]
        item["retweet_count"] = tweet._json["retweet_count"]
        item["in_reply_to_user_id_str"] = tweet._json["in_reply_to_user_id_str"]
        item["favorite_count"] = tweet._json["favorite_count"]
        item["is_retweet"] = tweet._json["retweeted"]
        alltweets.append(item)


    def writeToFile(alltweets, screen_name):
        #write to a json file
        with open('%s_tweets.json' % screen_name, mode='w',
            newline='', encoding="utf-8") as f:
            json.dump(alltweets, f)
        return

    
    def writeToDB(alltweets, screen_name):
        if screen_name == "realDonaldTrump":
            with MongoClient('localhost', 27017) as connection:
                db = connection["charon"]
                collection = db["trump_tweets"]
                collection.create_index("id_str", unique = True)
                for item in alltweets:
                    try:
                        # write to DB, however inert_many always throw exception so use insert_one
                        # db.trump_tweets.insert_many(alltweets, ordered=False)
                        db.trump_tweets.insert_one(item)
                    except:
                        #print("Duplicate key")
                        continue
            print("Total trump tweets in DB = ", collection.count_documents({}))
        return


    def twitter_callback(self, tweet):        
        """Analyzes Trump tweets, trades stocks, and tweets about it."""

        # save the tweet
        alltweets = []
        screen_name = "realDonaldTrump"
        toList(tweet, alltweets)  
        writeToFile(alltweets, screen_name)
        writeToDB(alltweets, screen_name)

        # Initialize the Analysis, Logs, Trading, and Twitter instances inside
        # the callback to create separate httplib2 instances per thread.
        analysis = Analysis()
        logs = Logs(name="main-callback")
        self.logs.info("twitter_callback starts") 
        
        #Analyze the tweet.
        companies = analysis.find_companies(tweet)
        
        logs.info("Using companies: %s" % companies)
        if not companies:
             return

        # Trade stocks.
        # trading = Trading()
        # trading.make_trades(companies)

        # Tweet about it.
        # twitter = Twitter()
        # twitter.tweet(companies, tweet)

    def run_session(self):
        """Runs a single streaming session. Logs and cleans up after
        exceptions.
        """

        self.logs.info("Starting new session.")
        try:
            self.twitter.start_streaming(self.twitter_callback)
        except:
            self.logs.catch()
        finally:
            self.twitter.stop_streaming()
            self.logs.info("Ending session.")

    def backoff(self, tries):
        """Sleeps an exponential number of seconds based on the number of
        tries.
        """

        delay = BACKOFF_STEP_S * pow(2, tries)
        self.logs.warn("Waiting for %.1f seconds." % delay)
        sleep(delay)

    def run(self):
        """Runs the main retry loop with exponential backoff."""

        tries = 0
        while True:

            # The session blocks until an error occurs.
            self.run_session()

            # Remember the first time a backoff sequence starts.
            now = datetime.now()
            if tries == 0:
                self.logs.debug("Starting first backoff sequence.")
                backoff_start = now

            # Reset the backoff sequence if the last error was long ago.
            if (now - backoff_start).total_seconds() > BACKOFF_RESET_S:
                self.logs.debug("Starting new backoff sequence.")
                tries = 0
                backoff_start = now

            # Give up after the maximum number of tries.
            if tries >= MAX_TRIES:
                self.logs.warn("Exceeded maximum retry count.")
                break

            # Wait according to the progression of the backoff sequence.
            self.backoff(tries)

            # Increment the number of tries for the next error.
            tries += 1