Beispiel #1
0
def get_object(bucket, key, backend, debug):
    log_level = logging.INFO if not debug else logging.DEBUG
    setup_lithops_logger(log_level)
    storage = Storage(backend=backend)
    logger.info('Downloading object {} from bucket {}'.format(key, bucket))
    data_stream = storage.get_object(bucket, key, stream=True)
    with open(key, 'wb') as out:
        shutil.copyfileobj(data_stream, out)
    logger.info('Object downloaded successfully')
Beispiel #2
0
def analyze_tweets(keyword, location):
    # Get the data from cloud
    storage = Storage()
    json_tweets = storage.get_object(bucket=STORAGEBUCKET,
                                     key=keyword + location + ".json")
    packed_tweets = json.loads(json_tweets)

    analisador = SentimentIntensityAnalyzer()

    # Columnas CSV:
    # URL, Localizacion, Fecha, Sentiment
    with open(keyword + location + ".csv", 'w') as file:
        writer = csv.writer(file)
        writer.writerow(["URL", "Fecha", "Sentiment"])
        # Start iterating over the tweets downloaded from the cloud, execute sentimental analysis and put the result in a csv file
        for tweet in packed_tweets["tweets"]:
            tweetstr = mtranslate.translate(str(tweet["Texto tweet"]), "en",
                                            "auto")
            writer.writerow([
                str(tweet["URL"]),
                str(tweet["Fecha"]),
                str(analisador.polarity_scores(tweetstr)['compound'])
            ])