Пример #1
0
def saveData():
    while(True):
        conditions = Conditions()
        current_conditions = {"fecha": getCurrentTime(), "temperatura_int": conditions.arduino[
            'temp'], "temperatura_ext": conditions.yahoo['temp']}
        log.debug(current_conditions)
        db.insert_value(current_conditions['fecha'], current_conditions[
                        'temperatura_int'], current_conditions['temperatura_ext'])
        time.sleep(2 * 60)
Пример #2
0
    def getArduinoConditions(self):

        url = self.config['url_arduino']
        req = requests.get(url)

        statusCode = req.status_code
        if statusCode == 200:

            weather_json = req.json()

            arduino_data = ArduinoData(weather_json)

            condiciones_actuales = {"fecha": getCurrentTime(
            ), "temp": arduino_data.temperatura, "hum": arduino_data.humedad}

            return condiciones_actuales

        else:
            print "Status Code %d" % statusCode
Пример #3
0
    def getYahooConditions(self):
        code = self.config['code'] if len(
            self.config['code']) > 0 else self.getYahooCode()
        yahoo_url = self.obtain_yahoo_url(code)
        req = requests.get(yahoo_url)

        statusCode = req.status_code
        if statusCode == 200:

            weather_json = req.json()

            tiempo = InfoTiempo(weather_json)

            condiciones_actuales = {
                "fecha": getCurrentTime(), "temp": tiempo.condiciones_actuales.temp}

            return condiciones_actuales

        else:
            print "Status Code %d" % statusCode
Пример #4
0
def updateCurrentSentiment():
    sentimentValues = []
    sentimentTimes = []
    for sentiment in sentiments:
        timeDiff = utilities.getCurrentTime() - sentiment.get('timeStamp')
        if (timeDiff > TWENTY_MINUTES_MS):
            sentiments.remove(sentiment)
        else:
            sentimentValues.append(sentiment.get('compound') / 10)
            sentimentTimes.append(
                (sentiment.get('timeStamp') - CUR_TIME_MS) / 1000000)
    if (len(sentimentValues) > 1):
        averageSentiment = s.mean(sentimentValues)
        currentSentiment.append(averageSentiment)
        currentTime.append(sentimentTimes[-1])
        return averageSentiment
        # print("sentimentValues:", sentimentValues)
        # print("Current sentiment: ", currentTime)
        # graph.updateGraph(currentTime, currentSentiment)
    else:
        return None
def process():
    with open('../data/tripexpertData/raw/destinations.json', 'r') as f:
        availableCities = json.loads(f.read())['response']['destinations']

    listings = []
    for city in availableCities:
        countryName = city['country_name']
        cityName = city['name']
        coordinates = ','.format(city['latitude'], city['longitude'])
        if 'None' in coordinates:
            coordinates = None

        crawler = 'tripexpertConvertor'
        sourceURL = 'https://api.tripexpert.com/v1/destinations?api_key=6cb54d22babb25cc64ae730f17455338&limit=10000'
        crawlTimestamp = getCurrentTime()

        cityListing = CityListing(crawler=crawler,
                                  sourceURL=sourceURL,
                                  crawlTimestamp=crawlTimestamp,
                                  countryName=countryName,
                                  cityName=cityName,
                                  coordinates=coordinates)

        imageURL: str = city['index_photo']
        if '?' in imageURL:
            imageURL = imageURL[:imageURL.index('?')]

        imageListing = ImageResource(crawler=crawler,
                                     sourceURL=sourceURL,
                                     crawlTimestamp=crawlTimestamp,
                                     countryName=countryName,
                                     cityName=cityName,
                                     imageURL=imageURL)

        listings.append(cityListing)
        listings.append(imageListing)

    with open('../data/tripexpertData/cities.json', 'w') as f:
        f.write(json.dumps(listings, cls=EnhancedJSONEncoder))
Пример #6
0
    def getYahooConditions(self):
        code = self.config['code'] if len(
            self.config['code']) > 0 else self.getYahooCode()
        yahoo_url = self.obtain_yahoo_url(code)
        req = requests.get(yahoo_url)

        statusCode = req.status_code
        if statusCode == 200:

            weather_json = req.json()

            tiempo = InfoTiempo(weather_json)

            condiciones_actuales = {
                "fecha": getCurrentTime(),
                "temp": tiempo.condiciones_actuales.temp
            }

            return condiciones_actuales

        else:
            print "Status Code %d" % statusCode
Пример #7
0
    def getArduinoConditions(self):

        url = self.config['url_arduino']
        req = requests.get(url)

        statusCode = req.status_code
        if statusCode == 200:

            weather_json = req.json()

            arduino_data = ArduinoData(weather_json)

            condiciones_actuales = {
                "fecha": getCurrentTime(),
                "temp": arduino_data.temperatura,
                "hum": arduino_data.humedad
            }

            return condiciones_actuales

        else:
            print "Status Code %d" % statusCode
Пример #8
0
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
import dateutil.parser
from datetime import datetime
import utilities
# import graph
# from matplotlib.pyplot import figure, draw

analyzer = SentimentIntensityAnalyzer()

sentiments = []
currentSentiment = []
currentTime = []
col_names = ['timeStamp', 'tweet', 'numFollowers']
TWENTY_MINUTES_MS = 1200000
tweet_collections = pd.DataFrame(columns=col_names)
CUR_TIME_MS = utilities.getCurrentTime()


# print(analyzeSentiment(tweet.get['text']))
def trackSentiment(tweet, user, time):
    vs = analyzer.polarity_scores(tweet)
    compound = vs['compound']
    weightedCompound = compound * (user['followers_count'] / 100)
    sentiments.append(weightedCompound)
    updateCurrentSentiment()


def analyzeSentiment(tweet, numFollowers, timeStamp):
    vs = analyzer.polarity_scores(tweet)
    msTime = dateutil.parser.parse(timeStamp).timestamp() * 1000
    sentiments.append({"compound": vs['compound'], "timeStamp": msTime})
Пример #9
0
def processAll():
    listings: t.List[JEL] = readAllListingsFromFiles()

    print('Processing.')

    countryListings: t.List[JKL] = [
        JKL(listing) for listing in listings
        if listing['_listingType'] == 'country'
    ]
    cityListings: t.List[JCL] = [
        JCL(listing) for listing in listings
        if listing['_listingType'] == 'city'
    ]
    pointListings: t.List[JPL] = [
        JPL(listing) for listing in listings
        if listing['_listingType'] == 'point'
    ]

    pointIDs: t.List[PointID] = [getPointID(point) for point in pointListings]
    cityIDs: t.List[CityID] = [getCityID(city) for city in cityListings]
    countryIDs: t.List[CountryID] = [
        getCountryID(country) for country in countryListings
    ]

    for listing in listings:
        if listing['_listingType'] in ['review', 'imageResource']:
            if forCountry(listing):
                countryIDs.append(getCountryID(listing))
            elif forCity(listing):
                cityIDs.append(getCityID(listing))
            elif forPoint(listing):
                pointIDs.append(getPointID(listing))

    if cachedPointAliases and os.path.exists('../PointAliasesCache/cache.pkl'):
        with open('../PointAliasesCache/cache.pkl', 'rb') as f:
            bestPointIDMap, bestCityIDMap, bestCountryIDMap = pickle.load(f)
    else:
        if not os.path.exists(
                os.path.dirname('../PointAliasesCache/cache.pkl')):
            try:
                os.makedirs(os.path.dirname('../PointAliasesCache/cache.pkl'))
            except OSError as exc:  # Guard against race condition
                if exc.errno != errno.EEXIST:
                    raise
        bestPointIDMap, bestCityIDMap, bestCountryIDMap = clusterAllIDs(
            pointIDs, cityIDs, countryIDs)
        with open('../PointAliasesCache/cache.pkl', 'wb') as f:
            pickle.dump((bestPointIDMap, bestCityIDMap, bestCountryIDMap), f)

    revPoint, revCity, revCountry = map(
        makeReverseMap, [bestPointIDMap, bestCityIDMap, bestCountryIDMap])

    toAggregateData = collectAllListings(listings, bestPointIDMap,
                                         bestCityIDMap, bestCountryIDMap)
    aggregatedListings, categoriesFound, allPointScores, allDiffablePointOrders = aggregateAllListings(
        toAggregateData, revPoint, revCity, revCountry)

    for timestamp in [getCurrentTime().replace(':', '.'), 'latest']:
        print('Saving results')
        saveData('../aggregatedData/{}/data.json'.format(timestamp),
                 aggregatedListings)
        print('Saving config')
        saveData('../aggregatedData/{}/config.json'.format(timestamp),
                 fullConfig)

        print('Saving debug info')
        debugInfo = {
            'bestPointIDMap':
            {str(key): str(val)
             for key, val in bestPointIDMap.items()},
            'bestCityIDMap':
            {str(key): str(val)
             for key, val in bestCityIDMap.items()},
            'bestCountryIDMap':
            {str(key): str(val)
             for key, val in bestCountryIDMap.items()},
            'toAggregateData': toAggregateData,
            'categoriesFound': list(categoriesFound),
            'allPointScores': allPointScores,
            'allDiffablePointOrders': allDiffablePointOrders,
        }
        for key, val in debugInfo.items():
            saveData(
                '../aggregatedData/{}/debug/{}.json'.format(timestamp, key),
                val)

    print('All done. Exit')