Ejemplo n.º 1
0
def getAllStations():
    initEnvironment()
    loadStations()

    timestamp = time.time()
    stationsJson = generalFunctions.dataFrameToJsonStr(stations)
    stopEnvironment(sc)
    return stationsJson
Ejemplo n.º 2
0
def getMeasurement():
    initEnvironment()
    loadCleanDaily()
    date = request.args.get('date')
    station_id = request.args.get('station_id')
    getAllStations = request.args.get('allStations')

    weatherData = sparkFunctions.getConcreteWeatherData(clean_daily, station_id, date, getAllStations)
    weatherJson = generalFunctions.dataFrameToJsonStr(weatherData)

    stopEnvironment(sc)
    return jsonify(weatherJson)
Ejemplo n.º 3
0
def getStats():
    initEnvironment()

    data = request.data
    dataStr = str(data, 'utf-8')
    dataDict = json.loads(dataStr)

    allStations = dataDict['allStations']
    allTime = dataDict['allTime']
    dateFrom = dataDict['dateFrom']
    dateTo = dataDict['dateTo']
    station_id = dataDict['station_id']

    try:
        if allTime:
            loadGlobalWeatherStats()
            if allStations:
                returnJson = generalFunctions.dataFrameToJsonStr(stations_limits)
                stopEnvironment(sc)
                return jsonify(returnJson)
            else:
                tmpDf = sparkFunctions.getLimitsForStation(stations_limits, station_id)
                returnJson = generalFunctions.dataFrameToJsonStr(tmpDf)
                stopEnvironment(sc)
                return jsonify(returnJson)
        else:
            loadCleanDaily()
            if (allStations):
                tmpDf = sparkFunctions.getLimitsAllStationsWithInterval(clean_daily, dateFrom, dateTo)
                returnJson = generalFunctions.dataFrameToJsonStr(tmpDf)
                stopEnvironment(sc)
                return jsonify(returnJson)
            else:
                tmpDf = sparkFunctions.getLimitsStationWithInterval(clean_daily, station_id, dateFrom, dateTo)
                returnJson = generalFunctions.dataFrameToJsonStr(tmpDf)
                stopEnvironment(sc)
                return jsonify(returnJson)
    except:
        logger.error("Ooops, something went wrong getting the stats")
        stopEnvironment(sc)
Ejemplo n.º 4
0
def getEarthquakes():
    initEnvironment()
    loadEarthquakes()
    date = request.args.get('date')
    max_lat = request.args.get('max_lat')
    min_lat = request.args.get('min_lat')
    max_lon = request.args.get('max_lon')
    min_lon = request.args.get('min_lon')
    
    start_time = time.time()
    earthquakesData = sparkFunctions.getConcreteEarhquakesData(earthquakes, date, max_lat, min_lat, max_lon, min_lon)
    logger.info("--- %s seconds getting the data ---" % (time.time() - start_time))
    start_time = time.time()
    earthquakesJson = generalFunctions.dataFrameToJsonStr(earthquakesData) 
    logger.info("--- %s seconds parsing the data to json---" % (time.time() - start_time))

    stopEnvironment(sc)
    return jsonify(earthquakesJson)
Ejemplo n.º 5
0
def getEarthquakesIntervalWithQuadrants():
    initEnvironment()
    loadEarthquakes()
    dateFrom = request.args.get('dateFrom')
    dateTo = request.args.get('dateTo')
    max_y = request.args.get('maxY')
    min_y = request.args.get('minY')
    max_x = request.args.get('maxX')
    min_x = request.args.get('minX')

    start_time = time.time()
    earthquakesData = sparkFunctions.getConcreteEarhquakesIntervalDataWithQuadrants(earthquakes, dateFrom, dateTo, max_y, min_y, max_x, min_x)
    logger.info("--- %s seconds getting the data ---" % (time.time() - start_time))
    
    start_time = time.time()
    earthquakesJson = generalFunctions.dataFrameToJsonStr(earthquakesData)
    logger.info("--- %s seconds parsing the data to json---" % (time.time() - start_time))

    stopEnvironment(sc)
    return jsonify(earthquakesJson)
Ejemplo n.º 6
0
def getWeatherDataInterval():
    initEnvironment()

    data = request.data
    dataStr = str(data, 'utf-8')
    dataDict = json.loads(dataStr)

    dateFrom = dataDict['dateFrom']
    dateTo = dataDict['dateTo']
    logger.info("From: " + str(dateFrom))
    logger.info("To: " + str(dateTo))
    station_id = dataDict['station_id']

    try:
        loadCleanDaily()
        tmpDf = sparkFunctions.getWeatherDataInterval(clean_daily, station_id, dateFrom, dateTo)
        returnJson = generalFunctions.dataFrameToJsonStr(tmpDf)
        stopEnvironment(sc)
        return jsonify(returnJson)
    except:
        logger.error("Ooops, something went wrong getting the stats")
        stopEnvironment(sc)
Ejemplo n.º 7
0
def getPredictionStats():
    try:
        data = request.data
        dataStr = str(data, 'utf-8')
        dataDict = json.loads(dataStr)
        initEnvironment()
        loadCleanDaily()

        station_id = dataDict["station_id"]
        fecha = dataDict['fecha']

        station_daily = clean_daily.filter(clean_daily.station_id == station_id)
        weatherPrediction = sparkFunctions.predictStats(fecha, station_id, station_daily)

        if (weatherPrediction):
            predictionJson = generalFunctions.dataFrameToJsonStr(weatherPrediction)
        else:
            predictionJson = "No Prediction"
        stopEnvironment(sc)
        return jsonify(predictionJson)
    except KeyError as ke:
        logger.error(ke)
        stopEnvironment(sc)