def generateTopic():
    owd = os.getcwd()
    producer = BusProducer()

    os.chdir('zonesGen/sample_json/')
    with open('TOP106_Sample.json') as f:
        msg = json.load(f)
    f.close()
    dataStreamID = msg['body']['dataStreamID']

    polygons = [
        'polygon1.json', 'polygon2.json', 'polygon3.json', 'polygon4.json'
    ]
    iter = 0
    for jsonFile in polygons:
        with open(jsonFile) as f:
            plgn = json.load(f)
        f.close()

        iter += 1
        msg['body']['dataStreamID'] = dataStreamID + '_' + str(
            10000 * abs(plgn['center']['latitude']))[:4] + str(
                10000 * abs(plgn['center']['longitude']))[:4]
        polygonStruct = []
        polygonStruct.append(plgn['polygon'])
        msg['body']['polygons'] = polygonStruct
        msg['body']['position'] = plgn['center']
        msg['body']['dataStreamName'] = 'ZONE ' + str(iter).zfill(2)
        plgn['polygon']['properties']['label'] = 'ZONE ' + str(iter).zfill(2)
        plgn['polygon']['properties']['color'] = {"r": 0, "g": 0, "b": 0}
        msg['body']['dataStreamDescription'] = plgn['Name']

        msg['header']["sentUTC"] = str("{}Z".format(
            (datetime.utcnow() -
             timedelta(hours=0)).replace(microsecond=0).isoformat()))
        msg['header']['msgIdentifier'] = msg['body']['dataStreamName'] = 'ZONE' + str(iter).zfill(2) + '_' + \
                                                                         msg['header']["sentUTC"]

        print(json.dumps(msg))
        #with open('ZONE' + str(iter) + '.json', 'w') as outfile:
        #    outfile.write(msg)
        #outfile.close()
        producer.send(msg["header"]["topicName"], json.dumps(msg))

    os.chdir(owd)
    #print('round ended')


#generateTopic()
예제 #2
0
def topic104HeatWave(HOCL, First_HWCrisis_Event, Max_HWCrisis_Event, directory,
                     points, center_points):

    # A) Create the TOPIC 104 (json format) for Heatwave Overall Crisis Level per day
    #       in the Thessaloniki region
    #
    counter_topics = 0

    producer = BusProducer()
    #
    #Decorate terminal
    print('\033[95m' + "\n***********************")
    print("*** CRCL SERVICE v1.0 ***")
    print("***********************\n" + '\033[0m')

    valid_values = ['Hot', 'Very Hot', 'Extreme']

    for it, item in enumerate(HOCL, 1):

        if item['note'] in valid_values:

            dataStreamGener = "CRCL"
            dataStreamName = "PHWCL_Predicted HeatWave Crisis Level"
            dataStreamID = 'HWCR_3001_PCL'
            dataStreamDescript = "Heatwave Overall Crisis Level per day for the selected points. Date: " + str(
                item['DateTime'])
            lang = "en-US"
            dataStreamCategory = "Met"
            dataStreamSubCategory = "Heatwave"

            # Position (long/lat)
            position = [round(center_points[0], 5), round(center_points[1], 5)]

            # Set variables for the header of the message
            district = "Thessaloniki"

            # Unique message identifier
            msgIdent = datetime.utcnow().isoformat().replace(":", "").replace(
                "-", "").replace(".", "MS")

            sent_dateTime = datetime.utcnow().replace(
                microsecond=0).isoformat() + 'Z'
            status = "Actual"
            actionType = "Update"
            scope = "Public"
            code = 20190617001

            # Call the class Top104_Metric_Report to create an object data of this class
            hocl_msg = Top104_Metric_Report(msgIdent, sent_dateTime, status,
                                            actionType, scope, district, code,
                                            dataStreamGener, dataStreamID,
                                            dataStreamName, dataStreamDescript,
                                            lang, dataStreamCategory,
                                            dataStreamSubCategory, position)

            # Record the thresholds for each weather indicator in the header note
            hocl_msg.topic_note = " "

            # create the header of the object
            hocl_msg.create_dictHeader()

            # create the measurements of the object
            #
            hocl_msg.topic_yValue = [item['val']]

            hocl_msg.topic_measurementID = [int(round(time.time() * 1000))]
            hocl_msg.topic_measurementTimeStamp = [
                datetime.utcnow().replace(microsecond=0).isoformat() + 'Z'
            ]

            hocl_msg.topic_dataSeriesID = [
                str(int(position[1] * 100)) + str(int(position[0] * 100))
            ]
            hocl_msg.topic_dataSeriesName = ["HOCL_dataSeries"]

            hocl_msg.topic_xValue = [str(item['DateTime'])]
            hocl_msg.topic_meas_color = [item['color']]
            hocl_msg.topic_meas_note = [item['note']]

            # call class function
            hocl_msg.create_dictMeasurements()

            # create the body of the object
            hocl_msg.create_dictBody()

            # create the TOP104_METRIC_REPORT as json
            top104_hocl = OrderedDict()
            top104_hocl['header'] = hocl_msg.header
            top104_hocl['body'] = hocl_msg.body

            # write json (top104_forecast) to output file
            flname = directory + "/" + 'TOP104_Heatwave_OverallCrisisLevel_' + str(
                it) + ".txt"
            with open(flname, 'w') as outfile:
                json.dump(top104_hocl, outfile, indent=4)

            print(
                'Send message: Heatwave Overall Crisis Level has been forwarded to logger!'
            )
            producer.send("TOP104_METRIC_REPORT", top104_hocl)

            counter_topics += 1

    #----------------------------------------------------------------------------
    # B) Create TOPIC 104 for the 1st

    dataStreamGener = "CRCL"
    dataStreamName = "PDI_Predicted Discomfort Index"
    dataStreamID = 'HWCR_3002_PDI'
    dataStreamDescript = "Discomfort Index for Heatwave -- First DI value that overcomes the Most population feels discomfort category, per point"
    lang = "en-US"
    dataStreamCategory = "Met"
    dataStreamSubCategory = "Heatwave"

    # Set variables for the header of the message
    district = "Thessaloniki"

    # Unique message identifier
    msgIdent = datetime.utcnow().isoformat().replace(":", "").replace(
        "-", "").replace(".", "MS")

    sent_dateTime = datetime.utcnow().replace(microsecond=0).isoformat() + 'Z'
    status = "Actual"
    actionType = "Update"
    scope = "Public"
    code = 20190617001

    for fst in range(len(First_HWCrisis_Event)):

        # Position (lat/long)
        position = [
            round(First_HWCrisis_Event[fst]['long'], 5),
            round(First_HWCrisis_Event[fst]['lat'], 5)
        ]

        name_place = First_HWCrisis_Event[fst]['Name']

        # Call the class Top104_Metric_Report to create an object data of this class
        fhwcr = Top104_Metric_Report(msgIdent, sent_dateTime, status,
                                     actionType, scope, district, code,
                                     dataStreamGener, dataStreamID,
                                     dataStreamName, dataStreamDescript, lang,
                                     dataStreamCategory, dataStreamSubCategory,
                                     position)

        # Record the thresholds for each weather indicator in the header note
        fhwcr.topic_note = " "

        # create the header of the object
        fhwcr.create_dictHeader()

        # create the measurements of the object
        #
        fhwcr.topic_yValue = [round(First_HWCrisis_Event[fst]['DI'], 3)]

        fhwcr.topic_measurementID = [int(round(time.time() * 1000))]
        fhwcr.topic_measurementTimeStamp = [
            datetime.utcnow().replace(microsecond=0).isoformat() + 'Z'
        ]

        dsid = str(int(float(position[1]) * 100)) + str(
            int(float(position[0]) * 100))
        fhwcr.topic_dataSeriesID = [dsid]

        fhwcr.topic_dataSeriesName = ['First DI value in ' + name_place[0]]

        fhwcr.topic_xValue = [str(First_HWCrisis_Event[fst]['DateTime'])]
        fhwcr.topic_meas_color = [First_HWCrisis_Event[fst]['Color']]
        fhwcr.topic_meas_note = [First_HWCrisis_Event[fst]['DI_Category']]

        # call class function
        fhwcr.create_dictMeasurements()

        # create the body of the object
        fhwcr.create_dictBody()

        # create the TOP104_METRIC_REPORT as json
        top104_di = OrderedDict()
        top104_di['header'] = fhwcr.header
        top104_di['body'] = fhwcr.body

        # write json (top104_forecast) to output file
        flname = directory + "/" + 'TOP104_DiscomfortIndex_' + dsid + ".txt"
        with open(flname, 'w') as outfile:
            json.dump(top104_di, outfile, indent=4)

        print('Send message: Discomfort Index has been forwarded to logger!')
        producer.send("TOP104_METRIC_REPORT", top104_di)

        counter_topics += 1

    #----------------------------------------------------------------------------
    # C) Create TOPIC 104 for the Maximum

    dataStreamGener = "CRCL"
    dataStreamName = "maxPDI_Predicted Discomfort Index"
    dataStreamID = 'HWCR_3003_maxPDI'
    dataStreamDescript = "Discomfort Index for Heatwave -- Maximum DI value per point"
    lang = "en-US"
    dataStreamCategory = "Met"
    dataStreamSubCategory = "Heatwave"

    # Set variables for the header of the message
    district = "Thessaloniki"

    # Unique message identifier
    msgIdent = datetime.utcnow().isoformat().replace(":", "").replace(
        "-", "").replace(".", "MS")

    sent_dateTime = datetime.utcnow().replace(microsecond=0).isoformat() + 'Z'
    status = "Actual"
    actionType = "Update"
    scope = "Public"
    code = 20190617001

    for fst in range(len(Max_HWCrisis_Event)):

        # Position (lat/long)
        position = [
            round(Max_HWCrisis_Event[fst]['long'], 5),
            round(Max_HWCrisis_Event[fst]['lat'], 5)
        ]

        name_place = Max_HWCrisis_Event[fst]['Name']

        # Call the class Top104_Metric_Report to create an object data of this class
        maxhwcr = Top104_Metric_Report(msgIdent, sent_dateTime, status,
                                       actionType, scope, district, code,
                                       dataStreamGener, dataStreamID,
                                       dataStreamName, dataStreamDescript,
                                       lang, dataStreamCategory,
                                       dataStreamSubCategory, position)

        # Record the thresholds for each weather indicator in the header note
        maxhwcr.topic_note = " "

        # create the header of the object
        maxhwcr.create_dictHeader()

        # create the measurements of the object
        #
        maxhwcr.topic_yValue = [round(Max_HWCrisis_Event[fst]['DI'], 3)]

        maxhwcr.topic_measurementID = [int(round(time.time() * 1000))]
        maxhwcr.topic_measurementTimeStamp = [
            datetime.utcnow().replace(microsecond=0).isoformat() + 'Z'
        ]

        dsid = str(int(float(position[1]) * 100)) + str(
            int(float(position[0]) * 100))
        maxhwcr.topic_dataSeriesID = [dsid]

        maxhwcr.topic_dataSeriesName = ['Max DI value in ' + name_place[0]]

        maxhwcr.topic_xValue = [str(Max_HWCrisis_Event[fst]['DateTime'])]
        maxhwcr.topic_meas_color = [Max_HWCrisis_Event[fst]['Color']]
        maxhwcr.topic_meas_note = [Max_HWCrisis_Event[fst]['DI_Category']]

        # call class function
        maxhwcr.create_dictMeasurements()

        # create the body of the object
        maxhwcr.create_dictBody()

        # create the TOP104_METRIC_REPORT as json
        top104_mxdi = OrderedDict()
        top104_mxdi['header'] = maxhwcr.header
        top104_mxdi['body'] = maxhwcr.body

        # write json (top104_forecast) to output file
        flname = directory + "/" + 'TOP104_Max_DiscomfortIndex_' + dsid + ".txt"
        with open(flname, 'w') as outfile:
            json.dump(top104_mxdi, outfile, indent=4)

        print(
            'Send message: Max Discomfort Index has been forwarded to logger!')
        producer.send("TOP104_METRIC_REPORT", top104_mxdi)

        counter_topics += 1

    return (counter_topics)
outfl_WL.close()
outfl_PR.close()

#--------------------------------------------------------------------------------------------
#  STEP 3: Creates the TOPIC_104_METRIC_REPORT
#--------------------------------------------------------------------------------------------
#
# Create the TOPIC 104 (json format) for each Weather Station and each Datastream
# (Water Level and Precipitation). Each datastream will be consisted of real values
# retrieved from the sensors at a particular Weather Station and another dataSeries
# metric which presents the scale.
#
#----------------------------------------------------------------------------------------
# Create new Producer instance using provided configuration message (dict data).
#
producer = BusProducer()

# Decorate terminal
print('\033[95m' + "\n***********************")
print("*** CRCL SERVICE v1.0 ***")
print("***********************\n" + '\033[0m')

#-----------------------------------------------

# for each Weather Station
for iter, item_WS in enumerate(WSDS):

    # List to store all the Topics 104
    Topics104 = []

    # Set variables for the header of the message
예제 #4
0
# call class function
data.create_dictMeasurements()

# create the body of the object
data.create_dictBody()

# create the TOP104_METRIC_REPORT as json
top104_forecast_oneday = {'header': data.header, 'body': data.body}

# write json (top104_forecast_oneday) to output file
with open('TOP104_forecast_oneday_outjson.txt', 'w') as outfile:
    json.dump(top104_forecast_oneday, outfile, indent=4)

#----------------------------------------------------------------------------------------
# Create new Producer instance using provided configuration message (dict data).
producer = BusProducer()

# Decorate terminal
print('\033[95m' + "\n***********************")
print("*** CRCL SERVICE v1.0 ***")
print("***********************\n" + '\033[0m')

print('\n First message: \n')
producer.send("TOP104_METRIC_REPORT", top104)


#print('\n Second message: \n')
#producer.send("TOP104_METRIC_REPORT", top104_forecast)


#print('\n Third message: \n')
def topic104Fire(directory, df, df_max, FOCL_list, categories, interp_method):

    counter_topics = 0

    print(" df_1st = ", df['Fire_Danger'].unique())
    print(" df_max = ", df_max['Fire_Danger'].unique())

    producer = BusProducer()

    # Decorate terminal
    print('\033[95m' + "\n***********************")
    print("*** CRCL SERVICE v1.0 ***")
    print("***********************\n" + '\033[0m')

    # ----------------------------------------------------------------------------------
    # B) PFWI_Predicted Fire Weather Index --- 1st FWI per point
    for i in range(df.shape[0]):

        if df['Fire_Danger'].iloc[i] in categories[2:]:
            dataStreamGener = "CRCL"
            dataStreamName = "PFWI_Predicted Fire Weather Index"
            dataStreamID = 'FRCR_2002_PFWI'
            dataStreamDescript = "Canadian Fire Weather Index (FWI) per point of interst forwarding the first measurement which exceeds the Moderate Fire Danger category"
            lang = "en-US"
            dataStreamCategory = "Fire"
            dataStreamSubCategory = ""

            # Position (long/lat)
            LON_V = df['Long'].iloc[i]
            LAT_V = df['Lat'].iloc[i]
            position = [round(LON_V, 5), round(LAT_V, 5)]

            # Set variables for the header of the message
            district = "Valencia"

            # Unique message identifier
            msgIdent = datetime.utcnow().isoformat().replace(":", "").replace(
                "-", "").replace(".", "MS")

            sent_dateTime = datetime.utcnow().replace(
                microsecond=0).isoformat() + 'Z'
            status = "Actual"
            actionType = "Update"
            scope = "Public"
            code = 20190617001

            # Call the class Top104_Metric_Report to create an object data of this class
            fwi_msg = Top104_Metric_Report(msgIdent, sent_dateTime, status,
                                           actionType, scope, district, code,
                                           dataStreamGener, dataStreamID,
                                           dataStreamName, dataStreamDescript,
                                           lang, dataStreamCategory,
                                           dataStreamSubCategory, position)

            # Record the thresholds for each weather indicator in the header note
            fwi_msg.topic_note = " "

            # create the header of the object
            fwi_msg.create_dictHeader()

            # create the measurements of the object
            #
            col_name = 'FWI' + "_" + interp_method
            fwi_msg.topic_yValue = [round(df.loc[i, col_name], 3)]

            fwi_msg.topic_measurementID = [int(round(time.time() * 1000))]

            # Measurement TimeStamp is equal with the Date/Time in which the predictions take place
            fwi_msg.topic_measurementTimeStamp = [
                str(df['Date'].iloc[0]) + 'Z'
            ]

            fwi_msg.topic_dataSeriesID = [
                str(int(abs(position[1]) * 100)) +
                str(int(abs(position[0]) * 100))
            ]
            fwi_msg.topic_dataSeriesName = [df['Name'].iloc[i]]

            fwi_msg.topic_xValue = [str(df['Date'].iloc[i]) + 'Z']
            fwi_msg.topic_meas_color = [df['Color'].iloc[i]]
            fwi_msg.topic_meas_note = [df['Fire_Danger'].iloc[i]]

            # call class function
            fwi_msg.create_dictMeasurements()

            # create the body of the object
            fwi_msg.create_dictBody()

            # create the TOP104_METRIC_REPORT as json
            top104_fwi = OrderedDict()
            top104_fwi['header'] = fwi_msg.header
            top104_fwi['body'] = fwi_msg.body

            # write json (top104_forecast) to output file
            flname = directory + "/" + 'FireWeatherIndex_' + str(i) + ".txt"
            with open(flname, 'w') as outfile:
                json.dump(top104_fwi, outfile, indent=4)

            print('Send ' + str(i) +
                  ' message: Fire Weather Index has been forwarded to logger!')
            producer.send("TOP104_METRIC_REPORT", top104_fwi)
            counter_topics += 1

    # ----------------------------------------------------------------------------------
    # C) maxPFWI_Predicted Fire Weather Index --- 1st FWI per point
    for i in range(df_max.shape[0]):

        if df_max['Fire_Danger'].iloc[i] in categories[2:]:
            dataStreamGener = "CRCL"
            dataStreamName = "maxPFWI_maxPredicted Fire Weather Index"
            dataStreamID = 'FRCR_2003_maxPFWI'
            dataStreamDescript = "Canadian Fire Weather Index (FWI) per point of interst forwarding the max measurement which exceeds the Moderate Fire Danger category"
            lang = "en-US"
            dataStreamCategory = "Fire"
            dataStreamSubCategory = ""

            # Position (long/lat)
            LON_V = df_max['Long'].iloc[i]
            LAT_V = df_max['Lat'].iloc[i]
            position = [round(LON_V, 5), round(LAT_V, 5)]

            # Set variables for the header of the message
            district = "Valencia"

            # Unique message identifier
            msgIdent = datetime.utcnow().isoformat().replace(":", "").replace(
                "-", "").replace(".", "MS")

            sent_dateTime = datetime.utcnow().replace(
                microsecond=0).isoformat() + 'Z'
            status = "Actual"
            actionType = "Update"
            scope = "Public"
            code = 20190617001

            # Call the class Top104_Metric_Report to create an object data of this class
            maxfwi_msg = Top104_Metric_Report(
                msgIdent, sent_dateTime, status, actionType, scope, district,
                code, dataStreamGener, dataStreamID, dataStreamName,
                dataStreamDescript, lang, dataStreamCategory,
                dataStreamSubCategory, position)

            # Record the thresholds for each weather indicator in the header note
            maxfwi_msg.topic_note = " "

            # create the header of the object
            maxfwi_msg.create_dictHeader()

            # create the measurements of the object
            #
            col_name = 'FWI' + "_" + interp_method
            maxfwi_msg.topic_yValue = [round(df_max.loc[i, col_name], 3)]

            maxfwi_msg.topic_measurementID = [int(round(time.time() * 1000))]

            # Measurement TimeStamp is equal with the Date/Time in which the predictions take place
            maxfwi_msg.topic_measurementTimeStamp = [
                str(df_max['Date'].iloc[0]) + 'Z'
            ]

            maxfwi_msg.topic_dataSeriesID = [
                str(int(abs(position[1]) * 100)) +
                str(int(abs(position[0]) * 100))
            ]
            maxfwi_msg.topic_dataSeriesName = [df_max['Name'].iloc[i]]

            maxfwi_msg.topic_xValue = [str(df_max['Date'].iloc[i]) + 'Z']
            maxfwi_msg.topic_meas_color = [df_max['Color'].iloc[i]]
            maxfwi_msg.topic_meas_note = [df_max['Fire_Danger'].iloc[i]]

            # call class function
            maxfwi_msg.create_dictMeasurements()

            # create the body of the object
            maxfwi_msg.create_dictBody()

            # create the TOP104_METRIC_REPORT as json
            top104_maxfwi = OrderedDict()
            top104_maxfwi['header'] = maxfwi_msg.header
            top104_maxfwi['body'] = maxfwi_msg.body

            # write json (top104_forecast) to output file
            flname = directory + "/" + 'FireWeatherIndex_MAX_' + str(
                i) + ".txt"
            with open(flname, 'w') as outfile:
                json.dump(top104_maxfwi, outfile, indent=4)

            print(
                'Send ' + str(i) +
                ' message: Max Fire Weather Index has been forwarded to logger!'
            )
            producer.send("TOP104_METRIC_REPORT", top104_maxfwi)
            counter_topics += 1

    # ------------------------------------------------------------
    # A) Create TOP104 for PFRCL_Predicted Fire Crisis Level
    #
    dataStreamGener = "CRCL"
    dataStreamName = "PFRCL_Predicted Fire Crisis Level"
    dataStreamID = 'FRCR_2001_PCL'
    dataStreamDescript = "Overall Predicted Fire Crisis Level estimated per day over all the WFI value's of points"
    lang = "en-US"
    dataStreamCategory = "Fire"
    dataStreamSubCategory = ""

    #valid_values = ['Moderate', 'High', 'Very High', 'Extreme']
    valid_values = ['High', 'Very High', 'Extreme']

    #valid_values = df['Fire_Danger'].unique().split()[0]
    print("valid values= ", valid_values)

    for it, item in enumerate(FOCL_list, 1):

        if item['note'] in valid_values:
            # Position (long/lat)
            LAT_V = float(item['Position'][0])
            LON_V = float(item['Position'][1])
            position = [round(LON_V, 5), round(LAT_V, 5)]
            name_place = 'Parc Natural de lAlbufera'

            # Set variables for the header of the message
            district = "Valencia"

            # Unique message identifier
            msgIdent = datetime.utcnow().isoformat().replace(":", "").replace(
                "-", "").replace(".", "MS")

            sent_dateTime = datetime.utcnow().replace(
                microsecond=0).isoformat() + 'Z'
            status = "Actual"
            actionType = "Update"
            scope = "Public"
            code = 20190617001

            # Call the class Top104_Metric_Report to create an object data of this class
            ovfcl_msg = Top104_Metric_Report(
                msgIdent, sent_dateTime, status, actionType, scope, district,
                code, dataStreamGener, dataStreamID, dataStreamName,
                dataStreamDescript, lang, dataStreamCategory,
                dataStreamSubCategory, position)

            # Record the thresholds for each weather indicator in the header note
            ovfcl_msg.topic_note = " "

            # create the header of the object
            ovfcl_msg.create_dictHeader()

            # create the measurements of the object
            #
            #ovfcl_msg.topic_yValue = [item['val']]
            ovfcl_msg.topic_yValue = [round(item['val_rescale'], 2)]

            ovfcl_msg.topic_measurementID = [int(round(time.time() * 1000))]

            # Measurement TimeStamp is equal with the Date/Time in which the predictions take place
            ovfcl_msg.topic_measurementTimeStamp = [str(item['Date']) + 'Z']

            ovfcl_msg.topic_dataSeriesID = [
                str(int(abs(LAT_V) * 100)) + str(int(abs(LON_V) * 100))
            ]
            ovfcl_msg.topic_dataSeriesName = [name_place]

            ovfcl_msg.topic_xValue = [str(item['Date']) + 'Z']
            ovfcl_msg.topic_meas_color = [item['color']]
            ovfcl_msg.topic_meas_note = [item['note']]

            # call class function
            ovfcl_msg.create_dictMeasurements()

            # create the body of the object
            ovfcl_msg.create_dictBody()

            # create the TOP104_METRIC_REPORT as json
            top104_fcl = OrderedDict()
            top104_fcl['header'] = ovfcl_msg.header
            top104_fcl['body'] = ovfcl_msg.body

            # write json (top104_forecast) to output file
            flname = directory + "/" + 'FireCrisisLevel_' + str(
                counter_topics) + ".txt"
            with open(flname, 'w') as outfile:
                json.dump(top104_fcl, outfile, indent=4)

            print(
                'Send ' + str(counter_topics) +
                ' message: Overall Fire Weather Index has been forwarded to logger!'
            )
            producer.send("TOP104_METRIC_REPORT", top104_fcl)
            counter_topics += 1

    print("Number of topics forwarded: ", counter_topics)
    return (counter_topics)
예제 #6
0
]
sel_vals = {
    'dstr_sel': ['id', 'name', 'properties'],
    'obs_sel': ['result', 'phenomenonTime', 'id', 'parameters']
}
filt_args = {'obs_filt': ['phenomenonTime']}
dates = ['2018-01-26T08:00:00.000Z', '2018-01-28T14:00:00.000Z']
filt_vals = {'obs_filt_vals': dates}
ord_vals = ['phenomenonTime']

flag_last_run = True  #False

#----------------------------------------------------------------------------------------
# Create new Producer instance using provided configuration message (dict data).

producer = BusProducer()

# Decorate terminal
print('\033[95m' + "\n***********************")
print("*** CRCL SERVICE v1.0 ***")
print("***********************\n" + '\033[0m')

# Array in which the frequency of each scale value is stored
# freqs = [N0, N1, N2, N3] where
#   N0 : number of river sections which have scale 0
#   Ni : number of river sections which have scale i, i={1,2,3}

scales = [0, 1, 2, 3]
freqs = [0] * len(scales)

for counter in range(0, count):
    def __init__(self):
        self.database = 'messages.sqlite'

        # Create producer
        self.producer = BusProducer()
def CrisisClassificationFlood_Emerg():

    ver = 'Ver8_2nd_Period'

    # Create a directory to store the output files and TOPICS
    #root_path = Path.cwd()

    # Create a path
    current_dirs_parent = os.getcwd()
    root_path = current_dirs_parent + "/" + "CRCL/FloodCRisisCLassification" + "/"

    now = datetime.now()
    directory = root_path + "TOPICS_fromSensors_2010" + "_" + ver + "_" + str(now.year) + "_" + str(now.month) + "_" + str(now.day)

    os.makedirs(directory, exist_ok=True)

    #-----------------------------------------------------------------------------------
    # Fetch data from the OGC SensorThings API
    #
    # User defined values in order to formulate the query
    #
    service_root_URI = 'https://beaware.server.de/SensorThingsService/v1.0/'

    SensorThingEntities = ['Things', 'Locations', 'HistoricalLocations',
                            'Datastreams', 'Sensor', 'Observations',
                            'ObservedProperties', 'FeaturesOfInterest', 'MultiDatastreams']

    SensorThings = [SensorThingEntities[0], SensorThingEntities[3], SensorThingEntities[5]]

    # Initialise arrays to store the results of comparison for each weather station and each datastream (WL or PR)
    meas_ColNote_WL = []
    meas_ColNote_PR = []

    #--------------------------------------------------------------------------------------
    # Creates the thresholds for each one of the Weather Stations of interest
    #
    Weather_Stations_Ids = [45, 47, 374]

    Thresholds_WL = [{'ID': 45, 'Alarm1': 4.36, 'Alarm2': 4.86, 'Alarm3': 5.66},
                     {'ID': 47, 'Alarm1': 3.00, 'Alarm2': 4.60, 'Alarm3': 5.40},
                     {'ID': 374, 'Alarm1': 1.63, 'Alarm2': 3.03, 'Alarm3': 3.43}
                    ]

    #Thresholds_PR = [{'ID': 47, 'Alarm1': 50, 'Alarm2': 100, 'Alarm3': 150},
    #                 {'ID': 49, 'Alarm1': 50, 'Alarm2': 100, 'Alarm3': 150},
    #                 {'ID': 374, 'Alarm1': 50, 'Alarm2': 100, 'Alarm3': 150}
    #                ]

    # PEIRAGMENA THRESHOLDS
    #Thresholds_WL = [{'ID': 45, 'Alarm1': 0.36, 'Alarm2': 0.6, 'Alarm3': 0.66},
    #                 {'ID': 47, 'Alarm1': 0.01, 'Alarm2': 0.60, 'Alarm3': 0.80},
    #                 {'ID': 374, 'Alarm1': 0.03, 'Alarm2': 0.03, 'Alarm3': 0.43}
    #                ]

    # Start Timing Step 1
    start_step1 = time.time()

    # Store the time steps
    time_duration_step = []

    #---------------------------------------------------------------------------------------------------
    # Step 1: Extracts the weather stations where have as Datastreams the Water Level

    flag_last_measurement = True  # or False

    # List of dictionaries contains the id of each WS and its one of the Datastreams.
    # For WS where one of the Datastreams is missing the None value is filled
    WSDS = []

    # dates_WL=[]
    # flag_phenTime = True

    # Specify the period date/time for each Weather Station
    flag_phenTime = False
    #dates_WL = [{'ID': 45, 'PhenDateTime': ['2010-10-31T21:00:00.000Z', '2010-11-02T22:00:00.000Z']},
    #            {'ID': 47, 'PhenDateTime': ['2010-10-31T19:00:00.000Z', '2010-11-02T23:00:00.000Z']},
    #            {'ID': 374, 'PhenDateTime': ['2010-10-31T13:00:00.000Z', '2010-11-02T23:00:00.000Z']}
    #            ]

    dates_WL = [{'ID': 45, 'PhenDateTime': ['2010-11-01T07:00:00.000Z']},
                {'ID': 47, 'PhenDateTime': ['2010-11-01T10:00:00.000Z']},
                {'ID': 374, 'PhenDateTime': ['2010-11-02T12:00:00.000Z']}
                ]

    for i, StationID in enumerate(Weather_Stations_Ids):

        WSDS_dict = {'ID': StationID}

        # extract the location of the station
        SensThings_Loc = [SensorThingEntities[0], SensorThingEntities[1]]
        selVals = {'thing_sel': ['id', 'name'], 'loc_sel': ['location']}
        filt_args = {'thing_filt': ['id']}
        filt_vals = {'thing_filt': str(StationID)}

        resp_station_loc = extract_station_location(service_root_URI, SensThings_Loc, selVals, filt_args, filt_vals)

        SensThings = [SensorThingEntities[0], SensorThingEntities[3]]
        selVals = {'dstr_sel': ['id', 'name', 'phenomenonTime']}
        filt_args={'thing_filt': ['id'], 'dstr_filt': ['name']}
        filt_vals_WL={'thing_filt': str(StationID), 'dstr_filt': ['Water']}

        resp_station_datastream_WL = extract_station_datastream(service_root_URI, SensThings, selVals, filt_args, filt_vals_WL)

        # Update WSDS with Weather Station name
        WSDS_dict.update({'WS_name': resp_station_datastream_WL['value'][0]['name']})

        # Keep elements and values for Water Level
        if len(resp_station_datastream_WL['value'][0]['Datastreams']) == 0:
             WSDS_dict.update({'WL': None})
             WSDS_dict.update({'WL_name': None})
        else:
             WSDS_dict.update({'WL': resp_station_datastream_WL['value'][0]['Datastreams'][0]['@iot.id']})
             WSDS_dict.update({'WL_name': resp_station_datastream_WL['value'][0]['Datastreams'][0]['name']})

             # Update the date/time equal with the phenomononTime
             if flag_phenTime == True:
                dates_WL_dict = {'ID': StationID}
                PhenDateTime = resp_station_datastream_WL['value'][0]['Datastreams'][0]['phenomenonTime']
                dates_WL_dict.update({'PhenDateTime': PhenDateTime[(PhenDateTime.find("/")+1):] })
                dates_WL += [dates_WL_dict]

        # Add station's location to WSDS_dict
        WSDS_dict.update({'Coordinates': resp_station_loc['value'][0]['Locations'][0]['location']['coordinates']})

        # Update the WSDS with the new dictionary for the WS
        WSDS += [ WSDS_dict ]

    # print("\n ----------------------- ")
    # print("WSDS =", WSDS )
    # print("--------------------------\n")


    # End Timing Step 1
    end_step1 = time.time()
    time_duration_step.append( end_step1 - start_step1 )

    #-----------------------------------------------------------------------------------
    #   Step 2: Extract real measurements from Sensors at the specific Weather Station
    # and
    #   Step 3: Create and send the Topic104
    #-----------------------------------------------------------------------------------

    # Start Timing Step 2
    start_step2 = time.time()

    # Open files to store the query responses
    flname_WL = directory + "/" + 'response_Sensors_WL.txt'
    outfl_WL = open(flname_WL, 'w')

    # Arrays to keep the query responses
    response_sensors_WL = []

    # List to store all the Topics 104
    Topics104 = []

    for i, StationID in enumerate(WSDS):

        filt_args={'thing_filt': ['id'], 'dstr_filt': ['name'], 'obs_filt': ['phenomenonTime']}
        sel_vals = {'thing_sel': ['id','name', 'description'],
                    'dstr_sel': ['id', 'name', 'phenomenonTime'],
                    'obs_sel': ['result', 'phenomenonTime', 'id']}
        ord_vals = ['phenomenonTime']

        # For WL datastream do:
        if StationID['WL'] != None:

            # Find the corresponding PhenomenonTimeDate for WL of the Station
            for k, j in enumerate(dates_WL):
                if j['ID'] == StationID['ID']:
                    if len(j) > 1:   #['PhenDateTime'] != None:
                        dt = j['PhenDateTime']
                        filt_vals_WL={'thing_filt': [str(StationID['ID'])], 'dstr_filt': ['Water'], 'obs_filt_vals': dt}

            # Call function to extract the measurement of WL from specific Station
            item_WL = extract_from_WS_Sensors(service_root_URI, SensorThings, sel_vals, ord_vals, filt_args, filt_vals_WL)
            response_sensors_WL.append(item_WL)

            msg_WL = "\n Station ID = " + str(StationID['ID']) + " and Datastream ID = " + str(StationID['WL']) + "\n"
            outfl_WL.write(msg_WL)
            json.dump(item_WL, outfl_WL)
            outfl_WL.write("\n ------------------------------ \n")

            # For each observation CRCL finds its scale
            lenObs = len(item_WL['value'][0]['Datastreams'][0]['Observations'])
            value = []
            for iter_obs in range(0, lenObs):

                value.append(item_WL['value'][0]['Datastreams'][0]['Observations'][iter_obs]['result'])

                # call function to compare the value with alarm thresholds
                if value[iter_obs] > 0.0:
                    color_note_WL = compare_value_scale_thresholds(value[iter_obs] , filt_vals_WL['thing_filt'], filt_vals_WL['dstr_filt'], Thresholds_WL)
                    meas_ColNote_WL_dict = {'ID': StationID['ID'], 'col': color_note_WL[0], 'note': color_note_WL[1],
                                            'scale': color_note_WL[2], 'note_scale': color_note_WL[3]}
                else: # StationID['WL'] == None:
                    meas_ColNote_WL_dict = {'ID': StationID['ID'], 'col': None, 'note': None, 'scale': None, 'note_scale': None}

                meas_ColNote_WL += [meas_ColNote_WL_dict]

        # print("\n ----------------------- ")
        # print("meas_ColNote_WL =", meas_ColNote_WL )
        # print("--------------------------\n")

        #--------------------------------------------------------------------------------------------
        #  STEP 3: Creates the TOPIC_104_METRIC_REPORT
        #--------------------------------------------------------------------------------------------
        #
        # Create the TOPIC 104 (json format) for each Weather Station and datastream
        # Water Level. The datastream will be consisted of real values
        # retrieved from the sensors at a particular Weather Station and another dataSeries
        # metric which presents the scale.

        # Set variables for the header of the message
        district = "Vicenza"

        sent_dateTime = datetime.utcnow().replace(microsecond=0).isoformat() + 'Z'
        status = "Actual"
        actionType = "Update"
        scope = "Public"
        code = 20190617001

        # Set variables for the body of the message
        dataStreamGener = "CRCL"
        lang = "en-US"
        dataStreamCategory = "Met"
        dataStreamSubCategory = "Flood"

        # Position of the Weather Station
        position = [ StationID['Coordinates'][0], StationID['Coordinates'][1] ]

        #-------------------------------------------------------------------------
        # If the Water Level datastream exist in the specific Weather Station

        # Initialize temporary arrays
        measurement_ID = []
        measurement_TimeStamp = []
        dataSeriesID = []
        dataSeriesName = []
        dsmeas_color = []
        dsmeas_note = []
        yValues = []
        xVals = []

        if StationID['WL'] != None:

            dataStreamName = 'OWLm_Observed Water Level Measurement'
            dataStreamDescript = StationID['WL_name'] + ' ,real measurement'
            dataStreamID = "FLCR_1012_OWLm"

            #print("\n dataStreamName = ", dataStreamName, " dataStreamID = ", dataStreamID)

            # Unique message identifier
            msgIdent = datetime.utcnow().isoformat().replace(":","").replace("-","").replace(".","MS")

            # Call the class Top104_Metric_Report to create an object data of this class
            data_WL = Top104_Metric_Report(msgIdent, sent_dateTime, status, actionType, scope, district, code,
                                dataStreamGener, dataStreamID, dataStreamName, dataStreamDescript,
                                lang, dataStreamCategory, dataStreamSubCategory, position)

            # Create the header of the object (message)
            data_WL.create_dictHeader()

            # Create the body and the measurements of the object (message)
            #
            # Extract values from 'response_sensors_WL'
            pos = [j for j, x in enumerate(response_sensors_WL) if x['value'][0]['@iot.id'] == StationID['ID'] ]

            val_measurement_ID = str(response_sensors_WL[pos[0]]['value'][0]['Datastreams'][0]['Observations'][0]['@iot.id']) + "_1"
            measurement_ID += [ val_measurement_ID ]
            measurement_TimeStamp += [ datetime.utcnow().replace(microsecond=0).isoformat() + 'Z' ]

            # find the position of station and datasteam to the meas_ColNote_WL list
            pos_meas = [j for j, x in enumerate(meas_ColNote_WL) if x['ID'] == StationID['ID'] ]

            # First measurement - real values
            dataSeriesID += [ str(StationID['ID']) + str(StationID['WL']) ]
            dataSeriesName += [ StationID['WL_name'] ]

            dsmeas_color += [ meas_ColNote_WL[pos_meas[0]]['col'][0] ]
            dsmeas_note  += [ meas_ColNote_WL[pos_meas[0]]['note'][0] ]

            # Store values to yValues array
            lenObs_yV = len( response_sensors_WL[pos[0]]['value'][0]['Datastreams'][0]['Observations'] )
            yValues = []
            for iter_obs in range(0, lenObs_yV):
                yValues.append( response_sensors_WL[pos[0]]['value'][0]['Datastreams'][0]['Observations'][iter_obs]['result'])

            # Second measurement for the scale
            val_measurement_ID = str(response_sensors_WL[pos[0]]['value'][0]['Datastreams'][0]['Observations'][0]['@iot.id']) + "_2"
            measurement_ID += [ val_measurement_ID ]
            measurement_TimeStamp += [ response_sensors_WL[pos[0]]['value'][0]['Datastreams'][0]['Observations'][0]['phenomenonTime'] ]

            dataSeriesID += [ str(StationID['ID']) + str(StationID['WL']) ]
            dataSeriesName += [ StationID['WL_name'] ]  # 'Scale

            yValues += [ meas_ColNote_WL[pos_meas[0]]['scale'] ]

            xVals = [ response_sensors_WL[pos[0]]['value'][0]['Datastreams'][0]['Observations'][0]['phenomenonTime'] ]*len(yValues)

            dsmeas_color += [""]
            dsmeas_note += [ meas_ColNote_WL[pos_meas[0]]['note_scale'][0] ]

            # Set values to the data_WL attributes from temporary arrays
            data_WL.topic_measurementID = measurement_ID
            data_WL.topic_measurementTimeStamp = measurement_TimeStamp
            data_WL.topic_dataSeriesID = dataSeriesID
            data_WL.topic_dataSeriesName = dataSeriesName
            data_WL.topic_xValue = xVals
            data_WL.topic_yValue = yValues

            data_WL.topic_meas_color = dsmeas_color
            data_WL.topic_meas_note = dsmeas_note

            # call class function
            data_WL.create_dictMeasurements()

            # create the body of the object
            data_WL.create_dictBody()

            # create the TOP104_METRIC_REPORT as json
            top104_WL = OrderedDict()
            top104_WL['header']= data_WL.header
            top104_WL['body']= data_WL.body

            # write json (top104_WL) to output file
            flname = directory + "/" + 'TOP104_Water_Level_' + 'WeatherStation_' + str(StationID['ID']) + '.txt'
            with open(flname, 'w') as outfile:
                json.dump(top104_WL, outfile, indent=4)

            Topics104 += [ top104_WL ]


    # End Timing Step 2 & 3
    end_step2 = time.time()
    time_duration_step.append( end_step2 - start_step2 )

    #------------------------------------------------------------------------------------
    # STEP 4: Calculate the Overall Crisis Level index for all Weather Stations

    # Start Timing Step 4
    start_step4 = time.time()

    ocl_ws_val = Overall_Crisis_Level_WeatherStations( meas_ColNote_WL )

    print("\n ----------------------- ")
    print("ocl_ws_val =", ocl_ws_val )
    print("--------------------------\n")

    # Create the TOP104 for Overall Crisis Classification Index
    #
    dataStreamGener = "CRCL"
    dataStreamName = "OFLCL_Observed Flood Crisis Level"
    lang = "en-US"
    dataStreamCategory = "Met"
    dataStreamSubCategory = "Flood"
    dataStreamID = "FLCR_1011_OCL"
    dataStreamDescript = "Overall Crisis Level Index for Vicenza region -- emergency phase"

    # Position of the specific river section
    position = ["11.53885", "45.54497"]

    # Set variables for the header of the message
    district = "Vicenza"

    # Unique message identifier
    msgIdent = datetime.utcnow().isoformat().replace(":","").replace("-","").replace(".","MS")

    sent_dateTime = datetime.utcnow().replace(microsecond=0).isoformat() + 'Z'
    status = "Actual"
    actionType = "Update"
    scope = "Public"
    code = 20190617001

    ocl_msg = Top104_Metric_Report(msgIdent, sent_dateTime, status, actionType, scope, district, code,
                                    dataStreamGener, dataStreamID, dataStreamName, dataStreamDescript,
                                    lang, dataStreamCategory, dataStreamSubCategory, position)

    # create the header of the object
    ocl_msg.create_dictHeader()

    # create the measurements of the object
    #
    #ocl_msg.topic_yValue = [ocl_ws_val['ocl']]
    ocl_msg.topic_yValue = [ocl_ws_val['ocl_val']]

    ocl_msg.topic_measurementID = ['OCL_ID_104000000']
    ocl_msg.topic_measurementTimeStamp = [sent_dateTime]
    ocl_msg.topic_dataSeriesID = ['WS_OCL_ID_104']
    ocl_msg.topic_dataSeriesName = [ocl_ws_val['name']]
    ocl_msg.topic_xValue = [sent_dateTime]
    ocl_msg.topic_meas_color = [ocl_ws_val['color']]
    ocl_msg.topic_meas_note = [ocl_ws_val['note']]

    # call class function
    ocl_msg.create_dictMeasurements()

    # create the body of the object
    ocl_msg.create_dictBody()

    # create the TOP104_METRIC_REPORT as json
    top104_ocl = OrderedDict()
    top104_ocl['header']= ocl_msg.header
    top104_ocl['body']= ocl_msg.body

    # write json (top104_WL) to output file
    flname = directory + "/" + 'TOP104_OverallCrisisLevel' + '.txt'
    with open(flname, 'w') as outfile:
        json.dump(top104_ocl, outfile, indent=4)


    Topics104 += [ top104_ocl ]


    # End Timing Step 3
    end_step4 = time.time()
    time_duration_step.append( end_step4 - start_step4 )

    #--------------------------------------------------------------------------------------
    #Send messages for the specific Weather Station

    # Create new Producer instance using provided configuration message (dict data).
    #
    producer = BusProducer()

    # Decorate terminal
    print('\033[95m' + "\n***********************")
    print("*** CRCL SERVICE v1.0 ***")
    print("***********************\n" + '\033[0m')


    if len(Topics104) != 0:
        print('Messages from Weather Station', str(StationID['ID']), 'have been forwarded to logger!')
        for it in range(len(Topics104)):
            producer.send("TOP104_METRIC_REPORT", Topics104[it])
    else:
        print('No messages will be forward to logger from Weather Station', str(StationID['ID']), "!" )

    # Close files
    outfl_WL.close()


    #---------------------------------------------------------------------------
    total_time = np.array(time_duration_step).sum()

    print("\n ****** EXECUTION TIME: **** ")
    print(" Time for Step 1. Data Acquisition: ", round(time_duration_step[0], 3), " seconds")
    print(" Time for Steps 2 & 3. Calculate WL scale create Topics 104 for Weather Stations: ", round(time_duration_step[1], 3), " seconds")
    print(" Time for Step 4. Calculate OFLCL & create Topics 104: ", round(time_duration_step[2], 3), " seconds")
    print(" Total Execution Time: ", round(total_time/60.0, 3), " minutes")