Esempio n. 1
0
 def __init__(self, app_config):
     super(GroupHandler, self).__init__()
     self.app_config = app_config
     self.group_config = get_config('groups')
     self.root_url = 'http://%s:%s' % (app_config['hostname'],
                                       app_config['port'])
     self.dpid = self.app_config['dpid']
def getCurrentState(endpoint, topic="", token=None):
    config = get_config()
    get_address = config['table_urls'][endpoint] + topic

    try:

        if token is None or endpoint == -1 or endpoint is None:
            r = requests.get(get_address,
                             headers={'Content-Type': 'application/json'})
        else:
            r = requests.get(get_address,
                             headers={
                                 'Content-Type':
                                 'application/json',
                                 'Authorization':
                                 'Bearer {}'.format(token).rstrip()
                             })
        if not r.status_code == 200:
            print("could not get from cityIO", get_address)
            print("Error code", r.status_code)
            return {}
    # exit on request execption (cityIO down)
    except requests.exceptions.RequestException as e:
        print("CityIO seems down." + str(e))

        return None

    return r.json()
    def __init__(self, endpoint=-1, token=None):
        try:
            self.result = cityio_socket.getCurrentState("", endpoint, token)
            self.start_cell_origin = (Point(
                self.result['header']['spatial']['longitude'],
                self.result['header']['spatial']['latitude']))
        # use local debugging table as fallback
        except:
            print("cannot read from CityIO server. Using local fallback json")
            cwd = os.path.dirname(os.path.abspath(__file__))
            debug_json_path = os.path.abspath(
                cwd + "/__debugging_virtual_table.json")
            self.result = json.load(open(debug_json_path))
            self.start_cell_origin = (Point(
                self.result['header']['spatial']['longitude'],
                self.result['header']['spatial']['latitude']))

        self.table_rotation = self.result['header']['spatial'][
            'rotation']  # TODO can the table rotation be different form the cell rotation??
        self.table_cell_size = self.result['header']['spatial']['cellSize']
        self.table_row_count = self.result['header']['spatial']['nrows']
        self.table_column_count = self.result['header']['spatial']['ncols']
        self.table_mapping = self.result['header']['mapping']['type']
        self.table_cell_content = self.result['header']['block']
        # todo enter mapping to get street id

        # get projections from config.ini
        config = get_config()
        # if the table origin is flipped to teh southeast, instead of regular northwest
        self.table_flipped = config['CITY_SCOPE'].getboolean('TABLE_FLIPPED')
        self.origin_epsg = config['CITY_SCOPE']['GLOBAL_EPSG']
        self.local_epsg = config['CITY_SCOPE']['LOCAL_EPSG']
Esempio n. 4
0
 def __init__(self, app_config):
     super(FlowHandler, self).__init__()
     self.app_config = app_config
     self.flow_config = get_config('flows')
     self.root_url = 'http://%s:%s' % (app_config['hostname'],
                                       app_config['port'])
     self.dpid = self.app_config['dpid']
     self.table_id = self.flow_config['table_id']
Esempio n. 5
0
def get_default_config(rename=False):
    c_config = config_loader.complete_config(
        config_loader.get_config("config.yaml"))
    env_config = c_config['env']
    config = {k: v["default"] for k, v in env_config.items()}
    # add
    if rename:
        for k, v in alias.items():
            config[v] = config[k]
    return config
Esempio n. 6
0
 def attach_meter(self, flow_name, meter_id):
     flow = find_flow(self.flow_config, flow_name)
     meter = find_meter(get_config('meters'), meter_id)
     if flow != None and meter != None:
         flow_dict = dict_builder(self.dpid, self.table_id, flow)
         meter_action = {"type": "METER", "meter_id": meter_id}
         flow_dict['actions'].insert(0, meter_action)
         self._flow_op(flow_dict, 'mod')
     else:
         print("Unable to attach meter | Flow %s | Meter %s" %
               (flow, meter))
def save_buildings_from_city_scope(endpoint=-1, token=None):
    # dynamic input data from designer
    table = CityScopeTable.CityScopeTable(endpoint, token)
    grid_of_cells = create_grid_of_cells(table)
    geo_json = create_buildings_json(table, grid_of_cells)
    geo_json_merged = merge_adjacent_buildings(geo_json)

    # save geojson
    with open(get_config()['NOISE_SETTINGS']['INPUT_JSON_BUILDINGS'],
              'wb') as f:
        json.dump(geo_json_merged, f)
Esempio n. 8
0
def getCurrentState(topic="", endpoint=-1, token=None):
    config = get_config()
    if endpoint == -1 or endpoint == None:
        get_address = config['CITY_SCOPE']['TABLE_URL_INPUT']+topic
    else:
        get_address = json.loads(config['CITY_SCOPE']['TABLE_URL_INPUT_LIST'])[endpoint]+topic

    try:
        if token is None or endpoint == -1 or endpoint is None:
            r = requests.get(get_address, headers={'Content-Type': 'application/json'})
        else:
            r = requests.get(get_address, headers={'Content-Type': 'application/json',
                                                'Authorization': 'Bearer {}'.format(token).rstrip()})
        if not r.status_code == 200:
            print("could not get from cityIO", get_address)
            print("Error code", r.status_code)
            return {}

        return r.json()
    
    except requests.exceptions.RequestException as e:
        print("CityIO error while GETting!" + str(e))
        return {}
Esempio n. 9
0
def sendToCityIO(data, endpoint=-1, token=None):
    config = get_config()
    if endpoint == -1 or endpoint == None:
        post_address = config['CITY_SCOPE']['TABLE_URL_RESULT_POST'] # default endpoint
    else:
        post_address = json.loads(config['CITY_SCOPE']['TABLE_URL_RESULT_POST_LIST'])[endpoint] # user endpoint

    try:
        if token is None or endpoint == -1:
            r = requests.post(post_address, json=data, headers={'Content-Type': 'application/json'})
        else: # with authentication
            r = requests.post(post_address, json=data,
                            headers={'Content-Type': 'application/json',
                                    'Authorization': 'Bearer {}'.format(token).rstrip()})
        print(r)
        if not r.status_code == 200:
            print("could not post result to cityIO", post_address)
            print("Error code", r.status_code)
        else:
            print("Successfully posted to cityIO", post_address, r.status_code)

    except requests.exceptions.RequestException as e:
        print("CityIO error while POSTing!" + str(e))
        return
from influxdb import InfluxDBClient
from config_loader import get_config

host = get_config('INFLUXDB', 'host')
port = get_config('INFLUXDB', 'port')
username = get_config('INFLUXDB', 'username')
password = get_config('INFLUXDB', 'password')
database = get_config('INFLUXDB', 'database')


def influxdb_publish(json_data):
    client = InfluxDBClient(host=host, port=port, username=username,
                            password=password, database=database)

    client.write_points(json_data)
def calculate_noise_result(cursor):
    # Scenario sample
    # Sending/Receiving geometry data using odbc connection is very slow
    # It is advised to use shape file or other storage format, so use SHPREAD or FILETABLE sql functions

    print("make buildings table ..")

    cursor.execute("""
    drop table if exists buildings;
    create table buildings ( the_geom GEOMETRY );
    """)

    buildings_queries = get_building_queries()
    for building in buildings_queries:
        print('building:', building)
        # Inserting building into database
        cursor.execute("""
        -- Insert 1 building from automated string
        INSERT INTO buildings (the_geom) VALUES (ST_GeomFromText({0}));
        """.format(building))

    print("Make roads table (just geometries and road type)..")
    cursor.execute("""
        drop table if exists roads_geom;
        create table roads_geom ( the_geom GEOMETRY, NUM INTEGER, node_from INTEGER, node_to INTEGER, road_type INTEGER);
        """)
    roads_queries = get_road_queries()
    for road in roads_queries:
        print('road:', road)
        cursor.execute("""{0}""".format(road))

    print("Make traffic information table..")
    cursor.execute("""
    drop table if exists roads_traffic;
     create table roads_traffic ( 
	node_from INTEGER,
	node_to INTEGER,
	load_speed DOUBLE,
	junction_speed DOUBLE,
	max_speed DOUBLE,
	lightVehicleCount DOUBLE,
	heavyVehicleCount DOUBLE,
	train_speed DOUBLE,
	trains_per_hour DOUBLE,
	ground_type INTEGER,
	has_anti_vibration BOOLEAN
	);
    """)

    traffic_queries = get_traffic_queries()
    for traffic_query in traffic_queries:
        cursor.execute("""{0}""".format(traffic_query))

    print(
        "Duplicate geometries to give sound level for each traffic direction.."
    )
    cursor.execute("""
    drop table if exists roads_dir_one;
    drop table if exists roads_dir_two;
    CREATE TABLE roads_dir_one AS SELECT the_geom,road_type,load_speed,junction_speed,max_speed,lightVehicleCount,heavyVehicleCount, train_speed, trains_per_hour, ground_type, has_anti_vibration FROM roads_geom as geo,roads_traffic traff WHERE geo.node_from=traff.node_from AND geo.node_to=traff.node_to;
    CREATE TABLE roads_dir_two AS SELECT the_geom,road_type,load_speed,junction_speed,max_speed,lightVehicleCount,heavyVehicleCount, train_speed, trains_per_hour, ground_type, has_anti_vibration FROM roads_geom as geo,roads_traffic traff WHERE geo.node_to=traff.node_from AND geo.node_from=traff.node_to;
    -- Collapse two direction in one table
    drop table if exists roads_geo_and_traffic;
    CREATE TABLE roads_geo_and_traffic AS select * from roads_dir_one UNION select * from roads_dir_two;"""
                   )

    print("Compute the sound level for each segment of roads..")

    # compute the power of the noise source and add it to the table roads_src_global
    # for railroads (road_type = 99) use the function BTW_EvalSource (TW = Tramway)
    # for car roads use the function BR_EvalSource
    cursor.execute("""
    drop table if exists roads_src_global;
    CREATE TABLE roads_src_global AS SELECT the_geom, 
    CASEWHEN(
        road_type = 99,
        BTW_EvalSource(train_speed, trains_per_hour, ground_type, has_anti_vibration),
        BR_EvalSource(load_speed,lightVehicleCount,heavyVehicleCount,junction_speed,max_speed,road_type,ST_Z(ST_GeometryN(ST_ToMultiPoint(the_geom),1)),ST_Z(ST_GeometryN(ST_ToMultiPoint(the_geom),2)),ST_Length(the_geom),False)
        ) as db_m from roads_geo_and_traffic;
	""")

    print("Apply frequency repartition of road noise level..")

    cursor.execute("""
    drop table if exists roads_src;
    CREATE TABLE roads_src AS SELECT the_geom,
    BR_SpectrumRepartition(100,1,db_m) as db_m100,
    BR_SpectrumRepartition(125,1,db_m) as db_m125,
    BR_SpectrumRepartition(160,1,db_m) as db_m160,
    BR_SpectrumRepartition(200,1,db_m) as db_m200,
    BR_SpectrumRepartition(250,1,db_m) as db_m250,
    BR_SpectrumRepartition(315,1,db_m) as db_m315,
    BR_SpectrumRepartition(400,1,db_m) as db_m400,
    BR_SpectrumRepartition(500,1,db_m) as db_m500,
    BR_SpectrumRepartition(630,1,db_m) as db_m630,
    BR_SpectrumRepartition(800,1,db_m) as db_m800,
    BR_SpectrumRepartition(1000,1,db_m) as db_m1000,
    BR_SpectrumRepartition(1250,1,db_m) as db_m1250,
    BR_SpectrumRepartition(1600,1,db_m) as db_m1600,
    BR_SpectrumRepartition(2000,1,db_m) as db_m2000,
    BR_SpectrumRepartition(2500,1,db_m) as db_m2500,
    BR_SpectrumRepartition(3150,1,db_m) as db_m3150,
    BR_SpectrumRepartition(4000,1,db_m) as db_m4000,
    BR_SpectrumRepartition(5000,1,db_m) as db_m5000 from roads_src_global;""")

    print("Please wait, sound propagation from sources through buildings..")

    cursor.execute(
        """drop table if exists tri_lvl; create table tri_lvl as SELECT * from BR_TriGrid((select 
    st_expand(st_envelope(st_accum(the_geom)), 750, 750) the_geom from ROADS_SRC),'buildings','roads_src','DB_M','',
    {max_prop_distance},{max_wall_seeking_distance},{road_with},{receiver_densification},{max_triangle_area},
    {sound_reflection_order},{sound_diffraction_order},{wall_absorption}); """.
        format(**get_settings()))

    print("Computation done !")

    print(
        "Create isocountour and save it as a geojson in the working folder..")

    cursor.execute("""
    drop table if exists tricontouring_noise_map;
    -- create table tricontouring_noise_map AS SELECT * from ST_SimplifyPreserveTopology(ST_TriangleContouring('tri_lvl','w_v1','w_v2','w_v3',31622, 100000, 316227, 1000000, 3162277, 1e+7, 31622776, 1e+20));
    create table tricontouring_noise_map AS SELECT * from ST_TriangleContouring('tri_lvl','w_v1','w_v2','w_v3',31622, 100000, 316227, 1000000, 3162277, 1e+7, 31622776, 1e+20);
    -- Merge adjacent triangle into polygons (multiple polygon by row, for unique isoLevel and cellId key)
    drop table if exists multipolygon_iso;
    create table multipolygon_iso as select ST_UNION(ST_ACCUM(the_geom)) the_geom ,idiso, CELL_ID from tricontouring_noise_map GROUP BY IDISO, CELL_ID;
    -- Explode each row to keep only a polygon by row
    drop table if exists simple_noise_map;
    -- example form internet : CREATE TABLE roads2 AS SELECT id_way, ST_PRECISIONREDUCER(ST_SIMPLIFYPRESERVETOPOLOGY(THE_GEOM),0.1),1) the_geom, highway_type t FROM roads; 
    -- ST_SimplifyPreserveTopology(geometry geomA, float tolerance);
    -- create table simple_noise_map as select ST_SIMPLIFYPRESERVETOPOLOGY(the_geom, 2) the_geom, idiso, CELL_ID from multipolygon_iso;
    drop table if exists contouring_noise_map;
    -- create table CONTOURING_NOISE_MAP as select ST_Transform(ST_SETSRID(the_geom,{0}),{1}),idiso, CELL_ID from ST_Explode('simple_noise_map'); 
    create table CONTOURING_NOISE_MAP as select ST_Transform(ST_SETSRID(the_geom,{0}),{1}),idiso, CELL_ID from ST_Explode('multipolygon_iso'); 
    drop table multipolygon_iso;""".format(
        get_config()['CITY_SCOPE']['LOCAL_EPSG'],
        get_config()['CITY_SCOPE']['OUTPUT_EPSG']))

    cwd = get_cwd()

    # export result from database to geojson
    # time_stamp = str(datetime.now()).split('.', 1)[0].replace(' ', '_').replace(':', '_')

    geojson_path = get_result_path()
    cursor.execute("CALL GeoJsonWrite('" + geojson_path +
                   "', 'CONTOURING_NOISE_MAP');")

    with open(geojson_path) as f:
        resultdata = json.load(f)

        return resultdata
Esempio n. 12
0
if __name__ == "__main__":
    number_of_endpoints = 8
    old_grid_hashes = [{}] * number_of_endpoints

    cwd = os.path.dirname(os.path.abspath(__file__))
    backups_dir = cwd + "/backups"

    if not os.path.exists(backups_dir):
        os.makedirs(backups_dir)

    sleep_interval = 180
    city_io_down_interval = 1800

    while True:
        for endpoint in range(0, 8):
            table_name = config_loader.get_config()['table_names'][endpoint]
            token = getToken(endpoint)

            oldHash = old_grid_hashes[endpoint]

            gridHash = cityio_socket.getCurrentState(int(endpoint),
                                                     "meta/hashes/grid", token)

            if not gridHash:  # cityIO has crashed
                timestamp = datetime.now().strftime("%Y-%m-%d__%H:%M")
                permanent_backup_dir = backups_dir + '/' + str(timestamp)
                print("permanent backup to ", permanent_backup_dir)

                # loaded latest saved file for table and save it to a permanent backup directory
                with open(backups_dir + '/' + table_name + '.json',
                          'r') as jsonfile:
Esempio n. 13
0
        for endpoint_name in data_type['endpoints']:
            endpoint = data_type['endpoints'][endpoint_name]

            url = root_url + endpoint['url']
            interval = endpoint['interval']

            prom_key = "%s:%s" % (data_type_name, endpoint_name)
            labels = endpoint['labels']
            values = endpoint['values']
            print(data_type_name, endpoint_name)
            pc = PrometheusClient(c['prometheus'], prom_key, labels, values)
            csv = CSVWriter(c['csvwriter'], prom_key, labels + values)

            if data_type_name == 'queue' and endpoint_name == 'config':
                worker = QueueConfigCollector(c['dpid'], url, interval, pc,
                                              csv, filter_dict)
                worker.start()
                continue

            worker = Collector(c['dpid'], url, interval, pc, csv, filter_dict)
            worker.start()


if __name__ == '__main__':
    c = get_config()
    spawn_collectors(c)
    print("Starting http server...")
    start_http_server(c['prometheus']['port'])
    while True:
        time.sleep(10)
Esempio n. 14
0
    print("Error in command")
    print("=============USAGE===================")
    print("python main.py (loadallconfig | loadallflows | loadallgroups |\
     loadallmeters | deleteallflows | deleteallgroups )")
    print("or")
    print(
        "python main.py (add | mod | delete) (flow | group | meter) (flow_name | meter_id)"
    )
    print("or")
    print(
        "python main.py attach (flow_name) (meter | queue) (meter_id | queue_id)"
    )


if __name__ == '__main__':
    app_config = get_config('app')
    flow_handler = FlowHandler(app_config)
    meter_handler = MeterHandler(app_config)
    group_handler = GroupHandler(app_config)

    #parser = argparse.ArgumentParser(description='Ryu REST API interactor for QoS Experiments')

    if len(sys.argv) == 2:
        command = sys.argv[1]
        if command == "loadallconfig":
            group_handler.load_all_groups()
            flow_handler.load_all_flows()
            meter_handler.load_all_meters()
        elif command == "loadallflows":
            group_handler.load_all_groups()
            flow_handler.load_all_flows()
Esempio n. 15
0
import CloudFlare
import json
import requests
from config_loader import get_config
import influx_json_templates as ijt
import time
import datetime

fetch_interval = int(get_config('APP', 'fetch_interval'))
cf_token = get_config('CLOUDFLARE', 'token')
cf_api_url = get_config('CLOUDFLARE', 'api_url')
cf_analytics_uri_prefix = get_config('CLOUDFLARE', 'analytics_uri_prefix')
cf_freeplans = get_config('CLOUDFLARE', 'free_plans')


def cf_zone_info():
    zone_info = []
    cf = CloudFlare.CloudFlare(token=cf_token)
    zones = cf.zones.get()
    for zone in zones:
        zone_data = {'zone_name': zone['name'], 'zone_id': zone['id']}
        if zone['name'] in (cf_freeplans):
            pass
        else:
            zone_info.append(zone_data)
    return zone_info


def http_request(uri):
    headers = {"Authorization": "Bearer %s" % cf_token}
    addr = cf_api_url
Esempio n. 16
0
#!/usr/bin/env python2.7

import json
import os
import numpy
from geomet import wkt
import RoadInfo
from config_loader import get_config

cwd = os.path.dirname(os.path.abspath(__file__))

# TODO: all coordinates for roads and buildings are currently set to z level 0

# settings for the static input data
config = get_config()

include_rail_road = config['NOISE_SETTINGS'].getboolean('INCLUDE_RAILROAD')
include_lower_main_road = config['NOISE_SETTINGS'].getboolean(
    'INCLUDE_LOWER_MAIN_ROAD')
upper_main_road_as_multi_line = config['NOISE_SETTINGS'].getboolean(
    'UPPER_MAIN_ROAD_AS_MULTI_LINE')

# dynamic input data from designer
buildings_json = config['NOISE_SETTINGS']['INPUT_JSON_BUILDINGS']

# static input data
road_network_json = config['NOISE_SETTINGS']['INPUT_JSON_ROAD_NETWORK']
railroad_multi_line_json = os.path.abspath(
    cwd + '/input_geojson/static/roads/railroads.json')

# road names from julias shapefile : road_type_ids from IffStar NoiseModdeling