index_col=0)

# Base profiles
print('\tloading base profiles')
folder_profiles = r'c:\user\U546416\Documents\PhD\Data\MVGrids\Boriette\Profiles\\'
profiles_load = pd.read_csv(folder_profiles + r'profiles_iris.csv',
                            engine='python',
                            index_col=0)
# Load IRIS polygons
print('Loading IRIS polygons')
folder_iris = r'c:\user\U546416\Documents\PhD\Data\DataGeo\\'
file_iris = 'IRIS_all_geo_' + str(2016) + '.csv'
iris_poly = pd.read_csv(folder_iris + file_iris, engine='python', index_col=0)
# Plot supply zone
iris_ss = net.load.zone.astype(int).unique()
polygons = util.do_polygons(iris_poly.loc[iris_ss], plot=False)
polys = util.list_polygons(polygons, iris_ss)
cmap = plt.get_cmap('plasma')
nb_bt_b = net.load[net.load.type_load == 'Base'].groupby(
    'zone').type_load.count()[
        iris_ss]  # Number of supplied LV trafos per IRIS by SS
nb_bt = lv_iris.Nb_BT[iris_ss]  # Total number of trafos per IRIS
supply = 1 - ((nb_bt - nb_bt_b) / nb_bt)  # Ratio of supply
colors = cmap(supply[iris_ss])

ax = util.plot_polygons(polys,
                        color=colors,
                        edgecolor='darkgrey',
                        linestyle='--')
plot_lines(net.line_geodata, col='coords', ax=ax, color='k', linewidth=0.3)
plt.plot(net.bus_geodata.x[0], net.bus_geodata.y[0], 'o', color='red')
Example #2
0
                   engine='python', sep=';', decimal=',')
comms = iris[iris.TYPE == 'Commune'].set_index('CODE')
comms = comms[comms.OPERATEUR.isin(['EdF-SEI', 'Régie électrique de Villarlurin']) == False]
comms.index = comms.index.astype(int)

iris = iris[iris.TYPE == 'IRIS'].set_index('CODE')
iris.index = iris.index.astype(int)

print('Polygons')
iris_poly = pd.read_csv(r'c:\user\U546416\Documents\PhD\Data\DataGeo\IRIS_all_geo_2016.csv',
                        engine='python', index_col=0)

#% Constructing polygons
print('Constructing polygons')
print('IRIS polygons')
polygons = util.do_polygons(iris_poly, plot=True)


def get_comm_str(inseecode):
    comm = str(inseecode)
    return '000'[0:5-len(comm)] + comm
##%% 
#iris_poly.columns = ['COMM_CODE', 'COMM_NAME', 'IRIS_NAME', 'IRIS_TYPE', 'PolygonType',
#       'Polygon', 'Lon', 'Lat', 'GRD']

#%%
GRD = iris[iris.OPERATEUR != 'RTE'].OPERATEUR.loc[iris_poly.index]
GRDnull = GRD[GRD.isnull()]

# remove repeated IRIS
a = pd.DataFrame([GRD.index, GRD.values]).T
Example #3
0
@author: U546416
"""

import util
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np

#polygons = util.load_polygons_iris()
#%% Read data

# iris info, geographie 2016
folder = r'c:\user\U546416\Documents\PhD\Data\DataGeo\\'
file = 'IRIS_all_geo_2016.csv'
iris_poly = pd.read_csv(folder + file, engine='python', index_col=0)
polygons = util.do_polygons(iris_poly, plot=False)
# AU info
AU = pd.read_excel(
    r'c:\user\U546416\Documents\PhD\Data\Mobilité\Data_Base\AU_coms.xlsx')
AU_taille = pd.read_excel(
    r'c:\user\U546416\Documents\PhD\Data\Mobilité\Data_Base\AU.xls')

# Modifications of communes since 2016
all_comms = pd.read_csv(
    r'c:\user\U546416\Documents\PhD\Data\DataGeo\Modifs_commune\France2018.txt',
    engine='python',
    sep='\t')
all_comms = all_comms.drop(all_comms[all_comms.DEP.isin(
    ['2A', '2B', '971', '972'])].index,
                           axis=0)
code_comms = all_comms.DEP.astype(int) * 1000 + all_comms.COM
Example #4
0
Created on Tue Jan  7 10:29:29 2020

@author: U546416
"""
import pandas as pd
import util
polygons_comms = pd.read_json(r'c:\user\U546416\Documents\PhD\Data\DataGeo\communes-20190101.json')

#%%
df = {}
for i in polygons_comms.features:
    comm = i['properties']['insee']
    name = i['properties']['nom']
    surf = i['properties']['surf_ha'] / 100
    ptype = i['geometry']['type']
    polygon = i['geometry']['coordinates']
    if ptype == 'MultiPolygon':
        polygon = [p[0] for p in polygon]
    if not (comm[0:2] in ['2A', '2B', '96', '97']):
        df[int(comm)] = {'COMM_NAME':name,
          'SURF_KM2' : surf,
          'Polygon':  polygon,
          'Polygon_Type' : ptype}
    
df = pd.DataFrame(df).T

df.COMM_NAME = util.fix_wrong_encoding_str(df.COMM_NAME)

polygons_c = util.do_polygons(df)

df.to_csv(r'c:\user\U546416\Documents\PhD\Data\DataGeo\COMM_all_geo_2019')
                      engine='python',
                      index_col=0)

#%% showing data
n0 = ss.node.iloc[0]
# Reducing polygons to consider to +- 0.5 degrees of latitude/longitude to data
dt = 0.5
lonmin, lonmax, latmin, latmax = nodes.xGPS.min(), nodes.xGPS.max(
), nodes.yGPS.min(), nodes.yGPS.max()
polys = iris_poly[(iris_poly.Lon > lonmin - dt) & (iris_poly.Lon < lonmax + dt)
                  & (iris_poly.Lat > latmin - dt) &
                  (iris_poly.Lat < latmax + dt)][[
                      'IRIS_NAME', 'Polygon', 'Lon', 'Lat'
                  ]]
polys.columns = ['Name', 'Polygon', 'xGPS', 'yGPS']
polys.Polygon = pd.Series(util.do_polygons(polys, plot=False))

#plot_quick(lines, lv, ss, nodes, GPS=True)

off = on_off_lines(lines,
                   n0,
                   ss=ss,
                   lv=lv,
                   GPS=True,
                   geo=polys,
                   tech=tech,
                   nodes=nodes)

#%% Transforming data in pandapower DataFrames
v = util.input_y_n(
    'Do you want to create the grid from loaded data (Y) or load existing grid (N):'
    polys = [p for irs in irises for p in polygons[irs]]
    if len(irises) == 0:
        edges[ss] = [[]]
        continue
    sil = get_silhouette(polys, use4=True)
    edges[ss] = sil
    if (i + 2) % 200 == 0:
        f, ax = plt.subplots()
        util.plot_polygons(polys, ax, facecolors='lightgreen')
        util.plot_segments(sil, ax, color='k', linestyle='--')

edges = {ss: [p for p in edges[ss]] for ss in edges}
edges = pd.DataFrame(edges, index=['Polygon']).T

#%% Create SS polygons (& Plot SS)
polygons_ss = util.do_polygons(edges)
util.plot_polygons(util.list_polygons(polygons_ss, polygons_ss.keys()),
                   edgecolors='k',
                   linewidth=0.5)
util.plot_polygons(util.list_polygons(
    polygons_ss, SS[SS.Departement.isin(util.deps_idf)].index),
                   edgecolors='k',
                   linewidth=0.5)

#%% Do one - define outside shapes:
d = 0.00001
edges = {}
i = 0
ss = 'ITTEVILLE'

irises = iris[iris.SS == ss].index
Example #7
0
#plt.scatter(data[0].IncreaseDemand_pu, data[0].IncreasePeak_pu-data[1].IncreasePeak_pu, s=1, c=colors)
#plt.ylabel('Peak load reduction')
#plt.xlabel('Demand increase')
#plt.tight_layout()
##plt.legend()

#%% Loading IRIS polygons
# loading iris polygons
print('Loading IRIS polygons')
folder_polys = r'c:\user\U546416\Documents\PhD\Data\DataGeo\\'
file_iris = 'IRIS_all_geo_' + str(2016) + '.csv'
iris_poly = pd.read_csv(folder_polys + file_iris, engine='python', index_col=0)
dep_polys = pd.read_csv(folder_polys + 'departements_polygons.csv',
                        engine='python',
                        index_col=0)
polys_dep = util.do_polygons(dep_polys)
comms = pd.read_csv(
    r'c:\user\U546416\Documents\PhD\Data\Mobilité\Data_Base\geoRefs.csv',
    engine='python',
    index_col=0,
    sep=';')

folder_consodata = r'c:\user\U546416\Documents\PhD\Data\Mobilité\Data_Traitee\Conso'
iris = pd.read_csv(folder_consodata + r'\IRIS_enedis_2017.csv',
                   engine='python',
                   index_col=0)

ies = iris[iris.SS.isin(data[0].index)].index
idfdeps = [75, 78, 77, 91, 92, 93, 94, 95]
iesidf = iris[(iris.SS.isin(data[0].index))
              & (iris.Departement.isin(idfdeps))].index
dep_parc_prod.columns = util.fix_wrong_encoding_str(
    pd.Series(dep_parc_prod.columns))
for c in dep_parc_prod.columns:
    if dep_parc_prod[c].dtype == 'object':
        try:
            dep_parc_prod[c] = util.fix_wrong_encoding_str(dep_parc_prod[c])
        except:
            pass

# Getting polygons for department

try:
    dep_polys = pd.read_csv(folder_polys + 'departements_polygons.csv',
                            engine='python',
                            index_col=0)
    polys = util.do_polygons(dep_polys)
except:
    dep_polys = {}
    for i, t in dep_parc_prod.iterrows():
        if type(t['Geo Point']) == str:
            data = {}
            data['Polygon'] = eval(t['Geo Shape'])['coordinates']
            data['GeoPoint'] = eval(t['Geo Point'])
            data['DEP_NAME'] = t['Nom Département']
            data['REG_NAME'] = t['Région']
            data['REG_CODE'] = t['Code Région']
            dep_polys[t['Code Département']] = data

    dep_polys = pd.DataFrame(dep_polys).T

    dep_polys.Polygon = dep_polys.Polygon.apply(lambda x: [y[0] for y in x]
print('Conso naff:', c_naff.sum(), '\n', c_naff)

#%% Correcting wrong encodings:
names = ['IRIS_NAME', 'COMM_NAME', 'DEP_NAME', 'EPCI_NAME', 'REGION_NAME']
for n in names:
    iris_aff[n] = util.fix_wrong_encoding_str(iris_aff[n])

#%% Add IRIS that are not included in the Enedis file, but should be.
# They will have 0 conso (helps for mapping)

iris_poly = pd.read_csv(
    r'c:\user\U546416\Documents\PhD\Data\DataGeo\IRIS_all_geo_2016.csv',
    engine='python',
    index_col=0)
print('Finished reading')
polygons = util.do_polygons(iris_poly)
print('Adding missing IRIS')
extra_iris = iris_poly[(iris_poly.GRD == 'Enedis')
                       & (iris_poly.index.isin(iris_aff.index) == False)][[
                           'COMM_CODE', 'COMM_NAME', 'IRIS_NAME', 'IRIS_TYPE'
                       ]]

iris_aff = iris_aff.append(extra_iris).replace(np.nan, 0)

#%% Correcting population data, from INSEE 20xx Recensement de la population

print('Correcting population data')
population = pd.read_csv(
    r'c:\user\U546416\Documents\PhD\Data\DataGeo\base-ic-evol-struct-pop-2014.csv',
    engine='python',
    index_col=0,
Example #10
0
# Polygons:
ts.append(time.time())
#print('load iris')
#iris = pd.read_csv(r'c:\user\U546416\Documents\PhD\Data\DataGeo\IRIS_all_geo_2016.csv',
#                        engine='python', index_col=0)
#ts.append(time.time())
#print('Time: {}s'.format(round(ts[-1]-ts[-2],1)))
#print('polygons')
#polygons = util.do_polygons(iris)
#polygons_ss = util.load_polygons_SS()
ts.append(time.time())
print('Time: {}s'.format(round(ts[-1]-ts[-2],1)))
print('communes')
comms = pd.read_csv(r'c:\user\U546416\Documents\PhD\Data\DataGeo\COMM_all_geo_2019.csv',
                        engine='python', index_col=0)
polygons_comm = util.do_polygons(comms)
ts.append(time.time())
print('Time: {}s'.format(round(ts[-1]-ts[-2],1)))

# Histograms of distance data
print('histograms')
folder_hdata = r'c:\user\U546416\Documents\PhD\Data\Mobilité'
hhome = pd.read_csv(folder_hdata + r'\HistHomeModal.csv', 
                    engine='python', index_col=0)
hwork = pd.read_csv(folder_hdata + r'\HistWorkModal.csv', 
                    engine='python', index_col=0)
hhome = hhome.drop(['ZE', 'Status', 'UU', 'Dep'], axis=1)
hwork = hwork.drop(['ZE', 'Status', 'UU', 'Dep'], axis=1)

comm2019 = pd.read_csv(r'c:\user\U546416\Documents\PhD\Data\DataGeo\Modifs_commune\Comms2016_2019.csv',
                       engine='python', index_col=0)
Example #11
0
import coord_transform as ct

folder = r'c:\user\U546416\Documents\PhD\Data\Conso-Reseau\Réseau\\'
postesbt = pd.read_csv(folder + 'poste-electrique.csv',
                       engine='python',
                       sep=';')
# formatting
postesbt = postesbt['Geo Shape'].apply(lambda x: eval(x)['coordinates'])

#%% Load IRIS polygon data
print('Loading IRIS polygons')
# TODO: Load IRIS polygons
folder_iris = r'c:\user\U546416\Documents\PhD\Data\DataGeo\\'
file_iris = 'IRIS_all_geo_' + str(2016) + '.csv'
iris_poly = pd.read_csv(folder_iris + file_iris, engine='python', index_col=0)
iris_poly.Polygon = pd.Series(util.do_polygons(iris_poly, plot=False))

iris_polys = iris_poly[['Polygon', 'IRIS_NAME', 'Lon', 'Lat']]
iris_polys.columns = ['Polygon', 'Name', 'xGPS', 'yGPS']
print('\tDone loading polygons')

#%% Transform to WGPS

xyGPS = postesbt.apply(lambda x: ct.point_LAMB93CC_WGS84((x[0], x[1]), cc=8))
postesbt = pd.DataFrame(
    [xyGPS.apply(lambda x: x[0]),
     xyGPS.apply(lambda x: x[1])],
    index=['xGPS', 'yGPS']).T

#%% Searching polygon
assign_polys(postesbt, iris_polys, 0.05)