223 lines
5.9 KiB
Python
223 lines
5.9 KiB
Python
#!/usr/bin/env python3
|
|
# -*- coding: UTF-8 -*-
|
|
|
|
from owslib.wfs import WebFeatureService
|
|
import geopandas as gpd
|
|
from geoalchemy2 import Geometry
|
|
from sqlalchemy import dialects
|
|
|
|
|
|
|
|
def get_wfs(url, layer, bbox=None):
|
|
from geopandas import read_file
|
|
from requests import Request
|
|
|
|
wfs = WebFeatureService(url=url)
|
|
item = dict(wfs.items())[layer]
|
|
crs = item.crsOptions[0].getcode()
|
|
|
|
params = dict(service='WFS', version=wfs.version, request='GetFeature',
|
|
typeName=layer)
|
|
q = Request('GET', url, params=params).prepare().url
|
|
|
|
data = read_file(q, bbox=bbox)
|
|
data.set_crs(crs=crs, inplace=True)
|
|
|
|
if crs != 'EPSG:2154':
|
|
data.to_crs(epsg=2154, inplace=True)
|
|
|
|
return data
|
|
|
|
|
|
def list_layer(url):
|
|
wfs = WebFeatureService(url=url)
|
|
lst = list(wfs.contents)
|
|
return lst
|
|
|
|
|
|
def Polygons_to_MultiPolygon(df,geom_col=None):
|
|
from shapely.geometry import MultiPolygon
|
|
from pandas import concat
|
|
|
|
if not geom_col:
|
|
geom_col = df.geometry.name
|
|
|
|
df = df.copy()
|
|
multi = df.loc[df.geom_type=='MultiPolygon'].copy()
|
|
poly = df.loc[df.geom_type=='Polygon'].copy()
|
|
poly[geom_col] = [MultiPolygon([geom]) for geom in df.loc[df.geom_type=='Polygon',geom_col] ]
|
|
df = concat([multi,poly])
|
|
df.sort_index(inplace=True)
|
|
return df
|
|
|
|
|
|
def ref_geo(type_code,con):
|
|
sql = """
|
|
SELECT l.* FROM ref_geo.l_areas l
|
|
JOIN ref_geo.bib_areas_types bib USING (id_type)
|
|
WHERE bib.type_code='{typ}'
|
|
""".format(typ=type_code)
|
|
return gpd.read_postgis(sql,con)
|
|
|
|
|
|
def to_lareas(df,dic,layer,con,dtypes={}):
|
|
|
|
id_type = gpd.pd.read_sql_query("""
|
|
SELECT id_type FROM ref_geo.bib_areas_types WHERE type_code='%s'
|
|
"""%layer,con).id_type.values[0]
|
|
|
|
ref = ref_geo(layer,con)
|
|
|
|
df.rename(columns=dic, inplace=True)
|
|
df = Polygons_to_MultiPolygon(
|
|
df[~df.area_name.isin(ref.area_name)]
|
|
)
|
|
df.rename_geometry('geom', inplace=True)
|
|
del_col = df.columns[~df.columns.isin(['geom',*[*set(dic.values())]])]
|
|
df.drop(columns=del_col, inplace=True)
|
|
|
|
df['id_type'] = id_type
|
|
df['geojson_4326'] = df.to_crs(4326).geom.__geo_interface__['features']
|
|
df['geojson_4326'] = [x['geometry'] for x in df['geojson_4326']]
|
|
df['centroid'] = 'SRID=2154;'+df.geom.centroid.to_wkt()
|
|
df['enable'] = True
|
|
|
|
if df.empty:
|
|
print('AUCUN nouveaux zonages identifiés')
|
|
else:
|
|
df.to_postgis(
|
|
name='l_areas',
|
|
con=con,
|
|
schema='ref_geo',
|
|
if_exists='append',
|
|
index=False,
|
|
index_label=None,
|
|
chunksize=None,
|
|
dtype={
|
|
'centroid': Geometry(geometry_type='POINT',srid=2154),
|
|
**dtypes
|
|
},
|
|
)
|
|
print('INSERT %i zones'%df.shape[0])
|
|
|
|
|
|
|
|
def update_larea(con,layer,df,cols_updt=[],on='area_name'):
|
|
from pycen import update_to_sql
|
|
table = 'l_areas'
|
|
# idtyp = tuple(df.id_type.unique())
|
|
|
|
idtyp = gpd.pd.read_sql_query("""
|
|
SELECT id_type FROM ref_geo.bib_areas_types WHERE type_code='%s'
|
|
"""%layer,con).id_type.values[0]
|
|
|
|
# if len(idtyp) > 1:
|
|
# where = 'in %s'%tuple(df.id_type.unique())
|
|
# else :
|
|
where = '=%i'%idtyp
|
|
|
|
sql = 'SELECT id_area, %s FROM ref_geo.l_areas WHERE id_type %s;'%(on,where)
|
|
larea = gpd.pd.read_sql_query(sql,con)
|
|
|
|
to_updt = df.merge(larea, on=on)
|
|
update_to_sql(
|
|
df=to_updt[['id_area',*cols_updt]],
|
|
con=con,
|
|
table_name=table,
|
|
schema_name='ref_geo',
|
|
key_name='id_area',
|
|
)
|
|
|
|
|
|
carmen = 'https://ws.carmencarto.fr/WFS/119/fxx_inpn?'
|
|
dic_layer = {
|
|
# ref_geo.bib_areas_types.type_code : layer_name from list_layer
|
|
'ZNIEFF2':'Znieff2',
|
|
'ZNIEFF1':'Znieff1',
|
|
'APB':'Arretes_de_protection_de_biotope',
|
|
'RNN':'Reserves_naturelles_nationales',
|
|
'RNR':'Reserves_naturelles_regionales',
|
|
'ZPS':'Zones_de_protection_speciale',
|
|
'SIC':'Sites_d_importance_communautaire',
|
|
'ZICO':'ZICO',
|
|
'RNCFS':'Reserves_nationales_de_chasse_et_faune_sauvage',
|
|
'RIPN':'Reserves_Integrales_de_Parcs_Nationaux', # NO zone in 74
|
|
'SCEN':'Terrains_acquis_des_Conservatoires_des_espaces_naturels', # Pas fait ;
|
|
# 'SCL':'',
|
|
# 'PNM':'',
|
|
'PNR':'Parcs_naturels_regionaux',
|
|
'RBIOL':'Reserves_biologiques',
|
|
'RBIOS':'Reserves_de_la_biosphere', # NO zone in 74
|
|
# 'RNC':'',
|
|
'SRAM':'Sites_Ramsar',
|
|
# 'AA':'',
|
|
# 'ZSC':'',
|
|
# 'PSIC':'',
|
|
# 'PEC':'',
|
|
# ...
|
|
}
|
|
|
|
if __name__ == "__main__":
|
|
from sqlalchemy import create_engine # pour lecture de la bd Géonature
|
|
from sqlalchemy.engine import URL
|
|
usr = 'geonatadmin'
|
|
pdw='g;gCEN74'
|
|
host='178.33.42.38'
|
|
bd ='geonature2db'
|
|
|
|
eng = URL.create('postgresql+psycopg2',username=usr,password=pdw,host=host,database=bd)
|
|
conn = create_engine(eng)
|
|
# from pycen import con_gn as conn
|
|
|
|
ref = ref_geo('DEP',conn)
|
|
ter = ref[ref.area_code=='74']
|
|
|
|
layer = 'Znieff1'
|
|
# layer_name = dic_layer[layer]
|
|
zon = get_wfs(carmen,layer,bbox=ter.unary_union)
|
|
zon = zon[zon.intersects(ter.unary_union)]
|
|
|
|
dic_cols = {
|
|
'ID_MNHN':'area_code',
|
|
'NOM':'area_name',
|
|
'URL':'source',
|
|
}
|
|
|
|
dtyp = {}
|
|
if layer in ['ZPS','SIC']:
|
|
dic_cols = {**dic_cols,**{'SITECODE':'area_code','SITENAME':'area_name',}}
|
|
elif layer == 'RNN':
|
|
dic_cols = {**dic_cols,**{'URL_FICHE':'source','NOM_SITE':'area_name',}}
|
|
elif layer == 'ZICO':
|
|
dic_cols = {**dic_cols,**{'ID_SPN':'area_code'}}
|
|
elif layer == 'SCEN':
|
|
zon.NOM = zon.NOM.str.title()
|
|
elif layer == 'RBIOL':
|
|
dic_cols = {
|
|
**dic_cols,
|
|
**{'URL_FICHE':'source','NOM_SITE':'area_name',
|
|
'comment':'comment','additional_data':'additional_data',}
|
|
}
|
|
zon['comment'] = 'Réserves biologiques ' + zon.CODE_R_ENP.replace(
|
|
{'I': 'intégrale','D' : 'dirigée'}
|
|
)
|
|
zon['additional_data'] = [
|
|
str({ zon.CODE_R_ENP.name : x }).replace("\'",'\"')
|
|
for x in zon.CODE_R_ENP
|
|
]
|
|
dtyp = {'additional_data':dialects.postgresql.JSONB}
|
|
|
|
|
|
# IF UPDATE DATA
|
|
# update_larea(conn,layer,zon.rename(columns=dic_cols),['comment'],on='area_code')
|
|
|
|
|
|
to_lareas(
|
|
df=zon,
|
|
dic=dic_cols,
|
|
layer=layer,
|
|
con=conn,
|
|
dtypes=dtyp
|
|
)
|
|
|