Python_scripts/0_FONCIER/TdB_FEDE/parcelles_TdB_fede .py

733 lines
34 KiB
Python

#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# library pandas geopandas sqlalchemy psycopg2-binary openpyxl
from calendar import c
from os import getlogin
import pandas as pd
import geopandas as gpd
from sqlalchemy.sql import text
from sqlalchemy.engine import URL
from sqlalchemy import create_engine
from pycen import con_bdcen as con, con_fon
from functools import reduce
annee = 2022
path_source = '/media/{login}/SRV/FICHIERS/OUTILS/BASES DE DONNEES/BILAN_FEDE_CEN/{an}/Docs_SIG_joints{an}/Docs_SIG_joints{an}/'.format(login=getlogin(),an=annee)
dico_data = 'Dico_DATA_sites_CEN_v%i_v1.xlsx' % annee
sheet_par = 'parcelles_cen_XX_%i' % annee
sheet_sit = 'sites_cen_XX_%i' % annee
lv_cen = pd.read_excel(path_source+dico_data,sheet_name='listes_valeurs (copy&modif)',header=1, usecols='A:D',nrows=25)
lv_typmaitr = pd.read_excel(path_source+dico_data,sheet_name='listes_valeurs (copy&modif)',header=1, usecols='F:K',nrows=23)
lv_typmilie = pd.read_excel(path_source+dico_data,sheet_name='listes_valeurs (copy&modif)',header=1, usecols='M:N',nrows=16)
lv_echelnum = pd.read_excel(path_source+dico_data,sheet_name='listes_valeurs (copy&modif)',header=1, usecols='P:Q',nrows=8)
lv_typhab = pd.read_excel(path_source+dico_data,sheet_name='listes_valeurs (copy&modif)',header=13, usecols='P:Q',nrows=5)
lv_sourcegeom = pd.read_excel(path_source+dico_data,sheet_name='listes_valeurs (copy&modif)',header=1, usecols='S:U',nrows=12)
lv_lienres = pd.read_excel(path_source+dico_data,sheet_name='listes_valeurs (copy&modif)',header=20, usecols='M:N',nrows=4)
lv_typprop = pd.read_excel(path_source+dico_data,sheet_name='listes_valeurs (copy&modif)',header=45, usecols='A:H',nrows=12)
lv_codgeol = pd.read_excel(path_source+dico_data,sheet_name='listes_valeurs (copy&modif)',header=36, usecols='M:T',nrows=19)
lst_cols_parc = pd.read_excel(path_source+dico_data,sheet_name=sheet_par,header=0, usecols='F',nrows=26)\
['nom du champ'].tolist()
user = 'postgres'
pwd = 'postgres'
adr = '172.17.0.2'
base = 'postgres'
url = URL.create('postgresql+psycopg2',
username=user,
password=pwd,
host=adr,
database=base,
)
con_vm = create_engine(url)
fadd = '/home/colas/Documents/tmp/FONCIER_FEDE/add_parc.gpkg'
add = gpd.read_file(fadd)
sql0 = '''SELECT
t1.geo_parcelle, substring(t1.geo_parcelle from 1 for 2)||substring(t1.geo_parcelle from 4 for 12) par_id,
t1.annee, t1.update_dat, t4.ccopre, t4.ccosec, t4.dcntpa, t1.geom, STRING_AGG(t3.gtoper::text,',') gtoper, STRING_AGG(t3.ccogrm::text,',') ccogrm,
STRING_AGG(CONCAT(t3.gtoper::text||COALESCE('_'||t3.ccogrm::text,'')),',') type_prop, STRING_AGG(t3.ddenom::text,',') ddenom
FROM "{sch}"."{t1}" t1
LEFT JOIN ("{sch}".parcelle_info t2
LEFT JOIN "{sch}".proprietaire t3 USING (comptecommunal))
USING (geo_parcelle)
LEFT JOIN "{sch}".parcelle t4 ON t1.geo_parcelle = t4.parcelle
'''.format(
sch='38_202207',
t1='v_geo_parcelle')
sql1 = '''WHERE substring(t1.geo_parcelle from 1 for 2)||substring(t1.geo_parcelle from 4 for 12) IN ('%s')
GROUP BY 1,2,3,4,5,6,7,8'''%"','".join(add.par_id)
sql = sql0 + sql1
parcx = gpd.read_postgis(text(sql),con=con_vm)
cols_parcx = parcx.columns
for col in ['ccopre','ccosec','type_prop','ddenom'] :
parcx[col] = parcx[col].str.strip()
parcx[col] = parcx[col].replace('',None)
parcx.loc[parcx.ccopre.isna(),'ccopre'] = parcx.loc[parcx.ccopre.isna(),'par_id'].str[5:8]
parcx.loc[parcx.ccosec.isna(),'ccosec'] = parcx.loc[parcx.ccosec.isna(),'par_id'].str[8:10]
parcx.loc[parcx.ccosec.str.len()==1,'ccosec'] = '0'+parcx.loc[parcx.ccosec.str.len()==1,'ccosec']
parcx.loc[parcx.dcntpa.isna(),'dcntpa'] = parcx.loc[parcx.dcntpa.isna(),'geom'].area.round().astype(int)
parcx.loc[parcx.gtoper.isna(),['gtoper','ccogrm','type_prop']] = ['2','0','2_0']
parcx.loc[parcx.par_id=='380740000B0705','ddenom'] = "CONSERVATOIRE D'ESPACES NATURELS ISERE"
parcx.dcntpa = parcx.dcntpa.astype(int)
###### Récupération des parcelles RN île de la Platière
f_plaRN = '/home/colas/Documents/tmp/FONCIER_FEDE/PLATIERE/Maitrise_Foncier_RN.shp'
f_plaENS = '/home/colas/Documents/tmp/FONCIER_FEDE/PLATIERE/Maitrise_Foncier_ENS.shp'
RNNP_rn = gpd.read_file(f_plaRN)
RNNP_ens = gpd.read_file(f_plaENS)
RNNP_rn.rename_geometry('geom',inplace=True)
RNNP_ens.rename_geometry('geom',inplace=True)
RNNP_rn.rename(columns={'Date_sign':'annee_acq','surf_acqui':'surface_ac','surf_parce':'superficie'},inplace=True)
RNNP_ens.rename(columns={'code_commu':'code_comm','année_acq':'annee_acq','année':'anne'},inplace=True)
RNNP_rn = RNNP_rn[RNNP_rn.code_comm.str.len()==5]
RNNP_ens = RNNP_ens[RNNP_ens.code_comm.str.len()==5]
RNNP_rn['section'] = RNNP_rn.parcelle.str[:2]
RNNP_ens['section'] = RNNP_ens.parcelle.str[:2]
RNNP_rn['num_parc'] = RNNP_rn.parcelle.str[2:]
RNNP_ens['num_parc'] = RNNP_ens.parcelle.str[2:]
RNNP_rn['num_parc'] = RNNP_rn['num_parc'].str.strip()
RNNP_ens['num_parc'] = RNNP_ens['num_parc'].str.strip()
RNNP_rn.loc[RNNP_rn.num_parc.str.len()==1,'num_parc'] = '000' + RNNP_rn.loc[RNNP_rn.num_parc.str.len()==1,'num_parc']
RNNP_rn.loc[RNNP_rn.num_parc.str.len()==2,'num_parc'] = '00' + RNNP_rn.loc[RNNP_rn.num_parc.str.len()==2,'num_parc']
RNNP_rn.loc[RNNP_rn.num_parc.str.len()==3,'num_parc'] = '0' + RNNP_rn.loc[RNNP_rn.num_parc.str.len()==3,'num_parc']
RNNP_ens.loc[RNNP_ens.num_parc.str.len()==1,'num_parc'] = '000' + RNNP_ens.loc[RNNP_ens.num_parc.str.len()==1,'num_parc']
RNNP_ens.loc[RNNP_ens.num_parc.str.len()==2,'num_parc'] = '00' + RNNP_ens.loc[RNNP_ens.num_parc.str.len()==2,'num_parc']
RNNP_ens.loc[RNNP_ens.num_parc.str.len()==3,'num_parc'] = '0' + RNNP_ens.loc[RNNP_ens.num_parc.str.len()==3,'num_parc']
RNNP_rn.loc[RNNP_rn.code_comm.str.startswith('39'),'code_comm'] = '39' + RNNP_rn.loc[RNNP_rn.code_comm.str.startswith('39'),'code_comm'].str[2:]
RNNP_ens.loc[RNNP_ens.code_comm.str.startswith('39'),'code_comm'] = '39' + RNNP_ens.loc[RNNP_ens.code_comm.str.startswith('39'),'code_comm'].str[2:]
RNNP_rn['par_id'] = RNNP_rn.code_comm+'000'+RNNP_rn.section+RNNP_rn.num_parc
RNNP_ens['par_id'] = RNNP_ens.code_comm+'000'+RNNP_ens.section+RNNP_ens.num_parc
lst_par_id_rnnp = [*RNNP_rn.par_id,*RNNP_ens.par_id]
RNNP = pd.concat([RNNP_rn,RNNP_ens])
RNNP['surf_parc_maitrise_m2'] = RNNP.surface_ac
RNNP['pour_part'] = 0
RNNP.loc[(~RNNP.proportion.isna())&(RNNP.proportion!=100),'pour_part'] = 1 # Récup après, cf. dfgere
frnnp_tofede = '/home/colas/Documents/tmp/FONCIER_FEDE/PLATIERE/parcelles_RNNP_tofede.gpkg'
rnnp_tofede = gpd.read_file(frnnp_tofede,layer='parcelles_RNNP_tofede')
rnnp_tofede.rename_geometry('geom', inplace=True)
rnnp_tofede['id_site_cen_parc'] = 0
rnnp_tofede['id_site_fcen_parc'] = None # OK
rnnp_tofede['insee_dep'] = rnnp_tofede.par_id.str[:2]
rnnp_tofede['insee_com'] = rnnp_tofede.par_id.str[2:5]
rnnp_tofede['num_section'] = rnnp_tofede.ccosec
rnnp_tofede['num_parcelle'] = rnnp_tofede.par_id
# rnnp_tofede['code_mfu1'] = None # Récup après, cf. dfgere
# rnnp_tofede['code_mfu2'] = None # Récup après, cf. dfgere
# rnnp_tofede['type_prop'] = None # Récup après, cf. parcb
# rnnp_tofede['mesure_compens'] = None # Récup après, cf. dfgere
rnnp_tofede['surf_ore_m2'] = 0
rnnp_tofede['date_debut_ore'] = None
rnnp_tofede['date_fin_ore'] = None
rnnp_tofede['doc_foncier'] = 0
rnnp_tofede['source_doc_foncier'] = 0
rnnp_tofede['parc_gestion_rnx'] = 0
rnnp_tofede['parc_gestion_rnx'] = 1
rnnp_tofede['surf_parc_maitrise_m2'] = rnnp_tofede.dcntpa
rnnp_tofede['source_geom_parc_nature'] = 2
rnnp_tofede['source_geom_parc_annee'] = None # Récup après, cf. parcb
rnnp_tofede['echelle_num_parc'] = 0
rnnp_tofede['source_surf_parc'] = 1 # OK
rnnp_tofede['date_maj_parcelle'] = None # Récup après, cf. parcb
rnnp_tofede['bnd'] = None # Récup après, cf. dfgere
rnnp_tofede['pour_part'] = None # Récup après, cf. dfgere
rnnp_tofede['domaine_public'] = 0
rnnp_tofede['id_proprietaire'] = None # Récup après, cf. parcb
# RNNP_ens[(~RNNP_ens.num_parcel.isna())&(RNNP_ens.num_parcel!=RNNP_ens.parcelle)]
###### Récupération des données conservatoires
sqlsit = '''SELECT * FROM %s.%s WHERE date_fin is NULL OR date_fin >= '%i-01-01' ;'''%('sites','c_sites_zonages',annee)
dfsite = gpd.read_postgis(sql=sqlsit,geom_col='geom',con=con)
# Isolation de tous les sites CEN (gestion/assist/mili)
dfsite[
~dfsite.code_site.duplicated()|\
(dfsite.code_site.duplicated() & dfsite.type_zonage == 'ZI')
]
rngl = dfsite[(dfsite.code_site=='RNGL')&(dfsite.type_zonage=='ZI')]
sqlfon = 'SELECT * FROM %s.%s'%('foncier','c_animation_fonciere_sites')
dffonc = gpd.read_postgis(sql=sqlfon,geom_col='geom',con=con)
##### Données identifiées pour les remontées fédé
ffede = '/home/colas/Documents/tmp/FONCIER_FEDE/extract_parc_for_fede.gpkg'
tofede = gpd.read_file(ffede)
tofede.rename_geometry('geom', inplace=True)
cols_tofede = tofede.columns
cols_join = [*cols_parcx[cols_parcx.isin(cols_tofede)]]
tofede = pd.concat([tofede,parcx[cols_join]],ignore_index=True)
tofede.reset_index(drop=True,inplace=True)
if 'type_prop' in tofede.columns:
tofede.drop(columns=['type_prop'], inplace=True)
# tofede['id_site_cen_parc'] = 0
tofede['id_site_fcen_parc'] = None # OK
tofede['insee_dep'] = tofede.par_id.str[:2]
tofede['insee_com'] = tofede.par_id.str[2:5]
tofede['num_section'] = tofede.ccosec
tofede['num_parcelle'] = tofede.par_id
# tofede['code_mfu1'] = None # Récup après, cf. dfgere
# tofede['code_mfu2'] = None # Récup après, cf. dfgere
# tofede['type_prop'] = None # Récup après, cf. parcb
# tofede['mesure_compens'] = None # Récup après, cf. dfgere
tofede['surf_ore_m2'] = 0
tofede['date_debut_ore'] = None
tofede['date_fin_ore'] = None
tofede['doc_foncier'] = 0
tofede['source_doc_foncier'] = 0
tofede['parc_gestion_rnx'] = 0
tofede.loc[tofede.geom.intersects(rngl.unary_union),'parc_gestion_rnx'] = 1
tofede['surf_parc_maitrise_m2'] = tofede.dcntpa
tofede['source_geom_parc_nature'] = 2
# tofede['source_geom_parc_annee'] = None # Récup après, cf. parcc
tofede['echelle_num_parc'] = 0
tofede['source_surf_parc'] = 1 # OK
# tofede['date_maj_parcelle'] = None # Récup après, cf. parcc
# tofede['bnd'] = None # Récup après, cf. dfgere
# tofede['pour_part'] = None # Récup après, cf. dfgere
tofede['domaine_public'] = 0
# tofede['id_proprietaire'] = None # Récup après, cf. parcb
# c_fede = tofede[['par_id','geom']].copy()
# c_fede.geom = tofede.representative_point()
# pb_parcel = dffonc[~dffonc.intersects(c_fede.unary_union)]
# c_fede.to_file('/home/colas/Documents/tmp/FONCIER_FEDE/centroid_parcelle_for_fede.gpkg', driver="GPKG")
# pb_parcel.to_file('/home/colas/Documents/tmp/FONCIER_FEDE/polys_NoIntersect_parcelles.gpkg', driver="GPKG")
###### Récupération des données conservatoires
# sqlsit = 'SELECT * FROM %s.%s'%('sites','c_sites_zonages')
# dfsite = gpd.read_postgis(sql=sqlsit,geom_col='geom',con=con)
sqlfon = 'SELECT * FROM %s.%s'%('foncier','c_animation_fonciere_sites')
dffonc = gpd.read_postgis(sql=sqlfon,geom_col='geom',con=con)
# dffonc.drop(columns=[], inplace=True)
#### 1 geom devient 2
# id 7037
dffonc.loc[dffonc.id==7037, ['code_dep','code_com','section','numero','geom']] = tofede.loc[tofede.par_id=='381000000A2233',['insee_dep','insee_com','num_section','dnupla','geom']].values[0]
add_p = gpd.GeoDataFrame(
[tofede.loc[tofede.par_id=='381000000A2234',['insee_dep','insee_com','num_section','dnupla','geom']].values[0]],
columns=['code_dep','code_com','section','numero','geom']
)
oth_cols = dffonc.columns[~dffonc.columns.isin(add_p.columns)].drop('id')
add_p[[*oth_cols]] = dffonc.loc[dffonc.id==7037, [*oth_cols]].values[0]
add_p['id'] = 0
add_p.set_geometry('geom', crs=2154, inplace=True)
dffonc = pd.concat([dffonc,add_p])
# id 1932
dffonc.loc[dffonc.id==1932, ['code_dep','code_com','section','numero','geom']] = tofede.loc[tofede.par_id=='38140000AM0190',['insee_dep','insee_com','num_section','dnupla','geom']].values[0]
add_p = gpd.GeoDataFrame(
[tofede.loc[tofede.par_id=='38140000AM0189',['insee_dep','insee_com','num_section','dnupla','geom']].values[0]],
columns=['code_dep','code_com','section','numero','geom']
)
oth_cols = dffonc.columns[~dffonc.columns.isin(add_p.columns)].drop('id')
add_p[[*oth_cols]] = dffonc.loc[dffonc.id==1932, [*oth_cols]].values[0]
add_p['id'] = 0
add_p.set_geometry('geom', crs=2154, inplace=True)
dffonc = pd.concat([dffonc,add_p])
####
dropline = dffonc[dffonc.id.isin([7144,7106,7107,7108,7109,7110,6666])].index.values # drop geometrie dupliquée
dffonc.drop([*dropline], inplace=True)
dffonc.loc[dffonc.numero.str.len()==1,'numero'] = '000'+dffonc.loc[dffonc.numero.str.len()==1,'numero']
dffonc.loc[dffonc.numero.str.len()==2,'numero'] = '00'+dffonc.loc[dffonc.numero.str.len()==2,'numero']
dffonc.loc[dffonc.numero.str.len()==3,'numero'] = '0'+dffonc.loc[dffonc.numero.str.len()==3,'numero']
dffonc.loc[dffonc.section.str.len()==1,'section'] = '0'+dffonc.loc[dffonc.section.str.len()==1,'section']
dffonc.loc[dffonc.id == 7101,['code_dep','code_com','section','numero']] = ['38','037','0D','0130']
dffonc.loc[dffonc.id == 7102,['code_dep','code_com','section','numero']] = ['38','037','0D','0131']
dffonc.loc[dffonc.id == 7103,['code_dep','code_com','section','numero']] = ['38','037','0D','0132']
dffonc.loc[dffonc.id == 7104,['code_dep','code_com','section','numero']] = ['38','037','0D','0133']
dffonc.loc[dffonc.id == 7105,['code_dep','code_com','section','numero']] = ['38','037','0D','0134']
dffonc.loc[dffonc.id.isin([
61,62,70,69,68,72,67,66,63,64,60,57,56,59,6,7,8,9,65,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,
26,27,28,29,30,58,31,32,33,34,35,36,37,38,39,40,41,55,54,42,43,71,44,45,46,47,48,49,50,51,52,53,4,5,
1350,1349,1345,1346,1347,1352,1353,1348,1354,1351, # ccopre == 121
2433,2447,2449,2432,2448 # ccopre == 274
]),'code_com'] = '479'
dffonc.loc[dffonc.id.isin([
2604,2605,2535,2599,2603,2602,2601,2639,2534,2536,2537,2532,2538,2540,2539,2635,2641,2533,2563,2562,
2561,2556,2560,2555,2597,2607,2614,2591,2622,2623,2620,2621,2619,2593,2592,2550,2547,2544,2551,2553,
2552,2545,2541,2546,2548,2549,2543,2583,2566,2596,2608,2567,2559,2584,2568,2615,2629,2618,2631,2571,
2609,2572,2590,2632,2569,2570,2595,2558,2636,2633,2606,2574,2573,2564,2579,2580,2577,2578,2565,2557,
2576,2581,2613,2598,2626,2627,2625,2611,2624,2642,2554,2628,2582,2585,2610,2542,2594,2589,2575,2616,
2612,2600,2617,2630,2587,2588,2586,2638,2637,2640,2634
]),'code_com'] = '292'
dffonc.loc[dffonc.id.isin([7114,7115,7116,7117,7118,7119,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135]),'code_com'] = '064'
dffonc.loc[dffonc.id.isin([7120,7121,7122,7123,7124,7125]),'code_com'] = '401'
dffonc.loc[dffonc.id.isin([7136,7137,7138,7139]),['code_com','section']] = ['341','0B']
dffonc.loc[dffonc.id.isin([7142,7143]),'code_com'] = '052'
dffonc.loc[dffonc.id.isin([1010288,1041363]),'code_com'] = '517'
dffonc.loc[dffonc.id.isin([1182503,1182640]),'code_com'] = '147'
dffonc.loc[dffonc.id.isin([1243657]),'code_com'] = '448'
dffonc.loc[dffonc.id.isin([1629034,1629035]),'code_com'] = '257'
dffonc.loc[dffonc.code_dep.isna(),'code_dep'] = '38'
#####
dffonc.loc[dffonc.id==113, 'numero'] = '0389'
dffonc.drop(dffonc[dffonc.id==114].index[0],inplace=True)
dffonc.loc[dffonc.id==151, 'numero'] = '0369'
dffonc.loc[dffonc.id==591, 'numero'] = '0705' # numero == '0542' est un ancien nom
dffonc.sort_values(['code_dep','code_com','section','numero'],inplace=True)
dffonc['par_id'] = dffonc.code_dep + dffonc.code_com + '___' + dffonc.section + dffonc.numero
dffonc['parid_start'] = dffonc.code_dep + dffonc.code_com
dffonc['parid_end'] = dffonc.section + dffonc.numero
dffonc.loc[dffonc.id.isin([1634,1638,1606,1607,1608]),'par_id'] = dffonc.loc[dffonc.id.isin([1634,1638,1606,1607,1608]),'par_id'].replace('___','227',regex=True)
dffonc.loc[dffonc.id.isin([1495,1673,1675]),'par_id'] = dffonc.loc[dffonc.id.isin([1495,1673,1675]),'par_id'].replace('___','000',regex=True)
# dffonc.loc[dffonc.id.isin([1495,1675]),'par_id']
# Jointure by parid_start & parid_end
dfpar_id = tofede[['par_id']].copy()
dfpar_id['parid_start'] = dfpar_id.par_id.str[:5]
dfpar_id['parid_end'] = dfpar_id.par_id.str[-6:]
lst_parid_ok = [1495,1634,1638,1673,1675,1606,1607,1608] # liste parcelle par_id reconstitué entier
parid_ok = dffonc.loc[dffonc.id.isin(lst_parid_ok)]
parid_join = pd.merge(
dffonc.loc[~dffonc.id.isin(lst_parid_ok)].drop(columns=['par_id']),
dfpar_id.loc[~dfpar_id.par_id.isin(parid_ok.par_id)],
on = ['parid_start','parid_end'],
how='left'
)
parid_join.geom = parid_join.buffer(0)
parid_ok = pd.concat([ parid_ok, parid_join[~parid_join.par_id.isna()] ])
parid_ok.drop(columns=['parid_start','parid_end'], inplace=True)
parid_na = parid_join[parid_join.par_id.isna()]
# Jointure by centroid
dfpar_id = tofede.loc[~tofede.par_id.isin(parid_ok.par_id),['par_id','geom']].copy()
dfpar_id.geom = dfpar_id.representative_point()
parid_join = gpd.sjoin(parid_na.drop(columns=['par_id']),dfpar_id,op='contains', how='left').drop(columns=['index_right'])
parid_join_ok = parid_join[ [str(x).startswith(y) and z in str(x) for x,y,z in zip(parid_join['par_id'], parid_join['parid_start'],parid_join['section'])] ].copy()
# parid_join[ [str(x).endswith(z) for x,y,z in zip(parid_join['par_id'], parid_join['parid_start'],parid_join['parid_end'])] ]
parid_join_ok.drop(columns=['parid_start','parid_end'], inplace=True)
# Couches annimation foncière finale
parid_OK = pd.concat([parid_ok,parid_join_ok])
parid_NA = dffonc[~dffonc.id.isin(parid_OK.id.unique())]
# Récupération des info BND
sqlmod = 'SELECT * FROM %s'%('animation_fonciere_pour_fcen')
dfmodf = gpd.read_postgis(sql=sqlmod,geom_col='geom',con=con)
del dfmodf['id_0']
dffon_OK = pd.merge(
parid_OK,
dfmodf[['id','Parc_MCE','docs_off_num','Nbr_lots_BND','partiel_conv_ou_acq']],
on='id',how='left')
#
lst_prop = ['Acquis par le CEN ou CREN','ACQUIS&CONVENTION', 'PARTIELLEMENT MAITRISE']
lst_conv = ["CONVENTION D'USAGE",'CONVENTION']
lst_gere = ['BAIL EMPHYTEOTIQUE', *lst_conv, *lst_prop]
dfgere = dffon_OK[
# (
# (dffon_OK.geom.intersects(dfsite[(dfsite.type_zonage=='ZI')|(dfsite.type_site=='MC')].unary_union))|
# (~dffon_OK.Parc_MCE.isna())|(~dffon_OK.Nbr_lots_BND.isna())|
# (~dffon_OK.partiel_conv_ou_acq.isna())|(~dffon_OK.docs_off_num.isna())
# )
# &
(dffon_OK.maitrise.isin(lst_gere))
].copy()
dfautr = dffon_OK[~dffon_OK.par_id.isin(dfgere.par_id)].copy()
# id_site_cen_parc
dfgere['id_site_cen_parc'] = dfgere.id
# code_mfu1 & code_mfu2
dfgere['code_mfu1'] = None
dfgere['code_mfu2'] = None
dfgere.loc[dfgere.maitrise.isin(lst_prop),'code_mfu1'] = 'P'
dfgere.loc[dfgere.maitrise.isin(lst_conv),'code_mfu1'] = 'C'
dfgere.loc[dfgere.maitrise == 'BAIL EMPHYTEOTIQUE','code_mfu1'] = 'L'
dfgere.loc[dfgere.maitrise.isin(lst_prop),'code_mfu2'] = 'P1'
dfgere.loc[dfgere.maitrise.isin(lst_conv),'code_mfu2'] = 'C7'
dfgere.loc[dfgere.maitrise == 'BAIL EMPHYTEOTIQUE','code_mfu2'] = 'L1'
# BND
dfgere['bnd'] = 0
dfgere.loc[dfgere.indivision=='BND','bnd'] = 1
tmp = pd.DataFrame(columns=dfgere.columns)
for i,row in dfgere.loc[~dfgere.Nbr_lots_BND.isna()].iterrows():
# if i==0 : tmp = row.to_list()
tmp = tmp.append([row]*(int(row.Nbr_lots_BND)-1))
dfgere = pd.concat([dfgere,tmp]).sort_index()
# MC
dfgere['mesure_compens'] = 0
dfgere.loc[dfgere.Parc_MCE=='OUI','mesure_compens'] = 1
# pour_part
dfgere['pour_part'] = 0
dfgere.loc[dfgere.maitrise=='PARTIELLEMENT MAITRISE','pour_part'] = 1
# Parcelles millitaire
dic = {
'ccosec':'section',
}
dfmili = dfsite[dfsite.type_site=='MILI'].copy()
sqlmil = "SELECT * FROM %s.%s WHERE ST_Contains('SRID=2154;%s',geom)" % ('cadastre', 'parcelles_38',dfmili.buffer(10).unary_union)
miliparc = gpd.read_postgis(sqlmil,con=con_fon)
miliparc['code_dep'] = miliparc.codcom.str[:2]
miliparc['code_com'] = miliparc.codcom.str[2:]
dfmili = miliparc[['par_id','geom','ccosec','code_dep','code_com']].copy()
dfmili.rename(columns=dic, inplace=True)
dfmili['id_site_cen_parc'] = 0
dfmili['code_mfu1'] = 'C'
dfmili['code_mfu2'] = 'C12'
dfmili['bnd'] = 0
dfmili['mesure_compens'] = 0
dfmili['pour_part'] = 0
dfgere = pd.concat([dfgere,dfmili])
# from pycen import update_to_sql
# tmpdffonc = dffonc[['id','nom_propri']]
# tmpdffonc["nom_propri"] = tmpdffonc["nom_propri"].str.strip()
# tmpdffonc.loc[tmpdffonc.nom_propri== '',"nom_propri"] = None
# update_to_sql(df=tmpdffonc, con=con, table_name='c_animation_fonciere_sites', schema_name='foncier', key_name='id', geom_col=None)
#####################
##### Parcelles #####2017380474000AN
# Ne considère pas Platière, ni le Grand Lemps
#####################
# Récupération des infos dans la bdd bd_cen (#Foncier)
# RNNP
sql = '''SELECT DISTINCT ON (t1.par_id,t1.geom, t1.dcntpa,t1.typprop_id,t1.ccopre,t1.ccosec,t1.dnupla)
t1.par_id,t1.geom, t1.dcntpa,t1.typprop_id,t1.ccopre,
t1.ccosec,t1.dnupla::text, STRING_AGG(t6.gtoper::text,',') gtoper, STRING_AGG(t6.ccogrm::text,',') ccogrm,
STRING_AGG(CONCAT(t6.gtoper::text||COALESCE('_'||t6.ccogrm::text,'')),',') type_prop, STRING_AGG(t6.ddenom::text,',') ddenom
FROM {sch}."{t1}" t1
JOIN ({sch}."lots_38" t2
JOIN ({sch}."cadastre_38" t3
JOIN ({sch}."cptprop_38" t4
JOIN ({sch}."r_prop_cptprop_38" t5
JOIN {sch}."proprios_38" t6 USING (dnuper))
USING (dnupro))
USING (dnupro))
USING (lot_id))
USING (par_id)
WHERE par_id IN ('{par_ids}') GROUP BY 1,2,3,4,5,6'''.format(
sch='cadastre',
t1='parcelles_38',
par_ids="','".join(lst_par_id_rnnp))
parca1 = gpd.read_postgis(text(sql),con=con_fon)
parca1.to_file('/home/colas/Documents/tmp/FONCIER_FEDE/PLATIERE/parcelles_RNNP_tofede.gpkg', driver='GPKG')
# Récupération des infos dans la bdd bd_cen (#Foncier)
sql = '''SELECT DISTINCT ON (t1.par_id,t1.geom, t1.dcntpa,t1.typprop_id,t1.ccopre,t1.ccosec,t1.dnupla)
t1.par_id,t1.geom, t1.dcntpa,t1.typprop_id,t1.ccopre,
t1.ccosec,t1.dnupla::text, STRING_AGG(t6.gtoper::text,',') gtoper, STRING_AGG(t6.ccogrm::text,',') ccogrm,
STRING_AGG(CONCAT(t6.gtoper::text||COALESCE('_'||t6.ccogrm::text,'')),',') type_prop, STRING_AGG(t6.ddenom::text,',') ddenom
FROM {sch}."{t1}" t1
JOIN ({sch}."lots_38" t2
JOIN ({sch}."cadastre_38" t3
JOIN ({sch}."cptprop_38" t4
JOIN ({sch}."r_prop_cptprop_38" t5
JOIN {sch}."proprios_38" t6 USING (dnuper))
USING (dnupro))
USING (dnupro))
USING (lot_id))
USING (par_id)
WHERE par_id IN ('{par_ids}') GROUP BY 1,2,3,4,5,6'''.format(
sch='cadastre',
t1='parcelles_38',
par_ids="','".join(tofede.par_id))
parca2 = gpd.read_postgis(text(sql),con=con_fon)
cols_parca2 = parca2.columns
cols_join = [*cols_parcx[cols_parcx.isin(cols_parca2)]]
parcb = pd.concat([
parca1,
parca2[~parca2.par_id.isin(parcx.par_id)],
parcx[cols_join]
])
# n = 10
# while n!= 0:
# parcb.type_prop = parcb.type_prop.replace(',,',',',regex=True)
# n = n-1
parcb.loc[parcb.type_prop.str.contains('2_0'),['type_prop','gtoper','ccogrm']] = ['2_0_MIX','2','0']
parcb.loc[parcb.type_prop.isin(['2_7,2_3,','2_7,2_3,2_3,']),['type_prop','gtoper','ccogrm']] = ['2_0_MIX','2','0']
parcb.loc[parcb.type_prop.str.contains('2_9'),['type_prop','gtoper','ccogrm']] = ['2_9_PUB','2','9']
parcb.loc[parcb.type_prop.str.contains('2_8'),['type_prop','gtoper','ccogrm']] = ['2_8_PRI','2','8']
parcb.loc[parcb.type_prop.str.contains('2_7'),['type_prop','gtoper','ccogrm']] = ['2_7_PRI','2','7']
parcb.loc[parcb.type_prop.str.contains('2_6'),['type_prop','gtoper','ccogrm']] = ['2_6_PRI','2','6']
parcb.loc[parcb.type_prop.str.contains('2_5'),['type_prop','gtoper','ccogrm']] = ['2_5_PUB','2','5']
parcb.loc[parcb.type_prop.str.contains('2_4'),['type_prop','gtoper','ccogrm']] = ['2_4_COM','2','4']
parcb.loc[parcb.type_prop.str.contains('2_3'),['type_prop','gtoper','ccogrm']] = ['2_3_PUB','2','3']
parcb.loc[parcb.type_prop.str.contains('2_2'),['type_prop','gtoper','ccogrm']] = ['2_2_PUB','2','2']
parcb.loc[parcb.type_prop.str.contains('2_1'),['type_prop','gtoper','ccogrm']] = ['2_1_PUB','2','1']
parcb.loc[parcb.type_prop.str.contains('1,1'),['type_prop','gtoper','ccogrm']] = ['1_PRI','1',None]
parcb.loc[parcb.type_prop == '1',['type_prop','gtoper','ccogrm']] = ['1_PRI','1',None]
lstterm_cen38 = ["CONSERVATOIRE D'ESPACES NATURELS ISERE","AVENIR CONSERVATOIRE DES ESPACES NATURELS DE L'ISERE","CONSERVATOIRE D'ESPACES NATURELS ISERE AVENIR DIT CEN ISERE"]
lstterm_cenra = ["CONSERVATOIRE RHONE ALPES DES ESPACES NATURELS","CONSERVATOIRE RH ALP ESPACES NATURELS"]
parcb.loc[parcb.ddenom.isin([*lstterm_cen38,*lstterm_cenra]),['type_prop','gtoper','ccogrm']] = ['3_CEN','3',None]
# parcb.loc[parcb.type_prop.str.len()==3,'gtoper'] = parcb[parcb.type_prop.str.len()==3].type_prop.str[0]
# parcb.loc[parcb.type_prop.str.len()==3,'ccogrm'] = parcb[parcb.type_prop.str.len()==3].type_prop.str[-1]
parcb.loc[parcb.dnupla.str.len()==1,'dnupla'] = '000' + parcb.loc[parcb.dnupla.str.len()==1,'dnupla']
parcb.loc[parcb.dnupla.str.len()==2,'dnupla'] = '00' + parcb.loc[parcb.dnupla.str.len()==2,'dnupla']
parcb.loc[parcb.dnupla.str.len()==3,'dnupla'] = '0' + parcb.loc[parcb.dnupla.str.len()==3,'dnupla']
parcb['id_proprietaire'] = 0
parcb.loc[parcb.ddenom.str.contains('|'.join(lstterm_cenra),na=False),'id_proprietaire'] = '28'
# Récupération des infos dans la VM cadastre (#Foncier)
# sql = '''SELECT geo_parcelle, substring(geo_parcelle from 1 for 2)||substring(geo_parcelle from 4 for 12) par_id, geom
# FROM "38_202207".geo_parcelle'''
# 38517000AW0032 # par_id
# 380517000AW0032 # geo_parcelle
sql = '''SELECT
substring(t1.geo_parcelle from 1 for 2)||substring(t1.geo_parcelle from 4 for 12) par_id,
t1.annee, t1.update_dat, t1.geom
FROM "{sch}"."{t1}" t1
WHERE substring(t1.geo_parcelle from 1 for 2)||substring(t1.geo_parcelle from 4 for 12) IN ('{par_ids}')
'''.format(
sch='38_202207',
t1='v_geo_parcelle',
par_ids="','".join(tofede[~tofede.par_id.isin(parcx.par_id)].par_id))
parcc0 = gpd.read_postgis(text(sql),con=con_vm)
sql = '''SELECT
substring(t1.geo_parcelle from 1 for 2)||substring(t1.geo_parcelle from 4 for 12) par_id,
t1.annee, t1.update_dat, t1.geom
FROM "{sch}"."{t1}" t1
WHERE substring(t1.geo_parcelle from 1 for 2)||substring(t1.geo_parcelle from 4 for 12) IN ('{par_ids}')
'''.format(
sch='26_202007',
t1='v_geo_parcelle',
par_ids="','".join(tofede[~tofede.par_id.isin(parcx.par_id)].par_id))
parcc1 = gpd.read_postgis(text(sql),con=con_vm)
cols_parcc = parcc0.columns
cols_join = [*cols_parcx[cols_parcx.isin(cols_parcc)]]
parcc = pd.concat([parcc0,parcc1,parcx[cols_join]])
parcc['source_geom_parc_annee'] = parcc.annee
parcc['date_maj_parcelle'] = parcc.update_dat
l1 = dfgere.columns[dfgere.columns.isin(lst_cols_parc)]
l2 = tofede.columns[tofede.columns.isin(lst_cols_parc)]
l3 = parcb.columns[parcb.columns.isin(lst_cols_parc)]
l4 = parcc.columns[parcc.columns.isin(lst_cols_parc)]
p1 = dfgere[['par_id',*l1]].copy()
p2 = tofede[['par_id',*l2,'geom']].copy()
p3 = parcb[['par_id',*l3]].copy()
p4 = parcc[['par_id',*l4]].copy()
dfs = [p1,p2,p3,p4]
df_parcs = reduce(lambda left,right: pd.merge(left,right,on=['par_id'],
how='left'), dfs)
for par_id in df_parcs[df_parcs.par_id.duplicated(keep=False)].par_id.unique():
x = df_parcs[df_parcs.par_id==par_id].shape[0]
df_parcs.loc[df_parcs.par_id==par_id,'surf_parc_maitrise_m2'] = round(df_parcs.loc[df_parcs.par_id==par_id,'surf_parc_maitrise_m2']/x).astype(int)
df_parcs.date_maj_parcelle = df_parcs.date_maj_parcelle.astype(str)
gdf_tofede = df_parcs[['id_site_fcen_parc','num_parcelle','date_maj_parcelle','geom']].copy()
gdf_tofede = gdf_tofede.set_geometry('geom', crs=2154)
gdf_tofede.rename(columns={
'id_site_fcen_parc':'id_fcen',
'num_parcelle':'num_parc',
'date_maj_parcelle':'date_maj_p'
},inplace=True)
gdf_tofede.to_file('/home/colas/Documents/tmp/FONCIER_FEDE/SEND/Parcelles_CEN_38_%i.shp' % annee)
df_tofede = df_parcs[lst_cols_parc].copy()
df_tofede.loc[df_tofede.date_debut_ore.isna(),'date_debut_ore']
df_tofede.to_csv('/home/colas/Documents/tmp/FONCIER_FEDE/SEND/Parcelles_CEN_38_%i.csv' % annee)
# parcb.loc[(parcb.ddenom.str.contains("AVENIR CONSERVATOIRE DES ESPACES NATURELS DE L'ISERE",case=False))].ddenom.unique()
# parcb.loc[(parcb.ddenom.str.contains('CONSERVATOIRE|CEN',case=False))&(parcb.ddenom.str.contains('ISERE',case=False))].ddenom.unique()
# parcb.loc[parcb.gtoper.isin(['1,1','2,1','1,2','1,1,1']),'gtoper'] = '1'
# parcb.loc[parcb.gtoper.isin(['1,1,1,1','2,1,1,1,1,1,1','2,1,1,1,1,1,1,1,1,1,1,1,1,1,1','2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1']),'gtoper'] = '1'
# parcb.loc[parcb.gtoper.isin(['2,2']),'gtoper'] = '2'
# parcb.loc[parcb.gtoper=='1','ccogrm'] = None
parcb[parcb.par_id.duplicated(keep=False)]
dic_col = {
'code_dep' : 'insee_dep',
'section' : 'num_section',
'num_unique_v2' : 'num_parcelle', # ????
}
dic_parc = {
'id':'id_site_cen_parc',
}
lst_prop = ['Acquis par le CEN ou CREN','ACQUIS&CONVENTION', 'PARTIELLEMENT MAITRISE']
lst_conv = ["CONVENTION D'USAGE",'CONVENTION']
lst_gere = ['BAIL EMPHYTEOTIQUE', *lst_conv, *lst_prop]
sql = '''SELECT DISTINCT
t1.par_id,t1.geom, t1.dcntpa,t1.typprop_id,t1.ccopre,
t1.ccosec,t1.dnupla::text, t6.gtoper::text, t6.ccogrm::text
FROM {sch}."{t1}" t1
JOIN ({sch}."lots_38" t2
JOIN ({sch}."cadastre_38" t3
JOIN ({sch}."cptprop_38" t4
JOIN ({sch}."r_prop_cptprop_38" t5
JOIN {sch}."proprios_38" t6 USING (dnuper))
USING (dnupro))
USING (dnupro))
USING (lot_id))
USING (par_id)
WHERE ST_Within(t1.geom, 'SRID=2154;{geom}') '''.format(
sch='cadastre',
t1='parcelles_38',
geom=dffonc.unary_union)
sql = '''SELECT DISTINCT
t1.par_id,t1.geom, t1.dcntpa,t1.typprop_id,t1.ccopre,
t1.ccosec,t1.dnupla::text, t6.gtoper::text, t6.ccogrm::text
FROM {sch}."{t1}" t1
JOIN ({sch}."lots_38" t2
JOIN ({sch}."cadastre_38" t3
JOIN ({sch}."cptprop_38" t4
JOIN ({sch}."r_prop_cptprop_38" t5
JOIN {sch}."proprios_38" t6 USING (dnuper))
USING (dnupro))
USING (dnupro))
USING (lot_id))
USING (par_id)
WHERE ST_Intersects(t1.geom, 'SRID=2154;{geom}') '''.format(
sch='cadastre',
t1='parcelles_38',
geom=dffonc.unary_union)
sql = sql + " AND t1.par_id LIKE ANY (array['%s']);"% "','".join(dffonc[~dffonc.par_id.isna()].par_id)
parc = gpd.read_postgis(sql=text(sql),geom_col='geom',con=con_fon)
parc.loc[parc.dnupla.str.len()==1,'dnupla'] = '000' + parc.loc[parc.dnupla.str.len()==1,'dnupla']
parc.loc[parc.dnupla.str.len()==2,'dnupla'] = '00' + parc.loc[parc.dnupla.str.len()==2,'dnupla']
parc.loc[parc.dnupla.str.len()==3,'dnupla'] = '0' + parc.loc[parc.dnupla.str.len()==3,'dnupla']
# parc = gpd.read_file('/home/colas/Documents/tmp/FONCIER_FEDE/extract_parc_for_fede.gpkg')
# parc.rename_geometry('geom',inplace=True)
# parc.to_file('/home/colas/Documents/tmp/FONCIER_FEDE/extract_parc_for_fede.gpkg',driver='GPKG')
parc['semi_par_id'] = parc.par_id.str[:5] + parc.ccosec + parc.dnupla
######################
# HARMONISATION couche FONCIER
dffonc_b1 = dffonc_b5 = dffonc_b20 = dffonc_b50 = dffonc.copy()
dffonc_b1.geom = dffonc_b1.buffer(1)
dffonc_b20.geom = dffonc_b20.buffer(20)
dffonc_b50.geom = dffonc_b50.buffer(50)
dffonc_b5.geom = dffonc_b5.buffer(5)
tmp = gpd.sjoin(dffonc_b50[~dffonc_b50.numero.isna()],parc,op='contains',how='left')
tmp2 = tmp[tmp.numero == tmp.dnupla]
crsp = tmp2.id.tolist()
tmp[~tmp.id.isin(crsp)]
tmp[tmp.id.duplicated(keep=False)]
##############
##############
dfparc = parc[['par_id','gtoper','ccogrm','geom']]
dfgere = dffonc[dffonc.maitrise.isin(lst_gere)].copy() # parcelles gérée
dfgere['code_mfu1'] = None
dfgere['code_mfu2'] = None
dfgere.loc[dfgere.maitrise.isin(lst_prop),'code_mfu1'] = 'P'
dfgere.loc[dfgere.maitrise.isin(lst_conv),'code_mfu1'] = 'C'
dfgere.loc[dfgere.maitrise == 'BAIL EMPHYTEOTIQUE','code_mfu1'] = 'L'
dfgere.loc[dfgere.maitrise.isin(lst_prop),'code_mfu2'] = 'P1'
dfgere.loc[dfgere.maitrise.isin(lst_conv),'code_mfu2'] = 'C7'
dfgere.loc[dfgere.maitrise == 'BAIL EMPHYTEOTIQUE','code_mfu2'] = 'L1'
dfgere.loc[dfgere.maitrise == 'PARTIELLEMENT MAITRISE','bnd'] = 1
dfgere.loc[dfgere.maitrise != 'PARTIELLEMENT MAITRISE','bnd'] = 0
# Parcelles d'interventions
dftmp = dffonc[~dffonc.maitrise.isin(lst_gere)].copy() # couche temp des parcelles non-gérés
sittmp = dfsite[(dfsite.assist_cnv)&(dfsite.assist_col)] # sites en assistance
intersect = dftmp.intersects(sittmp.unary_union) # Intersection [couche temp] VS [sites assist]
dfintr = dftmp[intersect] # parcelles d'interventions
# Parcelles millitaire
dic = {
'ccosec':'num_section',
'par_id':'num_parcelle'
}
dfmili = dfsite[dfsite.type_site=='MILI'].copy()
sqlmil = "SELECT * FROM %s.%s WHERE ST_Contains('SRID=2154;%s',geom)" % ('cadastre', 'parcelles_38',dfmili.buffer(10).unary_union)
miliparc = gpd.read_postgis(sqlmil,con=con_fon)
miliparc['code_dep'] = miliparc.codcom.str[:2]
miliparc['code_com'] = miliparc.codcom.str[2:]
dfmili = miliparc[['par_id','geom','ccosec','code_dep','code_com']].copy()
dfmili.rename(columns=dic, inplace=True)
# All parcelles
df = pd.concat([dfgere,dfintr,dfmili], ignore_index=True)
keep_col = df.columns[df.columns.isin(lst_cols_parc)]
df = df[keep_col]
df.reset_index(drop=True, inplace=True)
sql = '''SELECT par_id,geom, dcbtpa,typprop_id FROM %s."%s" WHERE ST_Intersects(geom, 'SRID=2154;%s') ''' % ('cadastre', 'parcelles_38',df.representative_point().unary_union)
parc = gpd.read_postgis(sql=sql,geom_col='geom',con=con_fon)
sql = '''SELECT geo_parcelle,geo_section,idu,proprietaire,proprietaire_info,geom FROM %s."%s" WHERE ST_Intersects(geom, 'SRID=2154;%s') ''' % ('ref_cadastre', 'parcelle_info',df.representative_point().unary_union)
prop = gpd.read_postgis(sql=sql,geom_col='geom',con=con)
df['insee_com'] = df.code_dep + df.code_com
tmp = df[['id','geom']]
gpd.sjoin(df,parc[['par_id','geom']],how='left',op='intersects')
# df['num_parcelle'] =
df['surf_ore_m2'] = 0
df['date_debut_ore'] = None
df['date_fin_ore'] = None
lst_doc = []
df.loc[df.id.isin(lst_doc),'doc_foncier'] = 1
df.loc[~df.id.isin(lst_doc),'doc_foncier'] = 0
df.loc[df.doc_foncier == 1,'source_doc_foncier'] = 'CEN Isère'
df['pour_part'] = 0
df['domaine_public'] = 0
df['id_proprietaire'] = 0
df['parc_gestion_rnx'] = None # ?????????????????
df['source_geom_parc_nature'] = 2
df['source_geom_parc_annee'] = 2017
df['source_surf_parc'] = 1
# 3800010000A0001
# 38566AK00981000
# 596060000A0012
# 073490000C0103
###########
### Autre #
###########
df = pd.DataFrame()
df['id_cen'] = 14
df['id_site_cen'] = ''