init
This commit is contained in:
parent
05cb232bec
commit
fe028eb812
760
0_FONCIER/foncier_insert_cadastre_V4.py
Normal file
760
0_FONCIER/foncier_insert_cadastre_V4.py
Normal file
@ -0,0 +1,760 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
from pycen import con_fon
|
||||
from sqlalchemy.sql import text
|
||||
from sqlalchemy.engine import URL
|
||||
from sqlalchemy import create_engine
|
||||
import pandas as pd
|
||||
import geopandas as gpd
|
||||
|
||||
# user = 'cgeier'
|
||||
# pwd = 'adm1n*bdCen'
|
||||
# adr = '91.134.194.221'
|
||||
# base = 'cadastre'
|
||||
user = 'postgres'
|
||||
pwd = 'foncier_test1'
|
||||
adr = '172.17.0.2'
|
||||
base = 'postgres'
|
||||
url = URL.create('postgresql+psycopg2',
|
||||
username=user,
|
||||
password=pwd,
|
||||
host=adr,
|
||||
database=base,
|
||||
)
|
||||
con_cad = create_engine(url)
|
||||
|
||||
|
||||
def recup_cols_table(table,con,schema='38_202207'):
|
||||
lst_cols = con.dialect.get_columns(con,table,schema)
|
||||
return [x['name'] for x in lst_cols]
|
||||
|
||||
|
||||
def __get_pkey__(engine,table_name,schema):
|
||||
pk = engine.dialect.get_pk_constraint(engine,table_name=table_name,schema=schema)
|
||||
return pk
|
||||
|
||||
|
||||
def _where_parcelle(sql0,schema,list_parid):
|
||||
|
||||
if list_parid is not None:
|
||||
chunk = None
|
||||
if isinstance(list_parid,str):
|
||||
list_parid = [list_parid]
|
||||
|
||||
LIST_ID = str(tuple(list_parid)).replace(',)',')')
|
||||
|
||||
sql1 = '''
|
||||
WHERE p.parcelle IN {list_id}
|
||||
;'''.format(sch=schema,list_id=LIST_ID)
|
||||
else :
|
||||
chunk = 200000
|
||||
sql1 = ';'
|
||||
|
||||
sql = sql0 + sql1
|
||||
df = pd.read_sql_query(text(sql),con=con_cad,chunksize=chunk)
|
||||
|
||||
# if chunk is not None:
|
||||
# for d in df:
|
||||
# print(d.shape[0])
|
||||
# d.drop_duplicates(inplace=True)
|
||||
# print(d.drop_duplicates().shape[0])
|
||||
# else :
|
||||
# df.drop_duplicates(inplace=True)
|
||||
|
||||
return df
|
||||
|
||||
|
||||
def _get_chunk(df1,df2):
|
||||
# cptp1 = pd.DataFrame()
|
||||
list_DF1 = []
|
||||
for d1 in df1:
|
||||
list_DF1.append(d1)
|
||||
|
||||
DF1 = pd.concat(list_DF1)
|
||||
|
||||
# cptp2 = pd.DataFrame()
|
||||
list_DF2 = []
|
||||
for d2 in df2:
|
||||
list_DF2.append(d2)
|
||||
|
||||
DF2 = pd.concat(list_DF2)
|
||||
|
||||
return pd.concat([DF1,DF2]).drop_duplicates()
|
||||
|
||||
|
||||
def __get_parcelles__(sql0,list_parid):
|
||||
|
||||
if list_parid is not None:
|
||||
chunk = None
|
||||
if isinstance(list_parid,str):
|
||||
list_parid = [list_parid]
|
||||
|
||||
LIST_ID = str(tuple(list_parid)).replace(',)',')')
|
||||
|
||||
sql1 = '''
|
||||
WHERE t1.geo_parcelle IN {list_id}
|
||||
;'''.format(list_id=LIST_ID)
|
||||
else :
|
||||
chunk = None
|
||||
sql1 = ';'
|
||||
|
||||
sql = sql0 + sql1
|
||||
# print(text(sql))
|
||||
return gpd.read_postgis(sql=sql,con=con_cad,chunksize=chunk)
|
||||
|
||||
|
||||
def _get_parcelles1(schema='38_202207',list_parid=None):
|
||||
sql0 = '''SELECT DISTINCT ON (t1.geo_parcelle)
|
||||
t1.geo_parcelle,
|
||||
case when t1.geom is null then t2.geom
|
||||
else t1.geom
|
||||
end geom,
|
||||
substring(p.parcelle from 1 for 2)||substring(p.parcelle from 4 for 12) par_id,
|
||||
substring(p.parcelle from 1 for 2)||substring(p.parcelle from 4 for 3) codcom,
|
||||
substring(p.parcelle from 1 for 2) ccodep,
|
||||
substring(p.parcelle from 4 for 3) ccocom,
|
||||
substring(p.parcelle from 7 for 3) ccopre,
|
||||
substring(p.parcelle from 10 for 2) ccosec,
|
||||
substring(p.parcelle from 12 for 4) dnupla,
|
||||
p.annee annee_pci,
|
||||
t1.update_dat,
|
||||
p.dparpi,
|
||||
p.dcntpa,
|
||||
p.ccocomm,
|
||||
p.ccoprem,
|
||||
p.ccosecm,
|
||||
p.dnuplam,
|
||||
p.ccovoi,
|
||||
p.ccoriv,
|
||||
p.type_filiation "type",
|
||||
substring(t1.geo_parcelle from 1 for 2)||substring(t1.geo_parcelle from 4 for 3)||p.ccovoi vl_id,
|
||||
(SELECT STRING_AGG(DISTINCT gtoper::text,',') FROM "{sch}".proprietaire WHERE (ccodep,ccocom,dnupro) = (t3.ccodep,t3.ccocom,t3.dnupro)) gtoper,
|
||||
(SELECT STRING_AGG(DISTINCT ccogrm::text,',') FROM "{sch}".proprietaire WHERE (ccodep,ccocom,dnupro) = (t3.ccodep,t3.ccocom,t3.dnupro)) ccogrm,
|
||||
(SELECT STRING_AGG(DISTINCT CONCAT(gtoper::text||COALESCE('_'||ccogrm::text,'')),',') FROM "{sch}".proprietaire WHERE (ccodep,ccocom,dnupro) = (t3.ccodep,t3.ccocom,t3.dnupro)) ccogrm,
|
||||
(SELECT STRING_AGG(DISTINCT TRIM(ddenom)::text,',') FROM "{sch}".proprietaire WHERE (ccodep,ccocom,dnupro) = (t3.ccodep,t3.ccocom,t3.dnupro)) ddenom
|
||||
FROM "{sch}".{t1} p
|
||||
LEFT JOIN "{sch}".parcelle_info t2 ON t2.geo_parcelle = p.parcelle
|
||||
LEFT JOIN "{sch}"."geo_parcelle" t1 ON t1.geo_parcelle = p.parcelle
|
||||
LEFT JOIN "{sch}".proprietaire t3 USING (ccodep,ccocom,dnupro)
|
||||
'''.format(
|
||||
sch=schema,
|
||||
t1='parcelle')
|
||||
|
||||
return __get_parcelles__(sql0,list_parid)
|
||||
|
||||
def drop_tables(con):
|
||||
sql = '''
|
||||
TRUNCATE TABLE cadastre.parcelles RESTART IDENTITY CASCADE;
|
||||
TRUNCATE TABLE cadastre.cadastre RESTART IDENTITY CASCADE;
|
||||
TRUNCATE TABLE cadastre.cptprop RESTART IDENTITY CASCADE;
|
||||
TRUNCATE TABLE cadastre.r_prop_cptprop RESTART IDENTITY CASCADE;
|
||||
TRUNCATE TABLE cadastre.lots RESTART IDENTITY CASCADE;
|
||||
TRUNCATE TABLE cadastre.lots_natcult RESTART IDENTITY CASCADE;
|
||||
TRUNCATE TABLE cadastre.proprios RESTART IDENTITY CASCADE;
|
||||
TRUNCATE TABLE cadastre.vl RESTART IDENTITY CASCADE;
|
||||
ALTER SEQUENCE
|
||||
cadastre.cadastre_cad_id_seq RESTART WITH 1;
|
||||
ALTER SEQUENCE
|
||||
cadastre.lots_natcult_lotnatcult_id_seq RESTART WITH 1;
|
||||
'''
|
||||
with con.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
|
||||
# CENRA 2020
|
||||
def _insert_voie1(schema='38_202207',list_parid=None):
|
||||
'''Insertion des voies et lieux-dits dans cadastre.vl à partir de voie.
|
||||
On ne garde que les voies qui sont effectivement affectées à des parcelles pour ne pas se trainer les voies annulées (annul = 'O')
|
||||
'''
|
||||
sql0 = '''set work_mem='512MB';
|
||||
INSERT into cadastre.vl
|
||||
SELECT
|
||||
ccodep||ccocom||codvoi AS vl_id, --character varying(10) NOT NULL, -->>> c'est pas le bon compte de caractères, mais je n'ai pas de doublons en auvergne
|
||||
libvoi AS libelle,--character varying(50),
|
||||
null AS geom -- geom geometry(MultiPolygon,2154),
|
||||
FROM "{sch}".voie
|
||||
WHERE ccodep||ccocom||codvoi IN (
|
||||
SELECT DISTINCT ccodep||ccocom||ccovoi FROM "{sch}".parcelle )
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql0)
|
||||
|
||||
def _insert_voie2(schema='38_202207',list_parid=None):
|
||||
'''Insertions des vois manquantes à partir de leur ccoriv --> 12
|
||||
'''
|
||||
sql0 = '''set work_mem='512MB';
|
||||
INSERT into cadastre.vl
|
||||
SELECT DISTINCT
|
||||
parcelle.ccodep||parcelle.ccocom||parcelle.ccovoi AS vl_id,
|
||||
libvoi
|
||||
FROM "{sch}".parcelle
|
||||
JOIN "{sch}".voie ON voie.ccoriv = parcelle.ccoriv AND voie.ccocom = parcelle.ccocom
|
||||
WHERE parcelle.ccodep||parcelle.ccocom||parcelle.ccovoi NOT IN (
|
||||
SELECT ccodep||ccocom||codvoi AS vl_id FROM "{sch}".voie)
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql0)
|
||||
def _insert_voie3(schema='38_202207',list_parid=None):
|
||||
'''Ajout manuel des dernières voies référencées dans parcelle mais introuvables dans voie (ni par codvoi ni par ccoriv) --> 0
|
||||
'''
|
||||
sql0 = '''set work_mem='512MB';
|
||||
INSERT into cadastre.vl
|
||||
SELECT DISTINCT ccodep||ccocom||ccovoi AS vl_id, 'NON TROUVEE' AS libelle
|
||||
FROM "{sch}".parcelle
|
||||
WHERE ccodep||ccocom||ccovoi NOT IN (SELECT vl_id from cadastre.vl)
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql0)
|
||||
|
||||
def insert_voie(schema='38_202207',list_parid=None):
|
||||
_insert_voie1(schema,list_parid)
|
||||
_insert_voie2(schema,list_parid)
|
||||
_insert_voie3(schema,list_parid)
|
||||
print('INSERT voie OK')
|
||||
|
||||
|
||||
# def _manage_parc_without_geom(schema='38_202207'):
|
||||
# sql = '''CREATE TEMP TABLE parcelles_sans_geom AS (
|
||||
# SELECT parcelle FROM "{sch}".parcelle a LEFT JOIN "{sch}".geo_parcelle b ON b.geo_parcelle = a.parcelle WHERE b.geo_parcelle is null
|
||||
# '''.format(
|
||||
# sch=schema)
|
||||
# with con_fon.begin() as cnx:
|
||||
# cnx.execute(sql)
|
||||
|
||||
# def add_old_geom(schema='38_202207'):
|
||||
# sql = '''
|
||||
# '''
|
||||
|
||||
|
||||
def _insert_parcelle1(schema='38_202207',list_parid=None):
|
||||
'''01/ parcelles'''
|
||||
sql = '''set work_mem='512MB';
|
||||
INSERT INTO cadastre.parcelles
|
||||
(
|
||||
SELECT DISTINCT ON (ccodep || ccocom || replace(ccopre, ' ', '0') || replace(ccosec, ' ', '0') || dnupla)
|
||||
ccodep || ccocom || replace(ccopre, ' ', '0') || replace(ccosec, ' ', '0') || dnupla AS par_id,
|
||||
-- Identifiant de la parcelle
|
||||
ccodep || ccocom AS codcom,
|
||||
-- Code insee de la commune
|
||||
--CASE WHEN btrim(ccovoi) <> '' THEN ccodep || ccocom || ccovoi ELSE NULL END AS vl_id,
|
||||
ccodep || ccocom || ccovoi AS vl_id,
|
||||
--character varying(10) NOT NULL, -->>> on ne met rien lorsque que ccovoi non renseigné dans la table parcelle
|
||||
btrim(ccopre) as ccopre,
|
||||
-- Préfixe de section ou quartier servi
|
||||
btrim(ccosec) as ccosec,
|
||||
-- Code de la section
|
||||
CASE WHEN btrim(dnupla) = ''
|
||||
THEN NULL :: INTEGER
|
||||
ELSE ltrim(btrim(dnupla), '0') :: INTEGER END AS dnupla,
|
||||
-- Numéro de la parcelle
|
||||
CASE WHEN btrim(dparpi) = ''
|
||||
THEN NULL :: INTEGER
|
||||
ELSE ltrim(btrim(dparpi), '0') :: INTEGER END AS dparpi,
|
||||
-- Ancien numéro de la parcelle, parcelle primitive
|
||||
dcntpa :: INTEGER,
|
||||
-- Contenance cadastrale de la parcelle
|
||||
NULL AS typprop_id,
|
||||
-- Type de propriété de la parcelle >>>>>>>>>>>>>>>>>>>>>>>>>>>>>> à remplir après (Cf. tâches de mise à jour du cadastre)
|
||||
geom,
|
||||
-- Géométrie de la parcelle
|
||||
ccocomm,
|
||||
-- Commune Parcelle mère
|
||||
ccoprem,
|
||||
-- Préfixe Parcelle mère
|
||||
btrim(ccosecm) as ccosecm,
|
||||
-- Section Parcelle mère
|
||||
CASE WHEN dnuplam = ''
|
||||
THEN NULL :: INTEGER
|
||||
ELSE ltrim(dnuplam, '0') :: INTEGER
|
||||
END AS dnuplam,
|
||||
-- Numéro Parcelle mère
|
||||
type_filiation AS type -- Type de filiation
|
||||
FROM "{sch}".parcelle a
|
||||
JOIN "{sch}".geo_parcelle ON geo_parcelle.geo_parcelle = a.parcelle
|
||||
)
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
# A REPRENDRE
|
||||
def _insert_parcelle2(schema='38_202207',list_parid=None):
|
||||
'''2/ Géométries manquantes
|
||||
Parcelles MAJIC qui n'ont plus de geom dans EDIGEO parce qu'elles ont été fusionnées, divisées ou transférées
|
||||
Il y a la geom des nouvelles parcelles dans EDIGEO mais rien dans MAJIC
|
||||
On récupère la geom sur un EDIGEO antérieur'''
|
||||
sql = '''set work_mem='512MB';
|
||||
INSERT INTO cadastre.parcelles
|
||||
( WITH t1 as(
|
||||
SELECT DISTINCT ON (a.ccodep || a.ccocom || replace(a.ccopre, ' ', '0') || replace(a.ccosec, ' ', '0') || a.dnupla)
|
||||
parcelle,
|
||||
a.ccodep || a.ccocom || replace(a.ccopre, ' ', '0') || replace(a.ccosec, ' ', '0') || a.dnupla AS par_id,
|
||||
-- Identifiant de la parcelle
|
||||
a.ccodep || a.ccocom AS codcom,
|
||||
-- Code insee de la commune
|
||||
--CASE WHEN btrim(ccovoi) <> '' THEN ccodep || ccocom || ccovoi ELSE NULL END AS vl_id,
|
||||
a.ccodep || a.ccocom || a.ccovoi AS vl_id,
|
||||
--character varying(10) NOT NULL, -->>> on ne met rien lorsque que ccovoi non renseigné dans la table parcelle
|
||||
replace(a.ccopre, ' ', '0') AS ccopre,
|
||||
-- Préfixe de section ou quartier servi
|
||||
-- btrim(a.ccosec) AS ccosec,
|
||||
replace(a.ccosec, ' ', '0') AS ccosec,
|
||||
-- Code de la section
|
||||
CASE WHEN btrim(a.dnupla) = ''
|
||||
THEN NULL :: INTEGER
|
||||
ELSE ltrim(btrim(a.dnupla), '0') :: INTEGER
|
||||
END AS dnupla,
|
||||
-- Numéro de la parcelle
|
||||
CASE WHEN btrim(a.dparpi) = ''
|
||||
THEN NULL :: INTEGER
|
||||
ELSE ltrim(btrim(a.dparpi), '0') :: INTEGER
|
||||
END AS dparpi,
|
||||
-- Ancien numéro de la parcelle, parcelle primitive
|
||||
a.dcntpa :: INTEGER,
|
||||
-- Contenance cadastrale de la parcelle
|
||||
NULL AS typprop_id,
|
||||
-- Type de propriété de la parcelle >>>>>>>>>>>>>>>>>>>>>>>>>>>>>> à remplir après (Cf. tâches de mise à jour du cadastre)
|
||||
-- parcelles_cen.geom,
|
||||
-- Géométrie de la parcelle
|
||||
a.ccocomm,
|
||||
-- Commune Parcelle mère
|
||||
a.ccoprem,
|
||||
-- Préfixe Parcelle mère
|
||||
btrim(a.ccosecm) AS ccosecm,
|
||||
-- Section Parcelle mère
|
||||
CASE WHEN a.dnuplam = ''
|
||||
THEN NULL :: INTEGER
|
||||
ELSE ltrim(a.dnuplam, '0') :: INTEGER END AS dnuplam,
|
||||
-- Numéro Parcelle mère
|
||||
a.type_filiation AS "type" -- Type de filiation
|
||||
FROM "{sch}".parcelle a )
|
||||
SELECT
|
||||
t1.par_id,
|
||||
t1.codcom,
|
||||
t1.vl_id,
|
||||
t1.ccopre,
|
||||
t1.ccosec,
|
||||
t1.dnupla,
|
||||
t1.dparpi,
|
||||
t1.dcntpa,
|
||||
t1.typprop_id,
|
||||
CASE WHEN b.geom IS NULL THEN parcelles_cen.geom
|
||||
ELSE b.geom END geom,
|
||||
t1.ccocomm,
|
||||
t1.ccoprem,
|
||||
t1.ccosecm,
|
||||
t1.dnuplam,
|
||||
t1.type
|
||||
FROM t1
|
||||
LEFT JOIN "{sch}".parcelle_info b ON t1.parcelle = b.geo_parcelle -- les parcelles sans geom
|
||||
LEFT JOIN cadastre.parcelles_cen ON t1.par_id = parcelles_cen.par_id
|
||||
WHERE t1.par_id NOT IN (SELECT par_id FROM cadastre.parcelles)
|
||||
)
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
def insert_parcelle(schema='38_202207'):
|
||||
_insert_parcelle1(schema)
|
||||
_insert_parcelle2(schema)
|
||||
print('INSERT parcelle OK')
|
||||
|
||||
|
||||
def checkortho_proprio_sym(con,colonne,schema='38_202207'):
|
||||
sql = '''
|
||||
SELECT DISTINCT '{dep}' AS insee_dep, dnuper, string_agg(DISTINCT TRIM({column}),' / ') AS orthographes_voisines
|
||||
FROM "{sch}".proprietaire
|
||||
GROUP BY dnuper HAVING count(DISTINCT TRIM({column})) > 1
|
||||
'''.format(dep = schema[:2],
|
||||
sch=schema,
|
||||
column=colonne)
|
||||
return pd.read_sql(sql,con=con)
|
||||
|
||||
def update_synonyme_proprio(con,data,colonne,schema='38_202207'):
|
||||
data['split_ortho'] = (data.orthographes_voisines
|
||||
.str.replace("'","''")
|
||||
.str.split(' / '))
|
||||
for i,row in data.iterrows():
|
||||
sql = '''UPDATE "{sch}".proprietaire SET {column} = '{dd1}' WHERE btrim({column}) {sym} {dd2} AND dnuper = '{dnup}';'''.format(
|
||||
sch=schema,
|
||||
dnup=row.dnuper,
|
||||
column=colonne,
|
||||
dd1=row.split_ortho[0],
|
||||
sym='IN' if len(row.split_ortho[1:]) > 1 else '=',
|
||||
dd2=tuple(row.split_ortho[1:]) if len(row.split_ortho[1:]) > 1 else "'%s'"% row.split_ortho[1])
|
||||
|
||||
with con.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
def check_proprio(schema='38_202207',con=con_fon):
|
||||
is_synonyme = checkortho_proprio_sym(con,'ddenom',schema)
|
||||
if not is_synonyme.empty:
|
||||
update_synonyme_proprio(con,is_synonyme,'ddenom',schema)
|
||||
|
||||
is_diff_lab = checkortho_proprio_sym(con,'dldnss',schema)
|
||||
if not is_diff_lab.empty:
|
||||
update_synonyme_proprio(con,is_diff_lab,'dldnss',schema)
|
||||
|
||||
is_incomplete = checkortho_proprio_sym(con,'jdatnss',schema)
|
||||
if not is_incomplete.empty:
|
||||
update_synonyme_proprio(con,is_incomplete,'jdatnss',schema)
|
||||
|
||||
is_diff_ccogrm = checkortho_proprio_sym(con,'ccogrm',schema)
|
||||
if not is_diff_ccogrm.empty:
|
||||
update_synonyme_proprio(con,is_diff_ccogrm,'ccogrm',schema)
|
||||
|
||||
is_diff_dsglpm = checkortho_proprio_sym(con,'dsglpm',schema)
|
||||
if not is_diff_dsglpm.empty:
|
||||
update_synonyme_proprio(con,is_diff_dsglpm,'dsglpm',schema)
|
||||
|
||||
is_diff_dnatpr = checkortho_proprio_sym(con,'dnatpr',schema)
|
||||
if not is_diff_dnatpr.empty:
|
||||
update_synonyme_proprio(con,is_diff_dnatpr,'dnatpr',schema)
|
||||
|
||||
is_diff_dprnus = checkortho_proprio_sym(con,'dprnus',schema)
|
||||
if not is_diff_dprnus.empty:
|
||||
update_synonyme_proprio(con,is_diff_dprnus,'dprnus',schema)
|
||||
is_diff_dprnlp = checkortho_proprio_sym(con,'dprnlp',schema)
|
||||
if not is_diff_dprnlp.empty:
|
||||
update_synonyme_proprio(con,is_diff_dprnlp,'dprnlp',schema)
|
||||
is_diff_dnomus = checkortho_proprio_sym(con,'dnomus',schema)
|
||||
if not is_diff_dnomus.empty:
|
||||
update_synonyme_proprio(con,is_diff_dnomus,'dnomus',schema)
|
||||
|
||||
def _insert_proprio(schema='38_202207'):
|
||||
sql = '''set work_mem='512MB';
|
||||
INSERT INTO cadastre.proprios
|
||||
SELECT DISTINCT
|
||||
ccodep||dnuper dnuper,
|
||||
ccoqua::integer,
|
||||
btrim(ddenom) AS ddenom,
|
||||
jdatnss,
|
||||
btrim(dldnss) AS dldnss,
|
||||
btrim(dsglpm) AS dsglpm,
|
||||
btrim(dlign3) AS dlign3,
|
||||
btrim(dlign4) AS dlign4,
|
||||
btrim(dlign5) AS dlign5,
|
||||
btrim(dlign6) AS dlign6,
|
||||
btrim(dnatpr) AS dnatpr,
|
||||
gtoper::integer,
|
||||
ccogrm::integer,
|
||||
btrim(dqualp) AS dnatpr,
|
||||
btrim(dnomlp) AS dnomlp,
|
||||
btrim(dprnlp) AS dprnlp,
|
||||
btrim(dnomus) AS dnomus,
|
||||
btrim(dprnus) AS dprnus,
|
||||
btrim(dforme) AS dforme
|
||||
FROM "{sch}".proprietaire
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
def insert_no_proprio(schema='38_202207'):
|
||||
sql = '''
|
||||
INSERT INTO cadastre.proprios (dnuper, ddenom)
|
||||
VALUES ('{dep}Y99999', ' PROPRIETAIRES NON RENSEIGNES');
|
||||
'''.format(
|
||||
dep=schema[:2])
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
def insert_proprio(schema='38_202207'):
|
||||
check_proprio(schema,con_fon)
|
||||
_insert_proprio(schema)
|
||||
insert_no_proprio(schema)
|
||||
print('INSERT proprio OK')
|
||||
|
||||
|
||||
def _insert_cptprop1(schema='38_202207'):
|
||||
'''
|
||||
Insertion des comptes de propriété depuis la table proprietaire
|
||||
'''
|
||||
sql = '''set work_mem='512MB';
|
||||
INSERT INTO cadastre.cptprop
|
||||
SELECT DISTINCT ccodep || ccocom || dnupro
|
||||
FROM "{sch}".proprietaire WHERE btrim(dnupro) <> ''
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
def _insert_cptprop2(schema='38_202207'):
|
||||
'''
|
||||
Insertion des comptes de propriété utilisés dans
|
||||
lots/parcelles/suf mais non référencés dans proprietaire
|
||||
'''
|
||||
sql = '''set work_mem='512MB';
|
||||
INSERT INTO cadastre.cptprop
|
||||
SELECT DISTINCT ccodep || ccocom || dnuprol
|
||||
FROM "{sch}".lots WHERE comptecommunal NOT IN (SELECT DISTINCT comptecommunal FROM "{sch}".proprietaire)
|
||||
UNION
|
||||
SELECT DISTINCT ccodep || ccocom || dnupro
|
||||
FROM "{sch}".parcelle WHERE comptecommunal NOT IN (SELECT DISTINCT comptecommunal FROM "{sch}".proprietaire)
|
||||
UNION
|
||||
SELECT DISTINCT ccodep || ccocom || dnupro
|
||||
FROM "{sch}".suf WHERE comptecommunal NOT IN (SELECT DISTINCT comptecommunal FROM "{sch}".proprietaire)
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
def insert_cptprop(schema='38_202207'):
|
||||
_insert_cptprop1(schema)
|
||||
_insert_cptprop2(schema)
|
||||
print('INSERT cptprop OK')
|
||||
|
||||
def _insert_r_prop_cptprop0(schema='38_202207'):
|
||||
sql = '''set work_mem='512MB';
|
||||
INSERT INTO cadastre.r_prop_cptprop
|
||||
SELECT DISTINCT
|
||||
ccodep||dnuper,
|
||||
ccodep || ccocom || dnupro,
|
||||
btrim(dnomlp),
|
||||
btrim(dprnlp),
|
||||
CASE WHEN btrim(epxnee) = ''
|
||||
THEN NULL::text
|
||||
ELSE btrim(epxnee)
|
||||
END AS epxnee ,
|
||||
btrim(dnomcp),
|
||||
btrim(dprncp),
|
||||
btrim(ccodro),
|
||||
btrim(ccodem)
|
||||
FROM "{sch}".proprietaire WHERE btrim(dnupro) <> ''
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
def _insert_r_prop_cptprop1(schema='38_202207'):
|
||||
sql = '''set work_mem='512MB';
|
||||
INSERT INTO cadastre.r_prop_cptprop
|
||||
SELECT DISTINCT '{dep}Y99999', ccodep || ccocom || dnuprol
|
||||
FROM "{sch}".lots WHERE comptecommunal NOT IN (SELECT DISTINCT comptecommunal FROM "{sch}".proprietaire)
|
||||
UNION
|
||||
SELECT DISTINCT '{dep}Y99999', ccodep || ccocom || dnupro
|
||||
FROM "{sch}".parcelle WHERE comptecommunal NOT IN (SELECT DISTINCT comptecommunal FROM "{sch}".proprietaire)
|
||||
UNION
|
||||
SELECT DISTINCT '{dep}Y99999', ccodep || ccocom || dnupro
|
||||
FROM "{sch}".suf WHERE comptecommunal NOT IN (SELECT DISTINCT comptecommunal FROM "{sch}".proprietaire)
|
||||
'''.format(
|
||||
sch=schema,
|
||||
dep=schema[:2])
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
def insert_r_prop_cptprop(schema='38_202207'):
|
||||
_insert_r_prop_cptprop0(schema)
|
||||
_insert_r_prop_cptprop1(schema)
|
||||
print('INSERT r_prop_cptprop OK')
|
||||
|
||||
|
||||
def _insert_lot1(schema='38_202207'):
|
||||
sql = '''set work_mem='512MB';
|
||||
INSERT INTO cadastre.lots (lot_id, par_id, dnulot, dcntlo)
|
||||
--les parcelles divisées en lots de la table lots
|
||||
SELECT DISTINCT
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||dnulot AS id_lot, -- Identifiant du lot character varying(21)
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla AS par_id, -- Identifiant de la parcelle
|
||||
dnulot, -- Numéro du lot
|
||||
max(dcntlo) AS dcntlo -- Contenance cadastrale (m²) on prend contenance maxi quand même id_lot mais dcntlo différentes
|
||||
FROM "{sch}".lots
|
||||
JOIN "{sch}".geo_parcelle ON lots.parcelle = geo_parcelle.geo_parcelle -- on ne garde que les lots sur des parcelles dont on a la géométrie
|
||||
WHERE ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||dnulot IN
|
||||
( -- on sélectionne uniquement les lots qui supportent une subdivision fiscale pour exclure les lots batis
|
||||
SELECT ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||dnulot FROM "{sch}".suf WHERE btrim(dnulot) <> '')
|
||||
GROUP BY ccodep, ccocom, ccopre, ccosec, dnupla, dnulot
|
||||
UNION -- les lots de la table suf qui ne sont pas dans la table lots
|
||||
SELECT DISTINCT
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||dnulot AS id_lot, -- Identifiant du lot character varying(21)
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla AS par_id, -- Identifiant de la parcelle
|
||||
dnulot, -- Numéro du lot
|
||||
sum(dcntsf) AS dcntlo -- Contenance cadastrale (m²) = On additionne les contenances des différentes suf qui composent le lot
|
||||
FROM "{sch}".suf
|
||||
JOIN "{sch}".geo_parcelle ON suf.parcelle = geo_parcelle.geo_parcelle -- on ne garde que les sufs sur des parcelles dont on a la géométrie
|
||||
WHERE btrim(dnulot) <> '' AND ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||dnulot NOT IN (
|
||||
SELECT DISTINCT ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||dnulot FROM "{sch}".lots WHERE btrim(dnulot)<>'')
|
||||
GROUP BY ccodep, ccocom, ccopre, ccosec, dnupla, dnulot
|
||||
UNION -- les parcelles à lot unique
|
||||
-- on sélectionne les parcelles qui supportent une subdivision fiscale (unique ou multiple) même si divisées en lots car dans ce cas ce sont surement des copropriétés (lots batis/jardins...)
|
||||
SELECT DISTINCT
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla AS id_lot, -- Identifiant du lot fictif(=id parcelle)
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla AS par_id, -- Identifiant de la parcelle
|
||||
null AS dnulot, -- Numéro du lot
|
||||
dcntpa AS dcntlo-- Contenance cadastrale (m²)
|
||||
FROM "{sch}".parcelle
|
||||
JOIN "{sch}".geo_parcelle ON parcelle.parcelle = geo_parcelle.geo_parcelle -- on ne garde que les les parcelles dont on a la géométrie
|
||||
WHERE ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla IN
|
||||
(SELECT ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||trim(dnulot) FROM "{sch}".suf); -- toutes les parcelles dont dnulot est NULL
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
|
||||
def _insert_lot2(schema='38_202207'):
|
||||
sql = '''set work_mem='512MB';
|
||||
insert into cadastre.lots (lot_id, par_id, dnulot, dcntlo,geom)
|
||||
with t1 as (
|
||||
SELECT DISTINCT
|
||||
-- CASE WHEN TRIM(t.dnulot) = '' OR TRIM(t.dnulot) IS NULL
|
||||
-- THEN substring(t.parcelle from 1 for 2)||substring(t.parcelle from 4 for 12)||TRIM(t.ccosub)
|
||||
-- ELSE substring(t.parcelle from 1 for 2)||substring(t.parcelle from 4 for 12)||TRIM(t.dnulot)
|
||||
-- END lot_id,
|
||||
--substring(t.parcelle from 1 for 2)||substring(t.parcelle from 4 for 12)||TRIM(t.dnulot) lot_id,
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||TRIM(dnupla)||TRIM(dnulot) AS lot_id,
|
||||
parcelle,
|
||||
t.ccosub,
|
||||
t.dcntsf,
|
||||
--substring(t.parcelle from 1 for 2)||substring(t.parcelle from 4 for 12) par_id,
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||TRIM(dnupla) AS par_id,
|
||||
CASE WHEN TRIM(t.dnulot) = '' OR TRIM(t.dnulot) IS NULL
|
||||
THEN TRIM(t.ccosub)
|
||||
ELSE TRIM(t.dnulot)
|
||||
END dnulot
|
||||
FROM "{sch}"."suf" t
|
||||
)
|
||||
select distinct on (t1.lot_id,t1.par_id)
|
||||
t1.lot_id,
|
||||
t1.par_id,
|
||||
t1.dnulot,
|
||||
CASE WHEN t1.dcntsf IS NULL
|
||||
THEN l.dcntlo
|
||||
ELSE t1.dcntsf
|
||||
END dcntlo,
|
||||
CASE WHEN geo_sub1.geom IS NULL
|
||||
THEN geo_sub2.geom
|
||||
ELSE geo_sub1.geom
|
||||
END geom
|
||||
from t1
|
||||
JOIN "{sch}".parcelle p USING (parcelle)
|
||||
LEFT JOIN "{sch}".geo_parcelle geo_p ON geo_p.geo_parcelle = p.parcelle
|
||||
LEFT JOIN "{sch}".lots l USING (parcelle)
|
||||
--LEFT JOIN "{sch}".lotslocaux ll USING (lots)
|
||||
LEFT JOIN "{sch}".geo_subdfisc_parcelle geo_sub_p ON p.parcelle = geo_sub_p.geo_parcelle
|
||||
--LEFT JOIN "{sch}".geo_subdfisc geo_sub USING (geo_subdfisc)
|
||||
LEFT JOIN "{sch}".geo_subdfisc geo_sub1
|
||||
ON (geo_sub1.geo_subdfisc = geo_sub_p.geo_subdfisc AND LOWER(geo_sub1.tex) = LOWER(t1.ccosub))
|
||||
LEFT JOIN "{sch}".geo_subdfisc geo_sub2
|
||||
ON (geo_sub2.geo_subdfisc = geo_sub_p.geo_subdfisc AND TRIM(LOWER(geo_sub2.tex)) = '')
|
||||
WHERE (t1.par_id,t1.lot_id) not IN ( select par_id,lot_id from cadastre.lots )
|
||||
OR t1.par_id not IN ( select par_id from cadastre.lots )
|
||||
order by 2,1,5
|
||||
;
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
def insert_lot(schema='38_202207'):
|
||||
_insert_lot1(schema)
|
||||
_insert_lot2(schema)
|
||||
print('INSERT lot OK')
|
||||
|
||||
def insert_cadastre(schema='38_202207'):
|
||||
sql = '''set work_mem='512MB';
|
||||
INSERT into cadastre.cadastre (lot_id, dnupro)
|
||||
--les pdl
|
||||
SELECT DISTINCT
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||dnulot AS lot_id, -- Identifiant du lot character varying(21)
|
||||
ccodep || ccocom || dnuprol AS dnupro
|
||||
FROM "{sch}".lots
|
||||
JOIN "{sch}".geo_parcelle ON lots.parcelle = geo_parcelle.geo_parcelle -- on ne garde que les lots sur des parcelles dont on a la géométrie
|
||||
WHERE
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||trim(dnulot) IN ( -- on sélectionne uniquement les lots qui supportent une subdivision fiscale
|
||||
SELECT ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||trim(dnulot)
|
||||
FROM "{sch}".suf)
|
||||
UNION
|
||||
-- les lots de la table suf qui ne sont pas dans la table lots
|
||||
SELECT DISTINCT
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||dnulot AS id_lot, -- Identifiant du lot character varying(21)
|
||||
ccodep || ccocom || dnupro AS dnupro
|
||||
FROM "{sch}".suf
|
||||
JOIN "{sch}".geo_parcelle ON suf.parcelle = geo_parcelle.geo_parcelle -- on ne garde que les sufs sur des parcelles dont on a la géométrie
|
||||
WHERE
|
||||
trim(dnulot) <> '' AND ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||btrim(dnulot)
|
||||
NOT IN (
|
||||
SELECT DISTINCT ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||btrim(dnulot) FROM "{sch}".lots)
|
||||
UNION
|
||||
---les parcelles sans lot
|
||||
SELECT DISTINCT
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla AS id_lot, -- Identifiant du lot fictif(=id parcelle)
|
||||
ccodep || ccocom || dnupro AS dnupro
|
||||
FROM "{sch}".parcelle
|
||||
JOIN "{sch}".geo_parcelle ON parcelle.parcelle = geo_parcelle.geo_parcelle -- on ne garde que les parcelles dont on a la géométrie
|
||||
WHERE
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla IN
|
||||
(SELECT ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||btrim(dnulot) FROM "{sch}".suf); -- toutes les parcelles dont dnulot est NULL
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
print('INSERT cadastre OK')
|
||||
|
||||
def insert_lotnatcult(schema='38_202207'):
|
||||
sql = '''set work_mem='512MB';
|
||||
INSERT INTO cadastre.lots_natcult (lot_id, dsgrpf, cnatsp, dclssf, ccosub, dcntsf)
|
||||
SELECT DISTINCT
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||btrim(dnulot) AS lot_id, -- Identifiant du lot (c'est bon, quand il n'y a pas de lot, c'est la même façon de noter l'identifiant)
|
||||
btrim(dsgrpf) AS dsgrpf , -- Sous-groupe de nature de culture
|
||||
btrim(cnatsp) AS cnatsp , -- Code nature de culture spéciale
|
||||
CASE WHEN trim(dclssf) = '' OR dclssf = '00'
|
||||
THEN NULL::integer
|
||||
ELSE ltrim(dclssf,'0')::integer
|
||||
END AS dclssf, -- Classe dans le groupe et la série tarif
|
||||
btrim(ccosub) AS ccosub , -- Lettres indicatives de la suf
|
||||
dcntsf -- Contenance de la suf
|
||||
FROM "{sch}".suf
|
||||
WHERE
|
||||
ccodep||ccocom||replace(ccopre, ' ', '0')||replace(ccosec, ' ', '0')||dnupla||trim(dnulot) IN (SELECT lot_id FROM cadastre.lots)
|
||||
'''.format(
|
||||
sch=schema)
|
||||
with con_fon.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
print('INSERT lotnatcult OK')
|
||||
|
||||
def update_typprop(con=con_fon):
|
||||
sql = '''set work_mem='512MB';
|
||||
UPDATE
|
||||
cadastre.parcelles
|
||||
SET typprop_id = gtoper||'_'||ccogrm
|
||||
FROM cadastre.proprios
|
||||
JOIN cadastre.r_prop_cptprop USING (dnuper)
|
||||
JOIN cadastre.cptprop USING (dnupro)
|
||||
JOIN cadastre.cadastre USING (dnupro)
|
||||
JOIN cadastre.lots USING (lot_id)
|
||||
WHERE lots.par_id = parcelles.par_id
|
||||
'''
|
||||
with con.begin() as cnx:
|
||||
cnx.execute(sql)
|
||||
|
||||
print('UPDATE typprop OK')
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
from pycen import update_to_sql
|
||||
# par = '3805050000E0523'
|
||||
par = None
|
||||
sch = '38_202307'
|
||||
drop_tables(con_fon)
|
||||
|
||||
lst_sch = ['07_202307','26_202307','42_202307','38_202307']
|
||||
for sch in lst_sch:
|
||||
print(' INIT ',sch)
|
||||
insert_voie(sch)
|
||||
insert_parcelle(sch)
|
||||
insert_proprio(sch)
|
||||
insert_cptprop(sch)
|
||||
insert_r_prop_cptprop(sch)
|
||||
insert_lot(sch)
|
||||
insert_cadastre(sch)
|
||||
insert_lotnatcult(sch)
|
||||
update_typprop(con_fon)
|
||||
|
||||
# pg_restore -h 172.17.0.2 -U postgres --dbname="bd_cen" -c /home/colas/Documents/9_PROJETS/0_FONCIER/DUMP/bd_cen_20240418_16h50_beforemaj2023.dump
|
||||
|
||||
9
3_AZALEE/tmp/manip_invZH_Huez.py
Normal file
9
3_AZALEE/tmp/manip_invZH_Huez.py
Normal file
@ -0,0 +1,9 @@
|
||||
import geopandas as gpd
|
||||
import os
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from pycen import con
|
||||
|
||||
PATH = "/home/colas/Documents/9_PROJETS/1_ZH/MAJ/GERECO - Schéma_strat_ZH_Huez"
|
||||
|
||||
18
5_GEONATURE/IMPORTS/CHAR_amphi_2011.py
Normal file
18
5_GEONATURE/IMPORTS/CHAR_amphi_2011.py
Normal file
@ -0,0 +1,18 @@
|
||||
import geopandas as gpd
|
||||
import os
|
||||
|
||||
PATH = '/media/colas/SRV/FICHIERS/SITES/SITES GERES/CHAR_CHARVAS/Scientifique-technique/Faune/AMPHIBIENS'
|
||||
file = 'Inventaires amphibiens Charvas 2011.xlsx'
|
||||
|
||||
|
||||
df = gpd.pd.read_excel(os.path.join(PATH,file))
|
||||
df.DATE = df.DATE.replace('\n','',regex=True)
|
||||
df["NOM DU POINT D'EAU"] = df["NUMER O DU POINT D'EAU 2"].astype(str)+' - '+df["NOM DU POINT D'EAU"]
|
||||
df[['LOCALISATION PRECISE (X)','LOCALISATION PRECISE (Y)']] = df[['LOCALISATION PRECISE (X)','LOCALISATION PRECISE (Y)']].replace({',':'.',' ':'','\n':''},regex=True)
|
||||
df['X_93'] = gpd.points_from_xy(df['LOCALISATION PRECISE (X)'],df['LOCALISATION PRECISE (Y)'],crs=27572).to_crs(2154).x
|
||||
df['Y_93'] = gpd.points_from_xy(df['LOCALISATION PRECISE (X)'],df['LOCALISATION PRECISE (Y)'],crs=27572).to_crs(2154).y
|
||||
|
||||
df.to_excel(os.path.join(PATH,file),index=False)
|
||||
|
||||
|
||||
df = gpd.pd.read_excel(os.path.join(PATH,file))
|
||||
68
5_GEONATURE/MIGRATION/PLATIERE/correct_cdnom.py
Normal file
68
5_GEONATURE/MIGRATION/PLATIERE/correct_cdnom.py
Normal file
@ -0,0 +1,68 @@
|
||||
from pycen import con_gn
|
||||
import pycen
|
||||
import pandas as pd
|
||||
import os
|
||||
|
||||
def get_taxref(con):
|
||||
sql = '''SELECT cd_nom,cd_ref,nom_complet,lb_nom FROM taxonomie.taxref;'''
|
||||
return pd.read_sql_query(sql,con)
|
||||
|
||||
def get_cd_nom(con,lb):
|
||||
sql = '''SELECT cd_nom,cd_ref,nom_complet,lb_nom FROM taxonomie.taxref
|
||||
WHERE nom_complet='{label}' or nom_complet ilike '%%{label}%%';'''.format(label=lb)
|
||||
return pd.read_sql_query(sql,con)
|
||||
|
||||
|
||||
dict_taxo = {
|
||||
'Brachythecium rutabulum (Hedw.) Schimp. var. rutabulum':434653,
|
||||
'Hypnum cupressiforme Hedw. var. cupressiforme':434662,
|
||||
'Orthotrichum affine Brid.':5014,
|
||||
'Plagiomnium cuspidatum ( Hedw.) T.J.Kop.':4934,
|
||||
'Leucodon sciuroides (Hedw.) Schwägr var. sciuroides':None,
|
||||
'Brachythecium albicans (Hedw. ) Schimp.':5807,
|
||||
'Phascum cuspidatum Hedw. var. cuspidatum':434590,
|
||||
'Syntrichia ruralis (Hedw.) F.Weber & D.Mohr var. ruralis':434602,
|
||||
'Brachythecium salebrosum (Hoffm. ex F.Weber & D.Mohr) Schimp. nom. cons.':5828,
|
||||
'Lunularia cruciata (L.) Lindb.':6167,
|
||||
'Rhynchostegium megapolitanum (Blandow ex F.Weber & D.Mohr) Schimp.':5908,
|
||||
'Orthotrichum cupulatum Hoffm. ex Brid. var. riparium Huebener':434606,
|
||||
'Syntrichia montana Nees var. montana':434600,
|
||||
'Abietinella abietina (Hedw.) M.Fleisch. var. abietina':434642,
|
||||
'Ceratodon purpureus (Hedw.) Brid. subsp. purpureus':434562,
|
||||
'Hedwigia ciliata (Hedw.) P.Beauv. var. leucophaea Bruch & Schimp.':434612,
|
||||
'Hypnum cupressiforme Hedw. var. filiforme Brid.':6046,
|
||||
'Hedwigia ciliata (Hedw.) P.Beauv. var. ciliata':434611,
|
||||
'Orthotrichum speciosum Nees var. speciosum':None,
|
||||
'Brachytheciastrum velutinum (Hedw.) Ignatov & Huttunen var. velutinum':434659
|
||||
}
|
||||
dict_taxo2 = {
|
||||
'Leucodon sciuroides (Hedw.) Schwägr var. sciuroides':5104,
|
||||
'Orthotrichum speciosum Nees var. speciosum':5041,
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
PATH = '/media/colas/Disk2/tmp'
|
||||
file = '(RNN Platière) Inventaire Bryophytes L93.csv'
|
||||
file_out = '(RNN Platière) Inventaire Bryophytes L93 (correct cdnom).csv'
|
||||
df = pd.read_csv(os.path.join(PATH,file),encoding='Windows-1252',sep=';')
|
||||
tax = get_taxref(con_gn)
|
||||
|
||||
for n in df.NOM_COMPLET.unique():
|
||||
|
||||
res = get_cd_nom(con_gn,n)
|
||||
df.loc[df.NOM_COMPLET==n,'CD_NOM2'] = res.cd_nom.values[0] if not res.empty else None
|
||||
if res.shape[0]>1:
|
||||
print(n)
|
||||
|
||||
df.CD_NOM2 = df.CD_NOM2.astype('Int64')
|
||||
cd_isna = df.CD_NOM2.isna()
|
||||
df.loc[cd_isna,'CD_NOM2'] = df.loc[cd_isna,'NOM_COMPLET'].replace(dict_taxo).astype('Int64')
|
||||
cd_isna = df.CD_NOM2.isna()
|
||||
df.loc[cd_isna,'remarque_obs'] = "Nom complet d'origine : "+df[cd_isna].NOM_COMPLET
|
||||
df.loc[cd_isna,'CD_NOM2'] = df.loc[cd_isna,'NOM_COMPLET'].replace(dict_taxo2).astype('Int64')
|
||||
df.CD_NOM2 = df.CD_NOM2.astype(int)
|
||||
df.to_csv(os.path.join(PATH,file_out),encoding='Windows-1252',sep=';',index=False)
|
||||
|
||||
|
||||
104
5_GEONATURE/MIGRATION/TAXREF/taxref_14to16.py
Normal file
104
5_GEONATURE/MIGRATION/TAXREF/taxref_14to16.py
Normal file
@ -0,0 +1,104 @@
|
||||
from pycen import con_gn
|
||||
import pandas as pd
|
||||
import os
|
||||
|
||||
|
||||
def get_synthese(con,lst):
|
||||
sql = '''
|
||||
SELECT * FROM gn_synthese.v_synthese_for_export
|
||||
WHERE cd_nom IN {lst_cdnom}
|
||||
'''.format(lst_cdnom=tuple(lst))
|
||||
return pd.read_sql_query(sql,con)
|
||||
|
||||
if __name__ == "__main__":
|
||||
PATH = '/media/colas/SRV/homer_geonat/home/geonatureadmin/taxhub/tmp'
|
||||
file = 'liste_changements.csv'
|
||||
chgt = pd.read_csv(os.path.join(PATH,file))
|
||||
|
||||
updt_cdnom = chgt.grappe_change=='cas1'
|
||||
cd_change = (chgt[~updt_cdnom].i_cd_nom_list
|
||||
.str.removeprefix('[')
|
||||
.str.removesuffix(']')
|
||||
.str.split(', ')
|
||||
.explode()
|
||||
.unique())
|
||||
df = get_synthese(con_gn,cd_change)
|
||||
|
||||
test = df.cd_nom == 134367
|
||||
df[test]
|
||||
df[test].observateurs
|
||||
df[test].nom_valide
|
||||
df[test].nom_vernaculaire
|
||||
|
||||
######
|
||||
### MODIFICATION A APPORTER DANS LA BDD AVANT EVOLUTION DU REFERENTIEL
|
||||
######
|
||||
|
||||
# 100142 OK - Reste inchangé, synonyme devient sous-espèce
|
||||
# 135069 Devient sous-espèce de 100142 49 Observations - GUEDOU Alix (CENI) & GOURGUES Frédéric (Gentiana) & ...
|
||||
### [Geranium robertianum L., 1753 --> Geranium robertianum subsp. robertianum L., 1753]
|
||||
### Mise à jour des obs 135069 --> 100142 Geranium robertianum L., 1753
|
||||
|
||||
# 126650 OK - Reste inchangé, synonyme 141769 devient sous-espèce
|
||||
# 141769 Devient sous-espèce de 126650 1 Observation 2008/10/30 - Raspail Loïc (AAIP)
|
||||
### [Tilia platyphyllos Scop., 1771 --> Tilia platyphyllos subsp. pseudorubra C.K.Schneid., 1909]
|
||||
### Mise à jour des obs 141769 --> 126650 Tilia platyphyllos
|
||||
### FAIT (modification dans l'import source)
|
||||
|
||||
######
|
||||
### LAISSER SUIVRE SON COURS
|
||||
######
|
||||
|
||||
# FLORE
|
||||
########
|
||||
# 145374 OK - Reste var. CD_REF, synonyme 619051 devient synonyme espèce. 619051 Non observé.
|
||||
# 33901 OK - Reste CD_REF, 33781 devient synonymes. 33781 Non observé.
|
||||
# 465265 OK - Reste synonyme de 966674. 966675 devient 2e synonyme. 966675 Non observé.
|
||||
# 53451 OK - Reste CD_REF, 9 synonymes deviennent espèces. Synonymes Non observé.
|
||||
# 89840 OK - Reste CD_REF, 619051 devient synonymes. 619051 Non observé.
|
||||
|
||||
# 108770 Devient synonyme de 762076 4 Observations - BIRON Nicolas (CENI) & GRANGE Benjamin (CENI)
|
||||
### [Monotropa hypopitys L., 1753 --> Hypopitys monotropa Crantz, 1766]
|
||||
# Vérifier espèce présente isère .. Oui
|
||||
|
||||
# 113620 OK - Reste CD_REF, taxon 138815,923321 deviennent synonymes
|
||||
# 138815 Regroupé avec 113620 11 Observations - BIRON Nicolas (CENI)
|
||||
### [Pinguicula grandiflora subsp. grandiflora Lam., 1789 --> Pinguicula grandiflora Lam., 1789]
|
||||
# Regarder champs remarque si pas tagué rosea .. Non
|
||||
# Vérifier espèce présente isère .. Oui
|
||||
|
||||
# 116759 OK - Reste CD_REF, taxon 139596 devient synonyme
|
||||
# 139596 Regroupé avec 116759 1 Observation 2004/12/01 - GILIOTTI Sébastien (Liparis SG)
|
||||
### [Quercus robur var. robur L., 1753 --> Quercus robur L., 1753]
|
||||
|
||||
# 97508 OK - Reste CD_REF, taxon 134367 devient synonyme
|
||||
# 134367 Regroupé avec 97508 1 Observation 2018/07/11 - Pont Bernard (AAIP)
|
||||
### [Euphorbia esula subsp. esula L., 1753 --> Euphorbia esula L., 1753]
|
||||
### Anomalie - modification dans l'import source
|
||||
|
||||
# FONGE
|
||||
########
|
||||
# 814216 OK - Reste CD_REF, 465268 devient synonymes
|
||||
# 465268 Devient synonyme de 814216 3 Observation 2015/xx/xx - RIVOIRE Bernard (MYCOPOLYDEV)
|
||||
### [Porodaedalea conchata (Pers.) Fiasson & Niemelä, 1984 --> Phellinopsis conchata (Pers.) Y.C.Dai, 2010]
|
||||
|
||||
# 44992 OK - Reste CD_REF, 470268 devient synonymes
|
||||
# 470268 Devient synonyme de 44992 2 Observation 2015/xx/xx - RIVOIRE Bernard (MYCOPOLYDEV)
|
||||
### [Tremella mesenterica f. crystallina Ew.Gerhardt, 1997 --> Tremella mesenterica Retz., 1769]
|
||||
|
||||
|
||||
lstjdd_plt = [
|
||||
'[CNR Restauration] Prospection flore vasculaire',
|
||||
'[ENS méandre des Oves] Observation opportuniste par observation directe',
|
||||
'[MC CNR Inspira] Inventaire dendrométrique + DMH',
|
||||
'[MC CNR Inspira] Relevés phytosociologiques et sondages pédologiques',
|
||||
'[N2000 Platière] Inventaire dendrométrique',
|
||||
'[RNN Platière] Observation opportuniste par observation directe',
|
||||
'[RNN Platière] Inventaire dendrométrique en plein',
|
||||
'[RNN Platière] Suivi phytosciologique grèves alluviales',
|
||||
'[RNN Platière] Prospection flore vasculaire',
|
||||
'[CNPE] Suivi phytosciologique des milieux ouverts',
|
||||
'[CNPE] Observation opportuniste par observation directe',
|
||||
'[ABC St-Maurice] Observation Opportuniste par observation directe'
|
||||
]
|
||||
|
||||
64
manage_bdd.py
Normal file
64
manage_bdd.py
Normal file
@ -0,0 +1,64 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
#Nom : manage_bdd.py
|
||||
#Description : MAnagement d'une base de données postgresql
|
||||
#Copyright : 2024, CEN 38
|
||||
#Auteur : Colas Geier
|
||||
import os
|
||||
|
||||
def dump_bdd(file_out,host,base,user,passwd=None,schema=None):
|
||||
|
||||
pwd = 'export PGPASSWORD="{pwd}";'.format(pwd=passwd)
|
||||
|
||||
cmd = 'pg_dump -h {H} -d {bdd} -U {U} -s > "{f}"'.format(
|
||||
H=host,
|
||||
bdd=base,
|
||||
U=user,
|
||||
f=file_out
|
||||
)
|
||||
if schema:
|
||||
cmd += ' --schema="{s}"'.format(s=schema)
|
||||
os.system(pwd+cmd if passwd else cmd)
|
||||
|
||||
|
||||
def copy_2another_server(host_in,base_in,user_in,host_out,base_out,user_out,passwd_in=None,schema_in=None,passwd_out=None,schema_out=None):
|
||||
# RESTORE AU NOM DE LA BDD ORIGINE
|
||||
# pg_dump -C -h localhost -U localuser dbname | psql -h remotehost -U remoteuser dbname
|
||||
# pg_dump -C -h 172.17.0.2 -U postgres -d postgres --schema="07_202107" | psql -h 91.134.194.221 -U cgeier -d bd_cen
|
||||
# RESTORE AU NOM DE LA BDD CIBLE
|
||||
# pg_dump -C -h 172.17.0.2 -U postgres -d postgres --schema="*_202207" --format=custom | pg_restore -h 91.134.194.221 -U cgeier --dbname="bd_cen"
|
||||
pwd_in = 'export PGPASSWORD="{pwd}";'.format(pwd=passwd_in)
|
||||
|
||||
cmd_in = 'pg_dump -C -h {H} -d {bdd} -U {U}'.format(
|
||||
H=host_in,
|
||||
bdd=base_in,
|
||||
U=user_in
|
||||
)
|
||||
if schema_in:
|
||||
cmd_in += ' --schema="{s}"'.format(s=schema_in)
|
||||
|
||||
CMD_IN = pwd_in+cmd_in if passwd_in else cmd_in
|
||||
pwd_out = 'export PGPASSWORD="{pwd}";'.format(pwd=passwd_out)
|
||||
|
||||
cmd_out = 'psql -h {H} -d {bdd} -U {U}'.format(
|
||||
H=host_out,
|
||||
bdd=base_out,
|
||||
U=user_out
|
||||
)
|
||||
if schema_out:
|
||||
cmd_out += ' --schema="{s}"'.format(s=schema_out)
|
||||
|
||||
CMD_out = pwd_out+cmd_out if passwd_out else cmd_out
|
||||
|
||||
os.system(CMD_IN+'|'+CMD_out)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
dump_bdd(
|
||||
file_out='/media/colas/SRV/FICHIERS/OUTILS/BASES DE DONNEES/FONCIER/CEN73/V4/_backup_V3/bd_cen_V3.sql',
|
||||
host='91.134.194.221',
|
||||
base='bd_cen',
|
||||
user='cgeier',
|
||||
passwd='adm1n*bdCen')
|
||||
|
||||
|
||||
256
tmp/CVB_bourbre_MC_penible.py
Normal file
256
tmp/CVB_bourbre_MC_penible.py
Normal file
@ -0,0 +1,256 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
import pandas as pd
|
||||
import geopandas as gpd
|
||||
|
||||
|
||||
path_fichsite = '/media/colas/SRV/FICHIERS/OPERATIONS (EX ETUDES)/ETUDES EN COURS/CORRIDORS/CVB_Bourbre/#42_Mesures compensatoires/BM/Evaluation des sites'
|
||||
path_geom = '/media/colas/SRV/FICHIERS/OUTILS/CARTOGRAPHIE/ESPACE DE TRAVAIL/ETUDES/CVB_Bourbre/Cartographie CVB Bourbre 2021/Mesures compensatoires/0_FINAL'
|
||||
c_geom = 'mc_isere_CVBbourbre.gpkg'
|
||||
c_geom_det = 'mc_isere_CVBbourbre_détaillé.gpkg'
|
||||
pathout = '/home/colas/Documents/tmp/CVB_Bourbre'
|
||||
|
||||
dic = {
|
||||
'Code cadastral' : 'N° cadast',
|
||||
"Date d'évaluation": 'date',
|
||||
"Date d'Evaluation": 'date',
|
||||
'RESULTAT' : 'Note',
|
||||
'APPARTENANCE A UN CORRIDOR SRCE' : 'corridor',
|
||||
'PATRIMOINE NATUREL' : 'patrim_nat',
|
||||
'CONNECTIVITE' : 'connectiv',
|
||||
'POTENTIEL DE RESTAURATION' : 'potentiel_restau',
|
||||
'Foncier ?' : 'foncier',
|
||||
}
|
||||
|
||||
gdf = gpd.read_file(Path(path_geom) / c_geom)
|
||||
gdf.loc[(gdf['N° cadast']=='0A0816'),'Note'] = 35.59
|
||||
gdf.loc[gdf['N° cadast']=='AB0063','Note'] = 48.18
|
||||
gdf.loc[gdf['N° cadast']=='AO0775','N° cadast'] = '0A0775'
|
||||
gdf.loc[gdf['N° cadast']=='A0321','N° cadast'] = '0A0321'
|
||||
gdf.loc[gdf['N° cadast']=='D0430','N° cadast'] = '0D0430'
|
||||
gdf.loc[gdf['N° cadast']=='D0719','N° cadast'] = '0D0719'
|
||||
gdf.loc[gdf['N° cadast']=='AB0006','N° cadast'] = 'AB0009'
|
||||
gdf.loc[gdf['N° cadast']=='AM0154','N° cadast'] = 'CM0154'
|
||||
gdf.loc[gdf['N° cadast']=='AM1032','N° cadast'] = 'AM0132'
|
||||
gdf.loc[gdf['N° cadast']=='AK0098','N° cadast'] = 'AI0098'
|
||||
gdf.loc[gdf['N° cadast']=='AI00109','N° cadast'] = 'AI0109'
|
||||
gdf.loc[gdf['N° cadast']=='00645','N° cadast'] = '0F0645'
|
||||
gdf.loc[gdf['N° cadast']=='DS0252','N° cadast'] = '0C0252'
|
||||
gdf.loc[gdf['Commune']=='Trep','Commune'] = 'Trept'
|
||||
gdf.loc[gdf['Commune']=='Villefoantaine','Commune'] = 'Villefontaine'
|
||||
gdf.loc[gdf['Commune']=='Villefonatine','Commune'] = 'Villefontaine'
|
||||
gdf.loc[gdf['Commune']=='La Verpillère','Commune'] = 'La Verpillière'
|
||||
gdf.loc[gdf['Commune']=='La Veprillière','Commune'] = 'La Verpillière'
|
||||
gdf.loc[gdf['Commune']=='Vaulx_Milieu','Commune'] = 'Vaulx-Milieu'
|
||||
gdf.loc[gdf['Commune']=='Valx-Milieu','Commune'] = 'Vaulx-Milieu'
|
||||
gdf.loc[gdf['Commune']=='Saint-Qentin-Fallavier','Commune'] = 'Saint-Quentin-Fallavier'
|
||||
|
||||
def recup_details_note_parcelle(path):
|
||||
DF = pd.DataFrame()
|
||||
for roots, dirs, files in os.walk(path):
|
||||
print(dirs)
|
||||
for f in files:
|
||||
if not f.endswith('.xlsx'):
|
||||
continue
|
||||
if f in ['fichier de saisie.xlsx', 'MC-Evaluation des sites.xlsx']:
|
||||
continue
|
||||
if f.startswith('.~'):
|
||||
continue
|
||||
if f.__contains__('origine'):
|
||||
continue
|
||||
|
||||
print(f)
|
||||
xl = pd.ExcelFile(Path(roots)/f)
|
||||
xl.sheet_names
|
||||
for sht in xl.sheet_names:
|
||||
if sht.__contains__('AM0132'):
|
||||
continue
|
||||
|
||||
df = pd.read_excel(Path(roots)/f, usecols="C:D",nrows=50, sheet_name=sht)
|
||||
df.drop_duplicates(inplace=True)
|
||||
if df.empty:
|
||||
continue
|
||||
|
||||
index = df.columns[0]
|
||||
df.set_index(index, inplace=True)
|
||||
lst_index = [*dic.keys() ]
|
||||
df = df[df.index.isin(lst_index)]
|
||||
if df.empty:
|
||||
continue
|
||||
df = df.T
|
||||
df.reset_index(inplace=True, drop=True)
|
||||
df.rename(columns=dic, inplace=True)
|
||||
df.columns.name = None
|
||||
df['sites'] = f
|
||||
df['feuillet'] = sht
|
||||
if df.columns.duplicated(keep=False).any():
|
||||
merge_col = df.columns[df.columns.duplicated(keep=False)]
|
||||
if not DF.empty and not df['N° cadast'].isin(DF['N° cadast']).all() :
|
||||
DF = pd.concat([DF,df],ignore_index=True)
|
||||
else:
|
||||
DF = pd.concat([DF,df],ignore_index=True)
|
||||
return DF
|
||||
|
||||
def normalize_colcadast(col):
|
||||
col = col.replace([', ',' \+ ',' \(','\)'],['/','/','',''],regex=True)
|
||||
col = col.replace(['\+'],['/'],regex=True)
|
||||
return col
|
||||
|
||||
def split_cadast(df,col):
|
||||
tmp = df[col].str.split('/',expand=True).stack().reset_index(-1,drop=True)
|
||||
tmp.name = 'split'
|
||||
df = pd.merge(df,tmp,right_index=True,left_index=True)
|
||||
df.drop_duplicates(inplace=True)
|
||||
df.drop_duplicates(subset=['Note','split'],inplace=True)
|
||||
del df[col]
|
||||
df.rename(columns={'split':col},inplace=True)
|
||||
df.reset_index(inplace=True,drop=True)
|
||||
return df
|
||||
|
||||
|
||||
|
||||
df21 = recup_details_note_parcelle(Path(path_fichsite)/'2021')
|
||||
df20 = recup_details_note_parcelle(Path(path_fichsite)/'2020')
|
||||
df19 = recup_details_note_parcelle(Path(path_fichsite)/'2019')
|
||||
df21['N° cadast'] = normalize_colcadast(df21['N° cadast'])
|
||||
df20['N° cadast'] = normalize_colcadast(df20['N° cadast'])
|
||||
df19['N° cadast'] = normalize_colcadast(df19['N° cadast'])
|
||||
df21 = split_cadast(df21,'N° cadast')
|
||||
df20 = split_cadast(df20,'N° cadast')
|
||||
df19 = split_cadast(df19,'N° cadast')
|
||||
|
||||
# Notes réalisé en 2021 mais publié avec son année d'origine
|
||||
N19 = ['AB0008']
|
||||
N20 = ['0C0366']
|
||||
DF21 = df21[~df21['N° cadast'].isin([*N19,*N20])]
|
||||
|
||||
# Notes non présentes en 2021
|
||||
tmp20 = df20[
|
||||
(~df20['N° cadast'].isin(df21['N° cadast']))
|
||||
&(df20['N° cadast'].isin(gdf['N° cadast']))
|
||||
]
|
||||
tmp19 = df19[
|
||||
(~df19['N° cadast'].isin(df21['N° cadast']))
|
||||
&(~df19['N° cadast'].isin(df20['N° cadast']))
|
||||
&(df19['N° cadast'].isin(gdf['N° cadast']))
|
||||
]
|
||||
|
||||
df = pd.concat([df21,df20,df19])
|
||||
df.reset_index(inplace=True,drop=True)
|
||||
df.loc[df['N° cadast']=='0C1003','N° cadast'] = '0C1003.4.5.6'
|
||||
lsttmp = df[df['N° cadast'].isin(['0C1004','0C1005','0C1006'])].index
|
||||
df.drop(lsttmp,inplace=True)
|
||||
df.reset_index(inplace=True,drop=True)
|
||||
cols_note = ['Note','corridor','patim_nat','connectiv','potentiel_restau','foncier']
|
||||
df[cols_note] = df[cols_note].astype(float).round(2)
|
||||
df[cols_note] = df[cols_note].fillna(0)
|
||||
# df.drop(columns=['sites','feuillet'], inplace=True)
|
||||
df['N_withFon'] = round(df['corridor'] + df['patim_nat'] + df['connectiv'] + df['potentiel_restau'] + df['foncier'], 2)
|
||||
df['N_withoutFon'] = round(df['corridor'] + df['patim_nat'] + df['connectiv'] + df['potentiel_restau'], 2)
|
||||
c_df = df.copy()
|
||||
|
||||
GDF = pd.merge(gdf,c_df,on=['N° cadast','Note'],how='left')
|
||||
|
||||
|
||||
df['T_withFon'] = df['N_withFon'].eq(df['Note'])
|
||||
df['T_withoutFon'] = df['N_withoutFon'].eq(df['Note'])
|
||||
GDF['T_withFon'] = GDF['N_withFon'].eq(GDF['Note'])
|
||||
GDF['T_withoutFon'] = GDF['N_withoutFon'].eq(GDF['Note'])
|
||||
|
||||
|
||||
TT = GDF[~(GDF['T_withFon'] | GDF['T_withoutFon'])]
|
||||
tt = df[~(df['T_withFon'] | df['T_withoutFon'])]
|
||||
|
||||
TT[(TT.sites.isna())&(TT.annee==2021)]
|
||||
TT[(TT.sites.isna())&(TT.annee==2020)]
|
||||
TT[(TT.sites.isna())&(TT.annee==2019)]
|
||||
TT[(~TT.sites.isna())]
|
||||
|
||||
#####################
|
||||
#####################
|
||||
#####################
|
||||
gdf.loc[gdf['N° cadast']=='AO0775','N° cadast'] = '0A0775'
|
||||
gdf.loc[gdf['N° cadast']=='A0321','N° cadast'] = '0A0321'
|
||||
gdf.loc[gdf['N° cadast']=='D0430','N° cadast'] = '0D0430'
|
||||
gdf.loc[gdf['N° cadast']=='D0719','N° cadast'] = '0D0719'
|
||||
gdf.loc[gdf['N° cadast']=='AB0006','N° cadast'] = 'AB0009'
|
||||
gdf.loc[gdf['N° cadast']=='AM0154','N° cadast'] = 'CM0154'
|
||||
gdf.loc[gdf['N° cadast']=='AM1032','N° cadast'] = 'AM0132'
|
||||
gdf.loc[gdf['N° cadast']=='AK0098','N° cadast'] = 'AI0098'
|
||||
gdf.loc[gdf['N° cadast']=='AI00109','N° cadast'] = 'AI0109'
|
||||
gdf.loc[gdf['N° cadast']=='00645','N° cadast'] = '0F0645'
|
||||
gdf.loc[gdf['N° cadast']=='DS0252','N° cadast'] = '0C0252'
|
||||
gdf.loc[gdf['Commune']=='Trep','Commune'] = 'Trept'
|
||||
gdf.loc[gdf['Commune']=='Villefoantaine','Commune'] = 'Villefontaine'
|
||||
|
||||
gdf.to_file(pathout+'/'+ c_geom, driver='GPKG')
|
||||
|
||||
|
||||
#####################
|
||||
#####################
|
||||
#####################
|
||||
path = '/media/colas/SRV/FICHIERS/OUTILS/CARTOGRAPHIE/ESPACE DE TRAVAIL/ETUDES/CVB_Bourbre/Cartographie CVB Bourbre 2021/Mesures compensatoires'
|
||||
l1 = 'LOT 1.shp'
|
||||
l2 = 'LOT 2.shp'
|
||||
l3 = 'LOT 3.shp'
|
||||
l4 = 'LOT 4 VALIDE PARTENAIRE.shp'
|
||||
gdf1 = gpd.read_file(Path(path)/l1)
|
||||
gdf2 = gpd.read_file(Path(path)/l2)
|
||||
gdf3 = gpd.read_file(Path(path)/l3)
|
||||
gdf4 = gpd.read_file(Path(path)/'SIG lot 4 SAFER'/l4)
|
||||
|
||||
print(gdf1[gdf1.Parcelle.str.contains('AI0098|AK0068|DS0252')])
|
||||
print(gdf2[gdf2.Parcelle.str.contains('AI0098|AK0068|DS0252')])
|
||||
print(gdf3[gdf3.Parcelle.str.contains('AI0098|AK0068|DS0252')])
|
||||
print(gdf4[gdf4.Parcelle.str.contains('AI0098|AK0068|DS0252',na=False)])
|
||||
|
||||
|
||||
#####################
|
||||
#####################
|
||||
#####################
|
||||
#####################
|
||||
|
||||
dic = {
|
||||
'corridor' : 'N_corridor',
|
||||
'patim_nat' : 'N_patrim_nat',
|
||||
'connectiv' : 'N_connectiv',
|
||||
'potentiel_restau' : 'N_pot_rest',
|
||||
'foncier' : 'N_foncier',
|
||||
'T_withFon' : 'N_withFon',
|
||||
}
|
||||
GDF.rename(columns=dic,inplace=True)
|
||||
|
||||
tmp = DF['N° cadast'].str.split('/',expand=True).stack().reset_index(-1,drop=True)
|
||||
tmp.name = 'split'
|
||||
df2019 = pd.merge(DF,tmp,right_index=True,left_index=True)
|
||||
|
||||
|
||||
df2020 = pd.merge(DF,tmp,right_index=True,left_index=True)
|
||||
df2020.drop_duplicates(inplace=True)
|
||||
df2020.drop_duplicates(subset=['Note','split'],inplace=True)
|
||||
|
||||
|
||||
|
||||
tmp = df2020[(~df2020.split.isin(df21.split))&(df2020.split.isin(gdf['N° cadast']))]
|
||||
|
||||
tmp = df2019[(~df2019.split.isin(df2021.split))&(~df2019.split.isin(df2020.split))&(df2019.split.isin(gdf['N° cadast']))]
|
||||
|
||||
|
||||
|
||||
|
||||
df2021 = pd.merge(DF,tmp,right_index=True,left_index=True)
|
||||
df2021.drop_duplicates(inplace=True)
|
||||
df2021.drop_duplicates(subset=['Note','split'],inplace=True)
|
||||
|
||||
DF_FINAL = pd.concat([df2021,tmp])
|
||||
|
||||
DF_FINAL = pd.concat([DF_FINAL,tmp])
|
||||
|
||||
|
||||
|
||||
|
||||
df2021[~df2021['split'].isin(gdf['N° cadast'])].shape
|
||||
df2021[df2021['split'].isin(gdf['N° cadast'])].shape
|
||||
Loading…
x
Reference in New Issue
Block a user