init file

This commit is contained in:
Colas Geier 2025-09-18 16:53:04 +02:00
parent 017d40db6a
commit 535f3dce1c
13 changed files with 992 additions and 0 deletions

View File

@ -0,0 +1,239 @@
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
from pycen import con_fon
d_typinfosite = '''
CREATE TABLE IF NOT EXISTS sites.d_typinfosite (
typinfosite_id serial NOT NULL,
typinfosite_lib varchar(50) NOT NULL COLLATE pg_catalog."default",
CONSTRAINT d_typinfosite_pkey PRIMARY KEY (typinfosite_id)
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS sites.d_typinfosite OWNER to grp_sig;
GRANT SELECT ON TABLE sites.d_typinfosite TO cen_user;
GRANT DELETE, INSERT, UPDATE ON TABLE sites.d_typinfosite TO grp_foncier;
GRANT ALL ON TABLE sites.d_typinfosite TO grp_sig;
'''
ins_typinfosite = '''
INSERT INTO sites.d_typinfosite (typinfosite_lib) VALUES
('Essentiel'),
('Historique'),
('Patrimoine'),
('Objectifs'),
('Enjeux')
;
'''
r_sites_infos = '''
DROP TABLE IF EXISTS sites.r_sites_infos;
CREATE TABLE IF NOT EXISTS sites.r_sites_infos (
infosite_id serial NOT NULL,
site_id varchar(10) NOT NULL,
typinfosite_id integer NOT NULL,
site_info text,
maj_user text,
maj_date varchar(20),
CONSTRAINT r_sites_infos_pkey PRIMARY KEY (infosite_id),
CONSTRAINT r_sites_infos_site_id_fkey FOREIGN KEY (site_id)
REFERENCES sites.sites (site_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION,
CONSTRAINT r_sites_infos_typinfosite_id_fkey FOREIGN KEY (typinfosite_id)
REFERENCES sites.d_typinfosite (typinfosite_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS sites.r_sites_infos OWNER to grp_sig;
GRANT SELECT ON TABLE sites.r_sites_infos TO cen_user;
GRANT DELETE, INSERT, UPDATE ON TABLE sites.r_sites_infos TO grp_foncier;
GRANT ALL ON TABLE sites.r_sites_infos TO grp_sig;
CREATE INDEX IF NOT EXISTS fki_r_sites_infos_infosite_id_fkey
ON sites.r_sites_infos USING btree
(infosite_id ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS fki_r_sites_infos_site_id_fkey
ON sites.r_sites_infos USING btree
(site_id ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS fki_r_sites_infos_typinfosite_id_fkey
ON sites.r_sites_infos USING btree
(typinfosite_id ASC NULLS LAST)
TABLESPACE pg_default;
'''
with con_fon.begin() as cnx:
cnx.execute(d_typinfosite)
cnx.execute(ins_typinfosite)
cnx.execute(r_sites_infos)
secteurs = '''
CREATE TABLE IF NOT EXISTS sites.secteurs (
id_secteur serial NOT NULL,
site_id varchar(10) NOT NULL,
secteur_nom text NOT NULL COLLATE pg_catalog."default",
CONSTRAINT secteurs_pkey PRIMARY KEY (id_secteur),
CONSTRAINT secteurs_site_id_fkey FOREIGN KEY (site_id)
REFERENCES sites.sites (site_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS sites.secteurs OWNER to grp_sig;
GRANT SELECT ON TABLE sites.secteurs TO cen_user;
GRANT DELETE, INSERT, UPDATE ON TABLE sites.secteurs TO grp_foncier;
GRANT ALL ON TABLE sites.secteurs TO grp_sig;
CREATE INDEX IF NOT EXISTS fki_secteurs_site_id_fkey
ON sites.secteurs USING btree
(site_id ASC NULLS LAST)
TABLESPACE pg_default;
'''
d_typpartenaire = '''
DROP TABLE IF EXISTS sites.d_typpartenaire;
CREATE TABLE IF NOT EXISTS sites.d_typpartenaire (
typpartenaire_id serial NOT NULL,
typpartenaire_lib text NOT NULL,
CONSTRAINT d_typpartenaire_pkey PRIMARY KEY (typpartenaire_id)
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS sites.d_typpartenaire OWNER to grp_sig;
GRANT SELECT ON TABLE sites.d_typpartenaire TO cen_user;
GRANT DELETE, INSERT, UPDATE ON TABLE sites.d_typpartenaire TO grp_foncier;
GRANT ALL ON TABLE sites.d_typpartenaire TO grp_sig;
'''
ins_typpartenaire = '''
INSERT INTO sites.d_typpartenaire (typpartenaire_lib) VALUES
('Financeurs'),
('Partenaires')
;
'''
r_sites_partenaires = '''
CREATE TABLE IF NOT EXISTS sites.r_sites_partenaires (
partenaire_id serial NOT NULL,
site_id varchar(10) NOT NULL,
typpartenaire_id integer NOT NULL,
maj_user text,
maj_date varchar(20),
CONSTRAINT r_sites_partenaires_pkey PRIMARY KEY (partenaire_id,site_id,typpartenaire_id),
CONSTRAINT r_sites_partenaires_site_id_fkey FOREIGN KEY (site_id)
REFERENCES sites.sites (site_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION,
CONSTRAINT r_sites_partenaires_typpartenaire_id_fkey FOREIGN KEY (typpartenaire_id)
REFERENCES sites.d_typpartenaire (typpartenaire_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS sites.r_sites_partenaires OWNER to grp_sig;
GRANT SELECT ON TABLE sites.r_sites_partenaires TO cen_user;
GRANT DELETE, INSERT, UPDATE ON TABLE sites.r_sites_partenaires TO grp_foncier;
GRANT ALL ON TABLE sites.r_sites_partenaires TO grp_sig;
CREATE INDEX IF NOT EXISTS fki_r_sites_partenaires_site_id_fkey
ON sites.r_sites_partenaires USING btree
(site_id ASC NULLS LAST)
TABLESPACE pg_default;
'''
d_typactnotsite = '''
CREATE TABLE IF NOT EXISTS sites.d_typactnotsite (
typactnotsite_id varchar(6) NOT NULL,
typactnotsite_lib varchar(10) NOT NULL,
CONSTRAINT d_typactnotsite_pkey PRIMARY KEY (typactnotsite_id)
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS sites.d_typactnotsite OWNER to grp_sig;
GRANT SELECT ON TABLE sites.d_typactnotsite TO cen_user;
GRANT DELETE, INSERT, UPDATE ON TABLE sites.d_typactnotsite TO grp_foncier;
GRANT ALL ON TABLE sites.d_typactnotsite TO grp_sig;
'''
actions_sites = '''
CREATE TABLE IF NOT EXISTS sites.actions_sites (
action_site_id serial NOT NULL,
action_site_date varchar(20) NOT NULL,
action_site_datemef varchar(20) NOT NULL,
typactnotsite_id varchar(6) NOT NULL,
action_site_lib text NOT NULL,
CONSTRAINT actions_sites_pkey PRIMARY KEY (action_site_id),
CONSTRAINT r_sites_partenaires_typactnotsite_id_fkey FOREIGN KEY (typactnotsite_id)
REFERENCES sites.d_typactnotsite (typactnotsite_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS sites.actions_sites OWNER to grp_sig;
GRANT SELECT ON TABLE sites.actions_sites TO cen_user;
GRANT DELETE, INSERT, UPDATE ON TABLE sites.actions_sites TO grp_foncier;
GRANT ALL ON TABLE sites.actions_sites TO grp_sig;
'''
r_sites_actions = '''
CREATE TABLE IF NOT EXISTS sites.r_sites_actions (
action_site_id serial NOT NULL,
site_id varchar(10) NOT NULL,
maj_user text,
maj_date varchar(20),
actif varchar(1) NOT NULL,
CONSTRAINT r_sites_actions_pkey PRIMARY KEY (action_site_id,site_id),
CONSTRAINT r_sites_actions_site_id_fkey FOREIGN KEY (site_id)
REFERENCES sites.sites (site_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION,
CONSTRAINT r_sites_actions_action_site_id_fkey FOREIGN KEY (action_site_id)
REFERENCES sites.actions_sites (action_site_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS sites.r_sites_actions OWNER to grp_sig;
GRANT SELECT ON TABLE sites.r_sites_actions TO cen_user;
GRANT DELETE, INSERT, UPDATE ON TABLE sites.r_sites_actions TO grp_foncier;
GRANT ALL ON TABLE sites.r_sites_actions TO grp_sig;
'''
notes_sites = '''
CREATE TABLE IF NOT EXISTS sites.notes_sites (
note_site_id serial NOT NULL,
site_id varchar(10) NOT NULL,
note_site_date varchar(20) NOT NULL,
observateur_id integer NOT NULL,
typactnotsite_id integer NOT NULL,
note_site_lib text NOT NULL,
maj_user text,
maj_date varchar(20),
CONSTRAINT notes_sites_pkey PRIMARY KEY (note_site_id),
CONSTRAINT notes_sites_site_id_fkey FOREIGN KEY (site_id)
REFERENCES sites.sites (site_id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
)
TABLESPACE pg_default;
ALTER TABLE IF EXISTS sites.notes_sites OWNER to grp_sig;
GRANT SELECT ON TABLE sites.notes_sites TO cen_user;
GRANT DELETE, INSERT, UPDATE ON TABLE sites.notes_sites TO grp_foncier;
GRANT ALL ON TABLE sites.notes_sites TO grp_sig;
'''
with con_fon.begin() as cnx:
cnx.execute(d_typactnotsite)
cnx.execute(actions_sites)
cnx.execute(r_sites_actions)

View File

@ -0,0 +1,68 @@
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
from pycen import con_fon as con
from sqlalchemy import text
db = 'bd_cen'
host = '91.134.194.221'
port = '5432'
srv_name = 'fdw_bd-cen-38'
tab_name = 'cr_line_saisie_travaux'
sch_name = 'travaux'
view_name = 'travaux_ligne'
user = 'cen_admin'
pwd = '#CEN38@venir'
dict_cols = {
'gid': 'serial4',
'geom': 'public.geometry(linestring, 2154)',
'id_site': 'varchar',
'annee_trav': 'int4',
'id_gestion': 'int4',
'id_presta': 'int4',
'etat_trav': 'varchar',
'nom_presta': 'varchar',
'date_debut': 'date',
'duree_trav': 'float8',
'nb_etp': 'float8',
'nb_balle': 'int4',
'val_mat': 'int4',
'legende': 'varchar',
'rmq': 'varchar',
'prestation': 'int4',
'long_m': 'float8',
'crea_date': 'date',
'date_update': 'date',
'date_fin': 'date',
}
col_table = ','.join([' '.join([key,val]) for key, val in dict_cols.items()])
table = '''
DROP FOREIGN TABLE IF EXISTS {sch_name}.{tab_name} CASCADE;
CREATE FOREIGN TABLE IF NOT EXISTS {sch_name}.{tab_name} (
{columns}
)
SERVER "{fgn_server}"
OPTIONS (schema_name '{sch_name}', table_name '{tab_name}');
'''.format(
sch_name=sch_name, tab_name=tab_name, columns=col_table, fgn_server=srv_name
)
col_view = ','.join(dict_cols.keys())
view = '''
CREATE OR REPLACE VIEW {sch_name}.{v_name} AS
SELECT
{columns}
FROM {sch_name}.{tab_name};
ALTER TABLE {sch_name}.{v_name} OWNER TO cen_admin;
GRANT ALL ON TABLE {sch_name}.{v_name} TO cen_admin;
GRANT SELECT ON TABLE {sch_name}.{v_name} TO grp_sig;
GRANT SELECT ON TABLE {sch_name}.{v_name} TO cen_user;
'''.format(
sch_name=sch_name, tab_name=tab_name, v_name=view_name, columns=col_view
)
with con.begin() as cnx:
cnx.execute(text(table))
cnx.execute(text(view))

View File

@ -0,0 +1,126 @@
from pycen import con_fon
foreign_server = """
CREATE SERVER fdw_azalee
FOREIGN DATA WRAPPER postgres_fdw
OPTIONS (dbname 'azalee', host '91.134.194.221', port '5432')
"""
user_mapping = """
CREATE USER MAPPING
FOR cen_admin
SERVER fdw_azalee
OPTIONS (user 'cen_admin', password '#CEN38@venir')
"""
foreign_table = """
-- DROP FOREIGN TABLE inventaires.inventaire_zh;
CREATE FOREIGN TABLE inventaires.inventaire_zh (
site_code varchar(10) NULL,
nom varchar NULL,
auteur_site varchar NULL,
auteur_geom varchar NULL,
auteur_last_maj varchar NULL,
date_site date NULL,
date_geom date NULL,
date_last_maj date NULL,
type_milieu varchar NULL,
type_site varchar NULL,
typo_sdage varchar NULL,
rmq_site text NULL,
rmq_fct_majeur text NULL,
rmq_interet_patri text NULL,
rmq_bilan_menace text NULL,
rmq_orient_act text NULL,
rmq_usage_process text NULL,
code_cb varchar NULL,
lib_cb text NULL,
activ_hum varchar NULL,
impact varchar NULL,
"position" varchar NULL,
rmq_activ_hum text NULL,
connexion varchar NULL,
subm_orig varchar NULL,
subm_freq varchar NULL,
subm_etend varchar NULL,
fct_bio varchar NULL,
fct_hydro varchar NULL,
int_patri varchar NULL,
"val_socioEco" varchar NULL,
crit_delim varchar NULL,
crit_def_esp varchar NULL,
entree_eau_reg varchar NULL,
entree_eau_perm varchar NULL,
entree_eau_topo varchar NULL,
sortie_eau_reg varchar NULL,
sortie_eau_perm varchar NULL,
sortie_eau_topo varchar NULL,
geom public.geometry(geometry, 2154) NULL
)
SERVER fdw_azalee
OPTIONS (schema_name 'zones_humides', table_name 'v_zoneshumides');
-- Permissions
ALTER TABLE inventaires.inventaire_zh OWNER TO cen_admin;
GRANT ALL ON TABLE inventaires.inventaire_zh TO cen_admin;
"""
view_v_zoneshumides = """
-- inventaires.v_zoneshumides source
CREATE OR REPLACE VIEW inventaires.v_zoneshumides
AS SELECT inventaire_zh.site_code,
inventaire_zh.nom,
inventaire_zh.auteur_site,
inventaire_zh.auteur_geom,
inventaire_zh.auteur_last_maj,
inventaire_zh.date_site,
inventaire_zh.date_geom,
inventaire_zh.date_last_maj,
inventaire_zh.type_milieu,
inventaire_zh.type_site,
inventaire_zh.typo_sdage,
inventaire_zh.rmq_site,
inventaire_zh.rmq_fct_majeur,
inventaire_zh.rmq_interet_patri,
inventaire_zh.rmq_bilan_menace,
inventaire_zh.rmq_orient_act,
inventaire_zh.rmq_usage_process,
inventaire_zh.code_cb,
inventaire_zh.lib_cb,
inventaire_zh.activ_hum,
inventaire_zh.impact,
inventaire_zh."position",
inventaire_zh.rmq_activ_hum,
CASE
WHEN inventaire_zh.rmq_activ_hum ~~ '%remblai%'::text THEN 't'::text
ELSE 'f'::text
END AS remblais,
inventaire_zh.connexion,
inventaire_zh.subm_orig,
inventaire_zh.subm_freq,
inventaire_zh.subm_etend,
inventaire_zh.fct_bio,
inventaire_zh.fct_hydro,
inventaire_zh.int_patri,
inventaire_zh."val_socioEco",
inventaire_zh.crit_delim,
inventaire_zh.crit_def_esp,
inventaire_zh.entree_eau_reg,
inventaire_zh.entree_eau_perm,
inventaire_zh.entree_eau_topo,
inventaire_zh.sortie_eau_reg,
inventaire_zh.sortie_eau_perm,
inventaire_zh.sortie_eau_topo,
inventaire_zh.geom
FROM inventaires.inventaire_zh;
-- Permissions
ALTER TABLE inventaires.v_zoneshumides OWNER TO cen_admin;
GRANT ALL ON TABLE inventaires.v_zoneshumides TO cen_admin;
GRANT SELECT ON TABLE inventaires.v_zoneshumides TO grp_sig;
GRANT SELECT ON TABLE inventaires.v_zoneshumides TO cen_user;
"""
with con_fon.begin() as cnx:
cnx.execute(foreign_server)
cnx.execute(user_mapping)
cnx.execute(foreign_table)
cnx.execute(view_v_zoneshumides)

36
0_FONCIER/new_sites.py Normal file
View File

@ -0,0 +1,36 @@
from pycen import con_fon, update_to_sql
import geopandas as gpd
from os import path
def milieu(x):
d_milieu = gpd.pd.read_sql('SELECT * FROM sites.d_milieux',con_fon)
dict_m = dict(zip(d_milieu.milieu_lib,d_milieu.milieu_id))
x.replace({'milieu':dict_m},inplace=True)
x.rename(columns={'milieu':'milieu_id'},inplace=True)
def typ_site(x):
d_site = gpd.pd.read_sql('SELECT * FROM sites.d_typsite',con_fon)
dict_s = dict(zip(d_site.typsite_lib,d_site.typsite_id))
x.replace({'typ_site':dict_s},inplace=True)
x.rename(columns={'typ_site':'typsite_id'},inplace=True)
if __name__ == "__main__":
PATH = '/home/colas/Documents/tmp/Foncier/LONE'
file = 'LONE.xlsx'
geofile = 'Contour_site_lônes.shp'
df = (gpd.pd.read_excel(path.join(PATH,file))
.rename(columns={'id_site':'site_id'}))
milieu(df)
typ_site(df)
_gdf = gpd.read_file(path.join(PATH,geofile))
gdf = gpd.GeoDataFrame(df,geometry=[_gdf.unary_union],crs=_gdf.crs)
gdf.rename_geometry('geom',inplace=True)
gdf['surf_m2'] = gdf.area
gdf.to_postgis('sites',con_fon,'sites',if_exists='append',index=False)
gdf.rename_geometry('geom_ecolo',inplace=True)
update_to_sql(gdf,con_fon,'sites','sites','site_id')

14
14_ANIMATION/r_ter_com.py Normal file
View File

@ -0,0 +1,14 @@
from pycen import con_anm
import geopandas as gpd
from datetime import datetime as dt
com = gpd.read_postgis('SELECT * FROM territoires.vm_communes',con_anm)
ter = gpd.read_postgis('SELECT * FROM territoires.vm_territoires',con_anm)
ter_com = gpd.sjoin(com,ter,how='inner')[['code_insee','territoire_id']]
ter_com['actif'] = True
ter_com['date_maj'] = dt.now().date()
ter_com['utilisateur_id'] = 'cgeier'
ter_com.drop_duplicates(inplace=True)
ter_com.to_sql('r_ter_com',con_anm,schema='territoires',index=False,if_exists='append')

View File

@ -0,0 +1,32 @@
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-.
from pycen import con
# import pycen
drop = '''
DROP TRIGGER IF EXISTS trigger_end_zh ON gn_pr_zh.t_zh;
DROP FUNCTION IF EXISTS zones_humides.end_zh();
'''
s2 = '''CREATE OR REPLACE FUNCTION zones_humides.end_zh()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
BEGIN
IF (TG_OP = 'DELETE') then
UPDATE sites.sites
SET date_fin = current_timestamp
WHERE id = OLD.code;
END IF;
RETURN NULL;
END;
$function$
;'''
s1 = '''CREATE TRIGGER trigger_end_zh BEFORE
DELETE ON
gn_pr_zh.t_zh FOR EACH ROW EXECUTE FUNCTION zones_humides.end_zh()'''
with con.begin() as cnx:
cnx.execute(drop)
cnx.execute(s2)
cnx.execute(s1)

View File

@ -0,0 +1,36 @@
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-.
from pycen import con
# import pycen
drop = '''
DROP TRIGGER IF EXISTS trigger_insert_new_zh ON gn_pr_zh.t_zh;
DROP FUNCTION IF EXISTS zones_humides.insert_new_zh();
'''
s2 = '''CREATE OR REPLACE FUNCTION zones_humides.insert_new_zh()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
BEGIN
INSERT into sites.sites (id,nom,date_deb,id_typo_sdage,remarques)
VALUES (new.code,new.main_name,new.create_date,(SELECT cd_nomenclature FROM gn_pr_zh.t_nomenclatures WHERE id = new.id_sdage),NULL);
INSERT into sites.r_sites_geom (id_site,geom,date,id_lot,id_origine)
VALUES (new.code,ST_TRANSFORM(new.geom,2154),new.create_date,NULL,new.zh_uuid::text);
RETURN NULL;
END;
$function$
;'''
s1 = '''CREATE TRIGGER trigger_insert_new_zh
AFTER INSERT OR UPDATE
ON gn_pr_zh.t_zh
FOR EACH ROW
EXECUTE FUNCTION zones_humides.insert_new_zh();
'''
with con.begin() as cnx:
cnx.execute(drop)
cnx.execute(s2)
cnx.execute(s1)

View File

@ -0,0 +1,158 @@
from pycen import zh as ZH
import geopandas as gpd
import pandas as pd
from sqlalchemy import create_engine
from sqlalchemy.engine import URL
import os
zh = ZH()
# isin_bdd = True
# # Parametres bdd CEN38 OUT
user = 'cen_admin'
pwd = "#CEN38@venir"
adr = '91.134.194.221'
base = 'azalee_restore'
url = URL.create("postgresql+psycopg2", username=user, password=pwd, host=adr, database=base)
con = create_engine(url)
lst_table = [
'r_sites_geom',
'r_site_critdelim',
'r_site_fctecosociopatri',
'r_site_habitat',
'r_site_reghydro',
'r_site_sub',
'r_site_type_connect',
'r_site_usageprocess',
]
result = {}
for table in lst_table:
sql_old = "SELECT max(t.id) FROM {sch}.{tab} t".format(tab=table,sch='sites' if table == 'r_sites_geom' else 'zones_humides')
if table == 'r_sites_geom':
sql_old += ' JOIN sites.sites ON sites.id = t.id_site AND sites.id_type_milieu = 1'
# else:
# sql_old += ''' JOIN sites.sites ON sites.id = r_sites_geom.id_site AND sites.id_type_milieu = 1'''
id_old = pd.read_sql(sql_old,con)
sql = "SELECT t.* FROM {sch}.{tab} t".format(tab=table,sch='sites' if table == 'r_sites_geom' else 'zones_humides')
if table == 'r_sites_geom':
sql += ' JOIN sites.sites ON sites.id = t.id_site AND sites.id_type_milieu = 1'
sql += " WHERE t.id > {id} AND id_site != '26PNRV0208'".format(id=id_old.values[0][0])
result[table] = pd.read_sql(sql,zh.con)
if table == 'r_sites_geom':
sql_lot = "SELECT * FROM sites.lots WHERE id_lot IN {id}".format(id=tuple(result[table].id_lot.dropna().unique()))
lot = pd.read_sql(sql_lot,zh.con).dropna(how='all',axis=1)
result[table] = result[table].merge(lot, on='id_lot',how='left')
r_geo = result['r_sites_geom']
sql = '''
with lots as (
SELECT DISTINCT ON (id_site)
id_site,lots.*
FROM sites.r_sites_geom
LEFT JOIN sites.lots USING (id_lot)
ORDER BY id_site, "date" DESC
)
SELECT v.*, lots.libelle FROM zones_humides.v_zoneshumides v
JOIN lots ON v.site_code = lots.id_site
WHERE v.site_code IN {lst_code}
'''.format(lst_code=str(tuple(r_geo.id_site.unique())))
vzh = gpd.read_postgis(sql,zh.con).dropna(subset=['libelle']).dropna(how='all',axis=1)
vzh_old = gpd.read_postgis('SELECT * FROM zones_humides.v_zoneshumides',con).dropna(how='all',axis=1)
vzh_new = gpd.read_postgis('SELECT * FROM zones_humides.v_zoneshumides',zh.con).dropna(how='all',axis=1)
date_cols = vzh.columns[vzh.columns.str.contains('date|heure')]
vzh[date_cols] = vzh[date_cols].astype(str)
vzh_old[['old_code',*date_cols]] = vzh_old[['old_code',*date_cols]].astype(str)
vzh_new[['old_code',*date_cols]] = vzh_new[['old_code',*date_cols]].astype(str)
PATH = '/home/colas/Documents/9_PROJETS/1_ZH/2024'
vzh_old.to_file(os.path.join(PATH,'20250415_avancement.gpkg'),driver='GPKG',layer='Inventaire_initiale')
vzh_new.to_file(os.path.join(PATH,'20250415_avancement.gpkg'),driver='GPKG',layer='Inv_actualisee_20250415')
for _lib in vzh.libelle.unique():
lib = (_lib.removesuffix('2024')
.removeprefix('ZH')
.replace('actualisation','CENIsere'))
tmp = vzh[vzh.libelle == _lib]
new = tmp[~tmp.site_code.isin(vzh_old.site_code.tolist())]
upt = tmp[tmp.site_code.isin(vzh_old.site_code.tolist())]
if not new.empty:
print(f'NEW {lib} : ',str(new.site_code.tolist()))
# new.to_file(os.path.join(PATH,'20250415_avancement.gpkg'),driver='GPKG',layer='newzh'+lib+str(new.shape[0]))
if not upt.empty:
print(f'UPDT {lib} : ',str(upt.site_code.tolist()))
# upt.to_file(os.path.join(PATH,'20250415_avancement.gpkg'),driver='GPKG',layer='majzh'+lib+str(upt.shape[0]))
vzh_reduc = (vzh[vzh.site_code.isin(vzh_old.site_code.tolist())].sort_values('site_code').set_index('site_code').area.sub(vzh_old[vzh_old.site_code.isin(vzh.site_code.tolist())].sort_values('site_code').set_index('site_code').area,fill_value=0))
vzh_reduc[vzh_reduc<0].sum() / 10000
vzh_reduc[vzh_reduc>0].sum() / 10000
area_maj = (vzh[vzh.site_code.isin(vzh_old.site_code.tolist())].area.sum() - vzh_old[vzh_old.site_code.isin(vzh.site_code.tolist())].area.sum()) / 10000
area_new = vzh[~vzh.site_code.isin(vzh_old.site_code.tolist())].area.sum() / 10000
cbna = vzh[vzh.libelle=='ZH_CBNA_2024']
new_cbna = cbna[(~cbna.site_code.isin(vzh_old.site_code.tolist()))&(vzh.libelle=='ZH_CBNA_2024')].area.sum() / 10000
maj_cbna = (cbna[cbna.site_code.isin(vzh_old.site_code.tolist())].area.sum() - vzh_old[vzh_old.site_code.isin(cbna.site_code.tolist())].area.sum()) / 10000
pgszh = vzh[vzh.libelle=='ZH_pgszhBelledonne_2024']
new_pgszh = pgszh[(~pgszh.site_code.isin(vzh_old.site_code.tolist()))&(vzh.libelle=='ZH_pgszhBelledonne_2024')].area.sum() / 10000
maj_pgszh = (pgszh[pgszh.site_code.isin(vzh_old.site_code.tolist())].area.sum() - vzh_old[vzh_old.site_code.isin(pgszh.site_code.tolist())].area.sum()) / 10000
evin = vzh[vzh.libelle=='ZH_EVINERUDE_2024']
new_evin = evin[(~evin.site_code.isin(vzh_old.site_code.tolist()))&(vzh.libelle=='ZH_EVINERUDE_2024')].area.sum() / 10000
maj_evin = (evin[evin.site_code.isin(vzh_old.site_code.tolist())].area.sum() - vzh_old[vzh_old.site_code.isin(evin.site_code.tolist())].area.sum()) / 10000
lst_attrs = []
r_geo = result['r_sites_geom']
for table in result.keys():
if table == 'r_sites_geom':
continue
df = result[table]
no_geo = df[~df.id_site.isin(r_geo.id_site.unique())]
lst_attrs += no_geo.id_site.unique().tolist()
lst_attrs = [x for x in list(set(lst_attrs)) if x is not None]
r_crit = result['r_site_critdelim']
r_fct = result['r_site_fctecosociopatri']
r_hab = result['r_site_habitat']
r_hyd = result['r_site_reghydro']
r_sub = result['r_site_sub']
r_connect = result['r_site_type_connect']
r_usg = result['r_site_usageprocess']
new_r_crit = r_crit[r_crit.id_site.isin(lst_attrs)]
new_r_fct = r_fct[r_fct.id_site.isin(lst_attrs)]
new_r_hab = r_hab[r_hab.id_site.isin(lst_attrs)]
new_r_hyd = r_hyd[r_hyd.id_site.isin(lst_attrs)]
new_r_sub = r_sub[r_sub.id_site.isin(lst_attrs)]
new_r_connect = r_connect[r_connect.id_site.isin(lst_attrs)]
new_r_usg = r_usg[r_usg.id_site.isin(lst_attrs)]
len_new_crit = new_r_crit.shape[0]
len_new_fct = new_r_fct.shape[0]
len_new_hab = new_r_hab.shape[0]
len_new_hyd = new_r_hyd.shape[0]
len_new_sub = new_r_sub.shape[0]
len_new_connect = new_r_connect.shape[0]
len_new_usg = new_r_usg.shape[0]
lst_new_crit = new_r_crit.id_site.unique().tolist()
lst_new_fct = new_r_fct.id_site.unique().tolist()
lst_new_hab = new_r_hab.id_site.unique().tolist()
lst_new_hyd = new_r_hyd.id_site.unique().tolist()
lst_new_sub = new_r_sub.id_site.unique().tolist()
lst_new_connect = new_r_connect.id_site.unique().tolist()
lst_new_usg = new_r_usg.id_site.unique().tolist()
tmp = gpd.read_file('/home/colas/Documents/9_PROJETS/1_ZH/MAJ/Actu 2024/BE EPODE - LEZE/KIT INVENTAIRE38 MODIFIE/a envoyer prêt/GEOM/38CG0105_Epode2025.gpkg')
tmp_old = gpd.read_postgis('SELECT * FROM zones_humides.v_zoneshumides WHERE site_code = \'38CG0105\'',con)

View File

@ -0,0 +1,52 @@
import requests
import geopandas as gpd
def get_orb_token():
login = 'cgeier'
pwd = 'adm1n*cEn38'
url = "https://donnees.biodiversite-auvergne-rhone-alpes.fr/api/auth/login"
return requests.post(
url,
json={"login": login, "password": pwd},
headers={'Content-Type': 'application/json'}
)
def get_orb_sp(tok, params: dict = None, geo=None):
import json
print("LOAD ORB DATA ...")
url = "https://donnees.biodiversite-auvergne-rhone-alpes.fr/api/synthese/for_web?"
if geo:
params += {
# "geoIntersection":{"type":"Feature","properties":{},"geometry":_json_geo['features'][0]['geometry']},
"geoIntersection": geo.to_crs(4326).unary_union.wkt,
"with_areas": False
}
res = requests.post(
url,
headers={'Content-Type': 'application/json'},
cookies=tok.cookies,
json=params)
_orb_geo = gpd.read_file(res.text)
orb_geo = (
_orb_geo[_orb_geo.geom_type == 'Point']
.reset_index(drop=True)
.to_crs(2154))
orb_geo['cd_nom'] = [
[cd['cd_nom'] for cd in json.loads(o)]
for o in orb_geo.observations
]
orb = orb_geo.explode('cd_nom')
# orb.drop(['observations','cd_nom'],axis=1,inplace=True)
return orb
if __name__ == "__main__":
tok = get_orb_token()
orb = get_orb_sp(tok, params={'regne': 'Animalia'})

View File

@ -0,0 +1,22 @@
import geopandas as gpd
from pycen import con_gn, update_to_sql
from sqlalchemy import DateTime
from os import path
PATH = '/home/colas/Documents/9_PROJETS/1_ZH/2024/CARTO/Couches SIG'
FILE = 'ZH_zh DDT 2024_MJ_modif 2025.shp'
df = (gpd.read_file(path.join(PATH, FILE))
.rename(columns={'Code ZH':'code','date':'update_date'}))
df['update_author'] = 24 # 24 = Mathieu Juton
update_to_sql(
df[['code','update_author', 'update_date']],
con_gn,
't_zh',
'pr_zh',
'code',
dtype={
'update_date': DateTime
}
)

View File

@ -0,0 +1,73 @@
# UsersHub
VERSION=2.4.7
cd
wget https://github.com/PnX-SI/UsersHub/archive/$VERSION.zip
unzip $VERSION.zip
rm $VERSION.zip
sudo rm -R ~/usershub_old
mv /home/`whoami`/usershub/ /home/`whoami`/usershub_old/
mv UsersHub-$VERSION /home/`whoami`/usershub/
cp /home/`whoami`/usershub_old/config/config.py /home/`whoami`/usershub/config/config.py
cp /home/`whoami`/usershub_old/config/settings.ini /home/`whoami`/usershub/config/settings.ini
cd usershub
./install_app.sh
# Monitoring
VERSION=1.0.3
cd
wget https://github.com/PnX-SI/gn_module_monitoring/archive/$VERSION.zip
unzip $VERSION.zip
rm $VERSION.zip
sudo rm -R ~/gn_module_monitoring_old
mv ~/gn_module_monitoring/ ~/gn_module_monitoring_old/
mv ~/gn_module_monitoring-$VERSION ~/gn_module_monitoring
# Export
VERSION=1.7.2
cd
wget https://github.com/PnX-SI/gn_module_export/archive/$VERSION.zip
unzip $VERSION.zip
rm $VERSION.zip
sudo rm -R ~/gn_module_export_old
mv ~/gn_module_export/ ~/gn_module_export_old/
mv ~/gn_module_export-$VERSION ~/gn_module_export
# ~/gn_module_export_old/backend/templates/swagger
# ZH
VERSION=1.4.0
cd
wget https://github.com/PnX-SI/gn_module_ZH/archive/$VERSION.zip
unzip $VERSION.zip
rm $VERSION.zip
sudo rm -R ~/gn_module_ZH_old
mv ~/gn_module_ZH/ ~/gn_module_ZH_old/
mv ~/gn_module_ZH-$VERSION ~/gn_module_ZH
# GeoNature
VERSION=2.15.4
wget https://github.com/PnX-SI/GeoNature/archive/$VERSION.zip
unzip $VERSION.zip
rm $VERSION.zip
sudo rm -R ~/geonature_old
mv ~/geonature/ ~/geonature_old/
mv ~/GeoNature-$VERSION ~/geonature/
cd ~/geonature
./install/migration/migration.sh 2>&1 | tee install/migration/migration.log
1. Remove API_TAXHUB
2. Modifier taxhub_url IN /geonature/backend/media/mobile/occtax/settings.json
3. geonature db downgrade taxhub-standalone@base
4. Check media taxhub IN GeoNature THEN rm ~/taxhub
5. Modifier config Apache TaxHub
``` sh
sudo nano /etc/apache2/conf-available/taxhub.conf
# Cas où TaxHub et GeoNature sont sur le même sous-domaine
RewriteEngine on
RewriteRule "^/taxhub/static/medias/(.+)" "/geonature/api/medias/taxhub/$1" [R,L]
```
~6. geonature monitorings update_module_available_permissions
~7. RUN data/upgrade_modules_permissions.sql
8. Update VIEWS Monitorings

View File

@ -0,0 +1,128 @@
# Monitoring
VERSION=1.1.0
cd
wget https://github.com/PnX-SI/gn_module_monitoring/archive/$VERSION.zip
unzip $VERSION.zip
rm $VERSION.zip
sudo rm -R ~/gn_module_monitoring_old
mv ~/gn_module_monitoring/ ~/gn_module_monitoring_old/
mv ~/gn_module_monitoring-$VERSION ~/gn_module_monitoring
# Export
VERSION=1.8.0
cd
wget https://github.com/PnX-SI/gn_module_export/archive/$VERSION.zip
unzip $VERSION.zip
rm $VERSION.zip
sudo rm -R ~/gn_module_export_old
mv ~/gn_module_export/ ~/gn_module_export_old/
mv ~/gn_module_export-$VERSION ~/gn_module_export
# ~/gn_module_export_old/backend/templates/swagger
# DASHBOARD
VERSION=1.6.0
cd
wget https://github.com/PnX-SI/gn_module_dashboard/archive/$VERSION.zip
unzip $VERSION.zip
rm $VERSION.zip
sudo rm -R ~/gn_module_dashboard_old
mv ~/gn_module_dashboard/ ~/gn_module_dashboard_old/
mv ~/gn_module_dashboard-$VERSION ~/gn_module_dashboard
# GeoNature
VERSION=2.16.2
wget https://github.com/PnX-SI/GeoNature/archive/$VERSION.zip
unzip $VERSION.zip
rm $VERSION.zip
sudo rm -R ~/geonature_old
mv ~/geonature/ ~/geonature_old/
mv ~/GeoNature-$VERSION ~/geonature/
cd ~/geonature
./install/migration/migration.sh 2>&1 | tee install/migration/migration.log
# LOG WARNING
Considering dependency proxy for proxy_http:
Module proxy already enabled
Module proxy_http already enabled
Considering dependency filter for deflate:
Module filter already enabled
Module deflate already enabled
AH00112: Warning: DocumentRoot [/home/geonatureadmin/geonature/frontend/dist] does not exist
Syntax OK
Set API_ENDPOINT to https://geonature.cen-isere.fr/geonature/api in frontend configuration file...
Mise à jour des fichiers de configuration frontend et rebuild du frontend…
Génération de la configuration du frontend :
# LOG ERROR
Mise à jour de la base de données…
/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/config.py:560: DeprecationWarning: No path_separator found in configuration; falling back to legacy splitting on spaces/commas for version_locations. Consider adding path_separator=os to Alembic config.
util.warn_deprecated(
/home/geonatureadmin/geonature/backend/geonature/migrations/versions/7dfd0a813f86_insert_inpn_sensitivity_referential.py:14: DeprecationWarning: The distutils package is deprecated and slated for removal in Python 3.12. Use setuptools or check PEP 632 for potential alternatives
from distutils.util import strtobool
/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/config.py:560: DeprecationWarning: No path_separator found in configuration; falling back to legacy splitting on spaces/commas for version_locations. Consider adding path_separator=os to Alembic config.
util.warn_deprecated(
INFO [alembic.runtime.migration] Context impl PostgresqlImpl.
INFO [alembic.runtime.migration] Will assume transactional DDL.
/home/geonatureadmin/geonature/backend/geonature/migrations/versions/7dfd0a813f86_insert_inpn_sensitivity_referential.py:14: DeprecationWarning: The distutils package is deprecated and slated for removal in Python 3.12. Use setuptools or check PEP 632 for potential alternatives
from distutils.util import strtobool
ERROR [flask_migrate] Error: Can't locate revision identified by 'd85b87a1ca62'
/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/utils_flask_sqla/commands.py:89: DeprecationWarning: The 'db' attribute is deprecated and will be removed in Flask-SQLAlchemy 3.1. The extension is registered directly as 'app.extensions["sqlalchemy"]'.
db = current_app.extensions["sqlalchemy"].db
/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/config.py:560: DeprecationWarning: No path_separator found in configuration; falling back to legacy splitting on spaces/commas for version_locations. Consider adding path_separator=os to Alembic config.
util.warn_deprecated(
/home/geonatureadmin/geonature/backend/geonature/migrations/versions/7dfd0a813f86_insert_inpn_sensitivity_referential.py:14: DeprecationWarning: The distutils package is deprecated and slated for removal in Python 3.12. Use setuptools or check PEP 632 for potential alternatives
from distutils.util import strtobool
Traceback (most recent call last):
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/script/base.py", line 217, in _catch_revision_errors
yield
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/script/base.py", line 285, in get_all_current
return cast(Set[Script], self.revision_map._get_all_current(id_))
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/script/revision.py", line 1531, in _get_all_current
top_revs = set(self.get_revisions(id_))
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/script/revision.py", line 542, in get_revisions
return sum([self.get_revisions(id_elem) for id_elem in id_], ())
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/script/revision.py", line 542, in <listcomp>
return sum([self.get_revisions(id_elem) for id_elem in id_], ())
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/script/revision.py", line 565, in get_revisions
return tuple(
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/script/revision.py", line 566, in <genexpr>
self._revision_for_ident(rev_id, branch_label)
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/script/revision.py", line 637, in _revision_for_ident
raise ResolutionError(
alembic.script.revision.ResolutionError: No such revision or branch 'd85b87a1ca62'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/geonatureadmin/geonature/backend/venv/bin/geonature", line 7, in <module>
sys.exit(main())
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/click/core.py", line 1161, in __call__
return self.main(*args, **kwargs)
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/click/core.py", line 1082, in main
rv = self.invoke(ctx)
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/click/core.py", line 1697, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/click/core.py", line 1697, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/click/core.py", line 1443, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/click/core.py", line 788, in invoke
return __callback(*args, **kwargs)
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/click/decorators.py", line 33, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/flask/cli.py", line 400, in decorator
return ctx.invoke(f, *args, **kwargs)
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/click/core.py", line 788, in invoke
return __callback(*args, **kwargs)
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/utils_flask_sqla/commands.py", line 97, in autoupgrade
current_heads = set(map(lambda rev: rev.revision, script.get_all_current(current_heads)))
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/script/base.py", line 284, in get_all_current
with self._catch_revision_errors():
File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__
self.gen.throw(typ, value, traceback)
File "/home/geonatureadmin/geonature/backend/venv/lib/python3.10/site-packages/alembic/script/base.py", line 249, in _catch_revision_errors
raise util.CommandError(resolution) from re
alembic.util.exc.CommandError: Can't locate revision identified by 'd85b87a1ca62'

View File

@ -0,0 +1,8 @@
from pycen import con_gn
import pandas as pd
tab = pd.read_excel('/home/colas/Documents/9_PROJETS/6_GEONATURE/citizen/liste_reptiles_rngl.xlsx')
sql = 'select cd_nom,lb_nom from taxonomie.taxref where lb_nom in {}'.format(tuple(tab['lb_nom']))
res = pd.read_sql(sql,con_gn)
lst_cdnom = res.cd_nom.astype(str).tolist()