diff --git a/0_FONCIER/TdB_FEDE/site_TdB_2024_complete.py b/0_FONCIER/TdB_FEDE/site_TdB_2024_complete.py new file mode 100644 index 0000000..d50003a --- /dev/null +++ b/0_FONCIER/TdB_FEDE/site_TdB_2024_complete.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- + +from pycen import con_fon +import pandas as pd + +dico_data = '/media/colas/SRV/FICHIERS/OUTILS/BASES DE DONNEES/BILAN_FEDE_CEN/2024/TDB2024_enquete_SIG/Dico_DATA_sites_CEN_v2024.xlsx' +bilan_2023 = '/media/colas/SRV/FICHIERS/OUTILS/BASES DE DONNEES/BILAN_FEDE_CEN/2024/TDB2024_enquete_SIG/DATA N-1/Sites_CEN_38_2023.csv' + +dic = pd.read_excel(dico_data,sheet_name='sites_cen_xx_2024',header=0, usecols='F',nrows=50) +dic_head_name = dic.columns[0] +bil2023 = pd.read_csv(bilan_2023,sep=',',header=0,encoding='utf-8') +# Mise en forme des dates +date_cols = bil2023.columns[bil2023.columns.str.contains('date')] +bil2023[date_cols] = bil2023[date_cols].apply(pd.to_datetime) +# Mise en forme de la colonne remq_sensibilite + +vm_site = pd.read_sql_table('vm_sites_cen_2024_csv',con_fon,'_tdbfcen') + + +dic_missing = (dic.loc[~dic[dic_head_name].isin(vm_site.columns),dic_head_name] + .tolist()) + +(bil2023[['id_site_cen',*dic_missing]] + .merge(vm_site[['id_site_cen']],how='inner',on='id_site_cen') + .to_sql( + '_sites_cen_2023_csv_complement', + con_fon, + schema='_tdbfcen', + if_exists='replace', + index=False)) + diff --git a/3_AZALEE/rpg_to_db.py b/3_AZALEE/rpg_to_db.py new file mode 100644 index 0000000..3caeef9 --- /dev/null +++ b/3_AZALEE/rpg_to_db.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 +# -*- coding: UTF-8 -*- + +from pycen import con +import geopandas as gpd +from datetime import datetime as dt + + + +file = '/home/colas/Documents/tmp/rpg/RPG_2-0__SHP_LAMB93_R84_2022-01-01.7z/RPG_2-0__SHP_LAMB93_R84_2022-01-01/RPG/1_DONNEES_LIVRAISON_2023-08-01/RPG_2-0_SHP_LAMB93_R84-2022/PARCELLES_GRAPHIQUES.shp' + +name_table = 'rpg_parcelles_graphiques_reg' +name_schema = 'ref_territoire' +thisyear = dt.now().year-2 + +df = gpd.read_file(file) +df.columns = df.columns.str.lower() +# df = df.set_geometry('geom') +df.rename_geometry('geom', inplace=True) + +if not df.crs: + df.set_crs(epsg=2154, inplace=True) + +if df.crs.srs != 'epsg:2154': + df.to_crs(epsg=2154, inplace=True) + +exist_table = con.dialect.get_table_names(con, schema = name_schema) +if name_table in exist_table: + sql = """ALTER TABLE {sch}.{tab} RENAME TO {sch}.{othertab}; + """.format(sch=name_schema,tab=name_table,othertab=name_table.replace('rpg','rpg'+str(thisyear-1))) + with con.begin() as cnx: + cnx.execute(sql) + +df.to_postgis( + name = name_table, + con = con, + schema = name_schema, + if_exists = 'replace', + index = False + ) + +sql = """ALTER TABLE {sch}.{tab} OWNER TO cen_admin; +GRANT ALL ON TABLE {sch}.{tab} TO cen_admin; +GRANT ALL ON TABLE {sch}.{tab} TO grp_admin; +GRANT SELECT ON TABLE {sch}.{tab} TO grp_consult; +""".format(sch=name_schema,tab=name_table) +comment = """COMMENT ON TABLE {sch}.{tab} IS 'Version {v} 2.1 - Table des parcelles graphiques de la région Auvergne Rhône-Alpes.'""".format(sch=name_schema,tab=name_table,v=thisyear) +with con.begin() as cnx: + cnx.execute(sql) + cnx.execute(comment) + +if 'rpg' in name_table: + v_tab = 'v_'+name_table.replace('reg','')+'isere' + lst_col = ','.join(tuple('s.'+df.columns)) + + sql = """CREATE OR REPLACE VIEW {sch}.{v_tab} + AS SELECT {cols} + FROM {sch}.{tab} s, + ref_territoire.dept_isere reg + WHERE st_intersects(s.geom, reg.geom); + + ALTER TABLE {sch}.{v_tab} OWNER TO cen_admin; + GRANT ALL ON TABLE {sch}.{v_tab} TO cen_admin; + GRANT ALL ON TABLE {sch}.{v_tab} TO grp_admin; + GRANT SELECT ON TABLE {sch}.{v_tab} TO grp_consult;""".format(sch=name_schema,tab=name_table,v_tab=v_tab,cols=lst_col) + comment = """COMMENT ON VIEW {sch}.{v_tab} IS 'Version {v} 2.1 - Table des parcelles graphiques de l''isere.'""".format(sch=name_schema,v_tab=v_tab,v=thisyear) + with con.begin() as cnx: + cnx.execute(sql) + cnx.execute(comment) diff --git a/5_GEONATURE/insert_liste_alerte.py b/5_GEONATURE/insert_liste_alerte.py new file mode 100644 index 0000000..aee0413 --- /dev/null +++ b/5_GEONATURE/insert_liste_alerte.py @@ -0,0 +1,243 @@ +#!/usr/bin/env python3 +# -*- coding: UTF-8 -*- + +def test_data(con,tab,col,status): + sql = ''' + SELECT count({col}) FROM {sch}.{tab} WHERE {col} = '{status}' + ;'''.format(sch='taxonomie',tab=tab,col=col,status=status) + with con.begin() as cnx: + return cnx.execute(sql).one()[0] + +def test_cor_values(con,vals): + sql = ''' + SELECT count(id_value_text) FROM {sch}.{tab} WHERE (id_value,id_text) = {vals} + ;'''.format(sch='taxonomie',tab='bdc_statut_cor_text_values',vals=vals) + with con.begin() as cnx: + return cnx.execute(sql).one()[0] + +def test_status_type(con,col,status): + sql = ''' + SELECT count({col}) FROM {sch}.{tab} WHERE {col} = '{status}' + ;'''.format(sch='taxonomie',tab='bdc_statut_type',col=col,status=status) + with con.begin() as cnx: + return cnx.execute(sql).one()[0] + +def insert_status_alerte(con): + if test_data(con,'bdc_statut_type','cd_type_statut','AL') > 0: + # if test_status_type(con,'cd_type_statut','AL') > 0: + print('ALERTE STATUS ALREADY EXISTS') + else: + sql = ''' + INSERT INTO {sch}.{tab} (cd_type_statut,lb_type_statut,regroupement_type,thematique,type_value) VALUES + ('AL','Liste d''alerte départementale','Alerte','STATUTS','VALUE') + ;'''.format(sch='taxonomie',tab='bdc_statut_type') + with con_gn.begin() as cnx: + cnx.execute(sql) + +def insert_status_values(con): + vals = [ + ['RE','Disparue au niveau départemental'], + ['AS-1','Quasi menacée (localisées sans signe de déclin)'], + ['AS-2','Quasi menacée (répandues mais en déclin)'], + ['AS-3','Quasi menacée (répandues, déclin à confirmer)'] + ] + for val in vals: + if test_data(con,'bdc_statut_values','label_statut',val[1]) > 0: + print('ALERTE VALUE STATUS ALREADY EXISTS : ',val[1]) + else: + sql = ''' + INSERT INTO {sch}.{tab} (code_statut,label_statut) VALUES + ('{val0}','{val1}') + '''.format(sch='taxonomie',tab='bdc_statut_values',val0=val[0],val1=val[1]) + with con_gn.begin() as cnx: + cnx.execute(sql) + +def get_text_id(con,cd_doc): + sql = ''' + SELECT id_text FROM {sch}.{tab} WHERE cd_doc = '{cd_doc}' + ;'''.format(sch='taxonomie',tab='bdc_statut_text',cd_doc=cd_doc) + with con.begin() as cnx: + return cnx.execute(sql).one()[0] + +def get_area_id(con,area): + sql = ''' + SELECT id_area FROM {sch}.{tab} WHERE area_name = '{area}' + ;'''.format(sch='ref_geo',tab='l_areas',area=area) + with con.begin() as cnx: + return cnx.execute(sql).one()[0] + +def get_values_id(con,col,terme): + if isinstance(terme,int|str): + terme = [terme] + else: + terme = list(terme) + sql = ''' + SELECT id_value FROM {sch}.{tab} WHERE {col} IN {terme} AND label_statut <> 'Disparue au niveau régional' + ;'''.format(sch='taxonomie',tab='bdc_statut_values',col=col,terme=tuple(terme)).replace(',)',')') + with con.begin() as cnx: + return cnx.execute(sql).all() + +def insert_status_cor_text_area(con,id_doc): + id_text = get_text_id(con,id_doc) + id_area = get_area_id(con,'Isère') + + check_sql = ''' + SELECT count(id_text) FROM {sch}.{tab} WHERE (id_text,id_area) = ({id_text},{id_area}) + ;'''.format(sch='taxonomie',tab='bdc_statut_cor_text_area',id_text=id_text,id_area=id_area) + with con.begin() as cnx: + if cnx.execute(check_sql).one()[0] == 0: + sql = (''' + INSERT INTO {sch}.{tab} (id_text,id_area) VALUES ({id_text},{id_area}) + ;'''.format(sch='taxonomie',tab='bdc_statut_cor_text_area',id_text=id_text,id_area=id_area)) + cnx.execute(sql) + + +def insert_status_cor_text_values(con,id_doc,values): + insert_status_values(con) + id_text = get_text_id(con,id_doc) + id_vals = [x[0] for x in get_values_id(con,'code_statut',values)] + + zip_vals = tuple(zip(id_vals,[id_text]*len(id_vals))) + cor_vals = [x for x in zip_vals if test_cor_values(con,x)==0] + + sql = (''' + INSERT INTO {sch}.{tab} (id_value,id_text) VALUES {values} + ;''' + .format(sch='taxonomie',tab='bdc_statut_cor_text_values',values=cor_vals) + .replace(r'[','') + .replace(r']','')) + + with con_gn.begin() as cnx: + cnx.execute(sql) + +def get_id_status_cor_text_values(con,id_doc,values): + id_text = get_text_id(con,id_doc) + id_vals = [x[0] for x in get_values_id(con,'code_statut',values)] + + zip_vals = tuple(zip(id_vals,[id_text]*len(id_vals))) + cor_vals = tuple(x for x in zip_vals if test_cor_values(con,x)>0) + + sql = (''' + SELECT id_value_text FROM {sch}.{tab} WHERE (id_value,id_text) IN {cor_vals} + ;''' + .format(sch='taxonomie',tab='bdc_statut_cor_text_values',cor_vals=cor_vals) + .replace('),)','))')) + + with con.begin() as cnx: + return cnx.execute(sql).all() + +def test_status_text(con,col,cd_doc): + sql = ''' + SELECT count({col}) FROM {sch}.{tab} WHERE {col} = '{cd_doc}' + ;'''.format(sch='taxonomie',tab='bdc_statut_text',col=col,cd_doc=cd_doc) + with con.begin() as cnx: + return cnx.execute(sql).one()[0] + +def insert_statut_text(con,cd_doc,doc): + if test_data(con,'bdc_statut_text','cd_doc',cd_doc['id_doc']) > 0: + # if test_status_text(con,'cd_doc',cd_doc) > 0: + print('ALERTE TEXT STATUS ALREADY EXISTS : ',doc) + else: + sql = ''' + INSERT INTO {sch}.{tab} (cd_type_statut,cd_sig,cd_doc,niveau_admin,lb_adm_tr,doc_url,enable) VALUES + ('AL','{cd_doc}','INSEED38','Département','Isère','{doc}',TRUE) + ;'''.format(sch='taxonomie',tab='bdc_statut_text',cd_doc=cd_doc['id_doc'],doc=doc) + with con_gn.begin() as cnx: + cnx.execute(sql) + + insert_status_cor_text_area(con,cd_doc['id_doc']) + insert_status_cor_text_values(con,cd_doc['id_doc'],cd_doc['id_values']) + +def get_cd_ref(con,cd_nom): + sql = ''' + SELECT cd_ref FROM {sch}.{tab} WHERE cd_nom = '{cd_nom}' + ;'''.format(sch='taxonomie',tab='taxref',cd_nom=cd_nom) + with con.begin() as cnx: + return cnx.execute(sql).one()[0] + +def get_max_idstatuttaxo(con): + sql = ''' + SELECT max(id) FROM {sch}.{tab} + ;'''.format(sch='taxonomie',tab='bdc_statut_taxons') + with con.begin() as cnx: + return cnx.execute(sql).one()[0] + + +def insert_status_taxo(con,cd_nom,cd_doc,status): + id_statut_cor = get_id_status_cor_text_values(con,cd_doc,status)[0][0] + cd_ref = get_cd_ref(con,cd_nom) + id_statut_taxo = get_max_idstatuttaxo(con) + 1 + + sql_check = ''' + SELECT count(id) FROM {sch}.{tab} + WHERE id_value_text = '{id_statut_cor}' AND cd_nom = '{cd_nom}' AND cd_ref = '{cd_ref}' + ;'''.format(sch='taxonomie',tab='bdc_statut_taxons',cd_ref=cd_ref,cd_nom=cd_nom,id_statut_cor=id_statut_cor) + with con.begin() as cnx: + check = cnx.execute(sql_check).one()[0] + + if check == 0: + sql = ''' + INSERT INTO {sch}.{tab} (id,id_value_text,cd_nom,cd_ref) VALUES + ('{id_statut_taxo}','{id_statut_cor}','{cd_nom}','{cd_ref}') + ;'''.format(sch='taxonomie',tab='bdc_statut_taxons',id_statut_taxo=id_statut_taxo,cd_ref=cd_ref,cd_nom=cd_nom,id_statut_cor=id_statut_cor) + with con_gn.begin() as cnx: + cnx.execute(sql) + +def get_status_type(con,col,status): + sql = ''' + SELECT * FROM {sch}.{tab} WHERE {col} = '{status}' + ;'''.format(sch='taxonomie',tab='bdc_statut_type',col=col,status=status) + return pd.read_sql(sql,con) + +def get_taxonomie(con,cd_nom): + if isinstance(cd_nom,int): + cd_nom = [cd_nom] + else: + cd_nom = list(cd_nom) + sql = ''' + SELECT cd_nom,cd_ref,cd_sup,lb_nom,lb_auteur,nom_complet_html,nom_valide,regne,phylum,classe,ordre,famille,group1_inpn,group2_inpn FROM {sch}.{tab} WHERE cd_nom IN {cd_nom} + ;'''.format(sch='taxonomie',tab='taxref',cd_nom=tuple(cd_nom)).replace(",)",")") + return pd.read_sql(sql,con) + +if __name__ == "__main__": + from pycen import con_gn, ref_hydro + import pandas as pd + file = '/home/colas/Documents/9_PROJETS/6_GEONATURE/listes_alertes_isère.xlsx' + + insert_status_alerte(con_gn) + cd_doc = { + 'Statut_de_conservation_des_poissons_et_écrevisses_en_Isère_2015':{'id_doc':999990,'id_values':['RE','CR','EN','VU','NT','LC','DD','NA',]}, + 'Liste_d’alerte_sur_les_orthoptères_menacés_en_Isère_2014':{'id_doc':999991,'id_values':['RE','CR','EN','VU','AS-1','AS-2','AS-3','LC','DD','NA']}, + 'Statuts_de_conservation_de_la_faune_sauvage_en_isere_2016':{'id_doc':999992,'id_values':['RE','CR','EN','VU','NT','LC','DD','NA','NE',]}, + } + + df = (pd.read_excel(file,keep_default_na=False) + .rename(columns={ + 'Statut':'code_statut', + 'CD_NOM':'cd_nom', + 'Source':'source', + 'Source_url':'doc_url', + })) + df = df.loc[df.doc_url!=''] + for d in df.source.unique(): + doc_url = df.loc[df.source==d,'doc_url'].unique()[0] + insert_statut_text(con_gn,cd_doc[d],doc_url) + + # INSERTION dans la table bdc_statut_taxons + for row in df.itertuples(): + id_doc = cd_doc[row.source]['id_doc'] + insert_status_taxo(con=con_gn,cd_nom=row.cd_nom,cd_doc=id_doc,status=row.code_statut) + + st = get_status_type(con_gn,'cd_type_statut','AL') + for c in st: + df[c] = st[c][0] + tax = get_taxonomie(con_gn,df['cd_nom']) + del tax['nom_valide'] + del df['source'] + del df['nom_français'] + del df['nom_latin'] + df = df.merge(tax,how='inner',on='cd_nom') + df['cd_sig'] = 'INSEED38' + df['lb_adm_tr'] = 'Isère' + df['niveau_admin'] = 'Département' + df.to_sql('bdc_statut',con_gn,schema='taxonomie',if_exists='append',index=False)