Source code for wolfhece.mar.Interface_MAR_WOLF_objet
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 3 16:31:47 2023
@author: jbrajkovic
"""
import numpy as np
import matplotlib.pyplot as plt
import commontools as ct
import xarray as xr
import matplotlib as mpl
import matplotlib.cm as cm
import glob as glob
import pyproj
import geopandas as gpd
from shapely.geometry import Polygon
from fiona.crs import from_epsg
import datetime
from datetime import timedelta
import os
import pandas as pd
from zipfile import ZipFile
import struct
import fiona
from pyproj import Proj, transform
import matplotlib
from scipy import stats as st
from tqdm import tqdm
import geopandas as gpd
import numpy as np
from shapely.ops import unary_union
[docs]
class MAR_input_data:
def __init__(self,xsummits=np.zeros(0),ysummits=np.zeros(0),
date_debut=datetime.datetime(2020,7,11,5),
date_fin=datetime.datetime(2020,7,11,5),
directory='~/BUP_srv7/',
directory_hist_sim='~/BUP_srv7/',
model_name='MIROC6',
var='MBRR',
var_unb='E',
UnborNot=0,
syu=1981,eyu=2010,
mod_ydays=1,
generate_quantiles=1):
"""
xsummits : abscisses Lambert 72 du rectangle d'extraction'
ysummits : idem pour ordonnées
date_debut : Date de début de la série temporelle extraite
date_fin : idem pour la date de fin
directory : répertoire des fichier Netcdfs annuels
directory_hist_sim : répertoire des fichiers Netcdfs annuels de la période historique de simulation
(pour débiaisage)
var : nom de la variable MAR à extraire, si on veut l'evapotranspiration totale (toutes les composantes),il
faut noter MBEP'
var_unb : nom de la variable qui sert au débiasage dans les fichiers Netcdfs de l'IRM '
UnborNot : 1 si débiaisage, 0 si données brutes
syu et eyu : année de début et de fin de la période future utilisée pour comparer modèle et observations
mod_ydays: 1 si modèle avec années bissextiles, 0 sinon 1
"""
if 'IRM_grid' in self.fn[0]:
print('Hajde Hajduce')
self.fn= glob.glob(self.directory+"*MAR_grid*"+str(date_debut.year)+"**nc*")
print(self.directory,date_debut.year)
print(self.fn)
self.x_Lb72, self.y_Lb72 = self.Lb72(self.lons,self.lats)
# self.plot_mask()
# self.historical_matrix=
[docs]
self.fn_quant_ev='/srv7_tmp1/jbrajkovic/These/Unbiasing/evapotranspiration_quantiles_1981_2010.nc'
[docs]
self.fn_quant_pr='/srv7_tmp1/jbrajkovic/These/Unbiasing/precipitation_quantiles_1981_2010.nc'
time_step=self.find_timestep()
if time_step[1]=='hours': self.MAR_timestep=datetime.timedelta(hours=int(time_step[0]))
elif time_step[1]=='minutes': self.MAR_timestep=datetime.timedelta(minutes=int(time_step[0]))
siguns=True
format = "<bbhbbbd"
[docs]
def mask_rectangles(self):
"""
Creates the rectangular mask
so MAR values can be extracted only for the
precised zone
"""
i=0
xmin=np.min(self.xsummits);xmax=np.max(self.xsummits)
ymin=np.min(self.ysummits);ymax=np.max(self.ysummits)
x=self.x_Lb72;y=self.y_Lb72
mask=np.zeros([x.shape[0],x.shape[1]])
while i<3:
# print(i)
j=i+1
while j<4:
# print(i,xsummits)
# print(j)
if self.xsummits[j]<self.xsummits[i]:
tempx=self.xsummits[i]
tempy=self.ysummits[i]
self.xsummits[i]=self.xsummits[j]
self.ysummits[i]=self.ysummits[j]
self.xsummits[j]=tempx
self.ysummits[j]=tempy
j=i+1
j=j+1
i=i+1
#print(self.xsummits);print(self.ysummits)
if (self.xsummits[0]-self.xsummits[1])>0.01:
pab=((self.ysummits[1]-self.ysummits[0])/(self.xsummits[1]-self.xsummits[0]))
pac=((self.ysummits[2]-self.ysummits[0])/(self.xsummits[2]-self.xsummits[0]))
pbd=((self.ysummits[3]-self.ysummits[1])/(self.xsummits[3]-self.xsummits[1]))
pcd=((self.ysummits[3]-self.ysummits[2])/(self.xsummits[3]-self.xsummits[2]))
for i in range(0,x.shape[0]):
for j in range(0,y.shape[1]):
#cas 1 en dehors de la grande zone
xp=x[i,j];yp=y[i,j]
if xp>xmax or xp<xmin or yp>ymax or yp<ymin:
# print(i,j)
continue
if self.ysummits[1]>self.ysummits[0]:
# print(i,j)
if xp>self.xsummits[0] and xp<self.xsummits[1]:
yhaut=self.ysummits[0]+pab*(xp-self.xsummits[0])
ybas=self.ysummits[0]+pac*(xp-self.xsummits[0])
if yp<=yhaut and yp>=ybas:mask[i,j]=1
else:continue
elif xp>self.xsummits[1] and xp<self.xsummits[2]:
# print(i,j)
yhaut=self.ysummits[1]+pbd*(xp-self.xsummits[1])
ybas=self.ysummits[0]+pac*(xp-self.xsummits[0])
if yp<=yhaut and yp>=ybas:mask[i,j]=1
else:continue
else:
ybas=self.ysummits[2]+pcd*(xp-self.xsummits[2])
yhaut=self.ysummits[1]+pbd*(xp-self.xsummits[1])
if yp<=yhaut and yp>=ybas:mask[i,j]=1
else:continue
else:
# if i==20:print(i,j)
if xp>self.xsummits[0] and xp<self.xsummits[1]:
# print('Hajmo')
ybas=self.ysummits[0]+pab*(xp-self.xsummits[0])
yhaut=self.ysummits[0]+pac*(xp-self.xsummits[0])
if yp<=yhaut and yp>=ybas:mask[i,j]=1
else:continue
elif xp>self.xsummits[1] and xp<self.xsummits[2]:
# print(i,j)
ybas=self.ysummits[1]+pbd*(xp-self.xsummits[1])
yhaut=self.ysummits[0]+pac*(xp-self.xsummits[0])
if yp<=yhaut and yp>=ybas:mask[i,j]=1
else:continue
elif xp>self.xsummits[2] and xp<self.xsummits[3] :
# print('Hajde')
yhaut=self.ysummits[2]+pcd*(xp-self.xsummits[2])
ybas=self.ysummits[1]+pbd*(xp-self.xsummits[1])
if yp<=yhaut and yp>=ybas:mask[i,j]=1;#print(i,j)
else:continue
else:
mask=((x>=xmin)&(x<=xmax))&((y>=ymin)&(y<=ymax))
mask=mask==1
print(mask[mask==True].shape)
return(mask)
[docs]
def plot_mask(self):
mask1=np.zeros_like(self.mask)
mask1[self.mask]=1
mask1[self.mask==False]=0
bounds=np.arange(0,1.5,.5)
cmap=cm.Greens
MSK=np.zeros_like(mask1)
ct.quick_map_plot(self.lons, self.lats, mask1, bounds, cmap, MSK)
# plt.show()
# plt.savefig('mask.png')
"Séléction des données entre les deux dates pour le masque rectangulaire"
[docs]
def select_MARdata(self):
'''
Input : var:nom de la variable hydro MAR (string)
date_debut:date initiale (vecteur[heure,jour,mois,année]
date_fin:idem pour date finale
directory:répertoire avec simus MAR (en fonction du GCM/scénario)
mask: masque spatiale(matrice de 0 et 1 de la zone d'intéret)
Description : Sélectionne la variable hydro MAR, pour les pixels du masque.
Retourne une matrice 2D avec toutes les valeurs MAR pour tous les pas de temps
exemple: 5 pas de temps et 100 pixels , output = matrice de dimensions(100,5)
'''
varnames=['PRECIP_QUANTITY','E','MBRR','Precip','MBRO3c','MBEPc','ST2c','MBSF','MBRO1','MBRO2','MBRO3','MBRO4',
'MBRO5','MBRO6','MBCC','MBEP','MBET','MBSL','MBSC','MBM','MBSN']
var=self.var
mask=self.mask
for i in range(0,np.size(varnames)):
if var==varnames[i]:var_index=i
if var_index>7:
"To take into account the occupied fraction by subpixels"
var_subpixel_cover="FRV"
covers=xr.open_dataset(glob.glob(self.directory+"*"+str(self.date_debut.year)+"**nc*")[0])
covers=np.transpose(np.array(covers[var_subpixel_cover]))/100.
covers=covers[mask]
if self.date_debut.year==self.date_fin.year:
year=self.date_debut.year;day=self.date_debut.day;month=self.date_debut.month ;hour=self.date_debut.hour
fn = glob.glob(self.directory+"*"+str(year)+"**nc*")
if 'IRM_grid' in fn[0]:
fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
print(fn[0])
ds=xr.open_dataset(fn[0])
JJ=ct.date2JJ(day, month, year)
MAR_time_step=np.transpose(np.array(ds[self.var])).shape[2]
if ct.isbis(year)==1:ndays=366
else:ndays=365
MAR_time_step=float(ndays)/float(MAR_time_step)
MAR_time_step_hours=(MAR_time_step*24)
if MAR_time_step==1.:
indice_debut=JJ-1
indice_fin=ct.date2JJ(self.date_fin.day,self.date_fin.month,self.date_fin.year,type_mod=self.mod_ydays)
else:
indice_debut=(JJ-1)*(int(24/MAR_time_step_hours))+(int(hour\
/MAR_time_step_hours))
indice_fin=(ct.date2JJ(self.date_fin.day,self.date_fin.month,self.date_fin.year,type_mod=self.mod_ydays)-1)*\
(int(24/MAR_time_step_hours))+(int(self.date_fin.hour\
/MAR_time_step_hours))+1
print('indice debut',indice_debut,indice_fin)
if var_index>6:
if var=='MBEP':
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
"**************Attention***************"
"Definition evapotranspiration dans MAR"
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
values1=(np.transpose(np.array(ds[var]))[:,:,0,indice_debut:indice_fin+1])[mask] +\
(np.transpose(np.array(ds['MBET']))[:,:,0,indice_debut:indice_fin+1])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,0,indice_debut:indice_fin+1])[mask]
values2=(np.transpose(np.array(ds[var]))[:,:,1,indice_debut:indice_fin+1])[mask]+\
(np.transpose(np.array(ds['MBET']))[:,:,1,indice_debut:indice_fin+1])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,1,indice_debut:indice_fin+1])[mask]
values3=(np.transpose(np.array(ds[var]))[:,:,2,indice_debut:indice_fin+1])[mask] +\
(np.transpose(np.array(ds['MBET']))[:,:,2,indice_debut:indice_fin+1])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,2,indice_debut:indice_fin+1])[mask]
for j in range(np.shape(values1)[2]):
values1[:,:,j]=values1[:,:,j]*covers[:,:,0]
values2[:,:,j]=values2[:,:,j]*covers[:,:,1]
values3[:,:,j]=values3[:,:,j]*covers[:,:,2]
values=values1+values2+values3
else:
values1=np.transpose(np.array(ds[var]))[:,:,0,indice_debut:indice_fin+1][mask]
values2=np.transpose(np.array(ds[var]))[:,:,1,indice_debut:indice_fin+1][mask]
values3=np.transpose(np.array(ds[var]))[:,:,2,indice_debut:indice_fin+1] [mask]
for j in range(np.shape(values1)[2]):
values1[:,j]=values1[:,j]*covers[:,0]
values2[:,j]=values2[:,j]*covers[:,1]
values3[:,j]=values3[:,j]*covers[:,2]
values=values1+values2+values3
else:
values=np.transpose(np.array(ds[var]))[:,:,indice_debut:indice_fin+1][mask]
else:
year=self.date_debut.year;day=self.date_debut.day;month=self.date_debut.month;hour=self.date_debut.hour
print(year,month,day,hour)
print(self.date_fin)
fn = glob.glob(self.directory+"*"+str(year)+"**nc*")
if 'IRM_grid' in fn[0]:
fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
ds=xr.open_dataset(fn[0])
JJ=ct.date2JJ(day, month, year,type_mod=self.mod_ydays)
MAR_time_step=np.transpose(np.array(ds[self.var])).shape[-1]
if self.mod_ydays==1:
if ct.isbis(year)==1:ndays=366
else:ndays=365
else:
ndays=365
MAR_time_step=ndays/float(MAR_time_step)
MAR_time_step_hours=(MAR_time_step*24)
if MAR_time_step==1.:
indice_debut=JJ-1
indice_fin=ct.date2JJ(self.date_fin.day,self.date_fin.month,self.date_fin.year,type_mod=self.mod_ydays)
else:
indice_debut=(JJ-1)*(int(24/MAR_time_step_hours))+(int(hour\
/MAR_time_step_hours))
indice_fin=(ct.date2JJ(self.date_fin.day,self.date_fin.month,self.date_fin.year,type_mod=self.mod_ydays)-1)*\
(int(24/MAR_time_step_hours))+(int(self.date_fin.hour\
/MAR_time_step_hours))+1
print("indices début et fin",MAR_time_step_hours,indice_debut,indice_fin)
if var_index>6:
if var=='MBEP':
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
"**************Attention***************"
"Definition evapotranspiration dans MAR"
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
values1=(np.transpose(np.array(ds[var]))[:,:,0,indice_debut:])[mask] +\
(np.transpose(np.array(ds['MBET']))[:,:,0,indice_debut:])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,0,indice_debut:])[mask]
values2=(np.transpose(np.array(ds[var]))[:,:,1,indice_debut:])[mask]+\
(np.transpose(np.array(ds['MBET']))[:,:,1,indice_debut:])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,1,indice_debut:])[mask]
values3=(np.transpose(np.array(ds[var]))[:,:,2,indice_debut:])[mask] +\
(np.transpose(np.array(ds['MBET']))[:,:,2,indice_debut:])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,2,indice_debut:])[mask]
for j in range(np.shape(values1)[-1]):
values1[:,j]=values1[:,j]*covers[:,0]
values2[:,j]=values2[:,j]*covers[:,1]
values3[:,j]=values3[:,j]*covers[:,2]
values=(values1+values2+values3)
for y in range(year+1,self.date_fin.year+1):
print(y)
if y<self.date_fin.year:
fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
if 'IRM_grid' in fn[0]:
fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
ds=xr.open_dataset(fn[0])
values1=(np.transpose(np.array(ds[var]))[:,:,0,:])[mask]+\
(np.transpose(np.array(ds['MBET']))[:,:,0,:])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,0,:])[mask]
values2=(np.transpose(np.array(ds[var]))[:,:,1,:])[mask]+\
(np.transpose(np.array(ds['MBET']))[:,:,1,:])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,1,:])[mask]
values3=(np.transpose(np.array(ds[var]))[:,:,2,:])[mask]+\
(np.transpose(np.array(ds['MBET']))[:,:,2,:])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,2,:])[mask]
for j in range(0,np.shape(values1)[-1]):
# print(j,np.shape(values1))
values1[:,j]=values1[:,j]*covers[:,0]
values2[:,j]=values2[:,j]*covers[:,1]
values3[:,j]=values3[:,j]*covers[:,2]
values=np.append(values,(values1+values2+values3),axis=1)
else:
fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
if 'IRM_grid' in fn[0]:
fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
ds=xr.open_dataset(fn[0])
values1=(np.transpose(np.array(ds[var]))[:,:,0,:indice_fin])[mask] +\
(np.transpose(np.array(ds['MBET']))[:,:,0,:indice_fin])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,0,:indice_fin])[mask]
values2=(np.transpose(np.array(ds[var]))[:,:,1,:indice_fin])[mask]+\
(np.transpose(np.array(ds['MBET']))[:,:,1,:indice_fin])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,1,:indice_fin])[mask]
values3=(np.transpose(np.array(ds[var]))[:,:,2,:indice_fin])[mask]+\
(np.transpose(np.array(ds['MBET']))[:,:,2,:indice_fin])[mask]+\
(np.transpose(np.array(ds['MBSL']))[:,:,2,:indice_fin])[mask]
for j in range(0,np.shape(values1)[-1]):
# print(j,np.shape(values1))
values1[:,j]=values1[:,j]*covers[:,0]
values2[:,j]=values2[:,j]*covers[:,1]
values3[:,j]=values3[:,j]*covers[:,2]
values=np.append(values,(values1+values2+values3),axis=1)
else:
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
"**************Attention***************"
"Definition evapotranspiration dans MAR"
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
values1=np.transpose(np.array(ds[var]))[:,:,0,indice_debut:][mask]
values2=np.transpose(np.array(ds[var]))[:,:,1,indice_debut:][mask]
values3=np.transpose(np.array(ds[var]))[:,:,2,indice_debut:][mask]
for j in range(np.shape(values1)[-1]):
values1[:,j]=values1[:,j]*covers[:,0]
values2[:,j]=values2[:,j]*covers[:,1]
values3[:,j]=values3[:,j]*covers[:,2]
values=(values1+values2+values3)
print(self.var,values.shape)
for y in range(year+1,self.date_fin.year+1):
print(y)
if y<self.date_fin.year:
fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
if 'IRM_grid' in fn[0]:
fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
ds=xr.open_dataset(fn[0])
values1=np.transpose(np.array(ds[var]))[:,:,0,:][mask]
values2=np.transpose(np.array(ds[var]))[:,:,1,:][mask]
values3=np.transpose(np.array(ds[var]))[:,:,2,:][mask]
for j in range(np.shape(values1)[-1]):
# print(j,np.shape(values1))
# print(values1.shape,covers.shape)
values1[:,j]=values1[:,j]*covers[:,0]
values2[:,j]=values2[:,j]*covers[:,1]
values3[:,j]=values3[:,j]*covers[:,2]
values=np.append(values,(values1+values2+values3),axis=1)
print(self.var,values.shape)
else:
fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
if 'IRM_grid' in fn[0]:
fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
ds=xr.open_dataset(fn[0])
values1=np.transpose(np.array(ds[var]))[:,:,0,:indice_fin][mask]
values2=np.transpose(np.array(ds[var]))[:,:,1,:indice_fin][mask]
values3=np.transpose(np.array(ds[var]))[:,:,2,:indice_fin][mask]
print(np.shape(values1)[-1])
for j in range(np.shape(values1)[-1]):
# print(j,np.shape(values1))
values1[:,j]=values1[:,j]*covers[:,0]
values2[:,j]=values2[:,j]*covers[:,1]
values3[:,j]=values3[:,j]*covers[:,2]
values=np.append(values,(values1+values2+values3),axis=1)
print(self.var,values.shape)
else:
#print(mask)
values=np.transpose(np.array(ds[var]))[:,:,indice_debut:][mask]
print(self.var,values.shape)
for y in range(year+1,self.date_fin.year+1):
fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
if 'IRM_grid' in fn[0]:
fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
ds=xr.open_dataset(fn[0])
print(y)
if y<self.date_fin.year:
values=np.append(values,
np.transpose(np.array(ds[var]))[:,:,:][mask],
axis=1)
else:
values=np.append(values,
np.transpose(np.array(ds[var]))[:,:,:indice_fin][mask],
axis=1)
print(self.var,values.shape)
return(values)
"Definition of the mar time-step"
"A modifier par la suite si le pas temporel du MAR est inférieur à l'heure"
[docs]
def MAR_unbiasing(self):
th_drizzle=.1
print("letsgo")
"**********************************************************"
"Lecture des données sur la période historiqe de simulation"
"**********************************************************"
if self.generate_quantiles==1:
historical_matrix_unbias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
date_debut=datetime.datetime(1981,1,1,0),
date_fin=datetime.datetime(2010,12,31,23),
directory=self.directory_unbiasing, var=self.var_unb).vec_data
date_debutu=datetime.datetime(self.syu,1,1,0)
date_finu=datetime.datetime(self.eyu,12,31,23)
if self.var_unb=='PRECIP_QUANTITY':
if self.generate_quantiles==1:
print('on va le faire')
historical_matrix_bias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
date_debut=datetime.datetime(1981,1,1,0),
date_fin=datetime.datetime(2010,12,31,23),
directory=self.directory, var='MBRR',mod_ydays=self.mod_ydays).vec_data+\
MAR_input_data(xsummits=self.xsummits,ysummits= self.ysummits,
date_debut=datetime.datetime(1981,1,1,0),
date_fin=datetime.datetime(2010,12,31,23),
directory=self.directory, var='MBSF',
mod_ydays=self.mod_ydays).vec_data
biased_data=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
date_debut=self.date_debut,
date_fin=self.date_fin,
directory=self.directory, var='MBRR',mod_ydays=self.mod_ydays).vec_data+\
MAR_input_data(xsummits=self.xsummits,ysummits= self.ysummits,
date_debut=self.date_debut,
date_fin=self.date_fin,
directory= self.directory,var= 'MBSF',mod_ydays=self.mod_ydays).vec_data
print(self.date_debut,self.date_fin)
print('biased data shape',biased_data.shape)
FutUnb=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
date_debut=date_debutu,
date_fin=date_finu,
directory=self.directory, var='MBRR',
mod_ydays=self.mod_ydays).vec_data+\
MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
date_debut=date_debutu,
date_fin=date_finu,
directory=self.directory, var='MBSF',
mod_ydays=self.mod_ydays).vec_data
elif self.var_unb=='E':
if self.generate_quantiles==1:
historical_matrix_bias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
date_debut=datetime.datetime(1981,1,1,0),
date_fin=datetime.datetime(2010,12,31,23),
directory=self.directory, var='MBEP',
mod_ydays=self.mod_ydays).vec_data
biased_data=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
date_debut=self.date_debut,
date_fin=self.date_fin,
directory=self.directory, var='MBEP',
mod_ydays=self.mod_ydays).vec_data
FutUnb=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
date_debut=date_debutu,
date_fin=date_finu,
directory=self.directory, var='MBEP',
mod_ydays=self.mod_ydays).vec_data
"****************************************************"
"Calcul des quantiles historiques simulés et observés"
"****************************************************"
quant_mat=np.zeros([biased_data.shape[0],101])
quant_mat_bias=np.zeros([biased_data.shape[0],101])
quant_coeffs=np.zeros([biased_data.shape[0],101])
if self.generate_quantiles==1:
historical_matrix_unbias[historical_matrix_unbias<th_drizzle]=0
if self.find_timestep()[1]=='hours':
tsd=24
historical_matrix_bias_d=np.zeros([historical_matrix_bias.shape[0],
int(historical_matrix_bias.shape[1]/tsd)])
for i in range(historical_matrix_bias_d.shape[0]):
for d in range(historical_matrix_bias_d.shape[1]):
historical_matrix_bias_d[i,d]=np.sum(historical_matrix_bias[i,d*tsd:(d+1)*tsd])
historical_matrix_bias_d[historical_matrix_bias_d<th_drizzle]=0
# print(historical_matrix_unbias.shape,historical_matrix_bias_d.shape)
for i in range(historical_matrix_unbias.shape[0]):
quant_mat_bias[i,:]=np.quantile(historical_matrix_bias_d[i,:]\
[historical_matrix_bias_d[i,:]>th_drizzle],np.arange(0,1.01,0.01))
quant_mat[i,:]=np.quantile(historical_matrix_unbias[i,:][historical_matrix_unbias[i,:]>th_drizzle],np.arange(0,1.01,0.01))
for j in range(quant_mat.shape[1]):quant_coeffs[i,j]=quant_mat[i,j]/quant_mat_bias[i,j]
else:
if self.var_unb=='E':fn_quant=self.fn_quant_ev
else:fn_quant=self.fn_quant_pr
quant_mat_bias=ct.marray(xr.open_dataset(fn_quant),self.model_name)[self.mask]
quant_mat=ct.marray(xr.open_dataset(fn_quant),'IRM')[self.mask]
for i in range(quant_mat.shape[0]):
for j in range(quant_mat.shape[1]):quant_coeffs[i,j]=quant_mat[i,j]/quant_mat_bias[i,j]
# biased_data_var=np.array(self.vec_data)
"******************************************"
"****Débiaisage des données daily**********"
"******************************************"
"Future quantiles to assess value location"
if self.find_timestep()[1]=='hours':
tsd=24
FutUnb_d=np.zeros([biased_data.shape[0],
int(FutUnb.shape[1]/tsd)])
for i in range(biased_data.shape[0]):
for d in range(FutUnb_d.shape[1]):
FutUnb_d[i,d]=np.sum(FutUnb[i,d*tsd:(d+1)*tsd])
quant_mat_fut=np.zeros_like(quant_mat)
for i in range(FutUnb.shape[0]):
quant_mat_fut[i,:]=np.quantile(FutUnb_d[i,:]\
[FutUnb_d[i,:]>th_drizzle],np.arange(0,1.01,0.01))
print(self.find_timestep()[1])
if self.find_timestep()[1]=='hours':
biased_data_d=np.zeros([biased_data.shape[0],
int(biased_data.shape[1]/24)+1])
for i in range(biased_data.shape[0]):
for d in range(biased_data_d.shape[1]):
biased_data_d[i,d]=np.sum(biased_data[i,d*tsd:(d+1)*tsd])
for i in range(self.vec_data.shape[0]):
for j in range(biased_data_d.shape[1]):
if biased_data_d[i,j]>th_drizzle:
for k in range(quant_mat.shape[1]):
if k==quant_mat.shape[1]-1:
if biased_data_d[i,j]>=quant_mat_fut[i,k]:
"Si en dehors de la distribution facteur du quantile 99 appliqué"
biased_data_d[i,j]=biased_data[i,j]*quant_coeffs[i,k-1]
elif k<quant_mat.shape[1]-1:
if biased_data_d[i,j]>=quant_mat_fut[i,k] and biased_data_d[i,j]<=quant_mat_fut[i,k+1]:
biased_data_d[i,j]=(quant_coeffs[i,k]*(biased_data_d[i,j]-quant_mat_fut[i,k])/\
(quant_mat_fut[i,k+1]-quant_mat_fut[i,k])+quant_coeffs[i,k+1]*(quant_mat_fut[i,k+1]-biased_data_d[i,j])/\
(quant_mat_fut[i,k+1]-quant_mat_fut[i,k]))*biased_data_d[i,j]
break
else:
biased_data_d[i,j]=0
if pd.isna(biased_data_d[i,j]):
biased_data_d[i,j]=0.
Unbiased_data_d=np.array(biased_data_d)
"*****************************************"
"**Redistribution au pas de temps horaire**"
"*****************************************"
ydays=biased_data_d.shape[1]
Unbiased_data=np.zeros_like(biased_data)
print ("redistributing on the daily time-step")
if self.var_unb=='PRECIP_QUANTITY':
"Si ce sont les pluies qui sont débiasées"
"On débiaise sur tout l'événement et non pas jour après jour"
for i in range(self.vec_data.shape[0]):
# print(i)
d=0
while d<ydays:
# if i==67:print(d,Unbiased_data_d[i,d])
# if d%100==0:print(d)
if Unbiased_data_d[i,d]<=0.1:d+=1
else:
d1=d
ndays=0
while d1<ydays and Unbiased_data_d[i,d1]>.1 :
d1+=1;ndays+=1
precip_sum_d=np.sum(Unbiased_data_d[i,d:d+ndays])
biased_sum=np.sum(biased_data\
[i,d*tsd:(d+ndays)*tsd])
biased_hourly=(biased_data)\
[i,d*tsd:(d+ndays)*tsd]
weights=biased_hourly/biased_sum
Unbiased_data[i,d*tsd:(d+ndays)*tsd]=\
precip_sum_d*weights
# print(d,PRECIP_IRM[i,j,d])
d+=ndays
else:
"Débiai<-sage jour après jour pour l'évapotranspiration notamment"
for i in range(self.vec_data.shape[0]):
# print(i)
d=0
while d<ydays:
# if i==67:print(d,Unbiased_data_d[i,d])
# if d%100==0:print(d)
if Unbiased_data_d[i,d]<=0.1:d+=1
else:
precip_sum_d=Unbiased_data_d[i,d]
biased_sum=np.sum(biased_data\
[i,d*tsd:(d+1)*tsd])
biased_hourly=(biased_data)\
[i,d*tsd:(d+1)*tsd]
weights=biased_hourly/biased_sum
Unbiased_data[i,d*tsd:(d+1)*tsd]=\
precip_sum_d*weights
# print(d,PRECIP_IRM[i,j,d])
d+=1
if self.var=='MBRO3' or self.var=='MBRR' or self.var=='MBSF':
# biased_data_var=np.array(self.vec_data)
biased_data_var=self.vec_data
print("biased data var shape ",biased_data_var.shape)
print('unbiased data shape' ,Unbiased_data.shape)
propor2var=(biased_data_var/biased_data)
Unbiased_data=Unbiased_data*propor2var
"**** 2 méthodes******"
Unbiased_data[pd.isna(Unbiased_data)]=0.
return(Unbiased_data)
[docs]
def find_timestep(self):
"""
Routine qui trouve le time step de MAR en heures
"""
year=self.date_debut.year
fn = glob.glob(self.directory+"*"+str(year)+"**nc*")
ds=xr.open_dataset(fn[0])
vec_out=['','']
MAR_time_step=np.transpose(np.array(ds[self.var])).shape[2]
if self.mod_ydays==1:
if ct.isbis(year)==1:ndays=366
else:ndays=365
else:
ndays=365
MAR_time_step=ndays/MAR_time_step
MAR_time_step_hours=24*MAR_time_step
if MAR_time_step_hours<1:vec_out[1]='minutes';vec_out[0]=str(int(MAR_time_step_hours*60))
else:vec_out[1]='hours';vec_out[0]=str(int(MAR_time_step_hours))
# print(vec_out)
return(vec_out)
[docs]
def make_time(self):
"""
formatte une matrice avec la date pour chaque pas de temps en heure,jour,mois,année
à redévelopper si pas de temps inférieurs à l'heure
"""
time_step=self.find_timestep()
if time_step[1]=='hours':
time_step=int(time_step[0])
date=np.array([self.date_debut])
end_month=[31,28,31,30,31,30,31,31,30,31,30,31]
i=0
datec=np.array(self.date_debut)
# print(datec,date_fin)
while ((self.date_fin[0] != datec[0]) or (self.date_fin[1] != datec[1]) \
or (self.date_fin[2] != datec[2]) or (self.date_fin[3] != datec[3])):
#print(datec)
if i!=0:datec=date[i,:]
#print(i)
new_hour=datec[0]+time_step
#print(new_hour)
if new_hour>=24.:new_day=datec[1]+1;new_hour=new_hour-24
else:new_day=datec[1]
if datec[2]==2.:
if self.mod_ydays!=9999:
if ct.isbis(datec[3]):end_month[1]=29
else:end_month[1]=28
if new_day>end_month[int(datec[2])-1]:
new_month=datec[2]+1
new_day=1
if new_month>12:
new_year=datec[3]+1
new_month=1
else:new_month=datec[2];new_year=datec[3]
new_vec=np.array([[new_hour,new_day,new_month,new_year]])
date=np.append(date,new_vec,axis=0)
datec=np.array([new_hour,new_day,new_month,new_year])
i=i+1
date=np.append(date,np.array([self.date_fin]),axis=0)
return(date)
"Calcul des sommets des pixels MAR"
[docs]
def MAR_summits(self):
"""
utilise les longitudes et latitudes des centres des pixels MAR
pour calculer les coordonnées des sommets des pixels en Lambert 72
outputs: deux matrices contenant pour chaque pixels les 4 coordonnées des 4 sommets
"""
summits_lon=np.zeros([self.lons.shape[0],self.lons.shape[1],4])
summits_lat=np.zeros([self.lons.shape[0],self.lons.shape[1],4])
summits_x=np.zeros([self.lons.shape[0],self.lons.shape[1],4])
summits_y=np.zeros([self.lons.shape[0],self.lons.shape[1],4])
for i in range(1,self.lons.shape[0]-1):
for j in range(1,self.lons.shape[1]-1):
summits_lon[i,j,0]=(self.lons[i,j]+self.lons[i-1,j]+self.lons[i-1,j-1]+self.lons[i,j-1])/4
summits_lon[i,j,1]=(self.lons[i,j]+self.lons[i-1,j]+self.lons[i-1,j+1]+self.lons[i,j+1])/4
summits_lon[i,j,2]=(self.lons[i,j]+self.lons[i,j+1]+self.lons[i+1,j]+self.lons[i+1,j+1])/4
summits_lon[i,j,3]=(self.lons[i,j]+self.lons[i,j-1]+self.lons[i+1,j-1]+self.lons[i+1,j])/4
summits_lat[i,j,0]=(self.lats[i,j]+self.lats[i-1,j]+self.lats[i-1,j-1]+self.lats[i,j-1])/4
summits_lat[i,j,1]=(self.lats[i,j]+self.lats[i-1,j]+self.lats[i-1,j+1]+self.lats[i,j+1])/4
summits_lat[i,j,2]=(self.lats[i,j]+self.lats[i,j+1]+self.lats[i+1,j]+self.lats[i+1,j+1])/4
summits_lat[i,j,3]=(self.lats[i,j]+self.lats[i,j-1]+self.lats[i+1,j-1]+self.lats[i+1,j])/4
summits_x,summits_y=self.Lb72(summits_lon,summits_lat)
return(summits_x,summits_y)
"Sortie shapefile"
[docs]
def MAR_shapefile(self,name,dirout1):
"""
cette routine sort les pixels MAR au format shapefile le nom donné
dans le sous-dossier GRID
"""
MASK=self.mask_rectangles()
sommets_x,sommets_y=self.MAR_summits()
xs=np.array([sommets_x[:,:,0][MASK]])
ys=np.array([sommets_y[:,:,0][MASK]])
for i in range(1,4):
xs=np.append(xs,np.array([sommets_x[:,:,i][MASK]]),axis=0)
ys=np.append(ys,np.array([sommets_y[:,:,i][MASK]]),axis=0)
xs=np.transpose(xs);ys=np.transpose(ys)
newdata = gpd.GeoDataFrame()
newdata['geometry'] = None
for i in range(0,xs.shape[0]):
coordinates=[(xs[i,0],ys[i,0]),(xs[i,1],ys[i,1]),
(xs[i,2],ys[i,2]),(xs[i,3],ys[i,3])]
poly = Polygon(coordinates)
newdata.loc[i, 'geometry'] = poly
newdata.loc[i, 'polyID'] = str(i+1)
newdata.crss=from_epsg(31370)
#(newdata.crs).to_byte(byteorder='little'))
if os.path.exists(dirout1)==False:os.mkdir(dirout1)
if os.path.exists(dirout1+'Grid/')==False:os.mkdir(dirout1+'Grid/')
outfp=dirout1+'Grid/'+name
newdata.to_file(outfp)
"sortie fichiers textes"
[docs]
def MAR_TextOutputs(self,dirout1,extension='.rain.txt'):
"""
sortie au format texte
1 fichier par polygone
nom du fichier = ID du polygone.rain
"""
time_step=self.find_timestep()
if not self.UnborNot:vec_data=self.vec_data
else:vec_data=self.MAR_unbiasing()
date_debut=self.date_debut
if os.path.exists(dirout1)==False:os.mkdir(dirout1)
if os.path.exists(dirout1+'DATA/')==False:os.mkdir(dirout1+'DATA/')
date_debut=self.date_debut
if time_step[1]=='hours': MAR_timestep=datetime.timedelta(hours=int(time_step[0]))
elif time_step[1]=='minutes': MAR_timestep=datetime.timedelta(minutes=int(time_step[0]))
" Cas de figure pour modélisations sans années bissextiles"
time = pd.date_range(self.date_debut, self.date_fin, freq='H')
print(time.shape, vec_data.shape)
if self.mod_ydays == 1:
# tqdm for outer loop
for i in tqdm(range(vec_data.shape[0]), desc="Writing text data"):
filename = str(i+1) + extension
f=open(dirout1+"DATA/"+filename,'w')
date_move = date_debut
for j in range(vec_data.shape[1]):
if j!=0:date_move=date_move+MAR_timestep
lines=[str(date_move.day),str(date_move.month),str(date_move.year),
str(date_move.hour),str(date_move.minute),str(date_move.second),
"{:.3f}".format(vec_data[i,j])]
line=""
for k in range(0,np.size(lines)):
line=line+lines[k]+"\t"
f.write(line)
f.write('\n')
f.close()
else:
mask_366 = (time.day != 29) & (time.month != 2)
time1 = np.delete(time, mask_366)
# tqdm for outer loop
for i in tqdm(range(time.shape[0]), desc="Writing leap-year text data"):
filename = str(i+1) + extension
f = open(dirout1 + "DATA/" + filename, 'w')
date_move = date_debut
suiv = -1
for j in range(vec_data.shape[1]):
if j != 0:
date_move = date_move + MAR_timestep
if mask_366[j]: # <-- FIXED: check element, not whole array
suiv += 1
vals = "{:.3f}".format(vec_data[i,suiv])
else:
vals = "{:.3f}".format(0)
lines=[str(date_move.day),str(date_move.month),str(date_move.year),
str(date_move.hour),str(date_move.minute),str(date_move.second),
vals]
line=""
for k in range(0,np.size(lines)):
line=line+lines[k]+"\t"
f.write(line)
f.write('\n')
f.close()
# f.close()
print(time1.shape)
[docs]
def MAR_BinaryOutputs(self,dirout1,extension='.rain.dat'):
"""
sortie au format texte
1 fichier par polygone
nom du fichier = ID du polygone.rain
"""
if os.path.exists(dirout1)==False:os.mkdir(dirout1)
if os.path.exists(dirout1+'IRM/')==False:os.mkdir(dirout1+'IRM/')
if not self.UnborNot:vec_data=self.vec_data
else:vec_data=self.MAR_unbiasing()
time=self.time
print('!!!!!!!!!!!!!!!\nVEC and time shapes\n',time.shape, vec_data.shape)
date_debut=self.date_debut
date_debut=self.date_debut
MAR_timestep=self.MAR_timestep
if self.mod_ydays == 1:
# tqdm for outer loop
for i in tqdm(range(vec_data.shape[0]), desc="Writing binary data"):
filename = str(i+1) + extension
f = open(dirout1 + "IRM/" + filename, 'wb')
date_move = date_debut
dimsb = struct.pack('<iiii', i+1, 1, 7, vec_data.shape[1])
f.write(dimsb)
for j in range(vec_data.shape[1]):
if j != 0:
date_move = date_move + MAR_timestep
vals2write = struct.pack(
"<bbhbbbd",
date_move.day, date_move.month, date_move.year,
date_move.hour, date_move.minute, date_move.second,
round(vec_data[i, j], 3)
)
f.write(vals2write)
f.close()
else:
mask_366 = (time.day != 29) & (time.month != 2)
time1 = np.delete(time, mask_366)
# tqdm for outer loop
for i in tqdm(range(time.shape[0]), desc="Writing binary leap-year data"):
filename = str(i+1) + extension
f = open(dirout1 + "IRM/" + filename, 'wb')
date_move = date_debut
dimsb = struct.pack('<iiii', i+1, 1, 7, vec_data.shape[1])
f.write(dimsb)
suiv = -1
for j in range(vec_data.shape[1]):
if j != 0:
date_move = date_move + MAR_timestep
if mask_366[j]: # <-- FIXED: check element, not whole array
suiv += 1
vals = round(vec_data[i, suiv], 3)
else:
vals = 0
vals2write = struct.pack(
"<bbhbbbd",
date_move.day, date_move.month, date_move.year,
date_move.hour, date_move.minute, date_move.second,
vals
)
f.write(vals2write)
f.close()
print(time1.shape)
[docs]
def MAR_BinaryOutputs_V2(self, dirout1, extension='.rain.dat'):
"""
Sortie au format binaire
1 fichier par polygone
nom du fichier = ID du polygone + extension
"""
# --- Time handling
if os.path.exists(dirout1)==False:os.mkdir(dirout1)
if os.path.exists(dirout1+'IRM/')==False:os.mkdir(dirout1+'IRM/')
if not self.UnborNot:vec_data=self.vec_data
else:vec_data=self.MAR_unbiasing()
time=self.time
print('!!!!!!!!!!!!!!!\nVEC and time shapes\n',time.shape, vec_data.shape)
date_debut=self.date_debut
date_debut=self.date_debut
MAR_timestep=self.MAR_timestep
# --- Time axis
# --- Leap-year handling (DONE ONCE)
if self.mod_ydays != 1:
mask_366 = (time.day != 29) | (time.month != 2)
full_data = np.zeros((vec_data.shape[0], time.shape[0]),
dtype=vec_data.dtype)
full_data[:, mask_366] = vec_data
else:
full_data = vec_data
# --- Binary format
fmt = "<bbhbbbd"
# --- Writing loop
for i in tqdm(range(full_data.shape[0]), desc="Writing binary data"):
filename = f"{i+1}{extension}"
with open(dirout1 + "IRM/" + filename, "wb") as f:
# Header
dimsb = struct.pack('<iiii', i+1, 1, 7, full_data.shape[1])
f.write(dimsb)
date_move = date_debut
for j in range(full_data.shape[1]):
if j != 0:
date_move += MAR_timestep
vals2write = struct.pack(
fmt,
date_move.day,
date_move.month,
date_move.year,
date_move.hour,
date_move.minute,
date_move.second,
round(full_data[i, j], 3)
)
f.write(vals2write)
[docs]
def MAR_BinaryOutputs_temperatures(self, dirout1, extension='.rain.dat'):
"""
Sortie au format binaire
1 fichier par polygone
nom du fichier = ID du polygone + extension
"""
# --- Time handling
if os.path.exists(dirout1)==False:os.mkdir(dirout1)
if os.path.exists(dirout1+'IRM/')==False:os.mkdir(dirout1+'IRM/')
if not self.UnborNot:vec_data=self.vec_data
else:vec_data=self.MAR_unbiasing()
time=self.time
print('!!!!!!!!!!!!!!!\nVEC and time shapes\n',time.shape, vec_data.shape)
date_debut=self.date_debut
date_debut=self.date_debut
MAR_timestep=self.MAR_timestep
# --- Time axis
# --- Leap-year handling (DONE ONCE)
if self.mod_ydays != 1:
mask_366 = (time.day != 29) | (time.month != 2)
full_data = np.zeros((vec_data.shape[0], time.shape[0]),
dtype=vec_data.dtype)
full_data[:, mask_366] = vec_data
else:
full_data = vec_data
# --- Binary format
fmt = "<bbhbbbdd"
# --- Writing loop
for i in tqdm(range(full_data.shape[0]), desc="Writing binary data"):
filename = f"{i+1}{extension}"
with open(dirout1 + "IRM/" + filename, "wb") as f:
# Header
dimsb = struct.pack('<iiii', i+1, 2, 8, full_data.shape[1])
f.write(dimsb)
date_move = date_debut
for j in range(full_data.shape[1]):
if j != 0:
date_move += MAR_timestep
vals2write = struct.pack(
fmt,
date_move.day,
date_move.month,
date_move.year,
date_move.hour,
date_move.minute,
date_move.second,
round(full_data[i, j], 3),
round(full_data[i, j], 3)
)
f.write(vals2write)
[docs]
def MAR_BinaryOutputs_temperatures_daily(self, dirout1, extension='.rain.dat'):
"""
Sortie au format binaire
1 fichier par polygone
nom du fichier = ID du polygone + extension
"""
# --------------------------------------------------
# Output directories
# --------------------------------------------------
if not os.path.exists(dirout1):
os.mkdir(dirout1)
irm_dir = dirout1 + "IRM/"
if not os.path.exists(irm_dir):
os.mkdir(irm_dir)
# --------------------------------------------------
# Data loading
# --------------------------------------------------
if not self.UnborNot:
vec_data = self.vec_data
else:
vec_data = self.MAR_unbiasing()
time = pd.DatetimeIndex(self.time)
print("!!!!!!!!!!!!!!!\nVEC and time shapes\n", time.shape, vec_data.shape)
# --------------------------------------------------
# Leap-year handling
# --------------------------------------------------
if self.mod_ydays != 1:
mask_366 = ~((time.month == 2) & (time.day == 29))
full_data = np.zeros((vec_data.shape[0], mask_366.sum()),
dtype=vec_data.dtype)
full_data[:] = vec_data[:, mask_366]
time = time[mask_366]
else:
full_data = vec_data
# --------------------------------------------------
# Hourly → Daily reshape
# --------------------------------------------------
hours_per_day = 24
n_days = len(time) // hours_per_day
full_data = full_data[:, :n_days * hours_per_day]
data_daily = full_data.reshape(
full_data.shape[0],
n_days,
hours_per_day
)
days = time.normalize()[::hours_per_day]
# --------------------------------------------------
# Binary format
# --------------------------------------------------
fmt = "<bbhdd"
# --------------------------------------------------
# Writing loop
# --------------------------------------------------
for i in tqdm(range(data_daily.shape[0]), desc="Writing binary data"):
filename = f"{i+1}{extension}"
with open(irm_dir + filename, "wb") as f:
# Header
dimsb = struct.pack('<iiii', i+1, 2, 5, n_days)
f.write(dimsb)
for j in range(n_days):
daily_vector = data_daily[i, j, :]
# print(daily_vector)
vals2write = struct.pack(
fmt,
days[j].day,
days[j].month,
days[j].year,
round(np.nanmax(daily_vector), 3),
round(np.nanmin(daily_vector), 3)
)
f.write(vals2write)
[docs]
def MAR_BinaryOutputs_temperatures_daily_text(self, dirout1, extension='.rain.dat'):
"""
Sortie au format binaire
1 fichier par polygone
nom du fichier = ID du polygone + extension
"""
# --------------------------------------------------
# Output directories
# --------------------------------------------------
if not os.path.exists(dirout1):
os.mkdir(dirout1)
irm_dir = dirout1 + "IRM/"
if not os.path.exists(irm_dir):
os.mkdir(irm_dir)
# --------------------------------------------------
# Data loading
# --------------------------------------------------
if not self.UnborNot:
vec_data = self.vec_data
else:
vec_data = self.MAR_unbiasing()
time = pd.DatetimeIndex(self.time)
print("!!!!!!!!!!!!!!!\nVEC and time shapes\n", time.shape, vec_data.shape)
# --------------------------------------------------
# Leap-year handling
# --------------------------------------------------
if self.mod_ydays != 1:
mask_366 = ~((time.month == 2) & (time.day == 29))
full_data = np.zeros((vec_data.shape[0], mask_366.sum()),
dtype=vec_data.dtype)
full_data[:] = vec_data[:, mask_366]
time = time[mask_366]
else:
full_data = vec_data
# --------------------------------------------------
# Hourly → Daily reshape
# --------------------------------------------------
hours_per_day = 24
n_days = len(time) // hours_per_day
full_data = full_data[:, :n_days * hours_per_day]
data_daily = full_data.reshape(
full_data.shape[0],
n_days,
hours_per_day
)
days = time.normalize()[::hours_per_day]
# --------------------------------------------------
# Writing loop
# --------------------------------------------------
for i in tqdm(range(data_daily.shape[0]), desc="Writing binary data"):
filename = f"{i+1}{extension}"
with open(irm_dir + filename, "w") as f:
f.write(f'{int(i+1)}\n2\n5\n{int(n_days)}\n')
for j in range(n_days):
daily_vector = data_daily[i, j, :]
l2write=f'{int(days[j].day)}\t{int(days[j].month)}\t{int(days[j].year)}'+\
f'\t{np.nanmax(daily_vector):.3f}\t{np.nanmin(daily_vector):.3f}\n'
f.write(l2write)
[docs]
def mask_subbasin(self,bassin_shp='',buff=100):
c = fiona.open(bassin_shp)
coords = [np.array(poly['geometry']['coordinates'])
for poly in c.values()]
coords=coords[0][0,:,:]
lb=pyproj.Proj(projparams='epsg:31370')
lon=self.lons[self.mask]
lat=self.lats[self.mask]
suiv=1
mask=np.zeros_like(lon)
xlb,ylb=lb(lon,lat)
print(lon,lat)
for i in range(lon.shape[0]):
"************************"
"******Recherche en y****"
"************************"
"Zone de recherche en x ?"
if ylb[i]<np.min(coords[:,1]) or ylb[i]>np.max(coords[:,1]) or\
xlb[i]<np.min(coords[:,0]) or xlb[i]>np.max(coords[:,0]) :
continue
suiv+=1
"Recherche des intersections à y0"
lon_test=(abs(coords[:,0]-xlb[i])<=buff)
pt_lon=coords[:,0][lon_test]
pt_lat=coords[:,1][lon_test]
"sorting lons and lats"
pt_lat,pt_lon=ct.sort2(pt_lat,pt_lon)
n_sup=pt_lat[pt_lat>ylb[i]].shape[0]
n_dow=pt_lat[pt_lat<ylb[i]].shape[0]
"Recherche des intersections à x0"
lat_test=(abs(coords[:,1]-ylb[i])<=buff)
pt_lon=coords[:,0][lat_test]
pt_lat=coords[:,1][lat_test]
pt_lon,pt_lat=ct.sort2(pt_lon,pt_lat)
n_rig=pt_lon[pt_lon>xlb[i]].shape[0]
n_lef=pt_lon[pt_lon<xlb[i]].shape[0]
print('hajmo')
if (n_sup>0 and n_dow>0 and n_lef>0 and n_rig>0):
print('in',suiv,'\n',pt_lon,'\n',pt_lat,'\n',n_sup,n_dow)
mask[i]=1
else:
print('out',suiv,'\n',pt_lon,'\n',pt_lat,'\n',n_rig,n_lef)
# mask[i,j]=suiv
print('\n')
cmap=cm.viridis
bounds=np.arange(-.5,2.5,1)
# fig=plt.figure(1);ax=fig.add_subplot()
lon1=np.zeros([lon.shape[0],1]);lon1[:,0]=lon
lat1=np.zeros([lon.shape[0],1]);lat1[:,0]=lat
mask1=np.zeros([lon.shape[0],1]);mask1[:,0]=mask
# plt.imshow(mask1)
# plt.show()
# color=['green','red','blue','purple']
# scatter = ax.scatter(xlb, ylb,
# c=mask,
# cmap=matplotlib.colors.ListedColormap(color[:2]))
# ax.scatter()
# mapa,ma=ct.quick_map_plot2(lon1, lat1, mask1, bounds, cmap,ax)
# cbar=plt.colorbar(mapa)
# inProj=Proj(init='epsg:31370')
# outProj = Proj(init='epsg:4326')
xpo=coords[:,0]
ypo=coords[:,1]
# ax.plot(xpo,ypo)
# lons,lats=transform(inProj,outProj,xpo,ypo)
# lons,lats=ma(lons,lats)
# ma.plot(lons,lats)
# plt.show()
return(mask,xlb,ylb,xpo,ypo)
[docs]
def cumulated_volume(self,fp='/climato_tmp1/jbrajkovic/BUP_srv7/Drainage_basin_tabreux.shp'):
"Area of the shapefile"
data = gpd.read_file(fp)
print(data['geometry'].head())
print('L\'aire du sous-bassin vaut '+'{:.2f}'.format(data.area[0]/(100**3))+' km²')
"Average rain per hour for the pixels inside the subbasin"
vec_data=self.vec_data
ntimesteps=vec_data.shape[1]
avg_rain_tstep=np.mean(vec_data,axis=1)
msk,xlb,ylb,xpo,ypo=self.mask_subbasin(bassin_shp=fp)
msk=msk==1
avg_rain_tstep=avg_rain_tstep[msk]
avg_rain=np.mean(avg_rain_tstep) #litre par mètre carré par heure
aire=data.area[0]
volum_per_hour=aire*avg_rain
volume_tot=volum_per_hour*ntimesteps
"plotting the evolution"
time=pd.date_range(self.date_debut,self.date_fin,freq='H')[:ntimesteps]
rain_basin=vec_data[msk]
rain_basin=np.mean(rain_basin,axis=0)*aire
rain_basin1=np.cumsum(rain_basin)
fig, ax = plt.subplots()
# ax=fig.add_subplot()
ax.plot(time,rain_basin1,color='darkblue')
txt='Volume cumulé sur la période = ' + '{:.0f}'.format(volume_tot)+ ' litres\n'+\
' = ' + '{:.0f}'.format(volume_tot/1000)+ ' m³\n'
ax.text(.1,1.01,txt,transform=ax.transAxes)
ax.set_ylim([-.15*np.max(rain_basin1),1.1*np.max(rain_basin1)])
ax.set_ylabel('Volume cumulé (litres)')
ax2 = ax.twinx()
# ax1.plot(x, y1, 'g-')
ax2.bar(time, rain_basin,width=10)
ax2.set_ylim([0,3*np.max(rain_basin)])
ax2.set_ylabel('volume par pas de temps (litres)')
ax1=fig.add_axes([.15,.60,.20,.20])
ax1.axis('off')
color=['green','red','blue','purple']
scatter = ax1.scatter(xlb, ylb,
c=msk,
cmap=matplotlib.colors.ListedColormap(color[:2]),s=1.5)
ax1.plot(xpo,ypo)
plt.show()
[docs]
def precip_time(self,time_ch=datetime.datetime(2000,1,1,0),maille_num=5):
vec_data=self.vec_data
ntimesteps=vec_data.shape[1]
time=pd.date_range(self.date_debut,self.date_fin,freq='H')[:ntimesteps]
msk_time=time==time_ch
val2show=vec_data[maille_num-1,:][msk_time][0]
# print(val2show)
print('la valeur associée à la maille '+str(maille_num)+\
' à la date du '+str(time_ch)+' vaut ' +'{:.1f}'.format(val2show))
print('ok')
[docs]
def test_prepro(self,xor1,yor1,dimx,dimy,res=100):
"""
Test si les volumes sont les mêmes que ceux générés par wolf sur une grill 100x100
"""
"***********************"
"Définition de la grille"
"***********************"
# dimx=int((xor2-xor1)/res+1)
# dimy=int((yor2-yor1)/res+1)
print(dimx,dimy)
xgr=np.zeros([dimy,dimx])
ygr=np.zeros([dimy,dimx])
xgr[:,0]=xor1+res/2
ygr[0,:]=yor1+res/2
for i in range(xgr.shape[0]):
xgr[i,:]=np.arange(xgr[i,0],(dimx-1)*res+xor1+res/2+res,res)
for i in range(ygr.shape[1]):
ygr[:,i]=np.arange(ygr[0,i],ygr[0,i]+(dimy-1)*res+res,+res)
# fig = plt.figure();ax=fig.add_subplot()
# ax.scatter(xgr,ygr,color='purple')
fp=fp='/climato_tmp1/jbrajkovic/BUP_srv7/Drainage_basin_tabreux.shp'
msk,xlb,ylb,xpo,ypo=self.mask_subbasin(bassin_shp=fp)
# ax.plot(xpo,ypo)
# print(xgr.shape)
# "plotting both grids"
inProj=Proj(init='epsg:31370')
outProj = Proj(init='epsg:4326')
lonsgr,latsgr=transform(inProj,outProj,xgr,ygr)
cmap=cm.viridis
# bounds=np.arange(np.min(xgr),np.max(xgr),1000)
# ct.quick_map_plot(lonsgr, latsgr, latsgr, bounds, cmap)
# plt.show()
bounds=np.arange(3)
lons=self.lons;lats=self.lats
mask=self.mask
m2pl=np.ones_like(lons)*2
m2pl[::2,::2]=0
m2pl[mask==False]=float('nan')
# "***********************************"
# "Looking for the intersecting pixels"
# "***********************************"
xgr_l=xgr-res/2
xgr_r=xgr+res/2
ygr_u=ygr+res/2
ygr_d=ygr-res/2
# # print(ygr_u[:7,0],ygr_d[:7,0],ygr[:7,0])
# # print('\nhajmo\n')
# # print(xgr_l[0,:7],xgr_r[0,:7],xgr[0,:7])
lb=pyproj.Proj(projparams='epsg:31370')
xlb,ylb=self.lons,self.lats
# 'Coordonnées des sommets des pixels MAR'
xlb,ylb=lb(xlb,ylb)
xlb=xlb[mask];ylb=ylb[mask]
# ax.scatter(xlb,ylb,color='red')
# plt.show()
summits_x,summits_y=self.MAR_summits()
summits_x=summits_x[mask,:];summits_y=summits_y[mask,:]
xlb_ul=summits_x[:,0];ylb_ul=summits_y[:,0];
xlb_ur=summits_x[:,1];ylb_ur=summits_y[:,1];
xlb_dr=summits_x[:,2];ylb_dr=summits_y[:,2];
xlb_dl=summits_x[:,3];ylb_dl=summits_y[:,3];
# plt.scatter(xlb_ul,ylb_ul,color='pink')
# plt.scatter(xlb_dl,ylb_dl,color='orange')
# plt.scatter(xgr,ygr,s=.1)
# plt.show()
'Recherche des pixels Wolf aux multiples intersections'
ninter=np.zeros_like(xgr)*float('nan')
ninter=np.zeros([xgr.shape[0],xgr.shape[1],5])*float('nan')
intersecting_pixels=np.zeros([xgr.shape[0],ygr.shape[0],16])*float('nan')
intersecting_pixels_s=np.zeros([xgr.shape[0],ygr.shape[0],16,2])*float('nan')
for i in range(xlb_ul.shape[0]):
cots_x=np.array([xlb_ul[i],xlb_ur[i],xlb_dr[i],xlb_dl[i]])
cots_y=np.array([ylb_ul[i],ylb_ur[i],ylb_dr[i],ylb_dl[i]])
k=1
while k<4:
j=k+1
while j<4:
if cots_x[j]<cots_x[k]:
re=cots_x[k]
cots_x[k]=cots_x[j]
cots_x[j]=re
re=cots_y[k]
cots_y[k]=cots_y[j]
cots_y[j]=re
k=0;break
j+=1
k+=1
# print(cots_x,cots_y)
for j in range(3):
if j==0:
x1=cots_x[0];y1=cots_y[0]
x2=cots_x[1];y2=cots_y[1]
x3=x1;y3=y1
x4=cots_x[2];y4=cots_y[2]
xbg=x1
xbd=np.min(np.array([x2,x4]))
elif j==1:
x1=cots_x[1];y1=cots_y[1]
x2=cots_x[-1];y2=cots_y[-1]
x3=cots_x[0];y3=cots_y[0]
x4=cots_x[2];y4=cots_y[2]
xbg=x1
xbd=np.min(np.array([x2,x4]))
elif j==2:
x1=cots_x[1];y1=cots_y[1]
x2=cots_x[-1];y2=cots_y[-1]
x3=cots_x[2];y3=cots_y[2]
x4=cots_x[-1];y4=cots_y[-1]
xbg=x3
xbd=np.max(np.array([x2,x4]))
# print(j+1)
# print(np.array([x1,x2,x3,x4]))
# print(np.array([y1,y2,y3,y4]))
# print('\n')
for k in range(5):
if k==0:
xinter=np.array(xgr)
yinter=np.array(ygr)
elif k==1:
xinter=np.array(xgr_l)
yinter=np.array(ygr_u)
elif k==2:
xinter=np.array(xgr_r)
yinter=np.array(ygr_u)
elif k==3:
xinter=np.array(xgr_r)
yinter=np.array(ygr_d)
elif k==4:
xinter=np.array(xgr_l)
yinter=np.array(ygr_d)
e1=y1+(xinter-x1)*(y2-y1)/(x2-x1)
e2=y3+(xinter-x3)*(y4-y3)/(x4-x3)
ma1=(((e1>=yinter)&(yinter>=e2))|((e1<=yinter)&(yinter<=e2)))
ma1=ma1&((xinter>=xbg)&(xinter<=xbd))
ninter[ma1,k]=i
weights=np.zeros([ninter.shape[0],ninter.shape[1],4,2])*float('nan')
mod_pix=st.mode(ninter,axis=2,nan_policy='omit')[0]
nrep=st.mode(ninter,axis=2)[1]
for i in range(ninter.shape[0]):
for j in range(ninter.shape[1]):
v2c=np.array(ninter[i,j,:])
ov=np.array(v2c[v2c!=mod_pix[i,j]])
ov=ov[np.isfinite(ov)]
ref_score=v2c[np.isfinite(v2c)].shape[0]
tv=v2c[np.isfinite(v2c)]
if ov.shape[0]==0 and tv.shape[0]!=0:
weights[i,j,0,0]=mod_pix[i,j]
weights[i,j,0,1]=1
if ov.shape[0]!=0 and tv.shape[0]!=0:
#other vals
nvm=v2c[v2c==mod_pix[i,j]].shape[0]
weights[i,j,0,0]=mod_pix[i,j]
weights[i,j,0,1]=nvm/ref_score
vad=np.zeros(0)
suiv=0
for k in range(ov.shape[0]):
if ov[k] in vad : continue
suiv+=1
nvm=v2c[v2c==ov[k]].shape[0]
weights[i,j,int(suiv),0]=float(ov[k])
# print('hajde hajduce',weights[i,j,0,0],ov[k],nvm,suiv, weights[i,j,int(suiv),0])
weights[i,j,int(suiv),1]=float(nvm/ref_score)
vad=np.append(vad,ov[k])
# plt.imshow(weights[:,:,0,1])
# plt.show()
return(weights)
[docs]
def test_prepro_mat(self,xor1,yor1,dimx,dimy,res=100,
sdate=datetime.datetime(2020,1,1,0),
edate=datetime.datetime(2020,1,1,0),
out_fn='',
pref_out=''):
"""Returns the interpolated matrix at 100 m between the two dates
"""
weight_mat=self.test_prepro(xor1, yor1, dimx, dimy)
vec_data=self.vec_data
ntimesteps=vec_data.shape[1]
time=pd.date_range(self.date_debut,self.date_fin,freq='H')[:ntimesteps]
dim2=time[(time>=sdate)&(time<=edate)].shape[0]
print(vec_data.shape)
time_mask=(time>=sdate)&(time<=edate)
vec_data=vec_data[:,time_mask]
out_mat=np.zeros([weight_mat.shape[0],weight_mat.shape[1],
vec_data.shape[1]])*float('nan')
for i in range(weight_mat.shape[0]):
for j in range(weight_mat.shape[1]):
k=0
while k<weight_mat.shape[2] and np.isfinite(weight_mat[i,j,k,0]) :
if k==0:
out_mat[i,j,:]=vec_data[int(weight_mat[i,j,k,0]),:]*\
weight_mat[i,j,k,1]
else:
out_mat[i,j,:]=out_mat[i,j,:]+vec_data[int(weight_mat[i,j,k,0]),:]*\
weight_mat[i,j,k,1]
k+=1
date1=sdate.strftime("%Y-%m-%d")
date2=edate.strftime("%Y-%m-%d")
out_fn=out_fn+pref_out+'_'+date1+'_'+date2+'.npy'
np.save(out_fn,out_mat)
[docs]
def test_vect(self):
fp='/climato_tmp1/jbrajkovic/BUP_srv7/Drainage_basin_tabreux.shp'
"Area of the shapefile"
data = gpd.read_file(fp)
print(data['geometry'].head())
print('L\'aire du sous-bassin vaut '+'{:.2f}'.format(data.area[0]/(100**3))+' km²')
msk,xlb,ylb,xpo,ypo=self.mask_subbasin(bassin_shp=fp)
summits_x,summits_y=self.MAR_summits()
mask=self.mask
summits_x=summits_x[mask,:];summits_y=summits_y[mask,:]
chos_pix=40
vec_data=self.vec_data
for i in range(vec_data.shape[0]):
print(i+1,vec_data[i,0])
fig=plt.figure()
ax=fig.add_subplot()
ax.plot(xpo,ypo)
ax.plot(summits_x[chos_pix,:],summits_y[chos_pix,:])
ax.set_title(str(vec_data[chos_pix,:2])+' mm/h')
plt.show()
[docs]
def compute_coverage_and_coords(self,fn1, fn2, epsg=31370):
"""
Compute coverage fraction and return coordinates of intersected polygons.
Returns
-------
fractions : np.ndarray
Fraction (0–1) of each polygon in fn2 covered by fn1
clipped_coords : list
List of (x, y) coordinate arrays for each intersected polygon
(None if no intersection)
"""
# -------------------------------------------------
# 1. Load data
# -------------------------------------------------
gdf1 = gpd.read_file(fn1)
gdf2 = gpd.read_file(fn2)
# -------------------------------------------------
# 2. CRS handling
# -------------------------------------------------
if gdf1.crs is None:
gdf1 = gdf1.set_crs(epsg=epsg)
if gdf2.crs is None:
gdf2 = gdf2.set_crs(epsg=epsg)
gdf1 = gdf1.to_crs(epsg)
gdf2 = gdf2.to_crs(epsg)
# -------------------------------------------------
# 3. Fix geometries
# -------------------------------------------------
gdf1["geometry"] = gdf1.buffer(0)
gdf2["geometry"] = gdf2.buffer(0)
# -------------------------------------------------
# 4. Merge mask
# -------------------------------------------------
mask = unary_union(gdf1.geometry)
# -------------------------------------------------
# 5. Compute intersections
# -------------------------------------------------
intersections = gdf2.geometry.intersection(mask)
polygon_area = gdf2.geometry.area
intersection_area = intersections.area
fractions = (intersection_area / polygon_area).fillna(0).to_numpy()
# -------------------------------------------------
# 6. Extract coordinates
# -------------------------------------------------
clipped_coords = []
for geom in intersections:
if geom.is_empty:
clipped_coords.append(None)
continue
# Handle MultiPolygon
if geom.geom_type == "MultiPolygon":
coords_list = []
for poly in geom.geoms:
x, y = poly.exterior.xy
coords_list.append((np.array(x), np.array(y)))
clipped_coords.append(coords_list)
# Handle Polygon
elif geom.geom_type == "Polygon":
x, y = geom.exterior.xy
clipped_coords.append((np.array(x), np.array(y)))
else:
clipped_coords.append(None)
return fractions, clipped_coords
[docs]
def check_input_volume(self,fn1,fn2,epsg=31370,dir_out=None):
fracts, clipped_coords = self.compute_coverage_and_coords(fn1, fn2)
sum_watershed=0
output_vec=np.zeros(self.vec_data.shape[-1])
for i in range(self.vec_data.shape[-1]):
output_vec[i]=np.nansum((self.vec_data[:,i]*fracts)[fracts>0])/np.sum(fracts[fracts>0])
fig=plt.figure(figsize=(8,5))
cax=fig.add_axes([.98,.1,.5,.5],projection=from_epsg(31370));cax.axis('off')
for coords in clipped_coords:
if coords is None:
continue
# If MultiPolygon
if isinstance(coords, list):
for x, y in coords:
cax.plot(x, y, color="red",transform=from_epsg(31370))
else:
x, y = coords
cax.plot(x, y, color="red",transform=from_epsg(31370))
ax=fig.add_subplot()
time=self.time
time1=time
mask_366 = (time.day != 29) & (time.month != 2)
# cax=fig.add_axes([.2,.6,.15,.15])
if self.mod_ydays==0:
time1 = np.delete(time, mask_366)
label=self.var
if self.var != 'ST2c':
label += ' cumulated'
output_vec1 = np.cumsum(output_vec)
# Create secondary y-axis
ax1 = ax.twinx()
# Put primary axis in front
ax.set_zorder(2)
ax1.set_zorder(1)
# Remove background of top axis so it doesn't hide lines
ax.patch.set_visible(False)
# Plot cumulative on primary axis
ax1.plot(time1, output_vec, color='tab:red', label=self.var,zorder=1)
ax.plot(time1, output_vec1, color='tab:blue', label='Cumulative',lw=2,zorder=2)
# Plot original on secondary axis
ax1.set_ylim([np.nanmin(output_vec),2.5*np.nanmax(output_vec)])
# Invert secondary axis
ax1.invert_yaxis()
# Optional: better axis labeling
# ax.set_ylabel(label)
ax1.set_ylabel(self.var)
else:
ax.plot(time1, output_vec)
ax.set_ylabel(label)
if dir_out is not None:
plt.savefig(f'{dir_out}Check_volumes_{self.var}.png',bbox_inches='tight',dpi=300)
"Test de l'objet"
if __name__ == "__main__":
dir_hist='/phypc11_tmp3/MARv3.14/MARv3.14-EUi-MIROC6-5km-ssp585/'
dir_stock='/home/josip/Desktop/scripts/Netcdfs/test_writing/'
dir_ins=['MARv3.14-EUh-MPI-ESM1-2-HR-5km-',
'MARv3.14-EUi-MIROC6-5km-',
'MARv3.14-EUm-EC-Earth3-Veg-5km-',
'MARv3.14-EUk-NorESM2-MM-5km-',
'MARv3.14-EUq-CMCC-CM2-SR5-5km-',
'MARv3.14-EUl-IPSL-CM6A-LR-5km-'
]
mod_names=['MPI-ESM1',
'MIROC6',
'EC3',
'NorESM2',
'CMCC-CM2-SR5',
'IPSL'
]
mod_racs=['MPI','MIR','EC3','NOR','CMC','IPSL']
scens=['ssp126','ssp245','ssp370','ssp585']
"dates entre lesquelles sélectionner les données (Heures,jour,mois,annee)"
"code à retravailler si simulations futures avec pas de temps inférieur à l'heure"
date_debut1=datetime.datetime(1982,1,1,0)
date_fin1=datetime.datetime(2023,12,31,23)
variant='_classical_seasonal'
hourly_exponent=1.75
GCM='ERA5'
if 'ERA' in GCM:
GCM=f'{GCM}corr{hourly_exponent}{variant}'
dir_stock=f'/srv5_tmp2/jbrajkovic/Unbiasing/{GCM}/'
else:
dir_stock=f'/srv5_tmp1/jbrajkovic/Unbiasing/{GCM}{variant}/'
if 'CMCC-CM' in GCM or 'NorESM2' in GCM:
myd=0
else:
myd=1
var2extract='ST2c'
if var2extract=='Precip' or var2extract=='MBRO3c':
ext='.rain.dat'
elif var2extract=='MBEPc':
ext='.etp.dat'
elif var2extract=='ST2c':
ext='.temp'
dir_pref = 'precip' if var2extract=='Precip' or var2extract=='MBRO3c' else 'evapo'
dirout=f"/srv7_tmp1/jbrajkovic/These/forWOLF/input_data_{GCM}{variant}_{date_debut1.year}_{date_fin1.year}_Ourthe/"#-MPI_1981-2010/" #dossier outputs
filenameshp="Grid_radar.shp" #nom du shapefile en sortie
format = "<bbhbbbd"
"Définition d'un rectangle"
xs=np.array([200000,200000,
272000,272000.])
ys=np.array([63000,152000,
152000,63000])
dat_types=[1,1,1,0,0,1]
sc=3
for mod in range(2,3):
# for sc in range(4):
# dirin=dir_stock+dir_ins[mod]+scens[sc]+'/'
dirin=dir_stock
print(dirin)
objet_MAR=MAR_input_data(xsummits=xs,ysummits=ys,
date_debut=date_debut1,
date_fin=date_fin1,
directory=dirin,
directory_hist_sim=dir_hist,
var=var2extract,
var_unb='Precip',
UnborNot=0,
syu=date_debut1.year,
eyu=date_fin1.year,
mod_ydays=myd,#dat_types[mod]#leap year or not
model_name='ERA5',
generate_quantiles=0)
print('ok')
# if date_fin1.year>2015:
# dirout1=dirout+'-'+mod_racs[mod]+'_'+scens[sc]+'_'+str(date_debut1.year)+'-'+\
# str(date_fin1.year)+'/'
# else:
# dirout1=dirout+'-'+mod_racs[mod]+'_'+str(date_debut1.year)+'-'+\
# str(date_fin1.year)+'/'
print(dir_stock)
dirout1=dirout
objet_MAR.MAR_shapefile(filenameshp,dirout1)
if var2extract=='ST2c':
objet_MAR.MAR_BinaryOutputs_temperatures_daily_text(dirout1,extension=ext)
else:
objet_MAR.MAR_BinaryOutputs_V2(dirout1,extension=ext)
# if var2extract != 'ST2c':
objet_MAR.check_input_volume('/srv7_tmp1/jbrajkovic/BV_ourthe.shp',f'{dirout1}/Grid/Grid_radar.shp',
epsg=31370,dir_out=dirout1)
# objet_MAR.MAR_TextOutputs(dirout1,extension='.rain.txt')
# objet_MAR.cumulated_volume()
# objet_MAR.test_prepro(206000, 65000, 612, 606)
# objet_MAR.test_prepro_mat(207000, 66400, 612, 606,
# sdate=datetime.datetime(1999,11,9,0),
# edate=datetime.datetime(1999,11,13,0),
# out_fn=dirout1,
# pref_out='evapo')
# objet_MAR.precip_time()
# objet_MAR.test_vect()