pyicon_tb.py 58.2 KB
Newer Older
1
print('sys')
2
import sys, glob, os
3
print('json')
4
5
import json
# --- calculations
6
print('numpy')
7
import numpy as np
8
print('scipy')
9
10
from scipy import interpolate
from scipy.spatial import cKDTree
11
# --- reading data 
12
print('netcdf datetime')
13
from netCDF4 import Dataset, num2date, date2num
14
import datetime
15
# --- plotting
16
print('matplotlib')
17
import matplotlib.pyplot as plt
18
import matplotlib
19
# --- debugging
20
print('mybreak')
21
#from ipdb import set_trace as mybreak  
22
23
print('pnadas')
import pandas as pd
24
print('xarray')
25
import xarray as xr
26
print('done xarray')
27

28
29
"""
pyicon
30
31
#  icon_to_regular_grid
#  icon_to_section
nbruegge's avatar
nbruegge committed
32
33
34
  apply_ckdtree
  ckdtree_hgrid
  ckdtree_section
35
  calc_ckdtree
nbruegge's avatar
nbruegge committed
36
37
  haversine_dist
  derive_section_points
38
39
40
41
42
  timing
  conv_gname
  identify_grid
  crop_tripolar_grid
  crop_regular_grid
nbruegge's avatar
nbruegge committed
43
44
45
  get_files_of_timeseries
  get_varnames
  get_timesteps
46
47
48
49
50
51
52
53
54

  ?load_data
  ?load_grid

  ?hplot
  ?update_hplot
  ?vplot
  ?update_vplot

nbruegge's avatar
nbruegge committed
55
  #IconDataFile
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79

  IconData
  IP_hor_sec_rect

  QuickPlotWebsite

  IDa: Icon data set (directory of files)
    - info about tsteps
    - info about vars
    - info about grid
    - IGr: Icon grid
    - IVa: Icon variable if loaded
  IIn: Icon interpolator class

  IPl: Icon plot class

IDa = pyic.IconData(fpath or path)
IDa.load_grid()
IDa.show()

IPl = pyic.hplot(IDa, 'var', iz, tstep, IIn)

"""

80
81
82
83
84
85
86
87
class pyicon_configure(object):
  def __init__(self, fpath_config):
    with open(fpath_config) as file_json:
      Dsettings = json.load(file_json)
    for key in Dsettings.keys():
      setattr(self, key, Dsettings[key])
    return

88
#def icon_to_regular_grid(data, shape, distances=None, \
89
#                  inds=None, radius_of_influence=1000e3):
90
91
92
93
94
95
96
97
#  """
#  """
#  data_interpolated = apply_ckdtree(data, distances=distances, inds=inds, 
#                                    radius_of_influence=radius_of_influence)
#  data_interpolated = data_interpolated.reshape(shape)
#  return data_interpolated
#
#def icon_to_section(data, distances=None, \
98
#                  inds=None, radius_of_influence=1000e3):
99
100
101
102
103
104
#  """
#  """
#  data_interpolated = apply_ckdtree(data, distances=distances, inds=inds, 
#                                    radius_of_influence=radius_of_influence)
#  return data_interpolated

Nils Brüggemann's avatar
Nils Brüggemann committed
105
106
107
"""
Routines to apply interpolation weights
"""
108
def apply_ckdtree_base(data, inds, distances, radius_of_influence=1000e3):
Nils Brüggemann's avatar
Nils Brüggemann committed
109
110
111
  if distances.ndim == 1:
    #distances_ma = np.ma.masked_greater(distances, radius_of_influence)
    if data.ndim==1:
112
113
114
115
      if isinstance(data, xr.core.dataarray.DataArray):
        data_interpolated = data.load()[inds]
      else:
        data_interpolated = data[inds]
Nils Brüggemann's avatar
Nils Brüggemann committed
116
117
      data_interpolated[distances>=radius_of_influence] = np.nan
    elif data.ndim==2:
118
119
120
121
      if isinstance(data, xr.core.dataarray.DataArray):
        data_interpolated = data.load()[:,inds]
      else:
        data_interpolated = data[:,inds]
Nils Brüggemann's avatar
Nils Brüggemann committed
122
123
124
125
126
127
128
129
130
131
132
      data_interpolated[:,distances>=radius_of_influence] = np.nan
  else:
    #raise ValueError("::: distances.ndim>1 is not properly supported yet. :::")
    #distances_ma = np.ma.masked_greater(distances, radius_of_influence)
    weights = 1.0 / distances**2
    if data.ndim==1:
      data_interpolated = np.ma.sum(weights * data[inds], axis=1) / np.ma.sum(weights, axis=1)
      #data_interpolated[distances>=radius_of_influence] = np.nan
    elif data.ndim==2:
      data_interpolated = np.ma.sum(weights[np.newaxis,:,:] * data[:,inds], axis=2) / np.ma.sum(weights[np.newaxis,:,:], axis=2)
      #data_interpolated[:,distances>=radius_of_influence] = np.nan
133
  data_interpolated = np.ma.masked_invalid(data_interpolated)
Nils Brüggemann's avatar
Nils Brüggemann committed
134
135
  return data_interpolated

136
def apply_ckdtree(data, fpath_ckdtree, mask=None, coordinates='clat clon', radius_of_influence=1000e3):
nbruegge's avatar
nbruegge committed
137
  """
138
  * credits
139
    function modified from pyfesom (Nikolay Koldunov)
140
  """
141
  ddnpz = np.load(fpath_ckdtree)
142
  #if coordinates=='clat clon':
143
  if ('clon' in coordinates) or (coordinates==''):
144
145
    distances = ddnpz['dckdtree_c']
    inds = ddnpz['ickdtree_c'] 
146
147
  #elif coordinates=='elat elon':
  elif 'elon' in coordinates:
148
149
    distances = ddnpz['dckdtree_e']
    inds = ddnpz['ickdtree_e'] 
150
151
  #elif coordinates=='vlat vlon':
  elif 'vlon' in coordinates:
152
153
154
155
156
    distances = ddnpz['dckdtree_v']
    inds = ddnpz['ickdtree_v'] 
  else:
    raise ValueError('::: Error: Unsupported coordinates: %s! ::: ' % (coordinates))

157
158
159
160
161
162
163
164
165
  if mask is not None:
    #if data.ndim==1:
    #  data = data[mask]
    #elif data.ndim==2:
    #  data = data[:,mask]
    if inds.ndim==1:
      inds = inds[mask]
      distances = distances[mask]
    elif inds.ndim==2:
166
      #raise ValueError('::: Warning: This was never checked! Please check carefully and remove this warning.:::')
167
168
      inds = inds[:,mask]
      distances = distances[:,mask]
169

Nils Brüggemann's avatar
Nils Brüggemann committed
170
  data_interpolated = apply_ckdtree_base(data, inds, distances, radius_of_influence)
171
172
  return data_interpolated

173
174
175
176
def interp_to_rectgrid(data, fpath_ckdtree, 
                       lon_reg=None, lat_reg=None,             # for new way of cropping
                       indx='all', indy='all', mask_reg=None,  # for old way of cropping
                       coordinates='clat clon'):
Nils Brüggemann's avatar
Nils Brüggemann committed
177
178
179
  ddnpz = np.load(fpath_ckdtree)
  lon = ddnpz['lon'] 
  lat = ddnpz['lat'] 
180
  # --- old way of cropping
181
182
183
  if not isinstance(indx, str):
    lon = lon[indx]
    lat = lat[indy]
184
185
186
187
188
189
190
191
192
193
  # --- prepare cropping the data to a region
  if lon_reg is not None:
    indx = np.where((lon>=lon_reg[0]) & (lon<lon_reg[1]))[0]
    indy = np.where((lat>=lat_reg[0]) & (lat<lat_reg[1]))[0]
    Lon, Lat = np.meshgrid(lon, lat) # full grid
    lon = lon[indx]
    lat = lat[indy]
    ind_reg = ((Lon>=lon_reg[0]) & (Lon<lon_reg[1]) & (Lat>=lat_reg[0]) & (Lat<lat_reg[1])).flatten()
    mask_reg = ind_reg
    Lon, Lat = np.meshgrid(lon, lat) # cropped grid
194
  datai = apply_ckdtree(data, fpath_ckdtree, mask=mask_reg, coordinates=coordinates)
195
  if datai.ndim==1:
196
    datai = datai.reshape(lat.size, lon.size)
197
198
199
200
201
  else:
    datai = datai.reshape([data.shape[0], lat.size, lon.size])
  datai[datai==0.] = np.ma.masked
  return lon, lat, datai

202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
def interp_to_rectgrid_xr(arr, fpath_ckdtree, 
                          lon_reg=None, lat_reg=None,
                          coordinates='clat clon',
                          radius_of_influence=1000e3,
                          compute=True,
                          mask_out_of_range=True,
                          mask_out_of_range_before=False,
                         ):

  # --- load interpolation indices
  ds_ckdt = xr.open_dataset(fpath_ckdtree)
  if ('clon' in coordinates) or (coordinates==''):
    inds = ds_ckdt.ickdtree_c
    dist = ds_ckdt.dckdtree_c
  elif 'elon' in coordinates:
    inds = ds_ckdt.ickdtree_e
    dist = ds_ckdt.dckdtree_e
  elif 'vlon' in coordinates:
    inds = ds_ckdt.ickdtree_v
    dist = ds_ckdt.dckdtree_v
  else:
    raise ValueError('::: Error: Unsupported coordinates: %s! ::: ' % (coordinates))
  lon = ds_ckdt.lon.compute().data
  lat = ds_ckdt.lat.compute().data
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
  if lon_reg is not None:
    indx = np.where((lon>=lon_reg[0]) & (lon<lon_reg[1]))[0]
    indy = np.where((lat>=lat_reg[0]) & (lat<lat_reg[1]))[0]
    lon = lon[indx]
    lat = lat[indy]
    dist = dist.isel(lon=indx, lat=indy)
    inds = inds.isel(lon=indx, lat=indy)
    
  #if lon_reg is not None:
  #  indx = np.where((lon>=lon_reg[0]) & (lon<lon_reg[1]))[0]
  #  indy = np.where((lat>=lat_reg[0]) & (lat<lat_reg[1]))[0]
  #  Lon, Lat = np.meshgrid(lon, lat) # full grid
  #  lon = lon[indx]
  #  lat = lat[indy]
  #  ind_reg = ((Lon>=lon_reg[0]) & (Lon<lon_reg[1]) & (Lat>=lat_reg[0]) & (Lat<lat_reg[1])).flatten()
  #  mask_reg = ind_reg
  #  Lon, Lat = np.meshgrid(lon, lat) # cropped grid
  dist = dist.compute()
  inds = inds.compute().data.flatten()
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268

  # --- interpolate by nearest neighbor
  arr_interp = arr.isel(ncells=inds)

  # --- reshape
  arr_interp = arr_interp.assign_coords(ncells=pd.MultiIndex.from_product([lat, lon], names=("lat", "lon"))
                                ).unstack()

  # --- mask values where nearest neighbor is too far away
  # (doing this after compute seems to be faster) FIXME check that!
  if mask_out_of_range_before:
    arr_interp = arr_interp.where(dist<radius_of_influence)

  # --- compute data otherwise a lazy object is returned
  if compute:
    arr_interp = arr_interp.compute()

  # --- mask values where nearest neighbor is too far away
  # (doing this after compute seems to be faster) FIXME check that!
  if mask_out_of_range:
    arr_interp = arr_interp.where(dist<radius_of_influence)

  return  arr_interp

269
270
271
272
273
274
275
276
def interp_to_section(data, fpath_ckdtree, coordinates='clat clon'):
  ddnpz = np.load(fpath_ckdtree)
  lon_sec = ddnpz['lon_sec'] 
  lat_sec = ddnpz['lat_sec'] 
  dist_sec = ddnpz['dist_sec'] 
  datai = apply_ckdtree(data, fpath_ckdtree, coordinates=coordinates)
  datai[datai==0.] = np.ma.masked
  return lon_sec, lat_sec, dist_sec, datai
Nils Brüggemann's avatar
Nils Brüggemann committed
277

Nils Brüggemann's avatar
Nils Brüggemann committed
278
279
280
""" 
Routines for zonal averaging
"""
nbruegge's avatar
nbruegge committed
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
def zonal_average(fpath_data, var, basin='global', it=0, fpath_fx='', fpath_ckdtree=''):

  for fp in [fpath_data, fpath_fx, fpath_ckdtree]:
    if not os.path.exists(fp):
      raise ValueError('::: Error: Cannot find file %s! :::' % (fp))

  f = Dataset(fpath_fx, 'r')
  basin_c = f.variables['basin_c'][:]
  mask_basin = np.zeros(basin_c.shape, dtype=bool)
  if basin.lower()=='atlantic' or basin=='atl':
    mask_basin[basin_c==1] = True 
  elif basin.lower()=='pacific' or basin=='pac':
    mask_basin[basin_c==3] = True 
  elif basin.lower()=='southern ocean' or basin=='soc' or basin=='so':
    mask_basin[basin_c==6] = True 
  elif basin.lower()=='indian ocean' or basin=='ind' or basin=='io':
    mask_basin[basin_c==7] = True 
  elif basin.lower()=='global' or basin=='glob' or basin=='glo':
    mask_basin[basin_c!=0] = True 
  elif basin.lower()=='indopacific' or basin=='indopac':
    mask_basin[(basin_c==3) | (basin_c==7)] = True 
Nils Brüggemann's avatar
Nils Brüggemann committed
302
303
  elif basin.lower()=='indopacso':
    mask_basin[(basin_c==3) | (basin_c==7) | (basin_c==6)] = True 
nbruegge's avatar
nbruegge committed
304
305
306
307
308
309
310
311
312
313
  f.close()
  
  ddnpz = np.load(fpath_ckdtree)
  lon = ddnpz['lon'] 
  lat = ddnpz['lat'] 
  shape = [lat.size, lon.size]
  lat_sec = lat
  
  f = Dataset(fpath_data, 'r')
  nz = f.variables[var].shape[1]
314
  coordinates = f.variables[var].coordinates
nbruegge's avatar
nbruegge committed
315
316
  data_zave = np.ma.zeros((nz,lat_sec.size))
  for k in range(nz):
nbruegge's avatar
nbruegge committed
317
    #print('k = %d/%d'%(k,nz))
nbruegge's avatar
nbruegge committed
318
319
320
321
322
323
324
    # --- load data
    data = f.variables[var][it,k,:]
    # --- mask land points
    data[data==0] = np.ma.masked
    # --- mask not-this-basin points
    data[mask_basin==False] = np.ma.masked
    # --- go to normal np.array (not np.ma object)
325
326
    if isinstance(data, np.ma.core.MaskedArray):
      data = data.filled(0.)
nbruegge's avatar
nbruegge committed
327
    # --- interpolate to rectangular grid
328
329
    datai = apply_ckdtree(data, fpath_ckdtree, coordinates=coordinates)
    datai = datai.reshape(shape)
nbruegge's avatar
nbruegge committed
330
331
332
333
334
335
336
    # --- go back to masked array
    datai = np.ma.array(datai, mask=datai==0.)
    # --- do zonal average
    data_zave[k,:] = datai.mean(axis=1)
  f.close()
  return lat_sec, data_zave

337
def zonal_average_3d_data(data3d, basin='global', it=0, coordinates='clat clon', fpath_fx='', fpath_ckdtree=''):
nbruegge's avatar
nbruegge committed
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
  """ Like zonal_average but here data instead of path to data is given. This can only work if the whole data array fits into memory.
  """

  for fp in [fpath_fx, fpath_ckdtree]:
    if not os.path.exists(fp):
      raise ValueError('::: Error: Cannot find file %s! :::' % (fp))

  f = Dataset(fpath_fx, 'r')
  basin_c = f.variables['basin_c'][:]
  mask_basin = np.zeros(basin_c.shape, dtype=bool)
  if basin.lower()=='atlantic' or basin=='atl':
    mask_basin[basin_c==1] = True 
  elif basin.lower()=='pacific' or basin=='pac':
    mask_basin[basin_c==3] = True 
  elif basin.lower()=='southern ocean' or basin=='soc' or basin=='so':
    mask_basin[basin_c==6] = True 
  elif basin.lower()=='indian ocean' or basin=='ind' or basin=='io':
    mask_basin[basin_c==7] = True 
  elif basin.lower()=='global' or basin=='glob' or basin=='glo':
    mask_basin[basin_c!=0] = True 
  elif basin.lower()=='indopacific' or basin=='indopac':
    mask_basin[(basin_c==3) | (basin_c==7)] = True 
Nils Brüggemann's avatar
Nils Brüggemann committed
360
361
  elif basin.lower()=='indopacso':
    mask_basin[(basin_c==3) | (basin_c==7) | (basin_c==6)] = True 
nbruegge's avatar
nbruegge committed
362
363
364
  f.close()
  
  ddnpz = np.load(fpath_ckdtree)
365
366
  #dckdtree = ddnpz['dckdtree']
  #ickdtree = ddnpz['ickdtree'] 
nbruegge's avatar
nbruegge committed
367
368
369
370
371
372
373
374
  lon = ddnpz['lon'] 
  lat = ddnpz['lat'] 
  shape = [lat.size, lon.size]
  lat_sec = lat
  
  nz = data3d.shape[0]
  data_zave = np.ma.zeros((nz,lat_sec.size))
  for k in range(nz):
Nils Brüggemann's avatar
Nils Brüggemann committed
375
    data = 1.*data3d[k,:]
nbruegge's avatar
nbruegge committed
376
377
378
379
380
381
    #print('k = %d/%d'%(k,nz))
    # --- mask land points
    data[data==0] = np.ma.masked
    # --- mask not-this-basin points
    data[mask_basin==False] = np.ma.masked
    # --- go to normal np.array (not np.ma object)
382
383
    if isinstance(data, np.ma.core.MaskedArray):
      data = data.filled(0.)
nbruegge's avatar
nbruegge committed
384
    # --- interpolate to rectangular grid
385
386
    datai = apply_ckdtree(data, fpath_ckdtree, coordinates=coordinates)
    datai = datai.reshape(shape)
nbruegge's avatar
nbruegge committed
387
388
389
390
391
392
    # --- go back to masked array
    datai = np.ma.array(datai, mask=datai==0.)
    # --- do zonal average
    data_zave[k,:] = datai.mean(axis=1)
  return lat_sec, data_zave

393
def zonal_average_atmosphere(data3d, ind_lev, fac, fpath_ckdtree='', coordinates='clat clon',):
394
395
396
397
398
  icall = np.arange(data3d.shape[1],dtype=int)
  datavi = data3d[ind_lev,icall]*fac+data3d[ind_lev+1,icall]*(1.-fac)
  lon, lat, datavihi = interp_to_rectgrid(datavi, fpath_ckdtree, coordinates=coordinates)
  data_zave = datavihi.mean(axis=2)
  return lat, data_zave
399

400
def zonal_section_3d_data(data3d, fpath_ckdtree, coordinates):
401
402
403
404
405
406
407
408
  """
  (
   lon_sec, lat_sec, dist_sec, data_sec 
  ) = pyic.zonal_section_3d_data(tbias, 
    fpath_ckdtree=path_ckdtree+'sections/r2b4_nps100_30W80S_30W80N.npz')
  """
  # --- load ckdtree
  ddnpz = np.load(fpath_ckdtree)
409
410
  #dckdtree = ddnpz['dckdtree']
  #ickdtree = ddnpz['ickdtree'] 
411
412
413
414
415
416
417
  lon_sec = ddnpz['lon_sec'] 
  lat_sec = ddnpz['lat_sec'] 
  dist_sec = ddnpz['dist_sec'] 

  nz = data3d.shape[0]
  data_sec = np.ma.zeros((nz,dist_sec.size))
  for k in range(nz):
418
    data_sec[k,:] = apply_ckdtree(data3d[k,:], fpath_ckdtree, coordinates=coordinates)
419
420
  return lon_sec, lat_sec, dist_sec, data_sec

421
422
423
424
425
426
427
428
429
430
431
def lonlat2str(lon, lat):
  if lon<0:
    lon_s = '%gW'%(-lon)
  else:
    lon_s = '%gE'%(lon)
  if lat<0:
    lat_s = '%gS'%(-lat)
  else:
    lat_s = '%gN'%(lat)
  return lon_s, lat_s

Nils Brüggemann's avatar
Nils Brüggemann committed
432
433
434
435
436
437
438
439
440
"""
Routines to calculate interpolation weights:

  | ckdtree_hgrid
  | ckdtree_section
  |-->| ckdtree_points
      |--> calc_ckdtree
"""

nbruegge's avatar
nbruegge committed
441
def ckdtree_hgrid(lon_reg, lat_reg, res, 
442
443
444
445
446
                 #fpath_grid_triangular='', 
                 fname_tgrid='',
                 path_tgrid='',
                 path_ckdtree='',
                 sname='',
Nils Brüggemann's avatar
Nils Brüggemann committed
447
                 gname='',
448
449
450
451
452
                 tgname='',
                 load_cgrid=True,
                 load_egrid=True,
                 load_vgrid=True,
                 n_nearest_neighbours=1,
453
                 n_jobs=1,
454
455
456
                 ):
  """
  """
457
458
459
  if tgname=='':
    Drgrid = identify_grid(path_tgrid, path_tgrid+fname_tgrid) 
    tgname = Drgrid['name']
460
461
462
  lon1str, lat1str = lonlat2str(lon_reg[0], lat_reg[0])
  lon2str, lat2str = lonlat2str(lon_reg[1], lat_reg[1])

463
464
465
466
  if n_nearest_neighbours==1:
    fname = '%s_res%3.2f_%s-%s_%s-%s.npz'%(tgname, res, lon1str, lon2str, lat1str, lat2str) 
  else:
    fname = '%s_res%3.2f_%dnn_%s-%s_%s-%s.npz'%(tgname, res, n_nearest_neighbours, lon1str, lon2str, lat1str, lat2str) 
467
  fpath_ckdtree = path_ckdtree+fname
Nils Brüggemann's avatar
Nils Brüggemann committed
468
  fpath_tgrid   = path_tgrid+fname_tgrid
469
470
471
472
473
474

  # --- make rectangular grid 
  lon = np.arange(lon_reg[0],lon_reg[1],res)
  lat = np.arange(lat_reg[0],lat_reg[1],res)
  Lon, Lat = np.meshgrid(lon, lat)

Nils Brüggemann's avatar
Nils Brüggemann committed
475
476
477
478
  lon_o = Lon.flatten()
  lat_o = Lat.flatten()
  
  # --- calculate ckdtree
479
480
  Dind_dist = ckdtree_points(fpath_tgrid, lon_o, lat_o, load_cgrid=load_cgrid, load_egrid=load_egrid, load_vgrid=load_vgrid,
                             n_nearest_neighbours=n_nearest_neighbours, n_jobs=n_jobs)
nbruegge's avatar
nbruegge committed
481
482
483
484
485
486

  # --- save grid
  print('Saving grid file: %s' % (fpath_ckdtree))
  np.savez(fpath_ckdtree,
            lon=lon,
            lat=lat,
487
            sname=sname,
Nils Brüggemann's avatar
Nils Brüggemann committed
488
            gname=gname,
489
            tgname='test',
Nils Brüggemann's avatar
Nils Brüggemann committed
490
            **Dind_dist,
nbruegge's avatar
nbruegge committed
491
492
493
494
           )
  return

def ckdtree_section(p1, p2, npoints=101, 
495
496
497
498
                 fname_tgrid='',
                 path_tgrid='',
                 path_ckdtree='',
                 sname='auto',
Nils Brüggemann's avatar
Nils Brüggemann committed
499
                 gname='',
500
                 tgname='',
Nils Brüggemann's avatar
Nils Brüggemann committed
501
                 n_nearest_neighbours=1,
502
                 n_jobs=1,
503
504
505
                 load_cgrid=True,
                 load_egrid=True,
                 load_vgrid=True,
nbruegge's avatar
nbruegge committed
506
507
508
                 ):
  """
  """
509
510
511
  if tgname=='':
    Drgrid = identify_grid(path_tgrid, path_tgrid+fname_tgrid) 
    tgname = Drgrid['name']
512
513
514
515
516
517
  lon1str, lat1str = lonlat2str(p1[0], p1[1])
  lon2str, lat2str = lonlat2str(p2[0], p2[1])

  if sname=='auto':
    sname = fpath_ckdtree.split('/')[-1][:-4]

Nils Brüggemann's avatar
Nils Brüggemann committed
518
519
520
521
  fname = '%s_nps%d_%s%s_%s%s.npz'%(tgname, npoints, lon1str, lat1str, lon2str, lat2str) 
  fpath_ckdtree = path_ckdtree+fname
  fpath_tgrid   = path_tgrid+fname_tgrid

nbruegge's avatar
nbruegge committed
522
523
  # --- derive section points
  lon_sec, lat_sec, dist_sec = derive_section_points(p1, p2, npoints)
Nils Brüggemann's avatar
Nils Brüggemann committed
524
525
  lon_o = lon_sec
  lat_o = lat_sec
nbruegge's avatar
nbruegge committed
526

Nils Brüggemann's avatar
Nils Brüggemann committed
527
  # --- calculate ckdtree
528
529
  Dind_dist = ckdtree_points(fpath_tgrid, lon_o, lat_o, load_cgrid=load_cgrid, load_egrid=load_egrid, load_vgrid=load_vgrid, n_nearest_neighbours=n_nearest_neighbours,
                             n_jobs=n_jobs)
nbruegge's avatar
nbruegge committed
530
531
532
533
534
535
536

  # --- save grid
  print('Saving grid file: %s' % (fpath_ckdtree))
  np.savez(fpath_ckdtree,
            lon_sec=lon_sec,
            lat_sec=lat_sec,
            dist_sec=dist_sec,
537
            sname=sname,
Nils Brüggemann's avatar
Nils Brüggemann committed
538
            gname=gname,
Nils Brüggemann's avatar
Nils Brüggemann committed
539
            **Dind_dist
nbruegge's avatar
nbruegge committed
540
           )
Nils Brüggemann's avatar
Nils Brüggemann committed
541
542
  return Dind_dist['dckdtree_c'], Dind_dist['ickdtree_c'], lon_sec, lat_sec, dist_sec

543
def ckdtree_points(fpath_tgrid, lon_o, lat_o, load_cgrid=True, load_egrid=True, load_vgrid=True, n_nearest_neighbours=1, n_jobs=1):
Nils Brüggemann's avatar
Nils Brüggemann committed
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
  """
  """
  # --- load triangular grid
  f = Dataset(fpath_tgrid, 'r')
  if load_cgrid:
    clon = f.variables['clon'][:] * 180./np.pi
    clat = f.variables['clat'][:] * 180./np.pi
  if load_egrid:
    elon = f.variables['elon'][:] * 180./np.pi
    elat = f.variables['elat'][:] * 180./np.pi
  if load_vgrid:
    vlon = f.variables['vlon'][:] * 180./np.pi
    vlat = f.variables['vlat'][:] * 180./np.pi
  f.close()

  # --- ckdtree for cells, edges and vertices
  if load_cgrid:
    dckdtree_c, ickdtree_c = calc_ckdtree(lon_i=clon, lat_i=clat,
                                          lon_o=lon_o, lat_o=lat_o,
                                          n_nearest_neighbours=n_nearest_neighbours,
564
                                          n_jobs=n_jobs,
Nils Brüggemann's avatar
Nils Brüggemann committed
565
566
567
568
569
                                          )
  if load_egrid:
    dckdtree_e, ickdtree_e = calc_ckdtree(lon_i=elon, lat_i=elat,
                                          lon_o=lon_o, lat_o=lat_o,
                                          n_nearest_neighbours=n_nearest_neighbours,
570
                                          n_jobs=n_jobs,
Nils Brüggemann's avatar
Nils Brüggemann committed
571
572
573
574
575
                                          )
  if load_vgrid:
    dckdtree_v, ickdtree_v = calc_ckdtree(lon_i=vlon, lat_i=vlat,
                                          lon_o=lon_o, lat_o=lat_o,
                                          n_nearest_neighbours=n_nearest_neighbours,
576
                                          n_jobs=n_jobs,
Nils Brüggemann's avatar
Nils Brüggemann committed
577
578
579
580
581
582
583
584
585
586
587
588
589
590
                                          )

  # --- save dict
  Dind_dist = dict()
  if load_cgrid: 
    Dind_dist['dckdtree_c'] = dckdtree_c
    Dind_dist['ickdtree_c'] = ickdtree_c
  if load_egrid: 
    Dind_dist['dckdtree_e'] = dckdtree_e
    Dind_dist['ickdtree_e'] = ickdtree_e
  if load_vgrid: 
    Dind_dist['dckdtree_v'] = dckdtree_v
    Dind_dist['ickdtree_v'] = ickdtree_v
  return Dind_dist
nbruegge's avatar
nbruegge committed
591

592
def calc_ckdtree(lon_i, lat_i, lon_o, lat_o, n_nearest_neighbours=1, n_jobs=1, use_npconcatenate=True):
nbruegge's avatar
nbruegge committed
593
594
  """
  """
595
  # --- do ckdtree
Nils Brüggemann's avatar
Nils Brüggemann committed
596
597
598
599
600
601
602
603
604
605
  if False:
    lzip_i = list(zip(lon_i, lat_i))
    tree = cKDTree(lzip_i)
    lzip_o = list(zip(lon_o, lat_o))
    dckdtree, ickdtree = tree.query(lzip_o , k=n_nearest_neighbours, n_jobs=1)
  else:
    #print('calc_ckdtree by cartesian distances')
    xi, yi, zi = spherical_to_cartesian(lon_i, lat_i)
    xo, yo, zo = spherical_to_cartesian(lon_o, lat_o)

606
607
608
609
610
611
612
    if not use_npconcatenate:
      lzip_i = list(zip(xi, yi, zi))
      lzip_o = list(zip(xo, yo, zo))
    else:
      # This option seems to be much faster but needs to be tested also for big grids
      lzip_i = np.concatenate((xi[:,np.newaxis],yi[:,np.newaxis],zi[:,np.newaxis]), axis=1)
      lzip_o = np.concatenate((xo[:,np.newaxis],yo[:,np.newaxis],zo[:,np.newaxis]), axis=1) 
Nils Brüggemann's avatar
Nils Brüggemann committed
613
    tree = cKDTree(lzip_i)
614
    dckdtree, ickdtree = tree.query(lzip_o , k=n_nearest_neighbours, n_jobs=n_jobs)
nbruegge's avatar
nbruegge committed
615
616
  return dckdtree, ickdtree

Nils Brüggemann's avatar
Nils Brüggemann committed
617
618
619
620
621
622
623
def calc_vertical_interp_weights(zdata, levs):
  """ Calculate vertical interpolation weights and indices.

Call example:
icall, ind_lev, fac = calc_vertical_interp_weights(zdata, levs)

Afterwards do interpolation like this:
624
datai = data[ind_lev,icall]*fac+data[ind_lev+1,icall]*(1.-fac)
Nils Brüggemann's avatar
Nils Brüggemann committed
625
626
627
628
629
630
631
632
633
634
635
636
637
  """
  nza = zdata.shape[0]
  # --- initializations
  ind_lev = np.zeros((levs.size,zdata.shape[1]),dtype=int)
  icall = np.arange(zdata.shape[1],dtype=int)
  icall = icall[np.newaxis,:]
  fac = np.ma.zeros((levs.size,zdata.shape[1]))
  for k, lev in enumerate(levs):
    #print(f'k = {k}')
    # --- find level below critical level
    ind_lev[k,:] = (zdata<levs[k]).sum(axis=0)-1
    ind_lev[k,ind_lev[k,:]==(nza-1)]=-1
    # --- zdata below and above lev 
638
639
640
641
    zd1 = zdata[ind_lev[k,:],icall]
    zd2 = zdata[ind_lev[k,:]+1,icall]
    # --- linear interpolation to get weight (fac=1 if lev=zd1)
    fac[k,:] = (0.-1.)/(zd2-zd1)*(levs[k]-zd1)+1.
Nils Brüggemann's avatar
Nils Brüggemann committed
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
  # --- mask values which are out of range
  fac[ind_lev==-1] = np.ma.masked 
  return icall, ind_lev, fac

"""
Routines to calculate grids and sections
"""

def derive_section_points(p1, p2, npoints=101,):
  # --- derive section points
  if p1[0]==p2[0]:
    lon_sec = p1[0]*np.ones((npoints)) 
    lat_sec = np.linspace(p1[1],p2[1],npoints)
  else:
    lon_sec = np.linspace(p1[0],p2[0],npoints)
    lat_sec = (p2[1]-p1[1])/(p2[0]-p1[0])*(lon_sec-p1[0])+p1[1]
  dist_sec = haversine_dist(lon_sec[0], lat_sec[0], lon_sec, lat_sec)
  return lon_sec, lat_sec, dist_sec

def calc_north_pole_interp_grid_points(lat_south=60., res=100e3):
  """
  Compute grid points optimized for plotting the North Pole area.

  Parameters:
  -----------
  lat_south : float
      Southern latitude of target grid.
  res : float
      resolution of target grid

  Returns:
  --------
  Lon_np, Lat_np: ndarray
      Longitude and latitude of target grid as 2d array.

  Examples:
  ---------
  Lon_np, Lat_np = calc_north_pole_interp_grid_points(lat_south=60., res=100e3)

  """
  R = 6371e3
  x1, y1, z1 = spherical_to_cartesian(  0., lat_south)
  x2, y2, z2 = spherical_to_cartesian( 90., lat_south)
  x3, y3, z3 = spherical_to_cartesian(180., lat_south)
  x4, y4, z4 = spherical_to_cartesian(270., lat_south)

  lon1, lat1 = cartesian_to_spherical(x1, y1, z1)
  lon2, lat2 = cartesian_to_spherical(x2, y2, z2)
  lon3, lat3 = cartesian_to_spherical(x3, y3, z3)
  lon4, lat4 = cartesian_to_spherical(x4, y4, z4)

  #x1 = R * np.cos(  0.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y1 = R * np.sin(  0.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z1 = R * np.sin(lat_south*np.pi/180.)
  #x2 = R * np.cos( 90.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y2 = R * np.sin( 90.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z2 = R * np.sin(lat_south*np.pi/180.)
  #x3 = R * np.cos(180.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y3 = R * np.sin(180.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z3 = R * np.sin(lat_south*np.pi/180.)
  #x4 = R * np.cos(270.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y4 = R * np.sin(270.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z4 = R * np.sin(lat_south*np.pi/180.)
  #
  #lat1 = np.arcsin(z1/np.sqrt(x1**2+y1**2+z1**2)) * 180./np.pi
  #lon1 = np.arctan2(y1,x1) * 180./np.pi
  #lat2 = np.arcsin(z2/np.sqrt(x2**2+y2**2+z2**2)) * 180./np.pi
  #lon2 = np.arctan2(y2,x2) * 180./np.pi
  #lat3 = np.arcsin(z3/np.sqrt(x3**2+y3**2+z3**2)) * 180./np.pi
  #lon3 = np.arctan2(y3,x3) * 180./np.pi
  #lat4 = np.arcsin(z4/np.sqrt(x4**2+y4**2+z4**2)) * 180./np.pi
  #lon4 = np.arctan2(y4,x4) * 180./np.pi
  
  xnp = np.arange(x3, x1+res, res)
  ynp = np.arange(y4, y2+res, res)
  
  Xnp, Ynp = np.meshgrid(xnp, ynp)
  Znp = R * np.sin(lat1*np.pi/180.) * np.ones((ynp.size,xnp.size))
  Lon_np = np.arctan2(Ynp,Xnp) * 180./np.pi
  Lat_np = np.arcsin(Znp/np.sqrt(Xnp**2+Ynp**2+Znp**2)) * 180./np.pi
  return Lon_np, Lat_np

"""
Routines related to spherical geometry
"""
nbruegge's avatar
nbruegge committed
727
728
729
730
731
732
733
734
735
736
737
738
739
740
def haversine_dist(lon_ref, lat_ref, lon_pts, lat_pts, degree=True):
  # for details see http://en.wikipedia.org/wiki/Haversine_formula
  r = 6378.e3
  if degree:
    lon_ref = lon_ref * np.pi/180.
    lat_ref = lat_ref * np.pi/180.
    lon_pts = lon_pts * np.pi/180.
    lat_pts = lat_pts * np.pi/180.
  arg = np.sqrt(   np.sin(0.5*(lat_pts-lat_ref))**2 
                 + np.sin(0.5*(lon_pts-lon_ref))**2
                 * np.cos(lat_ref)*np.cos(lat_pts) )
  dist = 2*r * np.arcsin(arg)
  return dist

Nils Brüggemann's avatar
Nils Brüggemann committed
741
742
743
744
745
746
747
748
749
750
751
def spherical_to_cartesian(lon, lat):
  earth_radius = 6371e3
  x = earth_radius * np.cos(lon*np.pi/180.) * np.cos(lat*np.pi/180.)
  y = earth_radius * np.sin(lon*np.pi/180.) * np.cos(lat*np.pi/180.)
  z = earth_radius * np.sin(lat*np.pi/180.)
  return x, y, z

def cartesian_to_spherical(x, y, z):
  lat = np.arcsin(z/np.sqrt(x**2+y**2+z**2)) * 180./np.pi
  lon = np.arctan2(y,x) * 180./np.pi
  return lon, lat
752

Nils Brüggemann's avatar
Nils Brüggemann committed
753
754
755
"""
Routines to load data
"""
756
def load_hsnap(fpath, var, it=0, iz=0, iw=None, fpath_ckdtree='', verbose=True):
757
  f = Dataset(fpath, 'r')
758
759
  if verbose:
    print("Loading %s from %s" % (var, fpath))
760
761
762
763
  if f.variables[var].ndim==2:
    data = f.variables[var][it,:]
  else:
    data = f.variables[var][it,iz,:]
764
765
  if iw is not None:
    data = np.concatenate((data[:,iw:],data[:,:iw]),axis=1)
766
767
768
769
770
  f.close()

  data[data==0.] = np.ma.masked
  return data

771
772
773
774
775
776
def datetime64_to_float(dates):
  years  = (dates.astype('datetime64[Y]').astype(int) + 1970).astype(int)
  months = (dates.astype('datetime64[M]').astype(int) % 12 + 1).astype(int)
  days   = (dates - dates.astype('datetime64[M]') + 1).astype(int)
  return years, months, days

777
def time_average(IcD, var, t1='none', t2='none', it_ave=[], iz='all', always_use_loop=False, verbose=False, use_xr=False, load_xr_data=False, dimension_from_file='first'):
Nils Brüggemann's avatar
Nils Brüggemann committed
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
  it_ave = np.array(it_ave)
  # --- if no it_ave is given use t1 and t2 to determine averaging indices it_ave
  if it_ave.size==0:
    # --- if t2=='none' set t2=t1 and no time average will be applied
    if isinstance(t2, str) and t2=='none':
      t2 = t1

    # --- convert to datetime64 objects if necessary
    if isinstance(t1, str):
      t1 = np.datetime64(t1)
    if isinstance(t2, str):
      t2 = np.datetime64(t2)

    # --- determine averaging interval
    it_ave = np.where( (IcD.times>=t1) & (IcD.times<=t2) )[0]
793
794
795
  else:
    t1 = IcD.times[it_ave[0]]
    t2 = IcD.times[it_ave[-1]]
796
797
798

  if it_ave.size==0:
    raise ValueError(f'::: Could not find any time steps in interval t1={t1} and t2={t2}! :::')
799
  
800
801
802
803
804
805
806
807
  ## --- decide whether the file consists of monthly or yearly averages (or something else)
  #dt1 = (IcD.times[it_ave][1]-IcD.times[it_ave][0]).astype(float)/(86400)
  #if dt1==365 or dt1==366:
  #  ave_mode = 'yearly'
  #elif dt1==28 or dt1==29 or dt1==30 or dt1==31:
  #  ave_mode = 'monthly'
  #else:
  #  ave_mode = 'unknown'
808
809
810
811
       
  dt64type = IcD.times[0].dtype
  time_bnds = IcD.times[it_ave]
  yy, mm, dd = datetime64_to_float(time_bnds[0])
812
813
814
815
816
817
818
819
820
821
822
823
824
825
  if t1!=t2:
    if IcD.output_freq=='yearly':
      time_bnds = np.concatenate(([np.datetime64(f'{yy-1:04d}-{mm:02d}-{dd:02d}').astype(dt64type)],time_bnds))
    elif IcD.output_freq=='monthly':
      if mm==1:
        yy += -1
        mm = 13
      time_bnds = np.concatenate(([np.datetime64(f'{yy:04d}-{mm-1:02d}-{dd:02d}').astype(dt64type)],time_bnds))
    elif IcD.output_freq=='unknown':
      time_bnds = np.concatenate(([time_bnds[0]-(time_bnds[1]-time_bnds[0])], time_bnds))
    dt = np.diff(time_bnds).astype(IcD.dtype)
  else:
    # load single time instance
    dt = np.array([1])
826
827
  #dt = np.ones((it_ave.size), dtype=IcD.dtype)
  #print('Warning dt set to ones!!!')
828
829

  # --- get dimensions to allocate data
830
831
832
833
834
  if dimension_from_file=='first':
    dimension_from_file = IcD.flist_ts[0]
  elif dimension_from_file=='last':
    dimension_from_file = IcD.flist_ts[-1]
  f = Dataset(dimension_from_file, 'r')
835
  # FIXME: If == ('time', 'lat', 'lon') works well use it everywhere
836
837
838
  load_hfl_type = False
  load_moc_type = False
  if f.variables[var].dimensions == ('time', 'lat', 'lon'): # e.g. for heat fluxes
839
840
    nt, nc, nx = f.variables[var].shape
    nz = 0
841
    load_hfl_type = True
842
  elif f.variables[var].dimensions == ('time', 'depth', 'lat', 'lon'): # e.g. for MOC 
843
844
    nt, nz, nc, ndummy = f.variables[var].shape 
    load_moc_type = True
845
846
  elif f.variables[var].ndim==3:
    nt, nz, nc = f.variables[var].shape
847
  elif f.variables[var].ndim==2: # e.g. for 2D variables like zos and mld
848
849
850
851
852
853
854
855
856
857
858
859
    nt, nc = f.variables[var].shape
    nz = 0
  f.close()

  # --- set iz to all levels
  if isinstance(iz,str) and iz=='all':
    iz = np.arange(nz)
  #else:
  #  iz = np.array([iz])

  # --- if all data is coming from one file take faster approach
  fpaths = np.unique(IcD.flist_ts[it_ave])
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
  if use_xr:
    #print(dt)
    if load_hfl_type:
      data_ave = (IcD.ds[var][it_ave,:,0]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
    elif load_moc_type:
      data_ave = (IcD.ds[var][it_ave,:,:,0]*dt[:,np.newaxis,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
    elif nz>0 and isinstance(iz,(int,np.integer)): # data has no depth dim afterwards
      #data_ave = (IcD.ds[var][it_ave,iz,:]*dt[:,np.newaxis]).sum(axis=0)/dt.sum()
      data_ave = (IcD.ds[var][it_ave,iz,:]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
    elif nz>0 and not isinstance(iz,(int,np.integer)): # data has depth dim afterwards
      data_ave = (IcD.ds[var][it_ave,iz,:]*dt[:,np.newaxis,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
    else:
      data_ave = (IcD.ds[var][it_ave,:]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
    #dataxr = dsxr[var][it_ave,:,:].mean(axis=0)
    if load_xr_data:
      data_ave = data_ave.load().data
  elif (fpaths.size==1) and not always_use_loop:
877
    f = Dataset(fpaths[0], 'r')
878
    if load_hfl_type:
879
      data_ave = (f.variables[var][IcD.its[it_ave],:,0]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
880
    elif load_moc_type:
881
      data_ave = (f.variables[var][IcD.its[it_ave],:,:,0]*dt[:,np.newaxis,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
882
    elif nz>0 and isinstance(iz,(int,np.integer)): # data has no depth dim afterwards
883
      data_ave = (f.variables[var][IcD.its[it_ave],iz,:]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
884
    elif nz>0 and not isinstance(iz,(int,np.integer)): # data has depth dim afterwards
885
      data_ave = (f.variables[var][IcD.its[it_ave],iz,:]*dt[:,np.newaxis,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
886
    else:
887
      data_ave = (f.variables[var][IcD.its[it_ave],:]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
888
889
890
891
    f.close()
  # --- otherwise loop ovar all files is needed
  else:
    # --- allocate data
892
    if isinstance(iz,(int,np.integer)) or nz==0:
893
      data_ave = np.ma.zeros((nc), dtype=IcD.dtype)
894
    else:
895
      data_ave = np.ma.zeros((iz.size,nc), dtype=IcD.dtype)
896
897

    # --- average by looping over all files and time steps
898
    for ll, it in enumerate(it_ave):
899
      f = Dataset(IcD.flist_ts[it], 'r')
900
      if load_hfl_type:
901
        data_ave += f.variables[var][IcD.its[it],:,0]*dt[ll]/dt.sum()
902
      elif load_moc_type:
903
        data_ave += f.variables[var][IcD.its[it],:,:,0]*dt[ll]/dt.sum()
904
      elif nz>0:
905
        data_ave += f.variables[var][IcD.its[it],iz,:]*dt[ll]/dt.sum()
906
      else:
907
        data_ave += f.variables[var][IcD.its[it],:]*dt[ll]/dt.sum()
908
      f.close()
909
  data_ave = data_ave.astype(IcD.dtype)
910
911
912
  if verbose:
    #print(f'pyicon.time_average: var={var}: it_ave={it_ave}')
    print(f'pyicon.time_average: var={var}: it_ave={IcD.times[it_ave]}')
913
914
  return data_ave, it_ave

915
def timing(ts, string='', verbose=True):
916
917
918
919
  if ts[0]==0:
    ts = np.array([datetime.datetime.now()])
  else:
    ts = np.append(ts, [datetime.datetime.now()])
920
921
    if verbose:
      print(ts[-1]-ts[-2], ' ', (ts[-1]-ts[0]), ' '+string)
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
  return ts

def conv_gname(gname):
  gname = gname[:-4]

  ogrid = gname.split('_')[0]
  res = float(gname.split('_')[1][1:])

  lo1 = gname.split('_')[2]
  if lo1[-1]=='w':
    lo1 = -float(lo1[:-1])
  else:
    lo1 = float(lo1[:-1])
  lo2 = gname.split('_')[3]
  if lo2[-1]=='w':
    lo2 = -float(lo2[:-1])
  else:
    lo2 = float(lo2[:-1])

  la1 = gname.split('_')[4]
  if la1[-1]=='s':
    la1 = -float(la1[:-1])
  else:
    la1 = float(la1[:-1])
  la2 = gname.split('_')[5]
  if la2[-1]=='s':
    la2 = -float(la2[:-1])
  else:
    la2 = float(la2[:-1])

  lon_reg = [lo1, lo2]
  lat_reg = [la1, la2]
  return ogrid, res, lon_reg, lat_reg

Nils Brüggemann's avatar
Nils Brüggemann committed
956
957
958
"""
Grid related functions
"""
959
def identify_grid(path_grid, fpath_data):
960
961
962
963
  """ Identifies ICON grid from fpath_data which can be either a path to 
  a file containing a 'clon' variable or an xarray dataset or array where
  the cell dimension name is either 'ncells' or 'cell'.
  
964
  
Nils Brüggemann's avatar
Nils Brüggemann committed
965
966
967
968
969
970
  r2b4:  160km:    15117: OceanOnly_Icos_0158km_etopo40.nc
  r2b4a: 160km:    20480: /pool/data/ICON/grids/public/mpim/0013/icon_grid_0013_R02B04_G.nc
  r2b6:   40km:   327680: OCEANINP_pre04_LndnoLak_039km_editSLOHH2017_G.nc
  r2b8:   10km:  3729001: OceanOnly_Global_IcosSymmetric_0010km_rotatedZ37d_modified_srtm30_1min.nc
  r2b9:    5km: 14886338: OceanOnly_IcosSymmetric_4932m_rotatedZ37d_modified_srtm30_1min.nc
  r2b9a:   5km: 20971520: /pool/data/ICON/grids/public/mpim/0015/icon_grid_0015_R02B09_G.nc
971
972
973
  """
  
  Dgrid_list = dict()
974
975
976
977
978
979
980
981

  grid_name = 'r2b4_oce_r0003'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '160km'
  Dgrid_list[grid_name]['long_name'] = 'OceanOnly_Icos_0158km_etopo40'
  Dgrid_list[grid_name]['size'] = 15117
  #Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
  Dgrid_list[grid_name]['fpath_grid'] = f'{path_grid}/{grid_name}/{grid_name}_tgrid.nc'
982
  
983
  grid_name = 'r2b4_oce_r0004'; Dgrid_list[grid_name] = dict()
984
985
986
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '160km'
  Dgrid_list[grid_name]['long_name'] = 'OceanOnly_Icos_0158km_etopo40'
987
988
989
  Dgrid_list[grid_name]['size'] = 15105
  #Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
  Dgrid_list[grid_name]['fpath_grid'] = f'{path_grid}/{grid_name}/{grid_name}_tgrid.nc'
Nils Brüggemann's avatar
Nils Brüggemann committed
990
 
991
  grid_name = 'r2b4_atm_r0013'; Dgrid_list[grid_name] = dict()
Nils Brüggemann's avatar
Nils Brüggemann committed
992
993
994
995
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '160km'
  Dgrid_list[grid_name]['long_name'] = 'icon_grid_0013_R02B04_G'
  Dgrid_list[grid_name]['size'] = 20480
996
997
  #Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
  Dgrid_list[grid_name]['fpath_grid'] = f'{path_grid}/{grid_name}/{grid_name}_tgrid.nc'
Nils Brüggemann's avatar
Nils Brüggemann committed
998

Nils Brüggemann's avatar
Nils Brüggemann committed
999
  grid_name = 'r2b6old'; Dgrid_list[grid_name] = dict()
1000
  Dgrid_list[grid_name]['name'] = grid_name