pyicon_tb.py 44 KB
Newer Older
1
#print('sys')
2
import sys, glob, os
3
4
import json
# --- calculations
5
import numpy as np
6
#print('scipy')
7
8
from scipy import interpolate
from scipy.spatial import cKDTree
9
# --- reading data 
10
from netCDF4 import Dataset, num2date, date2num
11
import datetime
12
# --- plotting
13
#print('matplotlib')
14
15
16
17
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import ticker
#import my_toolbox as my
18
#print('cartopy')
nbruegge's avatar
nbruegge committed
19
import cartopy
20
import cartopy.crs as ccrs
nbruegge's avatar
nbruegge committed
21
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
22
import cmocean
23
# --- debugging
24
from ipdb import set_trace as mybreak  
25
#from importlib import reload
26
#print('xarray')
27
import xarray as xr
28
#print('done loading')
29

30
31
"""
pyicon
32
33
#  icon_to_regular_grid
#  icon_to_section
nbruegge's avatar
nbruegge committed
34
35
36
  apply_ckdtree
  ckdtree_hgrid
  ckdtree_section
37
  calc_ckdtree
nbruegge's avatar
nbruegge committed
38
39
  haversine_dist
  derive_section_points
40
41
42
43
44
  timing
  conv_gname
  identify_grid
  crop_tripolar_grid
  crop_regular_grid
nbruegge's avatar
nbruegge committed
45
46
47
  get_files_of_timeseries
  get_varnames
  get_timesteps
48
49
50
51
52
53
54
55
56

  ?load_data
  ?load_grid

  ?hplot
  ?update_hplot
  ?vplot
  ?update_vplot

nbruegge's avatar
nbruegge committed
57
  #IconDataFile
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81

  IconData
  IP_hor_sec_rect

  QuickPlotWebsite

  IDa: Icon data set (directory of files)
    - info about tsteps
    - info about vars
    - info about grid
    - IGr: Icon grid
    - IVa: Icon variable if loaded
  IIn: Icon interpolator class

  IPl: Icon plot class

IDa = pyic.IconData(fpath or path)
IDa.load_grid()
IDa.show()

IPl = pyic.hplot(IDa, 'var', iz, tstep, IIn)

"""

82
83
84
85
86
87
88
89
class pyicon_configure(object):
  def __init__(self, fpath_config):
    with open(fpath_config) as file_json:
      Dsettings = json.load(file_json)
    for key in Dsettings.keys():
      setattr(self, key, Dsettings[key])
    return

90
#def icon_to_regular_grid(data, shape, distances=None, \
91
#                  inds=None, radius_of_influence=1000e3):
92
93
94
95
96
97
98
99
#  """
#  """
#  data_interpolated = apply_ckdtree(data, distances=distances, inds=inds, 
#                                    radius_of_influence=radius_of_influence)
#  data_interpolated = data_interpolated.reshape(shape)
#  return data_interpolated
#
#def icon_to_section(data, distances=None, \
100
#                  inds=None, radius_of_influence=1000e3):
101
102
103
104
105
106
#  """
#  """
#  data_interpolated = apply_ckdtree(data, distances=distances, inds=inds, 
#                                    radius_of_influence=radius_of_influence)
#  return data_interpolated

Nils Brüggemann's avatar
Nils Brüggemann committed
107
108
109
"""
Routines to apply interpolation weights
"""
110
def apply_ckdtree_base(data, inds, distances, radius_of_influence=1000e3):
Nils Brüggemann's avatar
Nils Brüggemann committed
111
112
113
  if distances.ndim == 1:
    #distances_ma = np.ma.masked_greater(distances, radius_of_influence)
    if data.ndim==1:
114
115
116
117
      if isinstance(data, xr.core.dataarray.DataArray):
        data_interpolated = data.load()[inds]
      else:
        data_interpolated = data[inds]
Nils Brüggemann's avatar
Nils Brüggemann committed
118
119
      data_interpolated[distances>=radius_of_influence] = np.nan
    elif data.ndim==2:
120
121
122
123
      if isinstance(data, xr.core.dataarray.DataArray):
        data_interpolated = data.load()[:,inds]
      else:
        data_interpolated = data[:,inds]
Nils Brüggemann's avatar
Nils Brüggemann committed
124
125
126
127
128
129
130
131
132
133
134
      data_interpolated[:,distances>=radius_of_influence] = np.nan
  else:
    #raise ValueError("::: distances.ndim>1 is not properly supported yet. :::")
    #distances_ma = np.ma.masked_greater(distances, radius_of_influence)
    weights = 1.0 / distances**2
    if data.ndim==1:
      data_interpolated = np.ma.sum(weights * data[inds], axis=1) / np.ma.sum(weights, axis=1)
      #data_interpolated[distances>=radius_of_influence] = np.nan
    elif data.ndim==2:
      data_interpolated = np.ma.sum(weights[np.newaxis,:,:] * data[:,inds], axis=2) / np.ma.sum(weights[np.newaxis,:,:], axis=2)
      #data_interpolated[:,distances>=radius_of_influence] = np.nan
135
  data_interpolated = np.ma.masked_invalid(data_interpolated)
Nils Brüggemann's avatar
Nils Brüggemann committed
136
137
  return data_interpolated

138
def apply_ckdtree(data, fpath_ckdtree, mask=None, coordinates='clat clon', radius_of_influence=1000e3):
nbruegge's avatar
nbruegge committed
139
  """
140
  * credits
141
    function modified from pyfesom (Nikolay Koldunov)
142
  """
143
  ddnpz = np.load(fpath_ckdtree)
144
  #if coordinates=='clat clon':
145
  if ('clon' in coordinates) or (coordinates==''):
146
147
    distances = ddnpz['dckdtree_c']
    inds = ddnpz['ickdtree_c'] 
148
149
  #elif coordinates=='elat elon':
  elif 'elon' in coordinates:
150
151
    distances = ddnpz['dckdtree_e']
    inds = ddnpz['ickdtree_e'] 
152
153
  #elif coordinates=='vlat vlon':
  elif 'vlon' in coordinates:
154
155
156
157
158
    distances = ddnpz['dckdtree_v']
    inds = ddnpz['ickdtree_v'] 
  else:
    raise ValueError('::: Error: Unsupported coordinates: %s! ::: ' % (coordinates))

159
160
161
162
163
164
165
166
167
  if mask is not None:
    #if data.ndim==1:
    #  data = data[mask]
    #elif data.ndim==2:
    #  data = data[:,mask]
    if inds.ndim==1:
      inds = inds[mask]
      distances = distances[mask]
    elif inds.ndim==2:
168
      #raise ValueError('::: Warning: This was never checked! Please check carefully and remove this warning.:::')
169
170
      inds = inds[:,mask]
      distances = distances[:,mask]
171

Nils Brüggemann's avatar
Nils Brüggemann committed
172
  data_interpolated = apply_ckdtree_base(data, inds, distances, radius_of_influence)
173
174
  return data_interpolated

175
def interp_to_rectgrid(data, fpath_ckdtree, indx='all', indy='all', mask_reg=None, coordinates='clat clon'):
Nils Brüggemann's avatar
Nils Brüggemann committed
176
177
178
  ddnpz = np.load(fpath_ckdtree)
  lon = ddnpz['lon'] 
  lat = ddnpz['lat'] 
179
180
181
182
  if not isinstance(indx, str):
    lon = lon[indx]
    lat = lat[indy]
  datai = apply_ckdtree(data, fpath_ckdtree, mask=mask_reg, coordinates=coordinates)
183
  if datai.ndim==1:
184
    datai = datai.reshape(lat.size, lon.size)
185
186
187
188
189
190
191
192
193
194
195
196
197
  else:
    datai = datai.reshape([data.shape[0], lat.size, lon.size])
  datai[datai==0.] = np.ma.masked
  return lon, lat, datai

def interp_to_section(data, fpath_ckdtree, coordinates='clat clon'):
  ddnpz = np.load(fpath_ckdtree)
  lon_sec = ddnpz['lon_sec'] 
  lat_sec = ddnpz['lat_sec'] 
  dist_sec = ddnpz['dist_sec'] 
  datai = apply_ckdtree(data, fpath_ckdtree, coordinates=coordinates)
  datai[datai==0.] = np.ma.masked
  return lon_sec, lat_sec, dist_sec, datai
Nils Brüggemann's avatar
Nils Brüggemann committed
198

Nils Brüggemann's avatar
Nils Brüggemann committed
199
200
201
""" 
Routines for zonal averaging
"""
nbruegge's avatar
nbruegge committed
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
def zonal_average(fpath_data, var, basin='global', it=0, fpath_fx='', fpath_ckdtree=''):

  for fp in [fpath_data, fpath_fx, fpath_ckdtree]:
    if not os.path.exists(fp):
      raise ValueError('::: Error: Cannot find file %s! :::' % (fp))

  f = Dataset(fpath_fx, 'r')
  basin_c = f.variables['basin_c'][:]
  mask_basin = np.zeros(basin_c.shape, dtype=bool)
  if basin.lower()=='atlantic' or basin=='atl':
    mask_basin[basin_c==1] = True 
  elif basin.lower()=='pacific' or basin=='pac':
    mask_basin[basin_c==3] = True 
  elif basin.lower()=='southern ocean' or basin=='soc' or basin=='so':
    mask_basin[basin_c==6] = True 
  elif basin.lower()=='indian ocean' or basin=='ind' or basin=='io':
    mask_basin[basin_c==7] = True 
  elif basin.lower()=='global' or basin=='glob' or basin=='glo':
    mask_basin[basin_c!=0] = True 
  elif basin.lower()=='indopacific' or basin=='indopac':
    mask_basin[(basin_c==3) | (basin_c==7)] = True 
Nils Brüggemann's avatar
Nils Brüggemann committed
223
224
  elif basin.lower()=='indopacso':
    mask_basin[(basin_c==3) | (basin_c==7) | (basin_c==6)] = True 
nbruegge's avatar
nbruegge committed
225
226
227
228
229
230
231
232
233
234
  f.close()
  
  ddnpz = np.load(fpath_ckdtree)
  lon = ddnpz['lon'] 
  lat = ddnpz['lat'] 
  shape = [lat.size, lon.size]
  lat_sec = lat
  
  f = Dataset(fpath_data, 'r')
  nz = f.variables[var].shape[1]
235
  coordinates = f.variables[var].coordinates
nbruegge's avatar
nbruegge committed
236
237
  data_zave = np.ma.zeros((nz,lat_sec.size))
  for k in range(nz):
nbruegge's avatar
nbruegge committed
238
    #print('k = %d/%d'%(k,nz))
nbruegge's avatar
nbruegge committed
239
240
241
242
243
244
245
    # --- load data
    data = f.variables[var][it,k,:]
    # --- mask land points
    data[data==0] = np.ma.masked
    # --- mask not-this-basin points
    data[mask_basin==False] = np.ma.masked
    # --- go to normal np.array (not np.ma object)
246
247
    if isinstance(data, np.ma.core.MaskedArray):
      data = data.filled(0.)
nbruegge's avatar
nbruegge committed
248
    # --- interpolate to rectangular grid
249
250
    datai = apply_ckdtree(data, fpath_ckdtree, coordinates=coordinates)
    datai = datai.reshape(shape)
nbruegge's avatar
nbruegge committed
251
252
253
254
255
256
257
    # --- go back to masked array
    datai = np.ma.array(datai, mask=datai==0.)
    # --- do zonal average
    data_zave[k,:] = datai.mean(axis=1)
  f.close()
  return lat_sec, data_zave

258
def zonal_average_3d_data(data3d, basin='global', it=0, coordinates='clat clon', fpath_fx='', fpath_ckdtree=''):
nbruegge's avatar
nbruegge committed
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
  """ Like zonal_average but here data instead of path to data is given. This can only work if the whole data array fits into memory.
  """

  for fp in [fpath_fx, fpath_ckdtree]:
    if not os.path.exists(fp):
      raise ValueError('::: Error: Cannot find file %s! :::' % (fp))

  f = Dataset(fpath_fx, 'r')
  basin_c = f.variables['basin_c'][:]
  mask_basin = np.zeros(basin_c.shape, dtype=bool)
  if basin.lower()=='atlantic' or basin=='atl':
    mask_basin[basin_c==1] = True 
  elif basin.lower()=='pacific' or basin=='pac':
    mask_basin[basin_c==3] = True 
  elif basin.lower()=='southern ocean' or basin=='soc' or basin=='so':
    mask_basin[basin_c==6] = True 
  elif basin.lower()=='indian ocean' or basin=='ind' or basin=='io':
    mask_basin[basin_c==7] = True 
  elif basin.lower()=='global' or basin=='glob' or basin=='glo':
    mask_basin[basin_c!=0] = True 
  elif basin.lower()=='indopacific' or basin=='indopac':
    mask_basin[(basin_c==3) | (basin_c==7)] = True 
Nils Brüggemann's avatar
Nils Brüggemann committed
281
282
  elif basin.lower()=='indopacso':
    mask_basin[(basin_c==3) | (basin_c==7) | (basin_c==6)] = True 
nbruegge's avatar
nbruegge committed
283
284
285
  f.close()
  
  ddnpz = np.load(fpath_ckdtree)
286
287
  #dckdtree = ddnpz['dckdtree']
  #ickdtree = ddnpz['ickdtree'] 
nbruegge's avatar
nbruegge committed
288
289
290
291
292
293
294
295
  lon = ddnpz['lon'] 
  lat = ddnpz['lat'] 
  shape = [lat.size, lon.size]
  lat_sec = lat
  
  nz = data3d.shape[0]
  data_zave = np.ma.zeros((nz,lat_sec.size))
  for k in range(nz):
Nils Brüggemann's avatar
Nils Brüggemann committed
296
    data = 1.*data3d[k,:]
nbruegge's avatar
nbruegge committed
297
298
299
300
301
302
    #print('k = %d/%d'%(k,nz))
    # --- mask land points
    data[data==0] = np.ma.masked
    # --- mask not-this-basin points
    data[mask_basin==False] = np.ma.masked
    # --- go to normal np.array (not np.ma object)
303
304
    if isinstance(data, np.ma.core.MaskedArray):
      data = data.filled(0.)
nbruegge's avatar
nbruegge committed
305
    # --- interpolate to rectangular grid
306
307
    datai = apply_ckdtree(data, fpath_ckdtree, coordinates=coordinates)
    datai = datai.reshape(shape)
nbruegge's avatar
nbruegge committed
308
309
310
311
312
313
    # --- go back to masked array
    datai = np.ma.array(datai, mask=datai==0.)
    # --- do zonal average
    data_zave[k,:] = datai.mean(axis=1)
  return lat_sec, data_zave

314
def zonal_average_atmosphere(data3d, ind_lev, fac, fpath_ckdtree='', coordinates='clat clon',):
315
316
317
318
319
  icall = np.arange(data3d.shape[1],dtype=int)
  datavi = data3d[ind_lev,icall]*fac+data3d[ind_lev+1,icall]*(1.-fac)
  lon, lat, datavihi = interp_to_rectgrid(datavi, fpath_ckdtree, coordinates=coordinates)
  data_zave = datavihi.mean(axis=2)
  return lat, data_zave
320

321
def zonal_section_3d_data(data3d, fpath_ckdtree, coordinates):
322
323
324
325
326
327
328
329
  """
  (
   lon_sec, lat_sec, dist_sec, data_sec 
  ) = pyic.zonal_section_3d_data(tbias, 
    fpath_ckdtree=path_ckdtree+'sections/r2b4_nps100_30W80S_30W80N.npz')
  """
  # --- load ckdtree
  ddnpz = np.load(fpath_ckdtree)
330
331
  #dckdtree = ddnpz['dckdtree']
  #ickdtree = ddnpz['ickdtree'] 
332
333
334
335
336
337
338
  lon_sec = ddnpz['lon_sec'] 
  lat_sec = ddnpz['lat_sec'] 
  dist_sec = ddnpz['dist_sec'] 

  nz = data3d.shape[0]
  data_sec = np.ma.zeros((nz,dist_sec.size))
  for k in range(nz):
339
    data_sec[k,:] = apply_ckdtree(data3d[k,:], fpath_ckdtree, coordinates=coordinates)
340
341
  return lon_sec, lat_sec, dist_sec, data_sec

342
343
344
345
346
347
348
349
350
351
352
def lonlat2str(lon, lat):
  if lon<0:
    lon_s = '%gW'%(-lon)
  else:
    lon_s = '%gE'%(lon)
  if lat<0:
    lat_s = '%gS'%(-lat)
  else:
    lat_s = '%gN'%(lat)
  return lon_s, lat_s

Nils Brüggemann's avatar
Nils Brüggemann committed
353
354
355
356
357
358
359
360
361
"""
Routines to calculate interpolation weights:

  | ckdtree_hgrid
  | ckdtree_section
  |-->| ckdtree_points
      |--> calc_ckdtree
"""

nbruegge's avatar
nbruegge committed
362
def ckdtree_hgrid(lon_reg, lat_reg, res, 
363
364
365
366
367
                 #fpath_grid_triangular='', 
                 fname_tgrid='',
                 path_tgrid='',
                 path_ckdtree='',
                 sname='',
Nils Brüggemann's avatar
Nils Brüggemann committed
368
                 gname='',
369
370
371
372
373
                 tgname='',
                 load_cgrid=True,
                 load_egrid=True,
                 load_vgrid=True,
                 n_nearest_neighbours=1,
374
375
376
                 ):
  """
  """
377
378
379
  if tgname=='':
    Drgrid = identify_grid(path_tgrid, path_tgrid+fname_tgrid) 
    tgname = Drgrid['name']
380
381
382
  lon1str, lat1str = lonlat2str(lon_reg[0], lat_reg[0])
  lon2str, lat2str = lonlat2str(lon_reg[1], lat_reg[1])

383
384
385
386
  if n_nearest_neighbours==1:
    fname = '%s_res%3.2f_%s-%s_%s-%s.npz'%(tgname, res, lon1str, lon2str, lat1str, lat2str) 
  else:
    fname = '%s_res%3.2f_%dnn_%s-%s_%s-%s.npz'%(tgname, res, n_nearest_neighbours, lon1str, lon2str, lat1str, lat2str) 
387
  fpath_ckdtree = path_ckdtree+fname
Nils Brüggemann's avatar
Nils Brüggemann committed
388
  fpath_tgrid   = path_tgrid+fname_tgrid
389
390
391
392
393
394

  # --- make rectangular grid 
  lon = np.arange(lon_reg[0],lon_reg[1],res)
  lat = np.arange(lat_reg[0],lat_reg[1],res)
  Lon, Lat = np.meshgrid(lon, lat)

Nils Brüggemann's avatar
Nils Brüggemann committed
395
396
397
398
399
  lon_o = Lon.flatten()
  lat_o = Lat.flatten()
  
  # --- calculate ckdtree
  Dind_dist = ckdtree_points(fpath_tgrid, lon_o, lat_o, load_cgrid=load_cgrid, load_egrid=load_egrid, load_vgrid=load_vgrid, n_nearest_neighbours=n_nearest_neighbours)
nbruegge's avatar
nbruegge committed
400
401
402
403
404
405

  # --- save grid
  print('Saving grid file: %s' % (fpath_ckdtree))
  np.savez(fpath_ckdtree,
            lon=lon,
            lat=lat,
406
            sname=sname,
Nils Brüggemann's avatar
Nils Brüggemann committed
407
            gname=gname,
408
            tgname='test',
Nils Brüggemann's avatar
Nils Brüggemann committed
409
            **Dind_dist,
nbruegge's avatar
nbruegge committed
410
411
412
413
           )
  return

def ckdtree_section(p1, p2, npoints=101, 
414
415
416
417
                 fname_tgrid='',
                 path_tgrid='',
                 path_ckdtree='',
                 sname='auto',
Nils Brüggemann's avatar
Nils Brüggemann committed
418
                 gname='',
419
                 tgname='',
Nils Brüggemann's avatar
Nils Brüggemann committed
420
                 n_nearest_neighbours=1,
421
422
423
                 load_cgrid=True,
                 load_egrid=True,
                 load_vgrid=True,
nbruegge's avatar
nbruegge committed
424
425
426
                 ):
  """
  """
427
428
429
  if tgname=='':
    Drgrid = identify_grid(path_tgrid, path_tgrid+fname_tgrid) 
    tgname = Drgrid['name']
430
431
432
433
434
435
  lon1str, lat1str = lonlat2str(p1[0], p1[1])
  lon2str, lat2str = lonlat2str(p2[0], p2[1])

  if sname=='auto':
    sname = fpath_ckdtree.split('/')[-1][:-4]

Nils Brüggemann's avatar
Nils Brüggemann committed
436
437
438
439
  fname = '%s_nps%d_%s%s_%s%s.npz'%(tgname, npoints, lon1str, lat1str, lon2str, lat2str) 
  fpath_ckdtree = path_ckdtree+fname
  fpath_tgrid   = path_tgrid+fname_tgrid

nbruegge's avatar
nbruegge committed
440
441
  # --- derive section points
  lon_sec, lat_sec, dist_sec = derive_section_points(p1, p2, npoints)
Nils Brüggemann's avatar
Nils Brüggemann committed
442
443
  lon_o = lon_sec
  lat_o = lat_sec
nbruegge's avatar
nbruegge committed
444

Nils Brüggemann's avatar
Nils Brüggemann committed
445
446
  # --- calculate ckdtree
  Dind_dist = ckdtree_points(fpath_tgrid, lon_o, lat_o, load_cgrid=load_cgrid, load_egrid=load_egrid, load_vgrid=load_vgrid, n_nearest_neighbours=n_nearest_neighbours)
nbruegge's avatar
nbruegge committed
447
448
449
450
451
452
453

  # --- save grid
  print('Saving grid file: %s' % (fpath_ckdtree))
  np.savez(fpath_ckdtree,
            lon_sec=lon_sec,
            lat_sec=lat_sec,
            dist_sec=dist_sec,
454
            sname=sname,
Nils Brüggemann's avatar
Nils Brüggemann committed
455
            gname=gname,
Nils Brüggemann's avatar
Nils Brüggemann committed
456
            **Dind_dist
nbruegge's avatar
nbruegge committed
457
           )
Nils Brüggemann's avatar
Nils Brüggemann committed
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
  return Dind_dist['dckdtree_c'], Dind_dist['ickdtree_c'], lon_sec, lat_sec, dist_sec

def ckdtree_points(fpath_tgrid, lon_o, lat_o, load_cgrid=True, load_egrid=True, load_vgrid=True, n_nearest_neighbours=1):
  """
  """
  # --- load triangular grid
  f = Dataset(fpath_tgrid, 'r')
  if load_cgrid:
    clon = f.variables['clon'][:] * 180./np.pi
    clat = f.variables['clat'][:] * 180./np.pi
  if load_egrid:
    elon = f.variables['elon'][:] * 180./np.pi
    elat = f.variables['elat'][:] * 180./np.pi
  if load_vgrid:
    vlon = f.variables['vlon'][:] * 180./np.pi
    vlat = f.variables['vlat'][:] * 180./np.pi
  f.close()

  # --- ckdtree for cells, edges and vertices
  if load_cgrid:
    dckdtree_c, ickdtree_c = calc_ckdtree(lon_i=clon, lat_i=clat,
                                          lon_o=lon_o, lat_o=lat_o,
                                          n_nearest_neighbours=n_nearest_neighbours,
                                          )
  if load_egrid:
    dckdtree_e, ickdtree_e = calc_ckdtree(lon_i=elon, lat_i=elat,
                                          lon_o=lon_o, lat_o=lat_o,
                                          n_nearest_neighbours=n_nearest_neighbours,
                                          )
  if load_vgrid:
    dckdtree_v, ickdtree_v = calc_ckdtree(lon_i=vlon, lat_i=vlat,
                                          lon_o=lon_o, lat_o=lat_o,
                                          n_nearest_neighbours=n_nearest_neighbours,
                                          )

  # --- save dict
  Dind_dist = dict()
  if load_cgrid: 
    Dind_dist['dckdtree_c'] = dckdtree_c
    Dind_dist['ickdtree_c'] = ickdtree_c
  if load_egrid: 
    Dind_dist['dckdtree_e'] = dckdtree_e
    Dind_dist['ickdtree_e'] = ickdtree_e
  if load_vgrid: 
    Dind_dist['dckdtree_v'] = dckdtree_v
    Dind_dist['ickdtree_v'] = ickdtree_v
  return Dind_dist
nbruegge's avatar
nbruegge committed
505

506
def calc_ckdtree(lon_i, lat_i, lon_o, lat_o, n_nearest_neighbours=1):
nbruegge's avatar
nbruegge committed
507
508
  """
  """
509
  # --- do ckdtree
Nils Brüggemann's avatar
Nils Brüggemann committed
510
511
512
513
514
515
516
517
518
519
520
521
522
523
  if False:
    lzip_i = list(zip(lon_i, lat_i))
    tree = cKDTree(lzip_i)
    lzip_o = list(zip(lon_o, lat_o))
    dckdtree, ickdtree = tree.query(lzip_o , k=n_nearest_neighbours, n_jobs=1)
  else:
    #print('calc_ckdtree by cartesian distances')
    xi, yi, zi = spherical_to_cartesian(lon_i, lat_i)
    xo, yo, zo = spherical_to_cartesian(lon_o, lat_o)

    lzip_i = list(zip(xi, yi, zi))
    tree = cKDTree(lzip_i)
    lzip_o = list(zip(xo, yo, zo))
    dckdtree, ickdtree = tree.query(lzip_o , k=n_nearest_neighbours, n_jobs=1)
nbruegge's avatar
nbruegge committed
524
525
  return dckdtree, ickdtree

Nils Brüggemann's avatar
Nils Brüggemann committed
526
527
528
529
530
531
532
def calc_vertical_interp_weights(zdata, levs):
  """ Calculate vertical interpolation weights and indices.

Call example:
icall, ind_lev, fac = calc_vertical_interp_weights(zdata, levs)

Afterwards do interpolation like this:
533
datai = data[ind_lev,icall]*fac+data[ind_lev+1,icall]*(1.-fac)
Nils Brüggemann's avatar
Nils Brüggemann committed
534
535
536
537
538
539
540
541
542
543
544
545
546
  """
  nza = zdata.shape[0]
  # --- initializations
  ind_lev = np.zeros((levs.size,zdata.shape[1]),dtype=int)
  icall = np.arange(zdata.shape[1],dtype=int)
  icall = icall[np.newaxis,:]
  fac = np.ma.zeros((levs.size,zdata.shape[1]))
  for k, lev in enumerate(levs):
    #print(f'k = {k}')
    # --- find level below critical level
    ind_lev[k,:] = (zdata<levs[k]).sum(axis=0)-1
    ind_lev[k,ind_lev[k,:]==(nza-1)]=-1
    # --- zdata below and above lev 
547
548
549
550
    zd1 = zdata[ind_lev[k,:],icall]
    zd2 = zdata[ind_lev[k,:]+1,icall]
    # --- linear interpolation to get weight (fac=1 if lev=zd1)
    fac[k,:] = (0.-1.)/(zd2-zd1)*(levs[k]-zd1)+1.
Nils Brüggemann's avatar
Nils Brüggemann committed
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
  # --- mask values which are out of range
  fac[ind_lev==-1] = np.ma.masked 
  return icall, ind_lev, fac

"""
Routines to calculate grids and sections
"""

def derive_section_points(p1, p2, npoints=101,):
  # --- derive section points
  if p1[0]==p2[0]:
    lon_sec = p1[0]*np.ones((npoints)) 
    lat_sec = np.linspace(p1[1],p2[1],npoints)
  else:
    lon_sec = np.linspace(p1[0],p2[0],npoints)
    lat_sec = (p2[1]-p1[1])/(p2[0]-p1[0])*(lon_sec-p1[0])+p1[1]
  dist_sec = haversine_dist(lon_sec[0], lat_sec[0], lon_sec, lat_sec)
  return lon_sec, lat_sec, dist_sec

def calc_north_pole_interp_grid_points(lat_south=60., res=100e3):
  """
  Compute grid points optimized for plotting the North Pole area.

  Parameters:
  -----------
  lat_south : float
      Southern latitude of target grid.
  res : float
      resolution of target grid

  Returns:
  --------
  Lon_np, Lat_np: ndarray
      Longitude and latitude of target grid as 2d array.

  Examples:
  ---------
  Lon_np, Lat_np = calc_north_pole_interp_grid_points(lat_south=60., res=100e3)

  """
  R = 6371e3
  x1, y1, z1 = spherical_to_cartesian(  0., lat_south)
  x2, y2, z2 = spherical_to_cartesian( 90., lat_south)
  x3, y3, z3 = spherical_to_cartesian(180., lat_south)
  x4, y4, z4 = spherical_to_cartesian(270., lat_south)

  lon1, lat1 = cartesian_to_spherical(x1, y1, z1)
  lon2, lat2 = cartesian_to_spherical(x2, y2, z2)
  lon3, lat3 = cartesian_to_spherical(x3, y3, z3)
  lon4, lat4 = cartesian_to_spherical(x4, y4, z4)

  #x1 = R * np.cos(  0.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y1 = R * np.sin(  0.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z1 = R * np.sin(lat_south*np.pi/180.)
  #x2 = R * np.cos( 90.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y2 = R * np.sin( 90.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z2 = R * np.sin(lat_south*np.pi/180.)
  #x3 = R * np.cos(180.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y3 = R * np.sin(180.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z3 = R * np.sin(lat_south*np.pi/180.)
  #x4 = R * np.cos(270.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y4 = R * np.sin(270.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z4 = R * np.sin(lat_south*np.pi/180.)
  #
  #lat1 = np.arcsin(z1/np.sqrt(x1**2+y1**2+z1**2)) * 180./np.pi
  #lon1 = np.arctan2(y1,x1) * 180./np.pi
  #lat2 = np.arcsin(z2/np.sqrt(x2**2+y2**2+z2**2)) * 180./np.pi
  #lon2 = np.arctan2(y2,x2) * 180./np.pi
  #lat3 = np.arcsin(z3/np.sqrt(x3**2+y3**2+z3**2)) * 180./np.pi
  #lon3 = np.arctan2(y3,x3) * 180./np.pi
  #lat4 = np.arcsin(z4/np.sqrt(x4**2+y4**2+z4**2)) * 180./np.pi
  #lon4 = np.arctan2(y4,x4) * 180./np.pi
  
  xnp = np.arange(x3, x1+res, res)
  ynp = np.arange(y4, y2+res, res)
  
  Xnp, Ynp = np.meshgrid(xnp, ynp)
  Znp = R * np.sin(lat1*np.pi/180.) * np.ones((ynp.size,xnp.size))
  Lon_np = np.arctan2(Ynp,Xnp) * 180./np.pi
  Lat_np = np.arcsin(Znp/np.sqrt(Xnp**2+Ynp**2+Znp**2)) * 180./np.pi
  return Lon_np, Lat_np

"""
Routines related to spherical geometry
"""
nbruegge's avatar
nbruegge committed
636
637
638
639
640
641
642
643
644
645
646
647
648
649
def haversine_dist(lon_ref, lat_ref, lon_pts, lat_pts, degree=True):
  # for details see http://en.wikipedia.org/wiki/Haversine_formula
  r = 6378.e3
  if degree:
    lon_ref = lon_ref * np.pi/180.
    lat_ref = lat_ref * np.pi/180.
    lon_pts = lon_pts * np.pi/180.
    lat_pts = lat_pts * np.pi/180.
  arg = np.sqrt(   np.sin(0.5*(lat_pts-lat_ref))**2 
                 + np.sin(0.5*(lon_pts-lon_ref))**2
                 * np.cos(lat_ref)*np.cos(lat_pts) )
  dist = 2*r * np.arcsin(arg)
  return dist

Nils Brüggemann's avatar
Nils Brüggemann committed
650
651
652
653
654
655
656
657
658
659
660
def spherical_to_cartesian(lon, lat):
  earth_radius = 6371e3
  x = earth_radius * np.cos(lon*np.pi/180.) * np.cos(lat*np.pi/180.)
  y = earth_radius * np.sin(lon*np.pi/180.) * np.cos(lat*np.pi/180.)
  z = earth_radius * np.sin(lat*np.pi/180.)
  return x, y, z

def cartesian_to_spherical(x, y, z):
  lat = np.arcsin(z/np.sqrt(x**2+y**2+z**2)) * 180./np.pi
  lon = np.arctan2(y,x) * 180./np.pi
  return lon, lat
661

Nils Brüggemann's avatar
Nils Brüggemann committed
662
663
664
"""
Routines to load data
"""
665
def load_hsnap(fpath, var, it=0, iz=0, iw=None, fpath_ckdtree='', verbose=True):
666
  f = Dataset(fpath, 'r')
667
668
  if verbose:
    print("Loading %s from %s" % (var, fpath))
669
670
671
672
  if f.variables[var].ndim==2:
    data = f.variables[var][it,:]
  else:
    data = f.variables[var][it,iz,:]
673
674
  if iw is not None:
    data = np.concatenate((data[:,iw:],data[:,:iw]),axis=1)
675
676
677
678
679
  f.close()

  data[data==0.] = np.ma.masked
  return data

680
681
682
683
684
685
def datetime64_to_float(dates):
  years  = (dates.astype('datetime64[Y]').astype(int) + 1970).astype(int)
  months = (dates.astype('datetime64[M]').astype(int) % 12 + 1).astype(int)
  days   = (dates - dates.astype('datetime64[M]') + 1).astype(int)
  return years, months, days

686
def time_average(IcD, var, t1='none', t2='none', it_ave=[], iz='all', always_use_loop=False, verbose=False, use_xr=False, load_xr_data=False):
Nils Brüggemann's avatar
Nils Brüggemann committed
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
  it_ave = np.array(it_ave)
  # --- if no it_ave is given use t1 and t2 to determine averaging indices it_ave
  if it_ave.size==0:
    # --- if t2=='none' set t2=t1 and no time average will be applied
    if isinstance(t2, str) and t2=='none':
      t2 = t1

    # --- convert to datetime64 objects if necessary
    if isinstance(t1, str):
      t1 = np.datetime64(t1)
    if isinstance(t2, str):
      t2 = np.datetime64(t2)

    # --- determine averaging interval
    it_ave = np.where( (IcD.times>=t1) & (IcD.times<=t2) )[0]
702
703
704

  if it_ave.size==0:
    raise ValueError(f'::: Could not find any time steps in interval t1={t1} and t2={t2}! :::')
705
  
706
707
708
709
710
711
712
713
  ## --- decide whether the file consists of monthly or yearly averages (or something else)
  #dt1 = (IcD.times[it_ave][1]-IcD.times[it_ave][0]).astype(float)/(86400)
  #if dt1==365 or dt1==366:
  #  ave_mode = 'yearly'
  #elif dt1==28 or dt1==29 or dt1==30 or dt1==31:
  #  ave_mode = 'monthly'
  #else:
  #  ave_mode = 'unknown'
714
715
716
717
       
  dt64type = IcD.times[0].dtype
  time_bnds = IcD.times[it_ave]
  yy, mm, dd = datetime64_to_float(time_bnds[0])
718
  if IcD.output_freq=='yearly':
719
    time_bnds = np.concatenate(([np.datetime64(f'{yy-1:04d}-{mm:02d}-{dd:02d}').astype(dt64type)],time_bnds))
720
  elif IcD.output_freq=='monthly':
721
722
723
    if mm==1:
      yy += -1
      mm = 13
724
    time_bnds = np.concatenate(([np.datetime64(f'{yy:04d}-{mm-1:02d}-{dd:02d}').astype(dt64type)],time_bnds))
725
  elif IcD.output_freq=='unknown':
726
    time_bnds = np.concatenate(([time_bnds[0]-(time_bnds[1]-time_bnds[0])], time_bnds))
727
728
729
  dt = np.diff(time_bnds).astype(IcD.dtype)
  #dt = np.ones((it_ave.size), dtype=IcD.dtype)
  #print('Warning dt set to ones!!!')
730
731
732
733

  # --- get dimensions to allocate data
  f = Dataset(IcD.flist_ts[0], 'r')
  # FIXME: If == ('time', 'lat', 'lon') works well use it everywhere
734
735
736
  load_hfl_type = False
  load_moc_type = False
  if f.variables[var].dimensions == ('time', 'lat', 'lon'): # e.g. for heat fluxes
737
738
    nt, nc, nx = f.variables[var].shape
    nz = 0
739
    load_hfl_type = True
740
  elif f.variables[var].dimensions == ('time', 'depth', 'lat', 'lon'): # e.g. for MOC 
741
742
    nt, nz, nc, ndummy = f.variables[var].shape 
    load_moc_type = True
743
744
  elif f.variables[var].ndim==3:
    nt, nz, nc = f.variables[var].shape
745
  elif f.variables[var].ndim==2: # e.g. for 2D variables like zos and mld
746
747
748
749
750
751
752
753
754
755
756
757
    nt, nc = f.variables[var].shape
    nz = 0
  f.close()

  # --- set iz to all levels
  if isinstance(iz,str) and iz=='all':
    iz = np.arange(nz)
  #else:
  #  iz = np.array([iz])

  # --- if all data is coming from one file take faster approach
  fpaths = np.unique(IcD.flist_ts[it_ave])
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
  if use_xr:
    #print(dt)
    if load_hfl_type:
      data_ave = (IcD.ds[var][it_ave,:,0]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
    elif load_moc_type:
      data_ave = (IcD.ds[var][it_ave,:,:,0]*dt[:,np.newaxis,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
    elif nz>0 and isinstance(iz,(int,np.integer)): # data has no depth dim afterwards
      #data_ave = (IcD.ds[var][it_ave,iz,:]*dt[:,np.newaxis]).sum(axis=0)/dt.sum()
      data_ave = (IcD.ds[var][it_ave,iz,:]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
    elif nz>0 and not isinstance(iz,(int,np.integer)): # data has depth dim afterwards
      data_ave = (IcD.ds[var][it_ave,iz,:]*dt[:,np.newaxis,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
    else:
      data_ave = (IcD.ds[var][it_ave,:]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
    #dataxr = dsxr[var][it_ave,:,:].mean(axis=0)
    if load_xr_data:
      data_ave = data_ave.load().data
  elif (fpaths.size==1) and not always_use_loop:
775
    f = Dataset(fpaths[0], 'r')
776
    if load_hfl_type:
777
      data_ave = (f.variables[var][IcD.its[it_ave],:,0]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
778
    elif load_moc_type:
779
      data_ave = (f.variables[var][IcD.its[it_ave],:,:,0]*dt[:,np.newaxis,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
780
    elif nz>0 and isinstance(iz,(int,np.integer)): # data has no depth dim afterwards
781
      data_ave = (f.variables[var][IcD.its[it_ave],iz,:]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
782
    elif nz>0 and not isinstance(iz,(int,np.integer)): # data has depth dim afterwards
783
      data_ave = (f.variables[var][IcD.its[it_ave],iz,:]*dt[:,np.newaxis,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
784
    else:
785
      data_ave = (f.variables[var][IcD.its[it_ave],:]*dt[:,np.newaxis]).sum(axis=0, dtype='float64')/dt.sum()
786
787
788
789
    f.close()
  # --- otherwise loop ovar all files is needed
  else:
    # --- allocate data
790
    if isinstance(iz,(int,np.integer)) or nz==0:
791
      data_ave = np.ma.zeros((nc), dtype=IcD.dtype)
792
    else:
793
      data_ave = np.ma.zeros((iz.size,nc), dtype=IcD.dtype)
794
795

    # --- average by looping over all files and time steps
796
    for ll, it in enumerate(it_ave):
797
      f = Dataset(IcD.flist_ts[it], 'r')
798
      if load_hfl_type:
799
        data_ave += f.variables[var][IcD.its[it],:,0]*dt[ll]/dt.sum()
800
      elif load_moc_type:
801
        data_ave += f.variables[var][IcD.its[it],:,:,0]*dt[ll]/dt.sum()
802
      elif nz>0:
803
        data_ave += f.variables[var][IcD.its[it],iz,:]*dt[ll]/dt.sum()
804
      else:
805
        data_ave += f.variables[var][IcD.its[it],:]*dt[ll]/dt.sum()
806
      f.close()
807
  data_ave = data_ave.astype(IcD.dtype)
808
809
810
  if verbose:
    #print(f'pyicon.time_average: var={var}: it_ave={it_ave}')
    print(f'pyicon.time_average: var={var}: it_ave={IcD.times[it_ave]}')
811
812
  return data_ave, it_ave

813
def timing(ts, string='', verbose=True):
814
815
816
817
  if ts[0]==0:
    ts = np.array([datetime.datetime.now()])
  else:
    ts = np.append(ts, [datetime.datetime.now()])
818
819
    if verbose:
      print(ts[-1]-ts[-2], ' ', (ts[-1]-ts[0]), ' '+string)
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
  return ts

def conv_gname(gname):
  gname = gname[:-4]

  ogrid = gname.split('_')[0]
  res = float(gname.split('_')[1][1:])

  lo1 = gname.split('_')[2]
  if lo1[-1]=='w':
    lo1 = -float(lo1[:-1])
  else:
    lo1 = float(lo1[:-1])
  lo2 = gname.split('_')[3]
  if lo2[-1]=='w':
    lo2 = -float(lo2[:-1])
  else:
    lo2 = float(lo2[:-1])

  la1 = gname.split('_')[4]
  if la1[-1]=='s':
    la1 = -float(la1[:-1])
  else:
    la1 = float(la1[:-1])
  la2 = gname.split('_')[5]
  if la2[-1]=='s':
    la2 = -float(la2[:-1])
  else:
    la2 = float(la2[:-1])

  lon_reg = [lo1, lo2]
  lat_reg = [la1, la2]
  return ogrid, res, lon_reg, lat_reg

Nils Brüggemann's avatar
Nils Brüggemann committed
854
855
856
"""
Grid related functions
"""
857
858
859
def identify_grid(path_grid, fpath_data):
  """ Identifies ICON grid in depending on clon.size in fpath_data.
  
Nils Brüggemann's avatar
Nils Brüggemann committed
860
861
862
863
864
865
  r2b4:  160km:    15117: OceanOnly_Icos_0158km_etopo40.nc
  r2b4a: 160km:    20480: /pool/data/ICON/grids/public/mpim/0013/icon_grid_0013_R02B04_G.nc
  r2b6:   40km:   327680: OCEANINP_pre04_LndnoLak_039km_editSLOHH2017_G.nc
  r2b8:   10km:  3729001: OceanOnly_Global_IcosSymmetric_0010km_rotatedZ37d_modified_srtm30_1min.nc
  r2b9:    5km: 14886338: OceanOnly_IcosSymmetric_4932m_rotatedZ37d_modified_srtm30_1min.nc
  r2b9a:   5km: 20971520: /pool/data/ICON/grids/public/mpim/0015/icon_grid_0015_R02B09_G.nc
866
867
868
869
870
871
872
873
874
875
  """
  
  Dgrid_list = dict()
  
  grid_name = 'r2b4'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '160km'
  Dgrid_list[grid_name]['long_name'] = 'OceanOnly_Icos_0158km_etopo40'
  Dgrid_list[grid_name]['size'] = 15117
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
Nils Brüggemann's avatar
Nils Brüggemann committed
876
877
878
879
880
881
882
883
 
  grid_name = 'r2b4a'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '160km'
  Dgrid_list[grid_name]['long_name'] = 'icon_grid_0013_R02B04_G'
  Dgrid_list[grid_name]['size'] = 20480
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'

Nils Brüggemann's avatar
Nils Brüggemann committed
884
  grid_name = 'r2b6old'; Dgrid_list[grid_name] = dict()
885
886
887
888
889
890
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '40km'
  Dgrid_list[grid_name]['long_name'] = 'OCEANINP_pre04_LndnoLak_039km_editSLOHH2017_G'
  Dgrid_list[grid_name]['size'] = 327680
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
  
891
892
893
894
895
896
897
  grid_name = 'r2b6'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '40km'
  Dgrid_list[grid_name]['long_name'] = 'OceanOnly_Global_IcosSymmetric_0039km_rotatedZ37d_BlackSea_Greenland_modified_srtm30_1min'
  Dgrid_list[grid_name]['size'] = 235403 
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'

898
899
900
901
902
903
904
905
906
907
908
909
910
  grid_name = 'r2b8'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '10km'
  Dgrid_list[grid_name]['long_name'] = 'OceanOnly_Global_IcosSymmetric_0010km_rotatedZ37d_modified_srtm30_1min'
  Dgrid_list[grid_name]['size'] = 3729001
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
  
  grid_name = 'r2b9'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '5km'
  Dgrid_list[grid_name]['long_name'] = 'OceanOnly_IcosSymmetric_4932m_rotatedZ37d_modified_srtm30_1min'
  Dgrid_list[grid_name]['size'] = 14886338
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
Nils Brüggemann's avatar
Nils Brüggemann committed
911
912
913
914
915
916
917

  grid_name = 'r2b9a'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '5km'
  Dgrid_list[grid_name]['long_name'] = 'icon_grid_0015_R02B09_G'
  Dgrid_list[grid_name]['size'] = 20971520
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '.nc'
918
919
920
921
922
923
924
925
  
  f = Dataset(fpath_data, 'r')
  gsize = f.variables['clon'].size
  f.close()
  for grid_name in Dgrid_list.keys():
    if gsize == Dgrid_list[grid_name]['size']:
      Dgrid = Dgrid_list[grid_name]
      break
Nils Brüggemann's avatar
Nils Brüggemann committed
926
  #fpath_grid = '/pool/data/ICON/oes/input/r0003/' + Dgrid['long_name'] +'/' + Dgrid['long_name'] + '.nc'
927
928
  return Dgrid

Nils Brüggemann's avatar
Nils Brüggemann committed
929
930
931
932
933
934
935
936
937
938
def mask_big_triangles(vlon, vertex_of_cell, Tri):
  mask_bt = (
      (np.abs(  vlon[vertex_of_cell[:,0]] 
              - vlon[vertex_of_cell[:,1]])>180.)
    | (np.abs(  vlon[vertex_of_cell[:,0]] 
              - vlon[vertex_of_cell[:,2]])>180.)
                )
  Tri.set_mask(mask_bt)
  return Tri, mask_bt

939
940
941
942
943
944
945
946
947
948
949
950
951
952
def crop_tripolar_grid(lon_reg, lat_reg,
                       clon, clat, vertex_of_cell, edge_of_cell):
  ind_reg = np.where(   (clon>lon_reg[0]) 
                      & (clon<=lon_reg[1]) 
                      & (clat>lat_reg[0]) 
                      & (clat<=lat_reg[1]) )[0]
  clon = clon[ind_reg]
  clat = clat[ind_reg]
  vertex_of_cell = vertex_of_cell[ind_reg,:]
  edge_of_cell   = edge_of_cell[ind_reg,:]
  ind_reg = ind_reg
  return clon, clat, vertex_of_cell, edge_of_cell, ind_reg

def crop_regular_grid(lon_reg, lat_reg, Lon, Lat):
953
954
955
  # this does not work since Lon[ind_reg].shape = (64800, 360)
  # cropping needs probably done by each dimension
  # in this case cropping function for data is used as well
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
  lon = Lon[0,:]
  lat = Lat[:,0]
  indx = np.where((lon>=lon_reg[0]) & (lon<lon_reg[1]))[0]
  indy = np.where((lat>=lat_reg[0]) & (lat<lat_reg[1]))[0]
  lon = lon[indx]
  lat = lat[indy]
  #ind_reg = np.where(   (Lon>=lon_reg[0]) 
  #                    & (Lon <lon_reg[1]) 
  #                    & (Lat>=lat_reg[0]) 
  #                    & (Lat <lat_reg[1]) )[0]
  ind_reg = ((Lon>=lon_reg[0]) & (Lon<lon_reg[1]) & (Lat>=lat_reg[0]) & (Lat<lat_reg[1])).flatten()
  Lon, Lat = np.meshgrid(lon, lat)
  #Lon = Lon[ind_reg]
  #Lat = Lat[ind_reg]
  return Lon, Lat, lon, lat, ind_reg, indx, indy
971

Nils Brüggemann's avatar
Nils Brüggemann committed
972
973
974
"""
Routines related to time steps of data set
"""
975
976
def get_files_of_timeseries(path_data, fname):
  flist = np.array(glob.glob(path_data+fname))
nbruegge's avatar
nbruegge committed
977
978
  flist.sort()
  times_flist = np.zeros(flist.size, dtype='datetime64[s]')
nbruegge's avatar
nbruegge committed
979
980
981
982
  #for l, fpath in enumerate(flist):
  #  tstr = fpath.split('/')[-1].split('_')[-1][:-4]
  #  times_flist[l] = '%s-%s-%sT%s:%s:%s' % ( (tstr[:4], tstr[4:6], tstr[6:8], 
  #                                      tstr[9:11], tstr[11:13], tstr[13:15]))
Nils Brüggemann's avatar
Nils Brüggemann committed
983
  if flist.size==0:
984
    raise ValueError('::: Error: No file found matching %s!:::' % (path_data+fname))
nbruegge's avatar
nbruegge committed
985
986
  return times_flist, flist

Nils Brüggemann's avatar
Nils Brüggemann committed
987
988
989
990
991
def nctime2numpy(ncv):
  np_time = num2date(ncv[:], units=ncv.units, calendar=ncv.calendar
                  ).astype("datetime64[s]")
  return np_time

992
993
994
995
996
997
998
999
1000
def nctime_to_datetime64(ncv_time, time_mode='num2date'):
  if time_mode=='num2date':
    np_time = num2date(ncv_time[:], units=ncv_time.units, calendar=ncv_time.calendar
                    ).astype("datetime64[s]")
  elif time_mode=='float2date':
    tps = ncv_time[:]
    secs_tot = np.round(86400.*(tps-np.floor(tps)))
    hours = np.floor(secs_tot/3600.)
    mins = np.floor((secs_tot-hours*3600.)/60.) 
For faster browsing, not all history is shown. View entire blame