pyicon_tb.py 40.5 KB
Newer Older
1
import sys, glob, os
2
3
import json
# --- calculations
4
5
6
import numpy as np
from scipy import interpolate
from scipy.spatial import cKDTree
7
# --- reading data 
8
from netCDF4 import Dataset, num2date, date2num
9
import datetime
10
11
12
13
14
# --- plotting
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import ticker
#import my_toolbox as my
nbruegge's avatar
nbruegge committed
15
import cartopy
16
import cartopy.crs as ccrs
nbruegge's avatar
nbruegge committed
17
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
18
import cmocean
19
# --- debugging
20
from ipdb import set_trace as mybreak  
21
#from importlib import reload
22

23
24
"""
pyicon
25
26
#  icon_to_regular_grid
#  icon_to_section
nbruegge's avatar
nbruegge committed
27
28
29
  apply_ckdtree
  ckdtree_hgrid
  ckdtree_section
30
  calc_ckdtree
nbruegge's avatar
nbruegge committed
31
32
  haversine_dist
  derive_section_points
33
34
35
36
37
  timing
  conv_gname
  identify_grid
  crop_tripolar_grid
  crop_regular_grid
nbruegge's avatar
nbruegge committed
38
39
40
  get_files_of_timeseries
  get_varnames
  get_timesteps
41
42
43
44
45
46
47
48
49

  ?load_data
  ?load_grid

  ?hplot
  ?update_hplot
  ?vplot
  ?update_vplot

nbruegge's avatar
nbruegge committed
50
  #IconDataFile
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74

  IconData
  IP_hor_sec_rect

  QuickPlotWebsite

  IDa: Icon data set (directory of files)
    - info about tsteps
    - info about vars
    - info about grid
    - IGr: Icon grid
    - IVa: Icon variable if loaded
  IIn: Icon interpolator class

  IPl: Icon plot class

IDa = pyic.IconData(fpath or path)
IDa.load_grid()
IDa.show()

IPl = pyic.hplot(IDa, 'var', iz, tstep, IIn)

"""

75
76
77
78
79
80
81
82
class pyicon_configure(object):
  def __init__(self, fpath_config):
    with open(fpath_config) as file_json:
      Dsettings = json.load(file_json)
    for key in Dsettings.keys():
      setattr(self, key, Dsettings[key])
    return

83
#def icon_to_regular_grid(data, shape, distances=None, \
84
#                  inds=None, radius_of_influence=1000e3):
85
86
87
88
89
90
91
92
#  """
#  """
#  data_interpolated = apply_ckdtree(data, distances=distances, inds=inds, 
#                                    radius_of_influence=radius_of_influence)
#  data_interpolated = data_interpolated.reshape(shape)
#  return data_interpolated
#
#def icon_to_section(data, distances=None, \
93
#                  inds=None, radius_of_influence=1000e3):
94
95
96
97
98
99
#  """
#  """
#  data_interpolated = apply_ckdtree(data, distances=distances, inds=inds, 
#                                    radius_of_influence=radius_of_influence)
#  return data_interpolated

Nils Brüggemann's avatar
Nils Brüggemann committed
100
101
102
"""
Routines to apply interpolation weights
"""
103
def apply_ckdtree_base(data, inds, distances, radius_of_influence=1000e3):
Nils Brüggemann's avatar
Nils Brüggemann committed
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
  if distances.ndim == 1:
    #distances_ma = np.ma.masked_greater(distances, radius_of_influence)
    if data.ndim==1:
      data_interpolated = data[inds]
      data_interpolated[distances>=radius_of_influence] = np.nan
    elif data.ndim==2:
      data_interpolated = data[:,inds]
      data_interpolated[:,distances>=radius_of_influence] = np.nan
  else:
    #raise ValueError("::: distances.ndim>1 is not properly supported yet. :::")
    #distances_ma = np.ma.masked_greater(distances, radius_of_influence)
    weights = 1.0 / distances**2
    if data.ndim==1:
      data_interpolated = np.ma.sum(weights * data[inds], axis=1) / np.ma.sum(weights, axis=1)
      #data_interpolated[distances>=radius_of_influence] = np.nan
    elif data.ndim==2:
      data_interpolated = np.ma.sum(weights[np.newaxis,:,:] * data[:,inds], axis=2) / np.ma.sum(weights[np.newaxis,:,:], axis=2)
      #data_interpolated[:,distances>=radius_of_influence] = np.nan
122
  data_interpolated = np.ma.masked_invalid(data_interpolated)
Nils Brüggemann's avatar
Nils Brüggemann committed
123
124
  return data_interpolated

125
def apply_ckdtree(data, fpath_ckdtree, mask=None, coordinates='clat clon', radius_of_influence=1000e3):
nbruegge's avatar
nbruegge committed
126
  """
127
  * credits
128
    function modified from pyfesom (Nikolay Koldunov)
129
  """
130
  ddnpz = np.load(fpath_ckdtree)
131
  #if coordinates=='clat clon':
132
  if ('clon' in coordinates) or (coordinates==''):
133
134
    distances = ddnpz['dckdtree_c']
    inds = ddnpz['ickdtree_c'] 
135
136
  #elif coordinates=='elat elon':
  elif 'elon' in coordinates:
137
138
    distances = ddnpz['dckdtree_e']
    inds = ddnpz['ickdtree_e'] 
139
140
  #elif coordinates=='vlat vlon':
  elif 'vlon' in coordinates:
141
142
143
144
145
    distances = ddnpz['dckdtree_v']
    inds = ddnpz['ickdtree_v'] 
  else:
    raise ValueError('::: Error: Unsupported coordinates: %s! ::: ' % (coordinates))

146
147
148
149
150
151
152
153
154
  if mask is not None:
    #if data.ndim==1:
    #  data = data[mask]
    #elif data.ndim==2:
    #  data = data[:,mask]
    if inds.ndim==1:
      inds = inds[mask]
      distances = distances[mask]
    elif inds.ndim==2:
155
      #raise ValueError('::: Warning: This was never checked! Please check carefully and remove this warning.:::')
156
157
158
      inds = inds[mask,:]
      distances = distances[mask,:]

Nils Brüggemann's avatar
Nils Brüggemann committed
159
  data_interpolated = apply_ckdtree_base(data, inds, distances, radius_of_influence)
160
161
  return data_interpolated

162
def interp_to_rectgrid(data, fpath_ckdtree, indx='all', indy='all', mask_reg=None, coordinates='clat clon'):
Nils Brüggemann's avatar
Nils Brüggemann committed
163
164
165
  ddnpz = np.load(fpath_ckdtree)
  lon = ddnpz['lon'] 
  lat = ddnpz['lat'] 
166
167
168
169
  if not isinstance(indx, str):
    lon = lon[indx]
    lat = lat[indy]
  datai = apply_ckdtree(data, fpath_ckdtree, mask=mask_reg, coordinates=coordinates)
170
  if datai.ndim==1:
171
    datai = datai.reshape(lat.size, lon.size)
172
173
174
175
176
177
178
179
180
181
182
183
184
  else:
    datai = datai.reshape([data.shape[0], lat.size, lon.size])
  datai[datai==0.] = np.ma.masked
  return lon, lat, datai

def interp_to_section(data, fpath_ckdtree, coordinates='clat clon'):
  ddnpz = np.load(fpath_ckdtree)
  lon_sec = ddnpz['lon_sec'] 
  lat_sec = ddnpz['lat_sec'] 
  dist_sec = ddnpz['dist_sec'] 
  datai = apply_ckdtree(data, fpath_ckdtree, coordinates=coordinates)
  datai[datai==0.] = np.ma.masked
  return lon_sec, lat_sec, dist_sec, datai
Nils Brüggemann's avatar
Nils Brüggemann committed
185

Nils Brüggemann's avatar
Nils Brüggemann committed
186
187
188
""" 
Routines for zonal averaging
"""
nbruegge's avatar
nbruegge committed
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
def zonal_average(fpath_data, var, basin='global', it=0, fpath_fx='', fpath_ckdtree=''):

  for fp in [fpath_data, fpath_fx, fpath_ckdtree]:
    if not os.path.exists(fp):
      raise ValueError('::: Error: Cannot find file %s! :::' % (fp))

  f = Dataset(fpath_fx, 'r')
  basin_c = f.variables['basin_c'][:]
  mask_basin = np.zeros(basin_c.shape, dtype=bool)
  if basin.lower()=='atlantic' or basin=='atl':
    mask_basin[basin_c==1] = True 
  elif basin.lower()=='pacific' or basin=='pac':
    mask_basin[basin_c==3] = True 
  elif basin.lower()=='southern ocean' or basin=='soc' or basin=='so':
    mask_basin[basin_c==6] = True 
  elif basin.lower()=='indian ocean' or basin=='ind' or basin=='io':
    mask_basin[basin_c==7] = True 
  elif basin.lower()=='global' or basin=='glob' or basin=='glo':
    mask_basin[basin_c!=0] = True 
  elif basin.lower()=='indopacific' or basin=='indopac':
    mask_basin[(basin_c==3) | (basin_c==7)] = True 
Nils Brüggemann's avatar
Nils Brüggemann committed
210
211
  elif basin.lower()=='indopacso':
    mask_basin[(basin_c==3) | (basin_c==7) | (basin_c==6)] = True 
nbruegge's avatar
nbruegge committed
212
213
214
215
216
217
218
219
220
221
  f.close()
  
  ddnpz = np.load(fpath_ckdtree)
  lon = ddnpz['lon'] 
  lat = ddnpz['lat'] 
  shape = [lat.size, lon.size]
  lat_sec = lat
  
  f = Dataset(fpath_data, 'r')
  nz = f.variables[var].shape[1]
222
  coordinates = f.variables[var].coordinates
nbruegge's avatar
nbruegge committed
223
224
  data_zave = np.ma.zeros((nz,lat_sec.size))
  for k in range(nz):
nbruegge's avatar
nbruegge committed
225
    #print('k = %d/%d'%(k,nz))
nbruegge's avatar
nbruegge committed
226
227
228
229
230
231
232
233
234
    # --- load data
    data = f.variables[var][it,k,:]
    # --- mask land points
    data[data==0] = np.ma.masked
    # --- mask not-this-basin points
    data[mask_basin==False] = np.ma.masked
    # --- go to normal np.array (not np.ma object)
    data = data.filled(0.)
    # --- interpolate to rectangular grid
235
236
    datai = apply_ckdtree(data, fpath_ckdtree, coordinates=coordinates)
    datai = datai.reshape(shape)
nbruegge's avatar
nbruegge committed
237
238
239
240
241
242
243
    # --- go back to masked array
    datai = np.ma.array(datai, mask=datai==0.)
    # --- do zonal average
    data_zave[k,:] = datai.mean(axis=1)
  f.close()
  return lat_sec, data_zave

244
def zonal_average_3d_data(data3d, basin='global', it=0, coordinates='clat clon', fpath_fx='', fpath_ckdtree=''):
nbruegge's avatar
nbruegge committed
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
  """ Like zonal_average but here data instead of path to data is given. This can only work if the whole data array fits into memory.
  """

  for fp in [fpath_fx, fpath_ckdtree]:
    if not os.path.exists(fp):
      raise ValueError('::: Error: Cannot find file %s! :::' % (fp))

  f = Dataset(fpath_fx, 'r')
  basin_c = f.variables['basin_c'][:]
  mask_basin = np.zeros(basin_c.shape, dtype=bool)
  if basin.lower()=='atlantic' or basin=='atl':
    mask_basin[basin_c==1] = True 
  elif basin.lower()=='pacific' or basin=='pac':
    mask_basin[basin_c==3] = True 
  elif basin.lower()=='southern ocean' or basin=='soc' or basin=='so':
    mask_basin[basin_c==6] = True 
  elif basin.lower()=='indian ocean' or basin=='ind' or basin=='io':
    mask_basin[basin_c==7] = True 
  elif basin.lower()=='global' or basin=='glob' or basin=='glo':
    mask_basin[basin_c!=0] = True 
  elif basin.lower()=='indopacific' or basin=='indopac':
    mask_basin[(basin_c==3) | (basin_c==7)] = True 
Nils Brüggemann's avatar
Nils Brüggemann committed
267
268
  elif basin.lower()=='indopacso':
    mask_basin[(basin_c==3) | (basin_c==7) | (basin_c==6)] = True 
nbruegge's avatar
nbruegge committed
269
270
271
  f.close()
  
  ddnpz = np.load(fpath_ckdtree)
272
273
  #dckdtree = ddnpz['dckdtree']
  #ickdtree = ddnpz['ickdtree'] 
nbruegge's avatar
nbruegge committed
274
275
276
277
278
279
280
281
  lon = ddnpz['lon'] 
  lat = ddnpz['lat'] 
  shape = [lat.size, lon.size]
  lat_sec = lat
  
  nz = data3d.shape[0]
  data_zave = np.ma.zeros((nz,lat_sec.size))
  for k in range(nz):
Nils Brüggemann's avatar
Nils Brüggemann committed
282
    data = 1.*data3d[k,:]
nbruegge's avatar
nbruegge committed
283
284
285
286
287
288
289
290
    #print('k = %d/%d'%(k,nz))
    # --- mask land points
    data[data==0] = np.ma.masked
    # --- mask not-this-basin points
    data[mask_basin==False] = np.ma.masked
    # --- go to normal np.array (not np.ma object)
    data = data.filled(0.)
    # --- interpolate to rectangular grid
291
292
    datai = apply_ckdtree(data, fpath_ckdtree, coordinates=coordinates)
    datai = datai.reshape(shape)
nbruegge's avatar
nbruegge committed
293
294
295
296
297
298
    # --- go back to masked array
    datai = np.ma.array(datai, mask=datai==0.)
    # --- do zonal average
    data_zave[k,:] = datai.mean(axis=1)
  return lat_sec, data_zave

299
def zonal_average_atmosphere(data3d, ind_lev, fac, fpath_ckdtree='', coordinates='clat clon',):
300
301
302
303
304
  icall = np.arange(data3d.shape[1],dtype=int)
  datavi = data3d[ind_lev,icall]*fac+data3d[ind_lev+1,icall]*(1.-fac)
  lon, lat, datavihi = interp_to_rectgrid(datavi, fpath_ckdtree, coordinates=coordinates)
  data_zave = datavihi.mean(axis=2)
  return lat, data_zave
305

306
def zonal_section_3d_data(data3d, fpath_ckdtree, coordinates):
307
308
309
310
311
312
313
314
  """
  (
   lon_sec, lat_sec, dist_sec, data_sec 
  ) = pyic.zonal_section_3d_data(tbias, 
    fpath_ckdtree=path_ckdtree+'sections/r2b4_nps100_30W80S_30W80N.npz')
  """
  # --- load ckdtree
  ddnpz = np.load(fpath_ckdtree)
315
316
  #dckdtree = ddnpz['dckdtree']
  #ickdtree = ddnpz['ickdtree'] 
317
318
319
320
321
322
323
  lon_sec = ddnpz['lon_sec'] 
  lat_sec = ddnpz['lat_sec'] 
  dist_sec = ddnpz['dist_sec'] 

  nz = data3d.shape[0]
  data_sec = np.ma.zeros((nz,dist_sec.size))
  for k in range(nz):
324
    data_sec[k,:] = apply_ckdtree(data3d[k,:], fpath_ckdtree, coordinates=coordinates)
325
326
  return lon_sec, lat_sec, dist_sec, data_sec

327
328
329
330
331
332
333
334
335
336
337
def lonlat2str(lon, lat):
  if lon<0:
    lon_s = '%gW'%(-lon)
  else:
    lon_s = '%gE'%(lon)
  if lat<0:
    lat_s = '%gS'%(-lat)
  else:
    lat_s = '%gN'%(lat)
  return lon_s, lat_s

Nils Brüggemann's avatar
Nils Brüggemann committed
338
339
340
341
342
343
344
345
346
"""
Routines to calculate interpolation weights:

  | ckdtree_hgrid
  | ckdtree_section
  |-->| ckdtree_points
      |--> calc_ckdtree
"""

nbruegge's avatar
nbruegge committed
347
def ckdtree_hgrid(lon_reg, lat_reg, res, 
348
349
350
351
352
                 #fpath_grid_triangular='', 
                 fname_tgrid='',
                 path_tgrid='',
                 path_ckdtree='',
                 sname='',
Nils Brüggemann's avatar
Nils Brüggemann committed
353
                 gname='',
354
355
356
357
358
                 tgname='',
                 load_cgrid=True,
                 load_egrid=True,
                 load_vgrid=True,
                 n_nearest_neighbours=1,
359
360
361
                 ):
  """
  """
362
363
364
  if tgname=='':
    Drgrid = identify_grid(path_tgrid, path_tgrid+fname_tgrid) 
    tgname = Drgrid['name']
365
366
367
  lon1str, lat1str = lonlat2str(lon_reg[0], lat_reg[0])
  lon2str, lat2str = lonlat2str(lon_reg[1], lat_reg[1])

368
369
370
371
  if n_nearest_neighbours==1:
    fname = '%s_res%3.2f_%s-%s_%s-%s.npz'%(tgname, res, lon1str, lon2str, lat1str, lat2str) 
  else:
    fname = '%s_res%3.2f_%dnn_%s-%s_%s-%s.npz'%(tgname, res, n_nearest_neighbours, lon1str, lon2str, lat1str, lat2str) 
372
  fpath_ckdtree = path_ckdtree+fname
Nils Brüggemann's avatar
Nils Brüggemann committed
373
  fpath_tgrid   = path_tgrid+fname_tgrid
374
375
376
377
378
379

  # --- make rectangular grid 
  lon = np.arange(lon_reg[0],lon_reg[1],res)
  lat = np.arange(lat_reg[0],lat_reg[1],res)
  Lon, Lat = np.meshgrid(lon, lat)

Nils Brüggemann's avatar
Nils Brüggemann committed
380
381
382
383
384
  lon_o = Lon.flatten()
  lat_o = Lat.flatten()
  
  # --- calculate ckdtree
  Dind_dist = ckdtree_points(fpath_tgrid, lon_o, lat_o, load_cgrid=load_cgrid, load_egrid=load_egrid, load_vgrid=load_vgrid, n_nearest_neighbours=n_nearest_neighbours)
nbruegge's avatar
nbruegge committed
385
386
387
388
389
390

  # --- save grid
  print('Saving grid file: %s' % (fpath_ckdtree))
  np.savez(fpath_ckdtree,
            lon=lon,
            lat=lat,
391
            sname=sname,
Nils Brüggemann's avatar
Nils Brüggemann committed
392
            gname=gname,
393
            tgname='test',
Nils Brüggemann's avatar
Nils Brüggemann committed
394
            **Dind_dist,
nbruegge's avatar
nbruegge committed
395
396
397
398
           )
  return

def ckdtree_section(p1, p2, npoints=101, 
399
400
401
402
                 fname_tgrid='',
                 path_tgrid='',
                 path_ckdtree='',
                 sname='auto',
Nils Brüggemann's avatar
Nils Brüggemann committed
403
                 gname='',
404
                 tgname='',
Nils Brüggemann's avatar
Nils Brüggemann committed
405
                 n_nearest_neighbours=1,
406
407
408
                 load_cgrid=True,
                 load_egrid=True,
                 load_vgrid=True,
nbruegge's avatar
nbruegge committed
409
410
411
                 ):
  """
  """
412
413
414
  if tgname=='':
    Drgrid = identify_grid(path_tgrid, path_tgrid+fname_tgrid) 
    tgname = Drgrid['name']
415
416
417
418
419
420
  lon1str, lat1str = lonlat2str(p1[0], p1[1])
  lon2str, lat2str = lonlat2str(p2[0], p2[1])

  if sname=='auto':
    sname = fpath_ckdtree.split('/')[-1][:-4]

Nils Brüggemann's avatar
Nils Brüggemann committed
421
422
423
424
  fname = '%s_nps%d_%s%s_%s%s.npz'%(tgname, npoints, lon1str, lat1str, lon2str, lat2str) 
  fpath_ckdtree = path_ckdtree+fname
  fpath_tgrid   = path_tgrid+fname_tgrid

nbruegge's avatar
nbruegge committed
425
426
  # --- derive section points
  lon_sec, lat_sec, dist_sec = derive_section_points(p1, p2, npoints)
Nils Brüggemann's avatar
Nils Brüggemann committed
427
428
  lon_o = lon_sec
  lat_o = lat_sec
nbruegge's avatar
nbruegge committed
429

Nils Brüggemann's avatar
Nils Brüggemann committed
430
431
  # --- calculate ckdtree
  Dind_dist = ckdtree_points(fpath_tgrid, lon_o, lat_o, load_cgrid=load_cgrid, load_egrid=load_egrid, load_vgrid=load_vgrid, n_nearest_neighbours=n_nearest_neighbours)
nbruegge's avatar
nbruegge committed
432
433
434
435
436
437
438

  # --- save grid
  print('Saving grid file: %s' % (fpath_ckdtree))
  np.savez(fpath_ckdtree,
            lon_sec=lon_sec,
            lat_sec=lat_sec,
            dist_sec=dist_sec,
439
            sname=sname,
Nils Brüggemann's avatar
Nils Brüggemann committed
440
            gname=gname,
Nils Brüggemann's avatar
Nils Brüggemann committed
441
            **Dind_dist
nbruegge's avatar
nbruegge committed
442
           )
Nils Brüggemann's avatar
Nils Brüggemann committed
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
  return Dind_dist['dckdtree_c'], Dind_dist['ickdtree_c'], lon_sec, lat_sec, dist_sec

def ckdtree_points(fpath_tgrid, lon_o, lat_o, load_cgrid=True, load_egrid=True, load_vgrid=True, n_nearest_neighbours=1):
  """
  """
  # --- load triangular grid
  f = Dataset(fpath_tgrid, 'r')
  if load_cgrid:
    clon = f.variables['clon'][:] * 180./np.pi
    clat = f.variables['clat'][:] * 180./np.pi
  if load_egrid:
    elon = f.variables['elon'][:] * 180./np.pi
    elat = f.variables['elat'][:] * 180./np.pi
  if load_vgrid:
    vlon = f.variables['vlon'][:] * 180./np.pi
    vlat = f.variables['vlat'][:] * 180./np.pi
  f.close()

  # --- ckdtree for cells, edges and vertices
  if load_cgrid:
    dckdtree_c, ickdtree_c = calc_ckdtree(lon_i=clon, lat_i=clat,
                                          lon_o=lon_o, lat_o=lat_o,
                                          n_nearest_neighbours=n_nearest_neighbours,
                                          )
  if load_egrid:
    dckdtree_e, ickdtree_e = calc_ckdtree(lon_i=elon, lat_i=elat,
                                          lon_o=lon_o, lat_o=lat_o,
                                          n_nearest_neighbours=n_nearest_neighbours,
                                          )
  if load_vgrid:
    dckdtree_v, ickdtree_v = calc_ckdtree(lon_i=vlon, lat_i=vlat,
                                          lon_o=lon_o, lat_o=lat_o,
                                          n_nearest_neighbours=n_nearest_neighbours,
                                          )

  # --- save dict
  Dind_dist = dict()
  if load_cgrid: 
    Dind_dist['dckdtree_c'] = dckdtree_c
    Dind_dist['ickdtree_c'] = ickdtree_c
  if load_egrid: 
    Dind_dist['dckdtree_e'] = dckdtree_e
    Dind_dist['ickdtree_e'] = ickdtree_e
  if load_vgrid: 
    Dind_dist['dckdtree_v'] = dckdtree_v
    Dind_dist['ickdtree_v'] = ickdtree_v
  return Dind_dist
nbruegge's avatar
nbruegge committed
490

491
def calc_ckdtree(lon_i, lat_i, lon_o, lat_o, n_nearest_neighbours=1):
nbruegge's avatar
nbruegge committed
492
493
  """
  """
494
  # --- do ckdtree
Nils Brüggemann's avatar
Nils Brüggemann committed
495
496
497
498
499
500
501
502
503
504
505
506
507
508
  if False:
    lzip_i = list(zip(lon_i, lat_i))
    tree = cKDTree(lzip_i)
    lzip_o = list(zip(lon_o, lat_o))
    dckdtree, ickdtree = tree.query(lzip_o , k=n_nearest_neighbours, n_jobs=1)
  else:
    #print('calc_ckdtree by cartesian distances')
    xi, yi, zi = spherical_to_cartesian(lon_i, lat_i)
    xo, yo, zo = spherical_to_cartesian(lon_o, lat_o)

    lzip_i = list(zip(xi, yi, zi))
    tree = cKDTree(lzip_i)
    lzip_o = list(zip(xo, yo, zo))
    dckdtree, ickdtree = tree.query(lzip_o , k=n_nearest_neighbours, n_jobs=1)
nbruegge's avatar
nbruegge committed
509
510
  return dckdtree, ickdtree

Nils Brüggemann's avatar
Nils Brüggemann committed
511
512
513
514
515
516
517
def calc_vertical_interp_weights(zdata, levs):
  """ Calculate vertical interpolation weights and indices.

Call example:
icall, ind_lev, fac = calc_vertical_interp_weights(zdata, levs)

Afterwards do interpolation like this:
518
datai = data[ind_lev,icall]*fac+data[ind_lev+1,icall]*(1.-fac)
Nils Brüggemann's avatar
Nils Brüggemann committed
519
520
521
522
523
524
525
526
527
528
529
530
531
  """
  nza = zdata.shape[0]
  # --- initializations
  ind_lev = np.zeros((levs.size,zdata.shape[1]),dtype=int)
  icall = np.arange(zdata.shape[1],dtype=int)
  icall = icall[np.newaxis,:]
  fac = np.ma.zeros((levs.size,zdata.shape[1]))
  for k, lev in enumerate(levs):
    #print(f'k = {k}')
    # --- find level below critical level
    ind_lev[k,:] = (zdata<levs[k]).sum(axis=0)-1
    ind_lev[k,ind_lev[k,:]==(nza-1)]=-1
    # --- zdata below and above lev 
532
533
534
535
    zd1 = zdata[ind_lev[k,:],icall]
    zd2 = zdata[ind_lev[k,:]+1,icall]
    # --- linear interpolation to get weight (fac=1 if lev=zd1)
    fac[k,:] = (0.-1.)/(zd2-zd1)*(levs[k]-zd1)+1.
Nils Brüggemann's avatar
Nils Brüggemann committed
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
  # --- mask values which are out of range
  fac[ind_lev==-1] = np.ma.masked 
  return icall, ind_lev, fac

"""
Routines to calculate grids and sections
"""

def derive_section_points(p1, p2, npoints=101,):
  # --- derive section points
  if p1[0]==p2[0]:
    lon_sec = p1[0]*np.ones((npoints)) 
    lat_sec = np.linspace(p1[1],p2[1],npoints)
  else:
    lon_sec = np.linspace(p1[0],p2[0],npoints)
    lat_sec = (p2[1]-p1[1])/(p2[0]-p1[0])*(lon_sec-p1[0])+p1[1]
  dist_sec = haversine_dist(lon_sec[0], lat_sec[0], lon_sec, lat_sec)
  return lon_sec, lat_sec, dist_sec

def calc_north_pole_interp_grid_points(lat_south=60., res=100e3):
  """
  Compute grid points optimized for plotting the North Pole area.

  Parameters:
  -----------
  lat_south : float
      Southern latitude of target grid.
  res : float
      resolution of target grid

  Returns:
  --------
  Lon_np, Lat_np: ndarray
      Longitude and latitude of target grid as 2d array.

  Examples:
  ---------
  Lon_np, Lat_np = calc_north_pole_interp_grid_points(lat_south=60., res=100e3)

  """
  R = 6371e3
  x1, y1, z1 = spherical_to_cartesian(  0., lat_south)
  x2, y2, z2 = spherical_to_cartesian( 90., lat_south)
  x3, y3, z3 = spherical_to_cartesian(180., lat_south)
  x4, y4, z4 = spherical_to_cartesian(270., lat_south)

  lon1, lat1 = cartesian_to_spherical(x1, y1, z1)
  lon2, lat2 = cartesian_to_spherical(x2, y2, z2)
  lon3, lat3 = cartesian_to_spherical(x3, y3, z3)
  lon4, lat4 = cartesian_to_spherical(x4, y4, z4)

  #x1 = R * np.cos(  0.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y1 = R * np.sin(  0.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z1 = R * np.sin(lat_south*np.pi/180.)
  #x2 = R * np.cos( 90.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y2 = R * np.sin( 90.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z2 = R * np.sin(lat_south*np.pi/180.)
  #x3 = R * np.cos(180.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y3 = R * np.sin(180.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z3 = R * np.sin(lat_south*np.pi/180.)
  #x4 = R * np.cos(270.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #y4 = R * np.sin(270.*np.pi/180.) * np.cos(lat_south*np.pi/180.)
  #z4 = R * np.sin(lat_south*np.pi/180.)
  #
  #lat1 = np.arcsin(z1/np.sqrt(x1**2+y1**2+z1**2)) * 180./np.pi
  #lon1 = np.arctan2(y1,x1) * 180./np.pi
  #lat2 = np.arcsin(z2/np.sqrt(x2**2+y2**2+z2**2)) * 180./np.pi
  #lon2 = np.arctan2(y2,x2) * 180./np.pi
  #lat3 = np.arcsin(z3/np.sqrt(x3**2+y3**2+z3**2)) * 180./np.pi
  #lon3 = np.arctan2(y3,x3) * 180./np.pi
  #lat4 = np.arcsin(z4/np.sqrt(x4**2+y4**2+z4**2)) * 180./np.pi
  #lon4 = np.arctan2(y4,x4) * 180./np.pi
  
  xnp = np.arange(x3, x1+res, res)
  ynp = np.arange(y4, y2+res, res)
  
  Xnp, Ynp = np.meshgrid(xnp, ynp)
  Znp = R * np.sin(lat1*np.pi/180.) * np.ones((ynp.size,xnp.size))
  Lon_np = np.arctan2(Ynp,Xnp) * 180./np.pi
  Lat_np = np.arcsin(Znp/np.sqrt(Xnp**2+Ynp**2+Znp**2)) * 180./np.pi
  return Lon_np, Lat_np

"""
Routines related to spherical geometry
"""
nbruegge's avatar
nbruegge committed
621
622
623
624
625
626
627
628
629
630
631
632
633
634
def haversine_dist(lon_ref, lat_ref, lon_pts, lat_pts, degree=True):
  # for details see http://en.wikipedia.org/wiki/Haversine_formula
  r = 6378.e3
  if degree:
    lon_ref = lon_ref * np.pi/180.
    lat_ref = lat_ref * np.pi/180.
    lon_pts = lon_pts * np.pi/180.
    lat_pts = lat_pts * np.pi/180.
  arg = np.sqrt(   np.sin(0.5*(lat_pts-lat_ref))**2 
                 + np.sin(0.5*(lon_pts-lon_ref))**2
                 * np.cos(lat_ref)*np.cos(lat_pts) )
  dist = 2*r * np.arcsin(arg)
  return dist

Nils Brüggemann's avatar
Nils Brüggemann committed
635
636
637
638
639
640
641
642
643
644
645
def spherical_to_cartesian(lon, lat):
  earth_radius = 6371e3
  x = earth_radius * np.cos(lon*np.pi/180.) * np.cos(lat*np.pi/180.)
  y = earth_radius * np.sin(lon*np.pi/180.) * np.cos(lat*np.pi/180.)
  z = earth_radius * np.sin(lat*np.pi/180.)
  return x, y, z

def cartesian_to_spherical(x, y, z):
  lat = np.arcsin(z/np.sqrt(x**2+y**2+z**2)) * 180./np.pi
  lon = np.arctan2(y,x) * 180./np.pi
  return lon, lat
646

Nils Brüggemann's avatar
Nils Brüggemann committed
647
648
649
"""
Routines to load data
"""
650
651
652
653
654
655
656
657
658
659
660
661
def load_hsnap(fpath, var, it=0, iz=0, fpath_ckdtree=''):
  f = Dataset(fpath, 'r')
  print("Loading %s from %s" % (var, fpath))
  if f.variables[var].ndim==2:
    data = f.variables[var][it,:]
  else:
    data = f.variables[var][it,iz,:]
  f.close()

  data[data==0.] = np.ma.masked
  return data

Nils Brüggemann's avatar
Nils Brüggemann committed
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
def time_average(IcD, var, t1='none', t2='none', it_ave=[], iz='all', always_use_loop=False):
  it_ave = np.array(it_ave)
  # --- if no it_ave is given use t1 and t2 to determine averaging indices it_ave
  if it_ave.size==0:
    # --- if t2=='none' set t2=t1 and no time average will be applied
    if isinstance(t2, str) and t2=='none':
      t2 = t1

    # --- convert to datetime64 objects if necessary
    if isinstance(t1, str):
      t1 = np.datetime64(t1)
    if isinstance(t2, str):
      t2 = np.datetime64(t2)

    # --- determine averaging interval
    it_ave = np.where( (IcD.times>=t1) & (IcD.times<=t2) )[0]
678
679
680
681
682
683
684

  if it_ave.size==0:
    raise ValueError(f'::: Could not find any time steps in interval t1={t1} and t2={t2}! :::')

  # --- get dimensions to allocate data
  f = Dataset(IcD.flist_ts[0], 'r')
  # FIXME: If == ('time', 'lat', 'lon') works well use it everywhere
685
686
687
  load_hfl_type = False
  load_moc_type = False
  if f.variables[var].dimensions == ('time', 'lat', 'lon'): # e.g. for heat fluxes
688
689
    nt, nc, nx = f.variables[var].shape
    nz = 0
690
691
692
693
    load_hfl_type = True
  elif f.variables[var].dimensions == ('time', 'depth', 'lat', 'lon'): # is the case for moc data
    nt, nz, nc, ndummy = f.variables[var].shape 
    load_moc_type = True
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
  elif f.variables[var].ndim==3:
    nt, nz, nc = f.variables[var].shape
  elif f.variables[var].ndim==2: # for 2D variables like zos and mld
    nt, nc = f.variables[var].shape
    nz = 0
  f.close()

  # --- set iz to all levels
  if isinstance(iz,str) and iz=='all':
    iz = np.arange(nz)
  #else:
  #  iz = np.array([iz])

  # --- if all data is coming from one file take faster approach
  fpaths = np.unique(IcD.flist_ts[it_ave])
  if (fpaths.size==1) and not always_use_loop:
    f = Dataset(fpaths[0], 'r')
711
712
713
714
715
    if load_hfl_type:
      data_ave = f.variables[var][IcD.its[it_ave],:,0].mean(axis=0)
    elif load_moc_type:
      data_ave = f.variables[var][IcD.its[it_ave],:,:,0].mean(axis=0)
    elif nz>0:
716
717
718
719
720
721
722
      data_ave = f.variables[var][IcD.its[it_ave],iz,:].mean(axis=0)
    else:
      data_ave = f.variables[var][IcD.its[it_ave],:].mean(axis=0)
    f.close()
  # --- otherwise loop ovar all files is needed
  else:
    # --- allocate data
723
    if isinstance(iz,(int,np.integer)) or nz==0:
724
725
726
727
728
729
730
      data_ave = np.ma.zeros((nc))
    else:
      data_ave = np.ma.zeros((iz.size,nc))

    # --- average by looping over all files and time steps
    for l in it_ave:
      f = Dataset(IcD.flist_ts[l], 'r')
731
732
733
734
735
      if load_hfl_type:
        data_ave += f.variables[var][IcD.its[l],:,0]/it_ave.size
      elif load_moc_type:
        data_ave += f.variables[var][IcD.its[l],:,:,0]/it_ave.size
      elif nz>0:
736
737
738
739
740
741
742
        data_ave += f.variables[var][IcD.its[l],iz,:]/it_ave.size
      else:
        data_ave += f.variables[var][IcD.its[l],:]/it_ave.size
      f.close()
  print(f'pyicon.time_average: var={var}: it_ave={it_ave}')
  return data_ave, it_ave

743
744
745
746
747
def timing(ts, string=''):
  if ts[0]==0:
    ts = np.array([datetime.datetime.now()])
  else:
    ts = np.append(ts, [datetime.datetime.now()])
748
    print(ts[-1]-ts[-2], ' ', (ts[-1]-ts[0]), ' '+string)
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
  return ts

def conv_gname(gname):
  gname = gname[:-4]

  ogrid = gname.split('_')[0]
  res = float(gname.split('_')[1][1:])

  lo1 = gname.split('_')[2]
  if lo1[-1]=='w':
    lo1 = -float(lo1[:-1])
  else:
    lo1 = float(lo1[:-1])
  lo2 = gname.split('_')[3]
  if lo2[-1]=='w':
    lo2 = -float(lo2[:-1])
  else:
    lo2 = float(lo2[:-1])

  la1 = gname.split('_')[4]
  if la1[-1]=='s':
    la1 = -float(la1[:-1])
  else:
    la1 = float(la1[:-1])
  la2 = gname.split('_')[5]
  if la2[-1]=='s':
    la2 = -float(la2[:-1])
  else:
    la2 = float(la2[:-1])

  lon_reg = [lo1, lo2]
  lat_reg = [la1, la2]
  return ogrid, res, lon_reg, lat_reg

Nils Brüggemann's avatar
Nils Brüggemann committed
783
784
785
"""
Grid related functions
"""
786
787
788
def identify_grid(path_grid, fpath_data):
  """ Identifies ICON grid in depending on clon.size in fpath_data.
  
Nils Brüggemann's avatar
Nils Brüggemann committed
789
790
791
792
793
794
  r2b4:  160km:    15117: OceanOnly_Icos_0158km_etopo40.nc
  r2b4a: 160km:    20480: /pool/data/ICON/grids/public/mpim/0013/icon_grid_0013_R02B04_G.nc
  r2b6:   40km:   327680: OCEANINP_pre04_LndnoLak_039km_editSLOHH2017_G.nc
  r2b8:   10km:  3729001: OceanOnly_Global_IcosSymmetric_0010km_rotatedZ37d_modified_srtm30_1min.nc
  r2b9:    5km: 14886338: OceanOnly_IcosSymmetric_4932m_rotatedZ37d_modified_srtm30_1min.nc
  r2b9a:   5km: 20971520: /pool/data/ICON/grids/public/mpim/0015/icon_grid_0015_R02B09_G.nc
795
796
797
798
799
800
801
802
803
804
  """
  
  Dgrid_list = dict()
  
  grid_name = 'r2b4'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '160km'
  Dgrid_list[grid_name]['long_name'] = 'OceanOnly_Icos_0158km_etopo40'
  Dgrid_list[grid_name]['size'] = 15117
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
Nils Brüggemann's avatar
Nils Brüggemann committed
805
806
807
808
809
810
811
812
 
  grid_name = 'r2b4a'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '160km'
  Dgrid_list[grid_name]['long_name'] = 'icon_grid_0013_R02B04_G'
  Dgrid_list[grid_name]['size'] = 20480
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'

Nils Brüggemann's avatar
Nils Brüggemann committed
813
  grid_name = 'r2b6old'; Dgrid_list[grid_name] = dict()
814
815
816
817
818
819
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '40km'
  Dgrid_list[grid_name]['long_name'] = 'OCEANINP_pre04_LndnoLak_039km_editSLOHH2017_G'
  Dgrid_list[grid_name]['size'] = 327680
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
  
820
821
822
823
824
825
826
  grid_name = 'r2b6'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '40km'
  Dgrid_list[grid_name]['long_name'] = 'OceanOnly_Global_IcosSymmetric_0039km_rotatedZ37d_BlackSea_Greenland_modified_srtm30_1min'
  Dgrid_list[grid_name]['size'] = 235403 
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'

827
828
829
830
831
832
833
834
835
836
837
838
839
  grid_name = 'r2b8'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '10km'
  Dgrid_list[grid_name]['long_name'] = 'OceanOnly_Global_IcosSymmetric_0010km_rotatedZ37d_modified_srtm30_1min'
  Dgrid_list[grid_name]['size'] = 3729001
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
  
  grid_name = 'r2b9'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '5km'
  Dgrid_list[grid_name]['long_name'] = 'OceanOnly_IcosSymmetric_4932m_rotatedZ37d_modified_srtm30_1min'
  Dgrid_list[grid_name]['size'] = 14886338
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '/' + Dgrid_list[grid_name]['long_name'] + '.nc'
Nils Brüggemann's avatar
Nils Brüggemann committed
840
841
842
843
844
845
846

  grid_name = 'r2b9a'; Dgrid_list[grid_name] = dict()
  Dgrid_list[grid_name]['name'] = grid_name
  Dgrid_list[grid_name]['res'] = '5km'
  Dgrid_list[grid_name]['long_name'] = 'icon_grid_0015_R02B09_G'
  Dgrid_list[grid_name]['size'] = 20971520
  Dgrid_list[grid_name]['fpath_grid'] = path_grid + Dgrid_list[grid_name]['long_name'] + '.nc'
847
848
849
850
851
852
853
854
  
  f = Dataset(fpath_data, 'r')
  gsize = f.variables['clon'].size
  f.close()
  for grid_name in Dgrid_list.keys():
    if gsize == Dgrid_list[grid_name]['size']:
      Dgrid = Dgrid_list[grid_name]
      break
Nils Brüggemann's avatar
Nils Brüggemann committed
855
  #fpath_grid = '/pool/data/ICON/oes/input/r0003/' + Dgrid['long_name'] +'/' + Dgrid['long_name'] + '.nc'
856
857
  return Dgrid

Nils Brüggemann's avatar
Nils Brüggemann committed
858
859
860
861
862
863
864
865
866
867
def mask_big_triangles(vlon, vertex_of_cell, Tri):
  mask_bt = (
      (np.abs(  vlon[vertex_of_cell[:,0]] 
              - vlon[vertex_of_cell[:,1]])>180.)
    | (np.abs(  vlon[vertex_of_cell[:,0]] 
              - vlon[vertex_of_cell[:,2]])>180.)
                )
  Tri.set_mask(mask_bt)
  return Tri, mask_bt

868
869
870
871
872
873
874
875
876
877
878
879
880
881
def crop_tripolar_grid(lon_reg, lat_reg,
                       clon, clat, vertex_of_cell, edge_of_cell):
  ind_reg = np.where(   (clon>lon_reg[0]) 
                      & (clon<=lon_reg[1]) 
                      & (clat>lat_reg[0]) 
                      & (clat<=lat_reg[1]) )[0]
  clon = clon[ind_reg]
  clat = clat[ind_reg]
  vertex_of_cell = vertex_of_cell[ind_reg,:]
  edge_of_cell   = edge_of_cell[ind_reg,:]
  ind_reg = ind_reg
  return clon, clat, vertex_of_cell, edge_of_cell, ind_reg

def crop_regular_grid(lon_reg, lat_reg, Lon, Lat):
882
883
884
  # this does not work since Lon[ind_reg].shape = (64800, 360)
  # cropping needs probably done by each dimension
  # in this case cropping function for data is used as well
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
  lon = Lon[0,:]
  lat = Lat[:,0]
  indx = np.where((lon>=lon_reg[0]) & (lon<lon_reg[1]))[0]
  indy = np.where((lat>=lat_reg[0]) & (lat<lat_reg[1]))[0]
  lon = lon[indx]
  lat = lat[indy]
  #ind_reg = np.where(   (Lon>=lon_reg[0]) 
  #                    & (Lon <lon_reg[1]) 
  #                    & (Lat>=lat_reg[0]) 
  #                    & (Lat <lat_reg[1]) )[0]
  ind_reg = ((Lon>=lon_reg[0]) & (Lon<lon_reg[1]) & (Lat>=lat_reg[0]) & (Lat<lat_reg[1])).flatten()
  Lon, Lat = np.meshgrid(lon, lat)
  #Lon = Lon[ind_reg]
  #Lat = Lat[ind_reg]
  return Lon, Lat, lon, lat, ind_reg, indx, indy
900

Nils Brüggemann's avatar
Nils Brüggemann committed
901
902
903
"""
Routines related to time steps of data set
"""
904
905
def get_files_of_timeseries(path_data, fname):
  flist = np.array(glob.glob(path_data+fname))
nbruegge's avatar
nbruegge committed
906
907
  flist.sort()
  times_flist = np.zeros(flist.size, dtype='datetime64[s]')
nbruegge's avatar
nbruegge committed
908
909
910
911
  #for l, fpath in enumerate(flist):
  #  tstr = fpath.split('/')[-1].split('_')[-1][:-4]
  #  times_flist[l] = '%s-%s-%sT%s:%s:%s' % ( (tstr[:4], tstr[4:6], tstr[6:8], 
  #                                      tstr[9:11], tstr[11:13], tstr[13:15]))
Nils Brüggemann's avatar
Nils Brüggemann committed
912
  if flist.size==0:
913
    raise ValueError('::: Error: No file found matching %s!:::' % (path_data+fname))
nbruegge's avatar
nbruegge committed
914
915
  return times_flist, flist

Nils Brüggemann's avatar
Nils Brüggemann committed
916
917
918
919
920
def nctime2numpy(ncv):
  np_time = num2date(ncv[:], units=ncv.units, calendar=ncv.calendar
                  ).astype("datetime64[s]")
  return np_time

921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
def nctime_to_datetime64(ncv_time, time_mode='num2date'):
  if time_mode=='num2date':
    np_time = num2date(ncv_time[:], units=ncv_time.units, calendar=ncv_time.calendar
                    ).astype("datetime64[s]")
  elif time_mode=='float2date':
    tps = ncv_time[:]
    secs_tot = np.round(86400.*(tps-np.floor(tps)))
    hours = np.floor(secs_tot/3600.)
    mins = np.floor((secs_tot-hours*3600.)/60.) 
    secs = secs_tot - hours*3600. - mins*60.
    tstrs = [0]*tps.size
    for l in range(tps.size):
      tp = tps[l]
      tstr = '%s-%s-%sT%02d:%02d:%02d' % (str(tp)[:4], str(tp)[4:6], str(tp)[6:8], hours[l], mins[l], secs[l]) 
      tstrs[l] = tstr
    np_time = np.array(tstrs, dtype='datetime64')
  else:
    raise ValueError('::: Error: Wrong time_mode %s in get_timesteps! :::' % time_mode)
  return np_time
  

Nils Brüggemann's avatar
Nils Brüggemann committed
942
943
944
945
def get_timesteps(flist, time_mode='num2date'):
  #f = Dataset(flist[0], 'r')
  #nt = f.variables['time'].size 
  #f.close()
946
  #times = np.zeros((len(flist)*nt))
Nils Brüggemann's avatar
Nils Brüggemann committed
947
948
949
950
951
952
  #times = np.array(['2010']*(len(flist)*nt), dtype='datetime64[s]')
  #its = np.zeros((len(flist)*nt), dtype='int')
  #flist_ts = np.zeros((len(flist)*nt), dtype='<U200')
  times = np.array([], dtype='datetime64[s]')
  its = np.array([], dtype='int')
  flist_ts = np.array([], dtype='<U200')
nbruegge's avatar
nbruegge committed
953
954
  for nn, fpath in enumerate(flist):
    f = Dataset(fpath, 'r')
955
    ncv_time = f.variables['time']
Nils Brüggemann's avatar
Nils Brüggemann committed
956
    nt = f.variables['time'].size 
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
    np_time = nctime_to_datetime64(ncv_time, time_mode=time_mode)
    ##if time_mode=='num2date':
    ##  np_time = num2date(ncv[:], units=ncv.units, calendar=ncv.calendar
    ##                  ).astype("datetime64[s]")
    ##elif time_mode=='float2date':
    ##  tps = ncv[:]
    ##  secs_tot = np.round(86400.*(tps-np.floor(tps)))
    ##  hours = np.floor(secs_tot/3600.)
    ##  mins = np.floor((secs_tot-hours*3600.)/60.) 
    ##  secs = secs_tot - hours*3600. - mins*60.
    ##  tstrs = [0]*tps.size
    ##  for l in range(tps.size):
    ##    tp = tps[l]
    ##    tstr = '%s-%s-%sT%02d:%02d:%02d' % (str(tp)[:4], str(tp)[4:6], str(tp)[6:8], hours[l], mins[l], secs[l]) 
    ##    tstrs[l] = tstr
    ##  np_time = np.array(tstrs, dtype='datetime64')
    ##else:
    ##  raise ValueError('::: Error: Wrong time_mode %s in get_timesteps! :::' % time_mode)
Nils Brüggemann's avatar
Nils Brüggemann committed
975
976
977
978
979
980
981
    #mybreak()
    #times[nn*nt:(nn+1)*nt] = np_time
    #flist_ts[nn*nt:(nn+1)*nt] = np.array([fpath]*nt)
    #its[nn*nt:(nn+1)*nt] = np.arange(nt)
    times    = np.concatenate((times, np_time))
    flist_ts = np.concatenate((flist_ts, np.array([fpath]*nt).astype('<U200')))
    its      = np.concatenate((its, np.arange(nt, dtype='int')))
nbruegge's avatar
nbruegge committed
982
983
984
    f.close()
  return times, flist_ts, its

Nils Brüggemann's avatar
Nils Brüggemann committed
985
986
987
988
989
990
991
992
993
def get_varnames(fpath, skip_vars=[]):
  f = Dataset(fpath, 'r')
  varnames = f.variables.keys()
  f.close()
  #varnames = [var for var in varnames if not var.startswith('clon')]
  for skip_var in skip_vars:
    varnames = [var for var in varnames if not var.startswith(skip_var)]
  return varnames

994
995
996
997
998
999
1000
def indfind(elements, vector):                                                      
  """ return indices of elements that closest match elements in vector
  """
  # convert elements to np array                                                    
  if type(elements) is int or type(elements) is float:                              
    elements = np.array([elements])
  elif type(elements) is list:
For faster browsing, not all history is shown. View entire blame