|
|
|
@ -41,24 +41,24 @@ if xarray_enabled():
@@ -41,24 +41,24 @@ if xarray_enabled():
|
|
|
|
|
from xarray import DataArray |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
_COORD_PAIR_MAP = {"XLAT" : ("XLAT", "XLONG"), |
|
|
|
|
"XLONG" : ("XLAT", "XLONG"), |
|
|
|
|
"XLAT_M" : ("XLAT_M", "XLONG_M"), |
|
|
|
|
"XLONG_M" : ("XLAT_M", "XLONG_M"), |
|
|
|
|
"XLAT_U" : ("XLAT_U", "XLONG_U"), |
|
|
|
|
"XLONG_U" : ("XLAT_U", "XLONG_U"), |
|
|
|
|
"XLAT_V" : ("XLAT_V", "XLONG_V"), |
|
|
|
|
"XLONG_V" : ("XLAT_V", "XLONG_V"), |
|
|
|
|
"CLAT" : ("CLAT", "CLONG"), |
|
|
|
|
"CLONG" : ("CLAT", "CLONG")} |
|
|
|
|
_COORD_PAIR_MAP = {"XLAT": ("XLAT", "XLONG"), |
|
|
|
|
"XLONG": ("XLAT", "XLONG"), |
|
|
|
|
"XLAT_M": ("XLAT_M", "XLONG_M"), |
|
|
|
|
"XLONG_M": ("XLAT_M", "XLONG_M"), |
|
|
|
|
"XLAT_U": ("XLAT_U", "XLONG_U"), |
|
|
|
|
"XLONG_U": ("XLAT_U", "XLONG_U"), |
|
|
|
|
"XLAT_V": ("XLAT_V", "XLONG_V"), |
|
|
|
|
"XLONG_V": ("XLAT_V", "XLONG_V"), |
|
|
|
|
"CLAT": ("CLAT", "CLONG"), |
|
|
|
|
"CLONG": ("CLAT", "CLONG")} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
_COORD_VARS = ("XLAT", "XLONG", "XLAT_M", "XLONG_M", "XLAT_U", "XLONG_U", |
|
|
|
|
"XLAT_V", "XLONG_V", "CLAT", "CLONG") |
|
|
|
|
|
|
|
|
|
_LAT_COORDS = ("XLAT", "XLAT_M", "XLAT_U", "XLAT_V", "CLAT") |
|
|
|
|
_LAT_COORDS = ("XLAT", "XLAT_M", "XLAT_U", "XLAT_V", "CLAT") |
|
|
|
|
|
|
|
|
|
_LON_COORDS = ("XLONG", "XLONG_M", "XLONG_U","XLONG_V", "CLONG") |
|
|
|
|
_LON_COORDS = ("XLONG", "XLONG_M", "XLONG_U", "XLONG_V", "CLONG") |
|
|
|
|
|
|
|
|
|
_TIME_COORD_VARS = ("XTIME",) |
|
|
|
|
|
|
|
|
@ -210,11 +210,11 @@ def _generator_copy(gen):
@@ -210,11 +210,11 @@ def _generator_copy(gen):
|
|
|
|
|
if module is not None: |
|
|
|
|
try: |
|
|
|
|
try: |
|
|
|
|
argd = {key:argvals.locals[key] for key in argvals.args} |
|
|
|
|
argd = {key: argvals.locals[key] for key in argvals.args} |
|
|
|
|
res = module.get(funcname)(**argd) |
|
|
|
|
except AttributeError: |
|
|
|
|
res = getattr(module, funcname)(**argd) |
|
|
|
|
except: |
|
|
|
|
except Exception: |
|
|
|
|
# This is the old way it used to work, but it looks like this was |
|
|
|
|
# fixed by Python. |
|
|
|
|
try: |
|
|
|
@ -226,9 +226,9 @@ def _generator_copy(gen):
@@ -226,9 +226,9 @@ def _generator_copy(gen):
|
|
|
|
|
import __main__ |
|
|
|
|
|
|
|
|
|
try: |
|
|
|
|
argd = {key:argvals.locals[key] for key in argvals.args} |
|
|
|
|
argd = {key: argvals.locals[key] for key in argvals.args} |
|
|
|
|
res = getattr(__main__, funcname)(**argd) |
|
|
|
|
except: |
|
|
|
|
except Exception: |
|
|
|
|
# This was the old way it used to work, but appears to have |
|
|
|
|
# been fixed by Python. |
|
|
|
|
res = getattr(__main__, funcname)(**argvals.locals) |
|
|
|
@ -237,7 +237,7 @@ def _generator_copy(gen):
@@ -237,7 +237,7 @@ def _generator_copy(gen):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test(): |
|
|
|
|
q = [1,2,3] |
|
|
|
|
q = [1, 2, 3] |
|
|
|
|
for i in q: |
|
|
|
|
yield i |
|
|
|
|
|
|
|
|
@ -388,13 +388,13 @@ def get_iterable(wrfseq):
@@ -388,13 +388,13 @@ def get_iterable(wrfseq):
|
|
|
|
|
if isinstance(wrfseq, (list, tuple, IterWrapper)): |
|
|
|
|
return wrfseq |
|
|
|
|
else: |
|
|
|
|
return IterWrapper(wrfseq) # generator/custom iterable class |
|
|
|
|
return IterWrapper(wrfseq) # generator/custom iterable class |
|
|
|
|
|
|
|
|
|
else: |
|
|
|
|
if isinstance(wrfseq, dict): |
|
|
|
|
return wrfseq |
|
|
|
|
else: |
|
|
|
|
return dict(wrfseq) # generator/custom iterable dict class |
|
|
|
|
return dict(wrfseq) # generator/custom iterable dict class |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Helper to extract masked arrays from DataArrays that convert to NaN |
|
|
|
@ -433,9 +433,9 @@ def to_np(array):
@@ -433,9 +433,9 @@ def to_np(array):
|
|
|
|
|
try: |
|
|
|
|
fill_value = array.attrs["_FillValue"] |
|
|
|
|
except AttributeError: |
|
|
|
|
result = array # Not a DataArray |
|
|
|
|
result = array # Not a DataArray |
|
|
|
|
except KeyError: |
|
|
|
|
result = array.values # Does not have missing values |
|
|
|
|
result = array.values # Does not have missing values |
|
|
|
|
else: |
|
|
|
|
result = ma.masked_invalid(array.values, copy=False) |
|
|
|
|
result.set_fill_value(fill_value) |
|
|
|
@ -496,7 +496,7 @@ class either(object):
@@ -496,7 +496,7 @@ class either(object):
|
|
|
|
|
return varname |
|
|
|
|
|
|
|
|
|
raise ValueError("{} are not valid variable names".format( |
|
|
|
|
self.varnames)) |
|
|
|
|
self.varnames)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# This should look like: |
|
|
|
@ -661,25 +661,25 @@ def _corners_moved(wrfnc, first_ll_corner, first_ur_corner, latvar, lonvar):
@@ -661,25 +661,25 @@ def _corners_moved(wrfnc, first_ll_corner, first_ur_corner, latvar, lonvar):
|
|
|
|
|
|
|
|
|
|
# Need to check all times |
|
|
|
|
for i in py3range(lats.shape[-3]): |
|
|
|
|
start_idxs = [0]*len(lats.shape) # PyNIO does not support ndim |
|
|
|
|
start_idxs = [0] * len(lats.shape) # PyNIO does not support ndim |
|
|
|
|
start_idxs[-3] = i |
|
|
|
|
start_idxs = tuple(start_idxs) |
|
|
|
|
|
|
|
|
|
end_idxs = [-1]*len(lats.shape) |
|
|
|
|
end_idxs = [-1] * len(lats.shape) |
|
|
|
|
end_idxs[-3] = i |
|
|
|
|
end_idxs = tuple(end_idxs) |
|
|
|
|
|
|
|
|
|
if (first_ll_corner[0] != lats[start_idxs] or |
|
|
|
|
first_ll_corner[1] != lons[start_idxs] or |
|
|
|
|
first_ur_corner[0] != lats[end_idxs] or |
|
|
|
|
first_ur_corner[1] != lons[end_idxs]): |
|
|
|
|
if (first_ll_corner[0] != lats[start_idxs] |
|
|
|
|
or first_ll_corner[1] != lons[start_idxs] |
|
|
|
|
or first_ur_corner[0] != lats[end_idxs] |
|
|
|
|
or first_ur_corner[1] != lons[end_idxs]): |
|
|
|
|
return True |
|
|
|
|
|
|
|
|
|
return False |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_moving_domain(wrfin, varname=None, latvar=either("XLAT", "XLAT_M"), |
|
|
|
|
lonvar=either("XLONG", "XLONG_M"), _key=None): |
|
|
|
|
lonvar=either("XLONG", "XLONG_M"), _key=None): |
|
|
|
|
|
|
|
|
|
"""Return True if the domain is a moving nest. |
|
|
|
|
|
|
|
|
@ -877,7 +877,7 @@ def extract_global_attrs(wrfin, attrs):
@@ -877,7 +877,7 @@ def extract_global_attrs(wrfin, attrs):
|
|
|
|
|
entry = wrfin[next(iter(viewkeys(wrfin)))] |
|
|
|
|
return extract_global_attrs(entry, attrs) |
|
|
|
|
|
|
|
|
|
return {attr:_get_global_attr(wrfin, attr) for attr in attrlist} |
|
|
|
|
return {attr: _get_global_attr(wrfin, attr) for attr in attrlist} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def extract_dim(wrfin, dim): |
|
|
|
@ -907,17 +907,18 @@ def extract_dim(wrfin, dim):
@@ -907,17 +907,18 @@ def extract_dim(wrfin, dim):
|
|
|
|
|
d = wrfin.dimensions[dim] |
|
|
|
|
if not isinstance(d, int): |
|
|
|
|
try: |
|
|
|
|
return len(d) #netCDF4 |
|
|
|
|
except TypeError: #scipy.io.netcdf |
|
|
|
|
return len(d) # netCDF4 |
|
|
|
|
except TypeError: # scipy.io.netcdf |
|
|
|
|
# Scipy can't handled unlimited dimensions, so now we have to |
|
|
|
|
# figure it out |
|
|
|
|
try: |
|
|
|
|
s = wrfin.variables["P"].shape |
|
|
|
|
return s[-4] |
|
|
|
|
except: |
|
|
|
|
except Exception: |
|
|
|
|
raise ValueError("unsupported NetCDF reader") |
|
|
|
|
else: |
|
|
|
|
return s[-4] |
|
|
|
|
|
|
|
|
|
return d # PyNIO |
|
|
|
|
return d # PyNIO |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _combine_dict(wrfdict, varname, timeidx, method, meta, _key): |
|
|
|
@ -981,15 +982,15 @@ def _combine_dict(wrfdict, varname, timeidx, method, meta, _key):
@@ -981,15 +982,15 @@ def _combine_dict(wrfdict, varname, timeidx, method, meta, _key):
|
|
|
|
|
_cache_key = _key[first_key] if _key is not None else None |
|
|
|
|
|
|
|
|
|
first_array = _extract_var(wrfdict[first_key], varname, |
|
|
|
|
timeidx, is_moving=is_moving, method=method, |
|
|
|
|
squeeze=False, cache=None, meta=meta, |
|
|
|
|
_key=_cache_key) |
|
|
|
|
timeidx, is_moving=is_moving, method=method, |
|
|
|
|
squeeze=False, cache=None, meta=meta, |
|
|
|
|
_key=_cache_key) |
|
|
|
|
|
|
|
|
|
# Create the output data numpy array based on the first array |
|
|
|
|
outdims = [numkeys] |
|
|
|
|
outdims += first_array.shape |
|
|
|
|
outdata = np.empty(outdims, first_array.dtype) |
|
|
|
|
outdata[0,:] = first_array[:] |
|
|
|
|
outdata[0, :] = first_array[:] |
|
|
|
|
|
|
|
|
|
idx = 1 |
|
|
|
|
while True: |
|
|
|
@ -1007,8 +1008,8 @@ def _combine_dict(wrfdict, varname, timeidx, method, meta, _key):
@@ -1007,8 +1008,8 @@ def _combine_dict(wrfdict, varname, timeidx, method, meta, _key):
|
|
|
|
|
|
|
|
|
|
if outdata.shape[1:] != vardata.shape: |
|
|
|
|
raise ValueError("data sequences must have the " |
|
|
|
|
"same size for all dictionary keys") |
|
|
|
|
outdata[idx,:] = to_np(vardata)[:] |
|
|
|
|
"same size for all dictionary keys") |
|
|
|
|
outdata[idx, :] = to_np(vardata)[:] |
|
|
|
|
idx += 1 |
|
|
|
|
|
|
|
|
|
if xarray_enabled() and meta: |
|
|
|
@ -1044,12 +1045,10 @@ def _combine_dict(wrfdict, varname, timeidx, method, meta, _key):
@@ -1044,12 +1045,10 @@ def _combine_dict(wrfdict, varname, timeidx, method, meta, _key):
|
|
|
|
|
# make it so that key_0 is leftmost |
|
|
|
|
outdims = key_coordnames + list(first_array.dims[existing_cnt:]) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Create the new 'key_n', value pairs |
|
|
|
|
for coordname, coordval in zip(key_coordnames, coord_vals): |
|
|
|
|
outcoords[coordname] = coordval |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
outattrs = OrderedDict(first_array.attrs) |
|
|
|
|
|
|
|
|
|
outarr = DataArray(outdata, name=outname, coords=outcoords, |
|
|
|
@ -1268,7 +1267,7 @@ def _build_data_array(wrfnc, varname, timeidx, is_moving_domain, is_multifile,
@@ -1268,7 +1267,7 @@ def _build_data_array(wrfnc, varname, timeidx, is_moving_domain, is_multifile,
|
|
|
|
|
for dkey, val in viewitems(var.__dict__): |
|
|
|
|
# scipy.io adds these but don't want them |
|
|
|
|
if dkey in ("data", "_shape", "_size", "_typecode", "_attributes", |
|
|
|
|
"maskandscale", "dimensions"): |
|
|
|
|
"maskandscale", "dimensions"): |
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
_dkey = dkey if isinstance(dkey, str) else dkey.decode() |
|
|
|
@ -1276,7 +1275,7 @@ def _build_data_array(wrfnc, varname, timeidx, is_moving_domain, is_multifile,
@@ -1276,7 +1275,7 @@ def _build_data_array(wrfnc, varname, timeidx, is_moving_domain, is_multifile,
|
|
|
|
|
_val = val |
|
|
|
|
else: |
|
|
|
|
if isinstance(val, bytes): |
|
|
|
|
_val = val.decode() # scipy.io.netcdf |
|
|
|
|
_val = val.decode() # scipy.io.netcdf |
|
|
|
|
else: |
|
|
|
|
_val = val |
|
|
|
|
|
|
|
|
@ -1292,7 +1291,6 @@ def _build_data_array(wrfnc, varname, timeidx, is_moving_domain, is_multifile,
@@ -1292,7 +1291,6 @@ def _build_data_array(wrfnc, varname, timeidx, is_moving_domain, is_multifile,
|
|
|
|
|
except IndexError: |
|
|
|
|
pass |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
coords = OrderedDict() |
|
|
|
|
|
|
|
|
|
# Handle lat/lon coordinates and projection information if available |
|
|
|
@ -1354,17 +1352,17 @@ def _build_data_array(wrfnc, varname, timeidx, is_moving_domain, is_multifile,
@@ -1354,17 +1352,17 @@ def _build_data_array(wrfnc, varname, timeidx, is_moving_domain, is_multifile,
|
|
|
|
|
|
|
|
|
|
else: |
|
|
|
|
coords[lon_coord] = (lon_coord_dims[1:], |
|
|
|
|
lon_coord_vals[0,:]) |
|
|
|
|
lon_coord_vals[0, :]) |
|
|
|
|
coords[lat_coord] = (lat_coord_dims[1:], |
|
|
|
|
lat_coord_vals[0,:]) |
|
|
|
|
lat_coord_vals[0, :]) |
|
|
|
|
|
|
|
|
|
if time_coord is not None: |
|
|
|
|
coords[time_coord] = (lon_coord_dims[0], time_coord_vals) |
|
|
|
|
else: |
|
|
|
|
coords[lon_coord] = (lon_coord_dims[1:], |
|
|
|
|
lon_coord_vals[timeidx,:]) |
|
|
|
|
lon_coord_vals[timeidx, :]) |
|
|
|
|
coords[lat_coord] = (lat_coord_dims[1:], |
|
|
|
|
lat_coord_vals[timeidx,:]) |
|
|
|
|
lat_coord_vals[timeidx, :]) |
|
|
|
|
|
|
|
|
|
if time_coord is not None: |
|
|
|
|
coords[time_coord] = (lon_coord_dims[0], |
|
|
|
@ -1513,7 +1511,7 @@ def _find_reverse(wrfseq, varname, timeidx, is_moving, meta, _key):
@@ -1513,7 +1511,7 @@ def _find_reverse(wrfseq, varname, timeidx, is_moving, meta, _key):
|
|
|
|
|
is_moving, True, _key) |
|
|
|
|
else: |
|
|
|
|
result = wrfnc.variables[varname][filetimeidx, :] |
|
|
|
|
return result[np.newaxis, :] # So that nosqueeze works |
|
|
|
|
return result[np.newaxis, :] # So that nosqueeze works |
|
|
|
|
else: |
|
|
|
|
comboidx += numtimes |
|
|
|
|
|
|
|
|
@ -1618,8 +1616,6 @@ def _cat_files(wrfseq, varname, timeidx, is_moving, squeeze, meta, _key):
@@ -1618,8 +1616,6 @@ def _cat_files(wrfseq, varname, timeidx, is_moving, squeeze, meta, _key):
|
|
|
|
|
return _find_arr_for_time(wrfseq, varname, timeidx, is_moving, meta, |
|
|
|
|
_key) |
|
|
|
|
|
|
|
|
|
#time_idx_or_slice = timeidx if not multitime else slice(None) |
|
|
|
|
|
|
|
|
|
# If all times are requested, need to build a new array and cat together |
|
|
|
|
# all of the arrays in the sequence |
|
|
|
|
wrf_iter = iter(wrfseq) |
|
|
|
@ -1665,7 +1661,8 @@ def _cat_files(wrfseq, varname, timeidx, is_moving, squeeze, meta, _key):
@@ -1665,7 +1661,8 @@ def _cat_files(wrfseq, varname, timeidx, is_moving, squeeze, meta, _key):
|
|
|
|
|
outxtimes = get_cached_item(_key, timekey) |
|
|
|
|
if outxtimes is None: |
|
|
|
|
outxtimes = np.empty(outdims[0]) |
|
|
|
|
outxtimes[startidx:endidx] = to_np(first_var.coords[timename][:]) |
|
|
|
|
outxtimes[startidx:endidx] = to_np( |
|
|
|
|
first_var.coords[timename][:]) |
|
|
|
|
else: |
|
|
|
|
timecached = True |
|
|
|
|
|
|
|
|
@ -1677,7 +1674,8 @@ def _cat_files(wrfseq, varname, timeidx, is_moving, squeeze, meta, _key):
@@ -1677,7 +1674,8 @@ def _cat_files(wrfseq, varname, timeidx, is_moving, squeeze, meta, _key):
|
|
|
|
|
outlats = get_cached_item(_key, latkey) |
|
|
|
|
if outlats is None: |
|
|
|
|
outlats = np.empty(outcoorddims, first_var.dtype) |
|
|
|
|
outlats[startidx:endidx, :] = to_np(first_var.coords[latname][:]) |
|
|
|
|
outlats[startidx:endidx, :] = to_np( |
|
|
|
|
first_var.coords[latname][:]) |
|
|
|
|
else: |
|
|
|
|
latcached = True |
|
|
|
|
|
|
|
|
@ -1685,11 +1683,11 @@ def _cat_files(wrfseq, varname, timeidx, is_moving, squeeze, meta, _key):
@@ -1685,11 +1683,11 @@ def _cat_files(wrfseq, varname, timeidx, is_moving, squeeze, meta, _key):
|
|
|
|
|
outlons = get_cached_item(_key, lonkey) |
|
|
|
|
if outlons is None: |
|
|
|
|
outlons = np.empty(outcoorddims, first_var.dtype) |
|
|
|
|
outlons[startidx:endidx, :] = to_np(first_var.coords[lonname][:]) |
|
|
|
|
outlons[startidx:endidx, :] = to_np( |
|
|
|
|
first_var.coords[lonname][:]) |
|
|
|
|
else: |
|
|
|
|
loncached = True |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
startidx = endidx |
|
|
|
|
while True: |
|
|
|
|
try: |
|
|
|
@ -1940,7 +1938,7 @@ def _join_files(wrfseq, varname, timeidx, is_moving, meta, _key):
@@ -1940,7 +1938,7 @@ def _join_files(wrfseq, varname, timeidx, is_moving, meta, _key):
|
|
|
|
|
else: |
|
|
|
|
loncached = True |
|
|
|
|
|
|
|
|
|
file_idx=1 |
|
|
|
|
file_idx = 1 |
|
|
|
|
while True: |
|
|
|
|
try: |
|
|
|
|
wrfnc = next(wrf_iter) |
|
|
|
@ -1964,8 +1962,8 @@ def _join_files(wrfseq, varname, timeidx, is_moving, meta, _key):
@@ -1964,8 +1962,8 @@ def _join_files(wrfseq, varname, timeidx, is_moving, meta, _key):
|
|
|
|
|
# For join, the times are a function of fileidx |
|
|
|
|
file_times = extract_times(wrfnc, ALL_TIMES, meta=False, |
|
|
|
|
do_xtime=False) |
|
|
|
|
time_coord[file_idx, 0:numtimes] = np.asarray(file_times, |
|
|
|
|
"datetime64[ns]")[:] |
|
|
|
|
time_coord[file_idx, 0:numtimes] = np.asarray( |
|
|
|
|
file_times, "datetime64[ns]")[:] |
|
|
|
|
|
|
|
|
|
if timename is not None and not timecached: |
|
|
|
|
xtimedata = wrfnc.variables[timename][:] |
|
|
|
@ -2219,8 +2217,8 @@ def _extract_var(wrfin, varname, timeidx, is_moving,
@@ -2219,8 +2217,8 @@ def _extract_var(wrfin, varname, timeidx, is_moving,
|
|
|
|
|
multifile, _key) |
|
|
|
|
else: |
|
|
|
|
if not multitime: |
|
|
|
|
result = wrfin.variables[varname][timeidx,:] |
|
|
|
|
result = result[np.newaxis, :] # So that no squeeze works |
|
|
|
|
result = wrfin.variables[varname][timeidx, :] |
|
|
|
|
result = result[np.newaxis, :] # So that no squeeze works |
|
|
|
|
else: |
|
|
|
|
result = wrfin.variables[varname][:] |
|
|
|
|
else: |
|
|
|
@ -2288,8 +2286,8 @@ def extract_vars(wrfin, timeidx, varnames, method="cat", squeeze=True,
@@ -2288,8 +2286,8 @@ def extract_vars(wrfin, timeidx, varnames, method="cat", squeeze=True,
|
|
|
|
|
else: |
|
|
|
|
varlist = varnames |
|
|
|
|
|
|
|
|
|
return {var:_extract_var(wrfin, var, timeidx, None, |
|
|
|
|
method, squeeze, cache, meta, _key) |
|
|
|
|
return {var: _extract_var(wrfin, var, timeidx, None, |
|
|
|
|
method, squeeze, cache, meta, _key) |
|
|
|
|
for var in varlist} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2322,7 +2320,7 @@ def _make_time(timearr):
@@ -2322,7 +2320,7 @@ def _make_time(timearr):
|
|
|
|
|
""" |
|
|
|
|
try: |
|
|
|
|
return dt.datetime.strptime("".join(npbytes_to_str(timearr)), |
|
|
|
|
"%Y-%m-%d_%H:%M:%S") |
|
|
|
|
"%Y-%m-%d_%H:%M:%S") |
|
|
|
|
except ValueError: |
|
|
|
|
return np.datetime64("NaT") |
|
|
|
|
|
|
|
|
@ -2351,9 +2349,9 @@ def _file_times(wrfin, do_xtime):
@@ -2351,9 +2349,9 @@ def _file_times(wrfin, do_xtime):
|
|
|
|
|
|
|
|
|
|
""" |
|
|
|
|
if not do_xtime: |
|
|
|
|
times = wrfin.variables["Times"][:,:] |
|
|
|
|
times = wrfin.variables["Times"][:, :] |
|
|
|
|
for i in py3range(times.shape[0]): |
|
|
|
|
yield _make_time(times[i,:]) |
|
|
|
|
yield _make_time(times[i, :]) |
|
|
|
|
else: |
|
|
|
|
xtimes = wrfin.variables["XTIME"][:] |
|
|
|
|
for i in py3range(xtimes.shape[0]): |
|
|
|
@ -2390,7 +2388,7 @@ def _extract_time_map(wrfin, timeidx, do_xtime, meta=False):
@@ -2390,7 +2388,7 @@ def _extract_time_map(wrfin, timeidx, do_xtime, meta=False):
|
|
|
|
|
otherwise the sequence is :class:`numpy.ndarray`. |
|
|
|
|
|
|
|
|
|
""" |
|
|
|
|
return {key : extract_times(wrfseq, timeidx, do_xtime, meta) |
|
|
|
|
return {key: extract_times(wrfseq, timeidx, do_xtime, meta) |
|
|
|
|
for key, wrfseq in viewitems(wrfin)} |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -2478,19 +2476,19 @@ def extract_times(wrfin, timeidx, method="cat", squeeze=True, cache=None,
@@ -2478,19 +2476,19 @@ def extract_times(wrfin, timeidx, method="cat", squeeze=True, cache=None,
|
|
|
|
|
num_cols = len(time_list[0]) |
|
|
|
|
|
|
|
|
|
time_arr = np.full((num_rows, num_cols), fill_value, dtype=dt) |
|
|
|
|
for i,row in enumerate(time_list): |
|
|
|
|
for i, row in enumerate(time_list): |
|
|
|
|
if len(row) == num_cols: |
|
|
|
|
time_arr[i,:] = row[:] |
|
|
|
|
time_arr[i, :] = row[:] |
|
|
|
|
else: |
|
|
|
|
for j,val in enumerate(row): |
|
|
|
|
time_arr[i,j] = val |
|
|
|
|
for j, val in enumerate(row): |
|
|
|
|
time_arr[i, j] = val |
|
|
|
|
|
|
|
|
|
time_arr = ma.masked_values(time_arr, fill_value) |
|
|
|
|
|
|
|
|
|
else: |
|
|
|
|
raise ValueError("invalid method argument '{}'".format(method)) |
|
|
|
|
except KeyError: |
|
|
|
|
return None # Thrown for pre-3.7 XTIME not existing |
|
|
|
|
return None # Thrown for pre-3.7 XTIME not existing |
|
|
|
|
|
|
|
|
|
if xarray_enabled() and meta: |
|
|
|
|
outattrs = OrderedDict() |
|
|
|
@ -2514,10 +2512,8 @@ def extract_times(wrfin, timeidx, method="cat", squeeze=True, cache=None,
@@ -2514,10 +2512,8 @@ def extract_times(wrfin, timeidx, method="cat", squeeze=True, cache=None,
|
|
|
|
|
|
|
|
|
|
outname = "XTIME" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
outarr = DataArray(time_arr, name=outname, coords=outcoords, |
|
|
|
|
dims=outdimnames, attrs=outattrs) |
|
|
|
|
|
|
|
|
|
else: |
|
|
|
|
outarr = time_arr |
|
|
|
|
|
|
|
|
@ -2670,7 +2666,7 @@ def get_right_slices(var, right_ndims, fixed_val=0):
@@ -2670,7 +2666,7 @@ def get_right_slices(var, right_ndims, fixed_val=0):
|
|
|
|
|
[slice(None)]*right_ndims) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_proj_params(wrfin):#, timeidx=0, varname=None): |
|
|
|
|
def get_proj_params(wrfin): |
|
|
|
|
"""Return a tuple of latitude, longitude, and projection parameters from |
|
|
|
|
a WRF output file object or a sequence of WRF output file objects. |
|
|
|
|
|
|
|
|
@ -2697,12 +2693,13 @@ def get_proj_params(wrfin):#, timeidx=0, varname=None):
@@ -2697,12 +2693,13 @@ def get_proj_params(wrfin):#, timeidx=0, varname=None):
|
|
|
|
|
longitude coordinate, and global projection attributes. |
|
|
|
|
|
|
|
|
|
""" |
|
|
|
|
proj_params = extract_global_attrs(wrfin, attrs=("MAP_PROJ", |
|
|
|
|
"CEN_LAT", "CEN_LON", |
|
|
|
|
"TRUELAT1", "TRUELAT2", |
|
|
|
|
"MOAD_CEN_LAT", "STAND_LON", |
|
|
|
|
"POLE_LAT", "POLE_LON", |
|
|
|
|
"DX", "DY")) |
|
|
|
|
proj_params = extract_global_attrs(wrfin, |
|
|
|
|
attrs=("MAP_PROJ", |
|
|
|
|
"CEN_LAT", "CEN_LON", |
|
|
|
|
"TRUELAT1", "TRUELAT2", |
|
|
|
|
"MOAD_CEN_LAT", "STAND_LON", |
|
|
|
|
"POLE_LAT", "POLE_LON", |
|
|
|
|
"DX", "DY")) |
|
|
|
|
|
|
|
|
|
return proj_params |
|
|
|
|
|
|
|
|
@ -2710,7 +2707,7 @@ def get_proj_params(wrfin):#, timeidx=0, varname=None):
@@ -2710,7 +2707,7 @@ def get_proj_params(wrfin):#, timeidx=0, varname=None):
|
|
|
|
|
def from_args(func, argnames, *args, **kwargs): |
|
|
|
|
"""Return a mapping of argument name to value for the called function. |
|
|
|
|
|
|
|
|
|
This function parses the function \*args and \*\*kwargs to obtain the \ |
|
|
|
|
This function parses the function args and kwargs to obtain the |
|
|
|
|
desired argument value. If the argument has not been passed in, the value |
|
|
|
|
is taken from the default keyword argument value. |
|
|
|
|
|
|
|
|
@ -2719,7 +2716,7 @@ def from_args(func, argnames, *args, **kwargs):
@@ -2719,7 +2716,7 @@ def from_args(func, argnames, *args, **kwargs):
|
|
|
|
|
Note: |
|
|
|
|
|
|
|
|
|
This function currently does not work with functions that contain |
|
|
|
|
\*args or \*\*kwargs arguments. |
|
|
|
|
variable length args or kwargs arguments. |
|
|
|
|
|
|
|
|
|
Args: |
|
|
|
|
|
|
|
|
@ -2763,7 +2760,7 @@ def _args_to_list2(func, args, kwargs):
@@ -2763,7 +2760,7 @@ def _args_to_list2(func, args, kwargs):
|
|
|
|
|
Note: |
|
|
|
|
|
|
|
|
|
This function currently does not work with functions that contain |
|
|
|
|
*args or **kwargs arguments. |
|
|
|
|
variable length args or kwargs arguments. |
|
|
|
|
|
|
|
|
|
Args: |
|
|
|
|
|
|
|
|
@ -2784,15 +2781,15 @@ def _args_to_list2(func, args, kwargs):
@@ -2784,15 +2781,15 @@ def _args_to_list2(func, args, kwargs):
|
|
|
|
|
# Build the full tuple with defaults filled in |
|
|
|
|
outargs = [None]*len(argspec.args) |
|
|
|
|
if argspec.defaults is not None: |
|
|
|
|
for i,default in enumerate(argspec.defaults[::-1], 1): |
|
|
|
|
for i, default in enumerate(argspec.defaults[::-1], 1): |
|
|
|
|
outargs[-i] = default |
|
|
|
|
|
|
|
|
|
# Add the supplied args |
|
|
|
|
for i,arg in enumerate(args): |
|
|
|
|
for i, arg in enumerate(args): |
|
|
|
|
outargs[i] = arg |
|
|
|
|
|
|
|
|
|
# Fill in the supplied kargs |
|
|
|
|
for argname,val in viewitems(kwargs): |
|
|
|
|
for argname, val in viewitems(kwargs): |
|
|
|
|
argidx = argspec.args.index(argname) |
|
|
|
|
outargs[argidx] = val |
|
|
|
|
|
|
|
|
@ -2850,7 +2847,7 @@ def _args_to_list3(func, args, kwargs):
@@ -2850,7 +2847,7 @@ def _args_to_list3(func, args, kwargs):
|
|
|
|
|
Note: |
|
|
|
|
|
|
|
|
|
This function currently does not work with functions that contain |
|
|
|
|
*args or **kwargs arguments. |
|
|
|
|
variable length args or kwargs arguments. |
|
|
|
|
|
|
|
|
|
Args: |
|
|
|
|
|
|
|
|
@ -2885,7 +2882,7 @@ def args_to_list(func, args, kwargs):
@@ -2885,7 +2882,7 @@ def args_to_list(func, args, kwargs):
|
|
|
|
|
Note: |
|
|
|
|
|
|
|
|
|
This function currently does not work with functions that contain |
|
|
|
|
\*args or \*\*kwargs arguments. |
|
|
|
|
variable length args or kwargs arguments. |
|
|
|
|
|
|
|
|
|
Args: |
|
|
|
|
|
|
|
|
@ -3039,13 +3036,27 @@ def psafilepath():
@@ -3039,13 +3036,27 @@ def psafilepath():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_filepath(obj): |
|
|
|
|
"""Return the file path for the specified object. |
|
|
|
|
|
|
|
|
|
This is used to return the file path for a netcdf object. If the |
|
|
|
|
particular object does not have the appropriate file path information, |
|
|
|
|
then one is created based on the timestep in the file. |
|
|
|
|
|
|
|
|
|
Args: |
|
|
|
|
|
|
|
|
|
obj: An object. |
|
|
|
|
|
|
|
|
|
Returns: |
|
|
|
|
|
|
|
|
|
:obj:`str`: A string for a file path. |
|
|
|
|
|
|
|
|
|
""" |
|
|
|
|
try: |
|
|
|
|
path = obj.filepath() |
|
|
|
|
except AttributeError: |
|
|
|
|
try: |
|
|
|
|
path = obj.file.path |
|
|
|
|
except: |
|
|
|
|
except AttributeError: |
|
|
|
|
# Let's make up a filename from the first file time |
|
|
|
|
found = False |
|
|
|
|
times = extract_times(obj, None, meta=False, do_xtime=False) |
|
|
|
@ -3059,6 +3070,7 @@ def get_filepath(obj):
@@ -3059,6 +3070,7 @@ def get_filepath(obj):
|
|
|
|
|
|
|
|
|
|
return path |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_id(obj, prefix=''): |
|
|
|
|
"""Return the cache id. |
|
|
|
|
|
|
|
|
@ -3091,11 +3103,11 @@ def get_id(obj, prefix=''):
@@ -3091,11 +3103,11 @@ def get_id(obj, prefix=''):
|
|
|
|
|
|
|
|
|
|
# For each key in the mapping, recursively call get_id until |
|
|
|
|
# until a non-mapping is found |
|
|
|
|
return {key : get_id(val, prefix) for key,val in viewitems(obj)} |
|
|
|
|
return {key: get_id(val, prefix) for key, val in viewitems(obj)} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def geo_bounds(var=None, wrfin=None, varname=None, timeidx=0, method="cat", |
|
|
|
|
squeeze=True, cache=None): |
|
|
|
|
squeeze=True, cache=None): |
|
|
|
|
"""Return the geographic boundaries for the variable or file(s). |
|
|
|
|
|
|
|
|
|
When using a :class:`xarray.DataArray` as the *var* parameter, the variable |
|
|
|
@ -3234,6 +3246,7 @@ def geo_bounds(var=None, wrfin=None, varname=None, timeidx=0, method="cat",
@@ -3234,6 +3246,7 @@ def geo_bounds(var=None, wrfin=None, varname=None, timeidx=0, method="cat",
|
|
|
|
|
# Non-moving domains |
|
|
|
|
return GeoBounds(lats=lats, lons=lons) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_wrf_proj_geobnds(var, wrfin, varname, timeidx, method, squeeze, |
|
|
|
|
cache): |
|
|
|
|
"""Return the :class:`wrf.WrfProj` subclass and :class:`wrf.GeoBounds`. |
|
|
|
@ -3301,7 +3314,7 @@ def _get_wrf_proj_geobnds(var, wrfin, varname, timeidx, method, squeeze,
@@ -3301,7 +3314,7 @@ def _get_wrf_proj_geobnds(var, wrfin, varname, timeidx, method, squeeze,
|
|
|
|
|
"information") |
|
|
|
|
else: |
|
|
|
|
geobnds = geo_bounds(wrfin=wrfin, varname=varname, timeidx=timeidx, |
|
|
|
|
method=method, cache=cache) |
|
|
|
|
method=method, cache=cache) |
|
|
|
|
proj_params = get_proj_params(wrfin) |
|
|
|
|
wrf_proj = getproj(**proj_params) |
|
|
|
|
|
|
|
|
@ -3376,7 +3389,7 @@ def _get_proj_obj(ob_type, var, wrfin, varname, timeidx, method, squeeze,
@@ -3376,7 +3389,7 @@ def _get_proj_obj(ob_type, var, wrfin, varname, timeidx, method, squeeze,
|
|
|
|
|
elif ob_type == "basemap": |
|
|
|
|
try: |
|
|
|
|
_ = len(geobnds) |
|
|
|
|
except TypeError: # Only a single object |
|
|
|
|
except TypeError: # Only a single object |
|
|
|
|
proj_obj = wrf_proj.basemap(geobnds, **kwargs) |
|
|
|
|
else: |
|
|
|
|
proj_obj = np.empty(geobnds.shape, np.object) |
|
|
|
@ -3386,7 +3399,7 @@ def _get_proj_obj(ob_type, var, wrfin, varname, timeidx, method, squeeze,
@@ -3386,7 +3399,7 @@ def _get_proj_obj(ob_type, var, wrfin, varname, timeidx, method, squeeze,
|
|
|
|
|
elif ob_type == "pyngl": |
|
|
|
|
try: |
|
|
|
|
_ = len(geobnds) |
|
|
|
|
except TypeError: # Only a single object |
|
|
|
|
except TypeError: # Only a single object |
|
|
|
|
proj_obj = wrf_proj.pyngl(geobnds, **kwargs) |
|
|
|
|
else: |
|
|
|
|
proj_obj = np.empty(geobnds.shape, np.object) |
|
|
|
@ -3443,7 +3456,7 @@ def latlon_coords(var, as_np=False):
@@ -3443,7 +3456,7 @@ def latlon_coords(var, as_np=False):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_cartopy(var=None, wrfin=None, varname=None, timeidx=0, method="cat", |
|
|
|
|
squeeze=True, cache=None): |
|
|
|
|
squeeze=True, cache=None): |
|
|
|
|
"""Return a :class:`cartopy.crs.Projection` subclass for the |
|
|
|
|
map projection. |
|
|
|
|
|
|
|
|
@ -3511,7 +3524,7 @@ def get_cartopy(var=None, wrfin=None, varname=None, timeidx=0, method="cat",
@@ -3511,7 +3524,7 @@ def get_cartopy(var=None, wrfin=None, varname=None, timeidx=0, method="cat",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_basemap(var=None, wrfin=None, varname=None, timeidx=0, method="cat", |
|
|
|
|
squeeze=True, cache=None, **kwargs): |
|
|
|
|
squeeze=True, cache=None, **kwargs): |
|
|
|
|
"""Return a :class:`matplotlib.mpl_toolkits.basemap.Basemap` object |
|
|
|
|
for the map projection. |
|
|
|
|
|
|
|
|
@ -3737,7 +3750,8 @@ def cartopy_xlim(var=None, geobounds=None, wrfin=None, varname=None, timeidx=0,
@@ -3737,7 +3750,8 @@ def cartopy_xlim(var=None, geobounds=None, wrfin=None, varname=None, timeidx=0,
|
|
|
|
|
|
|
|
|
|
""" |
|
|
|
|
wrf_proj, native_geobnds = _get_wrf_proj_geobnds(var, wrfin, varname, |
|
|
|
|
timeidx, method, squeeze, cache) |
|
|
|
|
timeidx, method, squeeze, |
|
|
|
|
cache) |
|
|
|
|
if geobounds is not None: |
|
|
|
|
return wrf_proj.cartopy_xlim(geobounds) |
|
|
|
|
|
|
|
|
@ -3823,7 +3837,8 @@ def cartopy_ylim(var=None, geobounds=None, wrfin=None, varname=None, timeidx=0,
@@ -3823,7 +3837,8 @@ def cartopy_ylim(var=None, geobounds=None, wrfin=None, varname=None, timeidx=0,
|
|
|
|
|
|
|
|
|
|
""" |
|
|
|
|
wrf_proj, native_geobnds = _get_wrf_proj_geobnds(var, wrfin, varname, |
|
|
|
|
timeidx, method, squeeze, cache) |
|
|
|
|
timeidx, method, squeeze, |
|
|
|
|
cache) |
|
|
|
|
if geobounds is not None: |
|
|
|
|
return wrf_proj.cartopy_ylim(geobounds) |
|
|
|
|
|
|
|
|
@ -3854,8 +3869,8 @@ def ll_points(lat, lon):
@@ -3854,8 +3869,8 @@ def ll_points(lat, lon):
|
|
|
|
|
object or a list of :class:`wrf.CoordPair` objects. |
|
|
|
|
|
|
|
|
|
""" |
|
|
|
|
latvals = np.ravel(to_np(lat)[...,0,0]) |
|
|
|
|
lonvals = np.ravel(to_np(lon)[...,0,0]) |
|
|
|
|
latvals = np.ravel(to_np(lat)[..., 0, 0]) |
|
|
|
|
lonvals = np.ravel(to_np(lon)[..., 0, 0]) |
|
|
|
|
|
|
|
|
|
if latvals.shape[0] == 1: |
|
|
|
|
return CoordPair(lat=float(latvals), lon=float(lonvals)) |
|
|
|
|