forked from 3rdparty/wrf-python
11 changed files with 698 additions and 582 deletions
@ -0,0 +1,227 @@
@@ -0,0 +1,227 @@
|
||||
from __future__ import (absolute_import, division, print_function, |
||||
unicode_literals) |
||||
|
||||
from collections import Iterable |
||||
|
||||
import numpy as np |
||||
|
||||
from .constants import Constants |
||||
from .extension import computeij, computell |
||||
from .util import (extract_vars, extract_global_attrs, |
||||
either, _is_moving_domain, _is_multi_time_req, |
||||
iter_left_indexes, _is_mapping, _is_multi_file, |
||||
viewkeys) |
||||
|
||||
def _lat_varname(wrfnc, stagger): |
||||
if stagger is None or stagger.lower() == "m": |
||||
varname = either("XLAT", "XLAT_M")(wrfnc) |
||||
elif stagger.lower() == "u" or stagger.lower() == "v": |
||||
varname = "XLAT_{}".format(stagger.upper()) |
||||
else: |
||||
raise ValueError("invalid 'stagger' value") |
||||
|
||||
return varname |
||||
|
||||
def _lon_varname(wrfnc, stagger): |
||||
if stagger is None or stagger.lower() == "m": |
||||
varname = either("XLONG", "XLONG_M")(wrfnc) |
||||
elif stagger.lower() == "u" or stagger.lower() == "v": |
||||
varname = "XLONG_{}".format(stagger.upper()) |
||||
else: |
||||
raise ValueError("invalid 'stagger' value") |
||||
|
||||
return varname |
||||
|
||||
def _get_proj_params(wrfnc, timeidx, stagger, method, squeeze, cache): |
||||
if timeidx < 0: |
||||
raise ValueError("'timeidx' must be greater than 0") |
||||
|
||||
attrs = extract_global_attrs(wrfnc, attrs=("MAP_PROJ", "TRUELAT1", |
||||
"TRUELAT2", "STAND_LON", |
||||
"DX", "DY")) |
||||
map_proj = attrs["MAP_PROJ"] |
||||
truelat1 = attrs["TRUELAT1"] |
||||
truelat2 = attrs["TRUELAT2"] |
||||
stdlon = attrs["STAND_LON"] |
||||
dx = attrs["DX"] |
||||
dy = attrs["DY"] |
||||
|
||||
if map_proj == 6: |
||||
pole_attrs = extract_global_attrs(wrfnc, attrs=("POLE_LAT", |
||||
"POLE_LON")) |
||||
pole_lat = pole_attrs["POLE_LAT"] |
||||
pole_lon = pole_attrs["POLE_LON"] |
||||
latinc = (dy*360.0)/2.0 / Constants.PI/Constants.WRF_EARTH_RADIUS |
||||
loninc = (dx*360.0)/2.0 / Constants.PI/Constants.WRF_EARTH_RADIUS |
||||
else: |
||||
pole_lat = 90.0 |
||||
pole_lon = 0.0 |
||||
latinc = 0.0 |
||||
loninc = 0.0 |
||||
|
||||
latvar = _lat_varname(wrfnc, stagger) |
||||
lonvar = _lon_varname(wrfnc, stagger) |
||||
|
||||
lat_timeidx = timeidx |
||||
|
||||
is_moving = _is_moving_domain(wrfnc, latvar=latvar, lonvar=lonvar) |
||||
|
||||
# Only need one file and one time if the domain is not moving |
||||
if not is_moving: |
||||
if _is_multi_time_req(timeidx): |
||||
lat_timeidx = 0 |
||||
|
||||
if _is_multi_file(wrfnc): |
||||
if not _is_mapping(wrfnc): |
||||
wrfnc = next(iter(wrfnc)) # only need one file |
||||
else: |
||||
wrfnc = wrfnc[next(iter(viewkeys(wrfnc)))] |
||||
return _get_proj_params(wrfnc, timeidx, stagger, |
||||
method, squeeze, cache) |
||||
|
||||
xlat = extract_vars(wrfnc, lat_timeidx, (latvar,), method, squeeze, cache, |
||||
nometa=True)[latvar] |
||||
xlon = extract_vars(wrfnc, lat_timeidx, (lonvar,), method, squeeze, cache, |
||||
nometa=True)[lonvar] |
||||
|
||||
ref_lat = np.ravel(xlat[...,0,0]) |
||||
ref_lon = np.ravel(xlon[...,0,0]) |
||||
|
||||
# Note: fortran index |
||||
known_i = 1.0 |
||||
known_j = 1.0 |
||||
|
||||
return (map_proj, truelat1, truelat2, stdlon, ref_lat, ref_lon, |
||||
pole_lat, pole_lon, known_i, known_j, dx, latinc, loninc) |
||||
|
||||
|
||||
def ll_to_ij(wrfnc, latitude, longitude, timeidx=0, |
||||
stagger=None, method="cat", squeeze=True, cache=None): |
||||
|
||||
(map_proj,truelat1,truelat2,stdlon,ref_lat,ref_lon, |
||||
pole_lat,pole_lon,known_i,known_j,dx,latinc, |
||||
loninc) = _get_proj_params(wrfnc, timeidx, stagger, method, squeeze, cache) |
||||
|
||||
if isinstance(latitude, Iterable): |
||||
lats = np.asarray(latitude) |
||||
lons = np.asarray(longitude) |
||||
|
||||
if lats.ndim > 1: |
||||
lats = lats.ravel() |
||||
|
||||
if lons.ndim > 1: |
||||
lons = lons.ravel() |
||||
|
||||
if (lats.size != lons.size): |
||||
raise ValueError("'latitude' and 'longitude' " |
||||
"must be the same length") |
||||
|
||||
|
||||
if ref_lat.size == 1: |
||||
outdim = [lats.size, 2] |
||||
extra_dims = [outdim[0]] |
||||
else: |
||||
# Moving domain will have moving ref_lats/ref_lons |
||||
outdim = [lats.size, ref_lat.size, 2] |
||||
extra_dims = outdim[0:2] |
||||
|
||||
res = np.empty(outdim, np.float64) |
||||
|
||||
for left_idxs in iter_left_indexes(extra_dims): |
||||
left_and_slice_idxs = left_idxs + (slice(None), ) |
||||
|
||||
if ref_lat.size == 1: |
||||
ref_lat_val = ref_lat[0] |
||||
ref_lon_val = ref_lon[0] |
||||
else: |
||||
ref_lat_val = ref_lat[left_idxs[-1]] |
||||
ref_lon_val = ref_lon[left_idxs[-1]] |
||||
|
||||
lat = lats[left_idxs[0]] |
||||
lon = lons[left_idxs[0]] |
||||
|
||||
ij = computeij(map_proj, truelat1, truelat2, stdlon, |
||||
ref_lat_val, ref_lon_val, pole_lat, pole_lon, |
||||
known_i, known_j, dx, latinc, loninc, |
||||
lat, lon) |
||||
|
||||
res[left_and_slice_idxs] = ij[:] |
||||
|
||||
else: |
||||
|
||||
res = computeij(map_proj, truelat1, truelat2, stdlon, |
||||
ref_lat, ref_lon, pole_lat, pole_lon, |
||||
known_i, known_j, dx, latinc, loninc, |
||||
latitude, longitude) |
||||
|
||||
if squeeze: |
||||
res = res.squeeze() |
||||
|
||||
return res |
||||
|
||||
def ij_to_ll(wrfnc, i, j, timeidx=0, |
||||
stagger=None, method="cat", squeeze=True, cache=None): |
||||
|
||||
(map_proj,truelat1,truelat2,stdlon,ref_lat,ref_lon, |
||||
pole_lat,pole_lon,known_i,known_j,dx,latinc, |
||||
loninc) = _get_proj_params(wrfnc, timeidx, stagger, method, squeeze, cache) |
||||
|
||||
if isinstance(i, Iterable): |
||||
i_arr = np.asarray(i) |
||||
j_arr = np.asarray(j) |
||||
|
||||
if i_arr.ndim > 1: |
||||
i_arr = i_arr.ravel() |
||||
|
||||
if j_arr.ndim > 1: |
||||
j_arr = j_arr.ravel() |
||||
|
||||
if (i_arr.size != j_arr.size): |
||||
raise ValueError("'i' and 'j' " |
||||
"must be the same length") |
||||
|
||||
if ref_lat.size == 1: |
||||
outdim = [i_arr.size, 2] |
||||
extra_dims = [outdim[0]] |
||||
else: |
||||
# Moving domain will have moving ref_lats/ref_lons |
||||
outdim = [i_arr.size, ref_lat.size, 2] |
||||
extra_dims = outdim[0:2] |
||||
|
||||
res = np.empty(outdim, np.float64) |
||||
|
||||
for left_idxs in iter_left_indexes(extra_dims): |
||||
left_and_slice_idxs = left_idxs + (slice(None), ) |
||||
|
||||
if ref_lat.size == 1: |
||||
ref_lat_val = ref_lat[0] |
||||
ref_lon_val = ref_lon[0] |
||||
else: |
||||
ref_lat_val = ref_lat[left_idxs[-1]] |
||||
ref_lon_val = ref_lon[left_idxs[-1]] |
||||
|
||||
i_val = i_arr[left_idxs[0]] |
||||
j_val = j_arr[left_idxs[0]] |
||||
|
||||
ll = computell(map_proj, truelat1, truelat2, |
||||
stdlon, ref_lat_val, ref_lon_val, |
||||
pole_lat, pole_lon, known_i, known_j, |
||||
dx, latinc, loninc, |
||||
i_val, j_val) |
||||
|
||||
res[left_and_slice_idxs] = ll[:] |
||||
|
||||
else: |
||||
i_val = i |
||||
j_val = j |
||||
|
||||
res = computell(map_proj, truelat1, truelat2, |
||||
stdlon, ref_lat, ref_lon, |
||||
pole_lat, pole_lon, known_i, known_j, |
||||
dx, latinc, loninc, |
||||
i_val, j_val) |
||||
|
||||
if squeeze: |
||||
res = res.squeeze() |
||||
|
||||
return res |
@ -0,0 +1,166 @@
@@ -0,0 +1,166 @@
|
||||
|
||||
|
||||
from .util import _unpack_sequence, is_standard_wrf_var, extract_vars |
||||
from .cape import get_2dcape, get_3dcape |
||||
from .ctt import get_ctt |
||||
from .dbz import get_dbz, get_max_dbz |
||||
from .dewpoint import get_dp, get_dp_2m |
||||
from .geoht import get_geopt, get_height |
||||
from .helicity import get_srh, get_uh |
||||
from .latlon import get_lat, get_lon |
||||
from .omega import get_omega |
||||
from .pressure import get_pressure, get_pressure_hpa |
||||
from .pw import get_pw |
||||
from .rh import get_rh, get_rh_2m |
||||
from .slp import get_slp |
||||
from .temp import get_tc, get_eth, get_temp, get_theta, get_tk, get_tv, get_tw |
||||
from .terrain import get_terrain |
||||
from .uvmet import (get_uvmet, get_uvmet10, get_uvmet10_wspd_wdir, |
||||
get_uvmet_wspd_wdir) |
||||
from .vorticity import get_avo, get_pvo |
||||
from .wind import (get_destag_wspd_wdir, get_destag_wspd_wdir10, |
||||
get_u_destag, get_v_destag, get_w_destag) |
||||
from .times import get_times |
||||
|
||||
__all__ = ["getvar"] |
||||
|
||||
# func is the function to call. kargs are required arguments that should |
||||
# not be altered by the user |
||||
_FUNC_MAP = {"cape2d" : get_2dcape, |
||||
"cape3d" : get_3dcape, |
||||
"dbz" : get_dbz, |
||||
"maxdbz" : get_max_dbz, |
||||
"dp" : get_dp, |
||||
"dp2m" : get_dp_2m, |
||||
"height" : get_height, |
||||
"geopt" : get_geopt, |
||||
"srh" : get_srh, |
||||
"uhel" : get_uh, |
||||
"omega" : get_omega, |
||||
"pw" : get_pw, |
||||
"rh" : get_rh, |
||||
"rh2m" : get_rh_2m, |
||||
"slp" : get_slp, |
||||
"theta" : get_theta, |
||||
"temp" : get_temp, |
||||
"tk" : get_tk, |
||||
"tc" : get_tc, |
||||
"theta_e" : get_eth, |
||||
"tv" : get_tv, |
||||
"twb" : get_tw, |
||||
"terrain" : get_terrain, |
||||
"times" : get_times, |
||||
"uvmet" : get_uvmet, |
||||
"uvmet10" : get_uvmet10, |
||||
"avo" : get_avo, |
||||
"pvo" : get_pvo, |
||||
"ua" : get_u_destag, |
||||
"va" : get_v_destag, |
||||
"wa" : get_w_destag, |
||||
"lat" : get_lat, |
||||
"lon" : get_lon, |
||||
"pressure" : get_pressure_hpa, |
||||
"pres" : get_pressure, |
||||
"wspd_wdir" : get_destag_wspd_wdir, |
||||
"wspd_wdir10" : get_destag_wspd_wdir10, |
||||
"wspd_wdir_uvmet" : get_uvmet_wspd_wdir, |
||||
"wspd_wdir_uvmet10" : get_uvmet10_wspd_wdir, |
||||
"ctt" : get_ctt |
||||
} |
||||
|
||||
_VALID_KARGS = {"cape2d" : ["missing"], |
||||
"cape3d" : ["missing"], |
||||
"dbz" : ["do_variant", "do_liqskin"], |
||||
"maxdbz" : ["do_variant", "do_liqskin"], |
||||
"dp" : ["units"], |
||||
"dp2m" : ["units"], |
||||
"height" : ["msl", "units"], |
||||
"geopt" : [], |
||||
"srh" : ["top"], |
||||
"uhel" : ["bottom", "top"], |
||||
"omega" : [], |
||||
"pw" : [], |
||||
"rh" : [], |
||||
"rh2m" : [], |
||||
"slp" : ["units"], |
||||
"temp" : ["units"], |
||||
"tk" : [], |
||||
"tc" : [], |
||||
"theta" : ["units"], |
||||
"theta_e" : ["units"], |
||||
"tv" : ["units"], |
||||
"twb" : ["units"], |
||||
"terrain" : ["units"], |
||||
"times" : [], |
||||
"uvmet" : ["units"], |
||||
"uvmet10" : ["units"], |
||||
"avo" : [], |
||||
"pvo" : [], |
||||
"ua" : ["units"], |
||||
"va" : ["units"], |
||||
"wa" : ["units"], |
||||
"lat" : [], |
||||
"lon" : [], |
||||
"pres" : ["units"], |
||||
"pressure" : ["units"], |
||||
"wspddir" : ["units"], |
||||
"wspddir_uvmet" : ["units"], |
||||
"wspddir_uvmet10" : ["units"], |
||||
"ctt" : [], |
||||
"default" : [] |
||||
} |
||||
|
||||
_ALIASES = {"cape_2d" : "cape2d", |
||||
"cape_3d" : "cape3d", |
||||
"eth" : "theta_e", |
||||
"mdbz" : "maxdbz", |
||||
"geopotential" : "geopt", |
||||
"helicity" : "srh", |
||||
"latitude" : "lat", |
||||
"longitude" : "lon", |
||||
"omg" : "omega", |
||||
"p" : "pres", |
||||
"rh2" : "rh2m", |
||||
"z": "height", |
||||
"ter" : "terrain", |
||||
"updraft_helicity" : "uhel", |
||||
"td" : "dp", |
||||
"td2" : "dp2m" |
||||
} |
||||
|
||||
class ArgumentError(Exception): |
||||
def __init__(self, msg): |
||||
self.msg = msg |
||||
|
||||
def __str__(self): |
||||
return self.msg |
||||
|
||||
def _undo_alias(alias): |
||||
actual = _ALIASES.get(alias, None) |
||||
if actual is None: |
||||
return alias |
||||
else: |
||||
return actual |
||||
|
||||
def _check_kargs(var, kargs): |
||||
for arg in kargs.iterkeys(): |
||||
if arg not in _VALID_KARGS[var]: |
||||
raise ArgumentError("'%s' is an invalid keyword " |
||||
"argument for '%s'" % (arg, var)) |
||||
|
||||
|
||||
def getvar(wrfnc, var, timeidx=0, |
||||
method="cat", squeeze=True, cache=None, |
||||
**kargs): |
||||
|
||||
wrfnc = _unpack_sequence(wrfnc) |
||||
|
||||
if is_standard_wrf_var(wrfnc, var): |
||||
return extract_vars(wrfnc, timeidx, var, method, squeeze, cache)[var] |
||||
|
||||
actual_var = _undo_alias(var) |
||||
if actual_var not in _VALID_KARGS: |
||||
raise ArgumentError("'%s' is not a valid variable name" % (var)) |
||||
|
||||
_check_kargs(actual_var, kargs) |
||||
return _FUNC_MAP[actual_var](wrfnc,timeidx,**kargs) |
Loading…
Reference in new issue